repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
xmaruto/mcord | xos/synchronizers/base/steps/sync_controller_slices.py | 4225 | import os
import base64
from collections import defaultdict
from netaddr import IPAddress, IPNetwork
from django.db.models import F, Q
from xos.config import Config
from synchronizers.base.openstacksyncstep import OpenStackSyncStep
from synchronizers.base.syncstep import *
from core.models import *
from synchronizers.base.ansible import *
from openstack.driver import OpenStackDriver
from xos.logger import observer_logger as logger
import json
class SyncControllerSlices(OpenStackSyncStep):
provides=[Slice]
requested_interval=0
observes=ControllerSlice
playbook='sync_controller_slices.yaml'
def map_sync_inputs(self, controller_slice):
logger.info("sync'ing slice controller %s" % controller_slice)
if not controller_slice.controller.admin_user:
logger.info("controller %r has no admin_user, skipping" % controller_slice.controller)
return
controller_users = ControllerUser.objects.filter(user=controller_slice.slice.creator,
controller=controller_slice.controller)
if not controller_users:
raise Exception("slice createor %s has not accout at controller %s" % (controller_slice.slice.creator, controller_slice.controller.name))
else:
controller_user = controller_users[0]
roles = ['admin']
max_instances=int(controller_slice.slice.max_instances)
tenant_fields = {'endpoint':controller_slice.controller.auth_url,
'endpoint_v3': controller_slice.controller.auth_url_v3,
'domain': controller_slice.controller.domain,
'admin_user': controller_slice.controller.admin_user,
'admin_password': controller_slice.controller.admin_password,
'admin_tenant': 'admin',
'tenant': controller_slice.slice.name,
'tenant_description': controller_slice.slice.description,
'roles':roles,
'name':controller_user.user.email,
'ansible_tag':'%s@%s'%(controller_slice.slice.name,controller_slice.controller.name),
'max_instances':max_instances}
return tenant_fields
def map_sync_outputs(self, controller_slice, res):
tenant_id = res[0]['id']
if (not controller_slice.tenant_id):
try:
driver = OpenStackDriver().admin_driver(controller=controller_slice.controller)
driver.shell.nova.quotas.update(tenant_id=tenant_id, instances=int(controller_slice.slice.max_instances))
except:
logger.log_exc('Could not update quota for %s'%controller_slice.slice.name)
raise Exception('Could not update quota for %s'%controller_slice.slice.name)
controller_slice.tenant_id = tenant_id
controller_slice.backend_status = '1 - OK'
controller_slice.save()
def map_delete_inputs(self, controller_slice):
controller_users = ControllerUser.objects.filter(user=controller_slice.slice.creator,
controller=controller_slice.controller)
if not controller_users:
raise Exception("slice createor %s has not accout at controller %s" % (controller_slice.slice.creator, controller_slice.controller.name))
else:
controller_user = controller_users[0]
tenant_fields = {'endpoint':controller_slice.controller.auth_url,
'admin_user': controller_slice.controller.admin_user,
'admin_password': controller_slice.controller.admin_password,
'admin_tenant': 'admin',
'tenant': controller_slice.slice.name,
'tenant_description': controller_slice.slice.description,
'name':controller_user.user.email,
'ansible_tag':'%s@%s'%(controller_slice.slice.name,controller_slice.controller.name),
'delete': True}
return tenant_fields
| apache-2.0 |
alien11689/aries | jndi/jndi-url/src/main/java/org/apache/aries/jndi/url/Activator.java | 3962 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.jndi.url;
import org.apache.aries.proxy.ProxyManager;
import org.osgi.framework.BundleActivator;
import org.osgi.framework.BundleContext;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceRegistration;
import org.osgi.service.jndi.JNDIConstants;
import javax.naming.spi.ObjectFactory;
import java.util.Hashtable;
import java.util.logging.Level;
import java.util.logging.Logger;
public class Activator implements BundleActivator {
private static SingleServiceTracker<ProxyManager> proxyManager;
private BundleContext ctx;
private volatile ServiceRegistration<?> osgiUrlReg = null;
private volatile ServiceRegistration<?> blueprintUrlReg = null;
public static ProxyManager getProxyManager() {
return proxyManager == null ? null : proxyManager.getService();
}
@Override
public void start(BundleContext context) throws InvalidSyntaxException {
ctx = context;
proxyManager = new SingleServiceTracker<>(context, ProxyManager.class, this::serviceChanged);
proxyManager.open();
// Blueprint URL scheme requires access to the BlueprintContainer service.
// We have an optional import
// on org.osgi.service.blueprint.container: only register the blueprint:comp/URL
// scheme if it's present
try {
ctx.getBundle().loadClass("org.osgi.service.blueprint.container.BlueprintContainer");
Hashtable<String, Object> blueprintURlSchemeProps = new Hashtable<>();
blueprintURlSchemeProps.put(JNDIConstants.JNDI_URLSCHEME, new String[]{"blueprint"});
blueprintUrlReg = ctx.registerService(ObjectFactory.class.getName(),
new BlueprintURLContextServiceFactory(), blueprintURlSchemeProps);
} catch (ClassNotFoundException cnfe) {
// The blueprint packages aren't available, so do nothing. That's fine.
Logger logger = Logger.getLogger("org.apache.aries.jndi");
logger.log(Level.INFO, "Blueprint support disabled: " + cnfe);
logger.log(Level.FINE, "Blueprint support disabled", cnfe);
}
}
@Override
public void stop(BundleContext context) {
proxyManager.close();
safeUnregisterService(osgiUrlReg);
safeUnregisterService(blueprintUrlReg);
}
void serviceChanged(ProxyManager oldPm, ProxyManager newPm) {
if (newPm == null) {
safeUnregisterService(osgiUrlReg);
osgiUrlReg = null;
} else {
Hashtable<String, Object> osgiUrlprops = new Hashtable<>();
osgiUrlprops.put(JNDIConstants.JNDI_URLSCHEME, new String[]{"osgi", "aries"});
osgiUrlReg = ctx.registerService(ObjectFactory.class.getName(),
new OsgiURLContextServiceFactory(), osgiUrlprops);
}
}
private static void safeUnregisterService(ServiceRegistration<?> reg) {
if (reg != null) {
try {
reg.unregister();
} catch (IllegalStateException e) {
//This can be safely ignored
}
}
}
} | apache-2.0 |
rmmh/kubernetes-test-infra | mungegithub/mungers/owner-label.go | 2788 | /*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package mungers
import (
"k8s.io/kubernetes/pkg/util/sets"
"k8s.io/test-infra/mungegithub/features"
"k8s.io/test-infra/mungegithub/github"
"k8s.io/test-infra/mungegithub/options"
githubapi "github.com/google/go-github/github"
)
// OwnerLabelMunger will label issues as specified in OWNERS files.
type OwnerLabelMunger struct {
labeler fileLabeler
}
type fileLabeler interface {
AllPossibleOwnerLabels() sets.String
FindLabelsForPath(path string) sets.String
}
func init() {
ownerLabel := &OwnerLabelMunger{}
RegisterMungerOrDie(ownerLabel)
}
// Name is the name usable in --pr-mungers
func (b *OwnerLabelMunger) Name() string { return "owner-label" }
// RequiredFeatures is a slice of 'features' that must be provided
func (b *OwnerLabelMunger) RequiredFeatures() []string {
return []string{features.RepoFeatureName}
}
// Initialize will initialize the munger
func (b *OwnerLabelMunger) Initialize(config *github.Config, features *features.Features) error {
b.labeler = features.Repos
return nil
}
// EachLoop is called at the start of every munge loop
func (b *OwnerLabelMunger) EachLoop() error { return nil }
// RegisterOptions registers options for this munger; returns any that require a restart when changed.
func (b *OwnerLabelMunger) RegisterOptions(opts *options.Options) sets.String { return nil }
func (b *OwnerLabelMunger) getLabels(files []*githubapi.CommitFile) sets.String {
labels := sets.String{}
for _, file := range files {
if file == nil {
continue
}
if file.Changes == nil || *file.Changes == 0 {
continue
}
fileLabels := b.labeler.FindLabelsForPath(*file.Filename)
labels = labels.Union(fileLabels)
}
return labels
}
// Munge is the workhorse the will actually make updates to the PR
func (b *OwnerLabelMunger) Munge(obj *github.MungeObject) {
if !obj.IsPR() {
return
}
files, ok := obj.ListFiles()
if !ok {
return
}
needsLabels := b.getLabels(files)
// TODO: make sure no other munger considers itself to own a label in
// AllPossibleOwnerLabels, and then pass that so that this will remove
// as well as add labels.
SyncLabels(needsLabels, needsLabels, obj)
// SyncLabels(b.labeler.AllPossibleOwnerLabels(), needsLabels, obj)
}
| apache-2.0 |
huaban/phabricator | src/applications/conpherence/query/ConpherenceTransactionQuery.php | 268 | <?php
final class ConpherenceTransactionQuery
extends PhabricatorApplicationTransactionQuery {
public function getTemplateApplicationTransaction() {
return new ConpherenceTransaction();
}
protected function getReversePaging() {
return false;
}
}
| apache-2.0 |
elkingtonmcb/oryx | common/src/main/java/com/cloudera/oryx/common/iterator/ArrayIterable.java | 1536 | /*
* Copyright (c) 2013, Cloudera, Inc. All Rights Reserved.
*
* Cloudera, Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the
* License.
*/
package com.cloudera.oryx.common.iterator;
import com.google.common.base.Preconditions;
import java.util.Iterator;
/**
* An {@link Iterable} that can provide an {@link Iterator} over an array.
* @param <T> array value type
* @author Sean Owen
*/
public final class ArrayIterable<T> implements Iterable<T> {
private final T[] values;
private final int from;
private final int to;
/**
* @param values array to iterate over
*/
public ArrayIterable(T[] values) {
this(values, 0, values.length);
}
/**
* @param values array to iterate over
* @param from index to start iteration from (inclusive)
* @param to index to stop iterating at (exclusive)
*/
public ArrayIterable(T[] values, int from, int to) {
Preconditions.checkArgument(from <= to);
this.values = values;
this.from = from;
this.to = to;
}
@Override
public Iterator<T> iterator() {
return new ArrayIterator<>(values, from, to);
}
}
| apache-2.0 |
zjy1170/zjy | zheng/zheng-cms/zheng-cms-dao/src/main/java/com/zheng/cms/dao/Generator.java | 1396 | package com.zheng.cms.dao;
import com.zheng.common.util.MybatisGeneratorUtil;
import com.zheng.common.util.PropertiesFileUtil;
import java.util.HashMap;
import java.util.Map;
/**
* 代码生成类
* Created by ZhangShuzheng on 2017/1/10.
*/
public class Generator {
// 根据命名规范,只修改此常量值即可
private static String MODULE = "zheng-cms";
private static String DATABASE = "zheng";
private static String TABLE_PREFIX = "cms_";
private static String PACKAGE_NAME = "com.zheng.cms";
private static String JDBC_DRIVER = PropertiesFileUtil.getInstance("generator").get("generator.jdbc.driver");
private static String JDBC_URL = PropertiesFileUtil.getInstance("generator").get("generator.jdbc.url");
private static String JDBC_USERNAME = PropertiesFileUtil.getInstance("generator").get("generator.jdbc.username");
private static String JDBC_PASSWORD = PropertiesFileUtil.getInstance("generator").get("generator.jdbc.password");
// 需要insert后返回主键的表配置,key:表名,value:主键名
private static Map<String, String> LAST_INSERT_ID_TABLES = new HashMap<>();
static {
}
/**
* 自动代码生成
* @param args
*/
public static void main(String[] args) throws Exception {
MybatisGeneratorUtil.generator(JDBC_DRIVER, JDBC_URL, JDBC_USERNAME, JDBC_PASSWORD, MODULE, DATABASE, TABLE_PREFIX, PACKAGE_NAME, LAST_INSERT_ID_TABLES);
}
}
| apache-2.0 |
robbiet480/home-assistant | homeassistant/components/openuv/config_flow.py | 2114 | """Config flow to configure the OpenUV component."""
from pyopenuv import Client
from pyopenuv.errors import OpenUvError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import (
CONF_API_KEY,
CONF_ELEVATION,
CONF_LATITUDE,
CONF_LONGITUDE,
)
from homeassistant.helpers import aiohttp_client, config_validation as cv
from .const import DOMAIN # pylint: disable=unused-import
CONFIG_SCHEMA = vol.Schema(
{
vol.Required(CONF_API_KEY): str,
vol.Inclusive(CONF_LATITUDE, "coords"): cv.latitude,
vol.Inclusive(CONF_LONGITUDE, "coords"): cv.longitude,
vol.Optional(CONF_ELEVATION): vol.Coerce(float),
}
)
class OpenUvFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle an OpenUV config flow."""
VERSION = 2
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
async def _show_form(self, errors=None):
"""Show the form to the user."""
return self.async_show_form(
step_id="user", data_schema=CONFIG_SCHEMA, errors=errors if errors else {},
)
async def async_step_import(self, import_config):
"""Import a config entry from configuration.yaml."""
return await self.async_step_user(import_config)
async def async_step_user(self, user_input=None):
"""Handle the start of the config flow."""
if not user_input:
return await self._show_form()
if user_input.get(CONF_LATITUDE):
identifier = f"{user_input[CONF_LATITUDE]}, {user_input[CONF_LONGITUDE]}"
else:
identifier = "Default Coordinates"
await self.async_set_unique_id(identifier)
self._abort_if_unique_id_configured()
websession = aiohttp_client.async_get_clientsession(self.hass)
client = Client(user_input[CONF_API_KEY], 0, 0, websession)
try:
await client.uv_index()
except OpenUvError:
return await self._show_form({CONF_API_KEY: "invalid_api_key"})
return self.async_create_entry(title=identifier, data=user_input)
| apache-2.0 |
stoksey69/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201502/DeleteContentMetadataKeyHierarchies.java | 1024 |
package com.google.api.ads.dfp.jaxws.v201502;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
*
* The action used for deleting {@link ContentMetadataKeyHierarchy} objects.
*
*
* <p>Java class for DeleteContentMetadataKeyHierarchies complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="DeleteContentMetadataKeyHierarchies">
* <complexContent>
* <extension base="{https://www.google.com/apis/ads/publisher/v201502}ContentMetadataKeyHierarchyAction">
* <sequence>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "DeleteContentMetadataKeyHierarchies")
public class DeleteContentMetadataKeyHierarchies
extends ContentMetadataKeyHierarchyAction
{
}
| apache-2.0 |
josejulio/manageiq | spec/models/manageiq/providers/embedded_ansible/automation_manager/playbook_runner_spec.rb | 13499 | describe ManageIQ::Providers::EmbeddedAnsible::AutomationManager::PlaybookRunner do
let(:manager) { FactoryGirl.create(:embedded_automation_manager_ansible) }
let(:playbook) { FactoryGirl.create(:embedded_playbook, :manager => manager) }
subject { ManageIQ::Providers::EmbeddedAnsible::AutomationManager::PlaybookRunner.create_job(options.merge(:playbook_id => playbook.id)) }
describe '#start' do
context 'localhost is used' do
let(:options) { {:hosts => 'localhost'} }
it 'moves on to create_job_template' do
expect(subject).to receive(:queue_signal).with(:create_job_template, :deliver_on => nil)
subject.start
end
end
context 'no host is given' do
let(:options) { {} }
it 'moves on to create_job_template' do
expect(subject).to receive(:queue_signal).with(:create_job_template, :deliver_on => nil)
subject.start
end
end
context 'hosts are given' do
let(:options) { {:hosts => 'host1,localhost'} }
it 'moves on to create inventory' do
expect(subject).to receive(:queue_signal).with(:create_inventory, :deliver_on => nil)
subject.start
end
end
end
describe '#create_inventory' do
context 'hosts are given' do
# Use string key to also test the indifferent accessibility
let(:options) { {'hosts' => 'host1,host2'} }
it 'creates an inventory and moves on to create_job_template' do
# Also test signal with queue
subject.send(:minimize_indirect=, false)
expect(ManageIQ::Providers::EmbeddedAnsible::AutomationManager::Inventory).to receive(:raw_create_inventory).and_return(double(:id => 'inv1'))
expect(subject).to receive(:queue_signal).with(:create_job_template, :deliver_on => nil)
subject.create_inventory
expect(subject.options[:inventory]).to eq('inv1')
end
end
context 'error is raised' do
let(:options) { {:hosts => 'host1,host2'} }
it 'moves on to post_ansible_run' do
allow(ManageIQ::Providers::EmbeddedAnsible::AutomationManager::Inventory).to receive(:raw_create_inventory).and_raise("can't complete the request")
expect(subject).to receive(:signal).with(:post_ansible_run, "can't complete the request", "error")
subject.create_inventory
end
end
end
describe '#create_job_template' do
before { allow(subject).to receive(:playbook).and_return(playbook) }
let(:options) { {:playbook_id => playbook.id, :inventory => 'inv1'} }
context 'options are enough to cretate job template' do
it 'creates a job template and moves on to launch_ansible_tower_job' do
allow(playbook).to receive(:raw_create_job_template).and_return(double(:id => 'jt_ref'))
expect(subject).to receive(:signal).with(:launch_ansible_tower_job)
subject.create_job_template
expect(subject.options).to include(:job_template_ref => 'jt_ref')
end
end
context 'error is raised' do
it 'moves on to post_ansible_run' do
allow(playbook).to receive(:raw_create_job_template).and_raise("can't complete the request")
expect(subject).to receive(:signal).with(:post_ansible_run, "can't complete the request", "error")
subject.create_job_template
end
end
end
describe '#launch_ansible_tower_job' do
let(:options) { {:job_template_ref => 'jt1'} }
context 'job template is ready' do
it 'launches a job and moves on to poll_ansible_tower_job_status' do
expect(ManageIQ::Providers::EmbeddedAnsible::AutomationManager::Job).to receive(:create_job).and_return(double(:id => 'jb1'))
expect(subject).to receive(:queue_signal).with(:poll_ansible_tower_job_status, kind_of(Integer), kind_of(Hash))
subject.launch_ansible_tower_job
expect(subject.options[:tower_job_id]).to eq('jb1')
end
end
context 'error is raised' do
it 'moves on to post_ansible_run' do
allow(ManageIQ::Providers::EmbeddedAnsible::AutomationManager::Job).to receive(:create_job).and_raise("can't complete the request")
expect(subject).to receive(:signal).with(:post_ansible_run, "can't complete the request", "error")
subject.launch_ansible_tower_job
end
end
end
describe '#poll_ansible_tower_job_status' do
let(:options) { {:tower_job_id => 'jb1'} }
context 'tower job is still running' do
before { allow(subject).to receive(:ansible_job).and_return(double(:raw_status => double(:completed? => false))) }
it 'requeues for later poll' do
expect(subject).to receive(:queue_signal).with(:poll_ansible_tower_job_status, 20, kind_of(Hash))
subject.poll_ansible_tower_job_status(10)
end
end
context 'tower job finishes normally' do
let(:ansible_job) { double(:raw_status => double(:completed? => true, :succeeded? => true), :refresh_ems => nil) }
before { allow(subject).to receive(:ansible_job).and_return(ansible_job) }
context 'always log output' do
let(:options) { {:tower_job_id => 'jb1', :log_output => 'always'} }
it 'gets ansible output and moves on to post_ansible_run with ok status' do
expect(ansible_job).to receive(:raw_stdout)
expect(subject).to receive(:signal).with(:post_ansible_run, kind_of(String), 'ok')
subject.poll_ansible_tower_job_status(10)
end
end
context 'log output on error' do
let(:options) { {:tower_job_id => 'jb1', :log_output => 'on_error'} }
it 'moves on to post_ansible_run with ok status' do
expect(ansible_job).not_to receive(:raw_stdout)
expect(subject).to receive(:signal).with(:post_ansible_run, kind_of(String), 'ok')
subject.poll_ansible_tower_job_status(10)
end
end
end
context 'tower job fails' do
let(:ansible_job) { double(:raw_status => double(:completed? => true, :succeeded? => false), :refresh_ems => nil) }
before { allow(subject).to receive(:ansible_job).and_return(ansible_job) }
context 'log output on error' do
let(:options) { {:tower_job_id => 'jb1', :log_output => 'on_error'} }
it 'gets ansible outputs and moves on to post_ansible_run with error status' do
expect(ansible_job).to receive(:raw_stdout)
expect(subject).to receive(:signal).with(:post_ansible_run, kind_of(String), 'error')
subject.poll_ansible_tower_job_status(10)
end
end
context 'never log output' do
let(:options) { {:tower_job_id => 'jb1', :log_output => 'never'} }
it 'moves on to post_ansible_run with error status' do
expect(ansible_job).not_to receive(:raw_stdout)
expect(subject).to receive(:signal).with(:post_ansible_run, kind_of(String), 'error')
subject.poll_ansible_tower_job_status(10)
end
end
end
context 'error is raised' do
before { allow(subject).to receive(:ansible_job).and_raise('internal error') }
it 'moves on to post_ansible_run with error message' do
expect(subject).to receive(:signal).with(:post_ansible_run, 'internal error', 'error')
subject.poll_ansible_tower_job_status(10)
end
end
end
describe '#post_ansible_run' do
let(:options) { {:inventory => 'inv1', :job_template_ref => 'jt1'} }
context 'playbook runs successfully' do
it 'removes temporary inventory and job template and finishes the job' do
expect(subject).to receive(:delete_inventory)
expect(subject).to receive(:delete_job_template)
subject.post_ansible_run('Playbook ran successfully', 'ok')
expect(subject).to have_attributes(:state => 'finished', :status => 'ok')
end
end
context 'playbook runs with error' do
it 'removes temporary inventory and job template and finishes the job with error' do
expect(subject).to receive(:delete_inventory)
expect(subject).to receive(:delete_job_template)
subject.post_ansible_run('Ansible engine returned an error for the job', 'error')
expect(subject).to have_attributes(:state => 'finished', :status => 'error')
end
end
context 'cleaning up has error' do
it 'does fail the job but logs the error' do
expect(subject).to receive(:delete_inventory)
allow(subject).to receive(:temp_configuration_script).and_raise('fake error')
expect($log).to receive(:log_backtrace)
subject.post_ansible_run('Playbook ran successfully', 'ok')
expect(subject).to have_attributes(
:state => 'finished',
:status => 'ok',
:message => 'Playbook ran successfully; Cleanup encountered error'
)
end
end
end
describe 'state transitions' do
let(:options) { {} }
%w(start create_inventory create_job_template launch_ansible_tower_job poll_ansible_tower_job_status post_ansible_run finish abort_job cancel error).each do |signal|
shared_examples_for "allows #{signal} signal" do
it signal.to_s do
expect(subject).to receive(signal.to_sym)
subject.signal(signal.to_sym)
end
end
end
%w(start create_inventory create_job_template launch_ansible_tower_job poll_ansible_tower_job_status post_ansible_run).each do |signal|
shared_examples_for "does not allow #{signal} signal" do
it signal.to_s do
expect { subject.signal(signal.to_sym) }.to raise_error(RuntimeError, /#{signal} is not permitted at state #{subject.state}/)
end
end
end
context 'in waiting_to_start state' do
before { subject.state = 'waiting_to_start' }
it_behaves_like 'allows start signal'
it_behaves_like 'allows finish signal'
it_behaves_like 'allows abort_job signal'
it_behaves_like 'allows cancel signal'
it_behaves_like 'allows error signal'
it_behaves_like 'does not allow create_inventory signal'
it_behaves_like 'does not allow create_job_template signal'
it_behaves_like 'does not allow launch_ansible_tower_job signal'
it_behaves_like 'does not allow poll_ansible_tower_job_status signal'
it_behaves_like 'does not allow post_ansible_run signal'
end
context 'in running state' do
before { subject.state = 'running' }
it_behaves_like 'allows create_inventory signal'
it_behaves_like 'allows create_job_template signal'
it_behaves_like 'allows finish signal'
it_behaves_like 'allows abort_job signal'
it_behaves_like 'allows cancel signal'
it_behaves_like 'allows error signal'
it_behaves_like 'does not allow start signal'
it_behaves_like 'does not allow launch_ansible_tower_job signal'
it_behaves_like 'does not allow poll_ansible_tower_job_status signal'
it_behaves_like 'does not allow post_ansible_run signal'
end
context 'in inventory state' do
before { subject.state = 'inventory' }
it_behaves_like 'allows create_job_template signal'
it_behaves_like 'allows post_ansible_run signal'
it_behaves_like 'allows finish signal'
it_behaves_like 'allows abort_job signal'
it_behaves_like 'allows cancel signal'
it_behaves_like 'allows error signal'
it_behaves_like 'does not allow start signal'
it_behaves_like 'does not allow create_inventory signal'
it_behaves_like 'does not allow launch_ansible_tower_job signal'
it_behaves_like 'does not allow poll_ansible_tower_job_status signal'
end
context 'in job_template state' do
before { subject.state = 'job_template' }
it_behaves_like 'allows launch_ansible_tower_job signal'
it_behaves_like 'allows post_ansible_run signal'
it_behaves_like 'allows finish signal'
it_behaves_like 'allows abort_job signal'
it_behaves_like 'allows cancel signal'
it_behaves_like 'allows error signal'
it_behaves_like 'does not allow start signal'
it_behaves_like 'does not allow create_inventory signal'
it_behaves_like 'does not allow create_job_template signal'
it_behaves_like 'does not allow poll_ansible_tower_job_status signal'
end
context 'in ansible_job state' do
before { subject.state = 'ansible_job' }
it_behaves_like 'allows poll_ansible_tower_job_status signal'
it_behaves_like 'allows post_ansible_run signal'
it_behaves_like 'allows finish signal'
it_behaves_like 'allows abort_job signal'
it_behaves_like 'allows cancel signal'
it_behaves_like 'allows error signal'
it_behaves_like 'does not allow start signal'
it_behaves_like 'does not allow create_inventory signal'
it_behaves_like 'does not allow create_job_template signal'
it_behaves_like 'does not allow launch_ansible_tower_job signal'
end
context 'in ansible_done state' do
before { subject.state = 'ansible_done' }
it_behaves_like 'allows finish signal'
it_behaves_like 'allows abort_job signal'
it_behaves_like 'allows cancel signal'
it_behaves_like 'allows error signal'
it_behaves_like 'does not allow start signal'
it_behaves_like 'does not allow create_inventory signal'
it_behaves_like 'does not allow launch_ansible_tower_job signal'
it_behaves_like 'does not allow poll_ansible_tower_job_status signal'
it_behaves_like 'does not allow post_ansible_run signal'
end
end
end
| apache-2.0 |
mdanielwork/intellij-community | platform/util/src/com/intellij/ui/SeparatorComponent.java | 3170 | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui;
import com.intellij.util.ui.JBUI;
import javax.swing.*;
import java.awt.*;
public class SeparatorComponent extends JComponent {
private int myVGap = 3;
private Color myColor = Color.lightGray;
private Color myShadow = Gray._240;
private int myHGap = 1;
private SeparatorOrientation myOrientation = SeparatorOrientation.HORIZONTAL;
public SeparatorComponent() {
}
public SeparatorComponent(int aVerticalGap) {
myVGap = aVerticalGap;
setBorder(JBUI.Borders.empty(myVGap, 0));
}
public SeparatorComponent(int aVerticalGap, int aHorizontalGap) {
myVGap = aVerticalGap;
myHGap = aHorizontalGap;
setBorder(JBUI.Borders.empty(myVGap, 0));
}
public SeparatorComponent(int aVerticalGap, Color aColor, Color aShadowColor) {
this(aVerticalGap, 1, aColor, aShadowColor);
}
public SeparatorComponent(int aVerticalGap, int horizontalGap, Color aColor, Color aShadowColor) {
myVGap = aVerticalGap;
myHGap = horizontalGap;
myColor = aColor;
myShadow = aShadowColor;
setBorder(JBUI.Borders.empty(myVGap, 0));
}
public SeparatorComponent(Color color, SeparatorOrientation orientation) {
myColor = color;
myOrientation = orientation;
myShadow = null;
myHGap = 0;
myVGap = 0;
}
@Override
protected void paintComponent(Graphics g) {
if (!isVisible()) return;
if (myColor == null) return;
g.setColor(myColor);
if (myOrientation != SeparatorOrientation.VERTICAL) {
g.drawLine(myHGap, myVGap, getWidth() - myHGap - 1, myVGap);
if (myShadow != null) {
g.setColor(myShadow);
g.drawLine(myHGap + 1, myVGap + 1, getWidth() - myHGap, myVGap + 1);
}
} else {
g.drawLine(myHGap, myVGap, myHGap, getHeight() - myVGap - 1);
if (myShadow != null) {
g.setColor(myShadow);
g.drawLine(myHGap + 1, myVGap + 1, myHGap + 1, getHeight() - myVGap);
}
}
}
@Override
public Dimension getPreferredSize() {
if (myOrientation != SeparatorOrientation.VERTICAL) {
return new Dimension(0, myVGap * 2 + 1);
}
else {
return new Dimension(myHGap * 2 + 1, 1 + ((myShadow != null) ? 1 : 0));
}
}
@Override
public Dimension getMinimumSize() {
return getPreferredSize();
}
@Override
public Dimension getMaximumSize() {
Dimension size = getPreferredSize();
if (myOrientation != SeparatorOrientation.VERTICAL) {
size.width = Integer.MAX_VALUE;
}
else {
size.height = Integer.MAX_VALUE;
}
return size;
}
} | apache-2.0 |
Rajik/gocd | server/webapp/WEB-INF/rails.new/spec/controllers/admin/package_repositories_controller_spec.rb | 24139 | ##########################GO-LICENSE-START################################
# Copyright 2014 ThoughtWorks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################GO-LICENSE-END##################################
require File.expand_path(File.dirname(__FILE__) + '/../../spec_helper')
describe Admin::PackageRepositoriesController do
include MockRegistryModule, ConfigSaveStubbing
describe :routes do
it "should resolve route to the new package-repositories page" do
expect({:get => "/admin/package_repositories/new"}).to route_to(:controller => "admin/package_repositories", :action => "new")
expect(package_repositories_new_path).to eq("/admin/package_repositories/new")
end
it "should resolve route to the list package-repositories page" do
expect({:get => "/admin/package_repositories/list"}).to route_to(:controller => "admin/package_repositories", :action => "list")
expect(package_repositories_list_path).to eq("/admin/package_repositories/list")
end
it "should resolve route to the create package-repositories page" do
expect({:post => "/admin/package_repositories"}).to route_to(:controller => "admin/package_repositories", :action => "create")
expect(package_repositories_create_path).to eq("/admin/package_repositories")
end
it "should resolve route to the edit package-repositories page" do
expect({:get => "/admin/package_repositories/abcd-1234/edit"}).to route_to(:controller => "admin/package_repositories", :action => "edit", :id => "abcd-1234")
expect(package_repositories_edit_path(:id => "abcd-1234")).to eq("/admin/package_repositories/abcd-1234/edit")
end
it "should resolve route to the update package-repositories page" do
expect({:put => "/admin/package_repositories/abcd-1234"}).to route_to(:controller => "admin/package_repositories", :action => "update", :id => "abcd-1234")
expect(package_repositories_update_path(:id => "abcd-1234")).to eq("/admin/package_repositories/abcd-1234")
end
it "should resolve route to plugin config" do
expect({:get => "/admin/package_repositories/abcd-1234/config"}).to route_to(:controller => "admin/package_repositories", :action => "plugin_config", :plugin => "abcd-1234")
expect(package_repositories_plugin_config_path(:plugin => "abcd-1234")).to eq("/admin/package_repositories/abcd-1234/config")
end
it "should resolve route to plugin config for repo" do
expect({:get => "/admin/package_repositories/repoid/pluginid/config"}).to route_to(:controller => "admin/package_repositories", :action => "plugin_config_for_repo", :plugin => "pluginid", :id => "repoid")
expect(package_repositories_plugin_config_for_repo_path(:plugin => "pluginid", :id => "repoid")).to eq("/admin/package_repositories/repoid/pluginid/config")
end
it "should resolve route to check connection for repo" do
expect({:get => "/admin/package_repositories/check_connection?id=foo"}).to route_to(:controller => "admin/package_repositories", :action => "check_connection", :id => "foo")
expect(package_repositories_check_connection_path).to eq("/admin/package_repositories/check_connection")
end
it "should resolve route to deletion of repo" do
expect({:delete => "/admin/package_repositories/repo"}).to route_to(:controller => "admin/package_repositories", :action => "destroy", :id => "repo")
expect(package_repositories_delete_path(:id => "repo")).to eq("/admin/package_repositories/repo")
end
it "should allow dots in the name of a plugin in the route for a plugin's configuration" do
expect({:get => "/admin/package_repositories/plugin.id.with.dots/config"}).to route_to(:controller => "admin/package_repositories", :action => "plugin_config", :plugin => "plugin.id.with.dots")
expect(package_repositories_plugin_config_path(:plugin => "plugin.id.with.dots")).to eq("/admin/package_repositories/plugin.id.with.dots/config")
end
it "should allow dots in the name of a plugin in the route for a plugin config for a repository" do
expect({:get => "/admin/package_repositories/repoid/plugin.id.with.dots/config"}).to route_to(:controller => "admin/package_repositories", :action => "plugin_config_for_repo", :plugin => "plugin.id.with.dots", :id => "repoid")
expect(package_repositories_plugin_config_for_repo_path(:plugin => "plugin.id.with.dots", :id => "repoid")).to eq("/admin/package_repositories/repoid/plugin.id.with.dots/config")
end
end
describe :actions do
before :each do
config_validity = double('config validity')
config_validity.should_receive(:isValid).and_return(true)
@go_config_service = double('go config service')
controller.stub(:go_config_service).and_return(@go_config_service)
@go_config_service.should_receive(:checkConfigFileValid).and_return(config_validity)
@go_config_service.stub(:registry)
controller.stub(:populate_health_messages)
@cloner = double('cloner')
controller.stub(:get_cloner_instance).and_return(@cloner)
end
describe "new" do
before(:each) do
controller.stub(:package_repository_service).with().and_return(@package_repository_service= double('Package Repository Service'))
@cruise_config = BasicCruiseConfig.new
@go_config_service.should_receive(:getConfigForEditing).and_return(@cruise_config)
@cloner.should_receive(:deepClone).at_least(1).times.with(@cruise_config).and_return(@cruise_config)
@user = current_user
end
it "should render form for addition of package repository" do
package_repositories = PackageRepositories.new
@cruise_config.setPackageRepositories(package_repositories)
get :new
expect(assigns[:tab_name]).to eq("package-repositories")
expect(assigns[:package_repository]).to_not be_nil
expect(assigns[:package_repositories]).to eq(package_repositories)
expect(assigns[:package_to_pipeline_map]).to eq(@cruise_config.getGroups().getPackageUsageInPipelines())
assert_template layout: "admin"
end
end
describe "list" do
before(:each) do
controller.stub(:package_repository_service).with().and_return(@package_repository_service= double('Package Repository Service'))
@cruise_config = BasicCruiseConfig.new
@cloner.should_receive(:deepClone).at_least(1).times.with(@cruise_config).and_return(@cruise_config)
@go_config_service.should_receive(:getConfigForEditing).and_return(@cruise_config)
@user = current_user
end
it "should render page for package repository list" do
package_repositories = PackageRepositories.new
@cruise_config.setPackageRepositories(package_repositories)
get :list
expect(assigns[:tab_name]).to eq("package-repositories")
expect(assigns[:package_repository]).to_not be_nil
expect(assigns[:package_repositories]).to eq(package_repositories)
expect(assigns[:package_to_pipeline_map]).to eq(@cruise_config.getGroups().getPackageUsageInPipelines())
assert_template layout: "admin"
end
end
describe "config" do
before(:each) do
@cruise_config = BasicCruiseConfig.new
@go_config_service.should_receive(:getConfigForEditing).and_return(@cruise_config)
@cloner.should_receive(:deepClone).at_least(1).times.with(@cruise_config).and_return(@cruise_config)
repository1 = PackageRepositoryMother.create("repo1", "repo1-name", "pluginid", "version1.0", Configuration.new([ConfigurationPropertyMother.create("k1", false, "v1")].to_java(ConfigurationProperty)))
repos = PackageRepositories.new
repos.add(repository1)
@cruise_config.setPackageRepositories(repos)
repo_metadata_store = RepositoryMetadataStore.getInstance()
configurations = PackageConfigurations.new()
configurations.add(PackageConfiguration.new("k1").with(PackageConfiguration::DISPLAY_NAME, "Key 1").with(PackageConfiguration::DISPLAY_ORDER, 0))
repo_metadata_store.addMetadataFor("pluginid", configurations)
end
it "should get the configuration properties for a given plugin-id" do
get :plugin_config, :plugin => "pluginid"
expect(assigns[:repository_configuration]).to_not be_nil
expect(assigns[:repository_configuration].properties[0].display_name).to eq("Key 1")
expect(assigns[:repository_configuration].properties[0].value).to eq(nil)
expect(assigns[:plugin_id]).to eq("pluginid")
expect(assigns[:isNewRepo]).to eq(true)
assert_template layout: false
end
it "should get the configuration properties with values for a given repo-id associated with package material plugin" do
get :plugin_config_for_repo, :id => "repo1", :plugin => "pluginid"
expect(assigns[:repository_configuration]).to_not be_nil
expect(assigns[:repository_configuration].properties.size).to eq(1)
expect(assigns[:repository_configuration].properties[0].display_name).to eq("Key 1")
expect(assigns[:repository_configuration].properties[0].value).to eq("v1")
expect(assigns[:plugin_id]).to eq("pluginid")
expect(assigns[:isNewRepo]).to eq(false)
assert_template layout: false
end
end
describe "create" do
before(:each) do
controller.stub(:package_repository_service).with().and_return(@package_repository_service= double('Package Repository Service'))
@cruise_config = BasicCruiseConfig.new
@go_config_service.should_receive(:getConfigForEditing).and_return(@cruise_config)
@cloner.should_receive(:deepClone).at_least(1).times.with(@cruise_config).and_return(@cruise_config)
@user = current_user
end
it "should save package repository form" do
package_repository = PackageRepository.new
package_repository.setId("repo-id")
PackageRepository.stub(:new).and_return(package_repository)
@package_repository_service.should_receive(:savePackageRepositoryToConfig).with(package_repository, "1234abcd", @user).and_return(ConfigUpdateAjaxResponse::success("repo-id", 200, "success"))
post :create, :config_md5 => "1234abcd", :package_repository => {:name => "name", :pluginConfiguration => {:id => "yum"}, :configuration => {"0" => {:configurationKey => {:name => "key"}, :configurationValue => {:value => "value"}}}}
expect(response.body).to eq("{\"fieldErrors\":{},\"globalErrors\":[],\"message\":\"success\",\"isSuccessful\":true,\"subjectIdentifier\":\"repo-id\",\"redirectUrl\":\"/admin/package_repositories/repo-id/edit\"}")
expect(flash[:success]).to eq("success")
expect(response.response_code).to eq(200)
expect(response.location).to eq("/admin/package_repositories/repo-id/edit")
expect(response.headers["Go-Config-Error"]).to eq(nil)
end
it "should not add flash message when create fails" do
package_repository = PackageRepository.new
PackageRepository.stub(:new).and_return(package_repository)
@package_repository_service.should_receive(:savePackageRepositoryToConfig).with(package_repository, "1234abcd", @user).and_return(ConfigUpdateAjaxResponse::failure(nil, 500, "failed", nil, nil));
post :create, :config_md5 => "1234abcd", :package_repository => {:name => "name", :pluginConfiguration => {:id => "yum"}, :configuration => {"0" => {:configurationKey => {:name => "key"}, :configurationValue => {:value => "value"}}}}
expect(flash[:success]).to eq(nil)
expect(response.response_code).to eq(500)
expect(response.headers["Go-Config-Error"]).to eq("failed")
expect(response.location).to eq(nil)
end
end
describe "edit" do
before(:each) do
controller.stub(:package_repository_service).with().and_return(@package_repository_service= double('Package Repository Service'))
@cruise_config = BasicCruiseConfig.new
@cloner.should_receive(:deepClone).at_least(1).times.with(@cruise_config).and_return(@cruise_config)
@go_config_service.should_receive(:getConfigForEditing).and_return(@cruise_config)
@user = current_user
@repository1 = PackageRepositoryMother.create("abcd-1234", "repo1-name", "pluginid", "version1.0", Configuration.new([ConfigurationPropertyMother.create("k1", false, "v1")].to_java(ConfigurationProperty)))
@repository2 = PackageRepositoryMother.create("with-missing-plugin", "repo2-name", "missing", "version1.0", Configuration.new([ConfigurationPropertyMother.create("k1", false, "v1")].to_java(ConfigurationProperty)))
@cruise_config.setPackageRepositories(PackageRepositories.new([@repository1, @repository2].to_java(PackageRepository)))
repo_metadata_store = RepositoryMetadataStore.getInstance()
configurations = PackageConfigurations.new()
configurations.add(PackageConfiguration.new("k1").with(PackageConfiguration::DISPLAY_NAME, "Key 1").with(PackageConfiguration::DISPLAY_ORDER, 0))
repo_metadata_store.addMetadataFor("pluginid", configurations)
end
it "should render form for editing package repository" do
get :edit, :id => "abcd-1234"
expect(assigns[:package_repository]).to eq(@repository1)
expect(assigns[:repository_configuration]).to_not be_nil
expect(assigns[:repository_configuration].properties.size).to eq(1)
expect(assigns[:repository_configuration].properties[0].display_name).to eq("Key 1")
expect(assigns[:repository_configuration].properties[0].value).to eq("v1")
expect(assigns[:package_repositories]).to eq(@cruise_config.getPackageRepositories())
expect(assigns[:tab_name]).to eq("package-repositories")
expect(assigns[:package_to_pipeline_map]).to eq(@cruise_config.getGroups().getPackageUsageInPipelines())
assert_template layout: "admin"
end
it "should render error if plugin is missing package repository" do
get :edit, :id => "with-missing-plugin"
expect(assigns[:package_repository]).to eq(@repository2)
expect(assigns[:repository_configuration]).to_not be_nil
expect(assigns[:repository_configuration].properties.size).to eq(0)
expect(assigns[:errors].size).to eq(1)
expect(assigns[:errors]).to include("Associated plugin 'missing' not found. Please contact the Go admin to install the plugin.")
expect(assigns[:package_repositories]).to eq(@cruise_config.getPackageRepositories())
expect(assigns[:tab_name]).to eq("package-repositories")
end
it "should render 404 page when repo is missing" do
get :edit, :id => "missing-repo-id"
expect(response.response_code).to eq(404)
expect(assigns[:message]).to eq("Could not find the repository with id 'missing-repo-id'. It might have been deleted.")
expect(assigns[:status]).to eq(404)
end
end
describe "update" do
before(:each) do
controller.stub(:package_repository_service).with().and_return(@package_repository_service= double('Package Repository Service'))
@cruise_config = BasicCruiseConfig.new
@cloner.should_receive(:deepClone).at_least(1).times.with(@cruise_config).and_return(@cruise_config)
@go_config_service.should_receive(:getConfigForEditing).and_return(@cruise_config)
@user = current_user
end
it "should update package repository form" do
package_repository = PackageRepository.new
PackageRepository.stub(:new).and_return(package_repository)
@package_repository_service.should_receive(:savePackageRepositoryToConfig).with(package_repository, "1234abcd", @user).and_return(ConfigUpdateAjaxResponse::success("id", 200, "success"))
post :update, :config_md5 => "1234abcd", :id => "id", :package_repository => {:name => "name", :pluginConfiguration => {:id => "yum"}, :configuration => {"0" => {:configurationKey => {:name => "key"}, :configurationValue => {:value => "value"}}}}
expect(response.body).to eq("{\"fieldErrors\":{},\"globalErrors\":[],\"message\":\"success\",\"isSuccessful\":true,\"subjectIdentifier\":\"id\",\"redirectUrl\":\"/admin/package_repositories/id/edit\"}")
expect(flash[:success]).to eq("success")
expect(response.response_code).to eq(200)
expect(response.location).to eq("/admin/package_repositories/id/edit")
expect(response.headers["Go-Config-Error"]).to eq(nil)
end
it "should not add flash message when update fails" do
package_repository = PackageRepository.new
PackageRepository.stub(:new).and_return(package_repository)
fieldErrors = HashMap.new
fieldErrors.put("field1", Arrays.asList(["error 1"].to_java(java.lang.String)))
fieldErrors.put("field2", Arrays.asList(["error 2"].to_java(java.lang.String)))
ajax_response = ConfigUpdateAjaxResponse::failure("id", 500, "failed", fieldErrors, Arrays.asList(["global1", "global2"].to_java(java.lang.String)))
@package_repository_service.should_receive(:savePackageRepositoryToConfig).with(package_repository, "1234abcd", @user).and_return(ajax_response)
post :update, :config_md5 => "1234abcd", :id => "id", :package_repository => {:name => "name", :pluginConfiguration => {:id => "yum"}, :configuration => {"0" => {:configurationKey => {:name => "key"}, :configurationValue => {:value => "value"}}}}
expect(flash[:notice]).to eq(nil)
expect(response.body).to eq("{\"fieldErrors\":{\"field2\":[\"error 2\"],\"field1\":[\"error 1\"]},\"globalErrors\":[\"global1\",\"global2\"],\"message\":\"failed\",\"isSuccessful\":false,\"subjectIdentifier\":\"id\"}")
expect(flash[:success]).to eq(nil)
expect(response.response_code).to eq(500)
expect(response.headers["Go-Config-Error"]).to eq("failed")
expect(response.location).to eq(nil)
end
end
describe "check connection" do
before(:each) do
controller.stub(:package_repository_service).with().and_return(@package_repository_service= double('Package Repository Service'))
@result = HttpLocalizedOperationResult.new
HttpLocalizedOperationResult.stub(:new).and_return(@result)
end
it "should check connection for given package repository" do
package_repository = PackageRepositoryMother.create("repo-id", "name", "yum", nil, Configuration.new([ConfigurationPropertyMother.create("key", false, "value")].to_java(ConfigurationProperty)))
@result.should_receive(:isSuccessful).and_return(true)
@result.should_receive(:message).with(anything).and_return("Connection OK from plugin.")
@package_repository_service.should_receive(:checkConnection).with(package_repository, @result)
get :check_connection, :package_repository => {:name => "name", :repoId => "repo-id", :pluginConfiguration => {:id => "yum"}, :configuration => {"0" => {:configurationKey => {:name => "key"}, :configurationValue => {:value => "value"}}}}
json = JSON.parse(response.body)
expect(json["success"]).to eq("Connection OK from plugin.")
expect(json["error"]).to eq(nil)
end
it "should show error when check connection fails for given package repository" do
package_repository = PackageRepositoryMother.create("repo-id", "name", "yum", nil, Configuration.new([ConfigurationPropertyMother.create("key", false, "value")].to_java(ConfigurationProperty)))
@result.should_receive(:isSuccessful).and_return(false)
@result.should_receive(:message).twice.with(anything).and_return("Connection To Repo Failed. Bad Url")
@package_repository_service.should_receive(:checkConnection).with(package_repository, @result)
get :check_connection, :package_repository => {:name => "name", :repoId => "repo-id", :pluginConfiguration => {:id => "yum"}, :configuration => {"0" => {:configurationKey => {:name => "key"}, :configurationValue => {:value => "value"}}}}
json = JSON.parse(response.body)
expect(json["success"]).to eq(nil)
expect(json["error"]).to eq("Connection To Repo Failed. Bad Url")
end
end
describe :destroy do
before :each do
@cruise_config = double('cruise config')
@cloner.should_receive(:deepClone).at_least(1).times.with(@cruise_config).and_return(@cruise_config)
@go_config_service.should_receive(:getConfigForEditing).at_least(1).times.and_return(@cruise_config)
@config_md5 = "1234abcd"
@update_response = double('update_response')
end
it "should delete repository successfully" do
@update_response.should_receive(:getCruiseConfig).and_return(@cruise_config)
@update_response.should_receive(:getNode).and_return(@cruise_config)
@update_response.should_receive(:getSubject).and_return(@cruise_config)
@update_response.should_receive(:configAfterUpdate).and_return(@cruise_config)
@update_response.should_receive(:wasMerged).and_return(false)
@go_config_service.should_receive(:updateConfigFromUI).with(anything, @config_md5, an_instance_of(Username), an_instance_of(HttpLocalizedOperationResult)).and_return(@update_response)
stub_service(:flash_message_service).should_receive(:add).with(FlashMessageModel.new("Saved successfully.", "success")).and_return("random-uuid")
delete :destroy, :id => "repo-id", :config_md5 => @config_md5
expect(response).to redirect_to package_repositories_list_path(:fm => 'random-uuid')
end
it "should render error when repository can not be deleted" do
repository_id = 'some_repository_id'
@update_response.should_receive(:getCruiseConfig).twice.and_return(@cruise_config)
@update_response.should_receive(:getNode).and_return(@cruise_config)
@update_response.should_receive(:getSubject).and_return(@cruise_config)
@update_response.should_receive(:configAfterUpdate).and_return(@cruise_config)
plugin_configuration = double(PluginConfiguration)
plugin_configuration.should_receive(:getId).and_return(repository_id)
package_repository = double(PackageRepository)
package_repository.should_receive(:getPluginConfiguration).and_return(plugin_configuration)
package_repositories = double(PackageRepositories)
package_repositories.should_receive(:find).with(repository_id).and_return(package_repository)
@cruise_config.should_receive(:getPackageRepositories).twice.and_return(package_repositories)
pipeline_groups = double(PipelineGroups)
pipeline_groups.should_receive(:getPackageUsageInPipelines).and_return(nil)
@cruise_config.should_receive(:getGroups).and_return(pipeline_groups)
@cruise_config.should_receive(:getAllErrorsExceptFor).and_return([])
@go_config_service.should_receive(:updateConfigFromUI).with(anything, @config_md5, an_instance_of(Username), an_instance_of(HttpLocalizedOperationResult)) do |action, md5, user, r|
r.badRequest(LocalizedMessage.string("SAVE_FAILED"))
end.and_return(@update_response)
delete :destroy, :id => repository_id, :config_md5 => @config_md5
expect(assigns[:tab_name]).to eq("package-repositories")
assert_template "edit"
assert_template layout: "admin"
expect(response.status).to eq(400)
assert_template layout: "admin"
end
end
end
end
| apache-2.0 |
TribeMedia/aura | aura-components/src/test/components/test/libraryTest/libraryTestHelper.js | 671 | /*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
({
// need a helper, even if empty, to expose libraries
}) | apache-2.0 |
gyeongin/reef | lang/cs/Org.Apache.REEF.Bridge/InteropLogger.cpp | 2006 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
#include "InteropLogger.h"
namespace Org {
namespace Apache {
namespace REEF {
namespace Bridge {
/// currently not being used
InteropLogger::InteropLogger(JNIEnv* env, jobject jobjectInteropLogger) {
_env = env;
_jobjectInteropLogger = jobjectInteropLogger;
_jclassInteropLogger = env->GetObjectClass(jobjectInteropLogger);
wchar_t formatBuf[1024];
if (NULL == _jclassInteropLogger) {
swprintf_s(formatBuf, sizeof(formatBuf) / sizeof(wchar_t), L"_jclassInteropLogger %p\n", _jclassInteropLogger);
fwprintf(stdout, formatBuf);
fflush(stdout);
}
_jmidLog = env->GetMethodID(_jclassInteropLogger, "log", "(ILjava/lang/String;)V");
if (NULL == _jmidLog) {
swprintf_s(formatBuf, sizeof(formatBuf) / sizeof(wchar_t), L"_jmidLog %p\n", _jmidLog);
fwprintf(stdout, formatBuf);
fflush(stdout);
}
}
void InteropLogger::Log(TraceLevel traceLevel, String^ message) {
pin_ptr<const wchar_t> wch = PtrToStringChars(message);
jstring msg = _env->NewString((const jchar*)wch, message->Length);
_env->CallObjectMethod(_jobjectInteropLogger, _jmidLog, (int)traceLevel, msg);
}
}
}
}
} | apache-2.0 |
ghik/intellij-scala | src/org/jetbrains/plugins/scala/components/TypeAwareHighlightingApplicationState.scala | 1742 | package org.jetbrains.plugins.scala
package components
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.components._
import org.jetbrains.plugins.scala.statistics.CacheStatistics
/**
* User: Dmitry Naydanov
* Date: 11/19/12
*/
@State(name = "TypeAwareHighlightingApplicationState",
storages = Array(
new Storage(
id = "TypeAwareHighlightingApplicationState",
file = "$APP_CONFIG$/scala_config.xml"
)
))
class TypeAwareHighlightingApplicationState extends ApplicationComponent with
PersistentStateComponent[TypeAwareHighlightingApplicationState.TypeAwareHighlightingApplicationSettings] {
import org.jetbrains.plugins.scala.components.TypeAwareHighlightingApplicationState.TypeAwareHighlightingApplicationSettings
private var myState = new TypeAwareHighlightingApplicationSettings
def suggest() = myState.getSUGGEST_TYPE_AWARE_HIGHLIGHTING_ENABLED
def setSuggest(b: Boolean) {
myState setSUGGEST_TYPE_AWARE_HIGHLIGHTING_ENABLED b
}
def getState: TypeAwareHighlightingApplicationSettings = myState
def loadState(state: TypeAwareHighlightingApplicationSettings) {
myState = state
}
def getComponentName = "TypeAwareHighlightingApplicationState"
def initComponent() {}
def disposeComponent(): Unit = {
CacheStatistics.printStats()
}
}
object TypeAwareHighlightingApplicationState {
class TypeAwareHighlightingApplicationSettings {
import scala.beans.BeanProperty
@BeanProperty
var SUGGEST_TYPE_AWARE_HIGHLIGHTING_ENABLED: Boolean = false
}
def getInstance = ApplicationManager.getApplication getComponent classOf[TypeAwareHighlightingApplicationState]
} | apache-2.0 |
smmribeiro/intellij-community | java/java-tests/testData/inspection/dataFlow/jspecify/OverrideParametersThatAreTypeVariables.java | 1402 | /*
* Copyright 2020 The JSpecify Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.jspecify.nullness.NullMarked;
import org.jspecify.nullness.Nullable;
@NullMarked
class OverrideParametersThatAreTypeVariables {
interface Super<E extends @Nullable Object> {
void useE(E e);
<F extends E> void useF(F f);
void useLibE(Lib<E> e);
<F extends E> void useLibF(Lib<F> f);
void useArrayOfE(E[] e);
<F extends E> void useArrayOfF(F[] f);
}
interface Sub<T extends @Nullable Object> extends Super<T> {
@Override
void useE(T e);
@Override
<U extends T> void useF(U f);
@Override
void useLibE(Lib<T> e);
@Override
<U extends T> void useLibF(Lib<U> f);
@Override
void useArrayOfE(T[] e);
@Override
<U extends T> void useArrayOfF(U[] f);
}
interface Lib<T extends @Nullable Object> {}
}
| apache-2.0 |
nebril/fuel-web | fuelmenu/fuelmenu/common/modulehelper.py | 9219 | # -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fuelmenu.common.urwidwrapper as widget
from fuelmenu.settings import Settings
import logging
import netifaces
import re
import socket
import struct
import subprocess
import urwid
import urwid.raw_display
import urwid.web_display
log = logging.getLogger('fuelmenu.modulehelper')
blank = urwid.Divider()
class ModuleHelper(object):
@classmethod
def load(cls, modobj):
#Read in yaml
defaultsettings = Settings().read(modobj.parent.defaultsettingsfile)
oldsettings = defaultsettings.copy()
oldsettings.update(Settings().read(modobj.parent.settingsfile))
for setting in modobj.defaults.keys():
if "label" in setting:
continue
elif "/" in setting:
part1, part2 = setting.split("/")
modobj.defaults[setting]["value"] = oldsettings[part1][part2]
else:
modobj.defaults[setting]["value"] = oldsettings[setting]
if modobj.netsettings and oldsettings["ADMIN_NETWORK"]["interface"] \
in modobj.netsettings.keys():
modobj.activeiface = oldsettings["ADMIN_NETWORK"]["interface"]
return oldsettings
@classmethod
def save(cls, modobj, responses):
newsettings = dict()
for setting in responses.keys():
if "/" in setting:
part1, part2 = setting.split("/")
if part1 not in newsettings:
#We may not touch all settings, so copy oldsettings first
newsettings[part1] = modobj.oldsettings[part1]
newsettings[part1][part2] = responses[setting]
else:
newsettings[setting] = responses[setting]
return newsettings
@classmethod
def cancel(self, cls, button=None):
for index, fieldname in enumerate(cls.fields):
if fieldname != "blank" and "label" not in fieldname:
try:
cls.edits[index].set_edit_text(cls.defaults[fieldname][
'value'])
except AttributeError:
log.warning("Field %s unable to reset text" % fieldname)
@classmethod
def screenUI(cls, modobj, headertext, fields, defaults,
showallbuttons=False, buttons_visible=True):
log.debug("Preparing screen UI for %s" % modobj.name)
#Define text labels, text fields, and buttons first
header_content = []
for text in headertext:
if isinstance(text, str):
header_content.append(urwid.Text(text))
else:
header_content.append(text)
edits = []
toolbar = modobj.parent.footer
for key in fields:
#Example: key = hostname, label = Hostname, value = fuel-pm
if key == "blank":
edits.append(blank)
elif defaults[key]["value"] == "radio":
label = widget.TextLabel(defaults[key]["label"])
if "choices" in defaults[key]:
choices_list = defaults[key]["choices"]
else:
choices_list = ["Yes", "No"]
choices = widget.ChoicesGroup(choices_list,
default_value="Yes",
fn=modobj.radioSelect)
columns = widget.Columns([('weight', 2, label),
('weight', 3, choices)])
#Attach choices rb_group so we can use it later
columns.rb_group = choices.rb_group
edits.append(columns)
elif defaults[key]["value"] == "label":
edits.append(widget.TextLabel(defaults[key]["label"]))
else:
ispassword = "PASSWORD" in key.upper()
caption = defaults[key]["label"]
default = defaults[key]["value"]
tooltip = defaults[key]["tooltip"]
edits.append(
widget.TextField(key, caption, 23, default, tooltip,
toolbar, ispassword=ispassword))
listbox_content = []
listbox_content.extend(header_content)
listbox_content.append(blank)
listbox_content.extend(edits)
listbox_content.append(blank)
#Wrap buttons into Columns so it doesn't expand and look ugly
if buttons_visible:
#Button to check
button_check = widget.Button("Check", modobj.check)
#Button to revert to previously saved settings
button_cancel = widget.Button("Cancel", modobj.cancel)
#Button to apply (and check again)
button_apply = widget.Button("Apply", modobj.apply)
if modobj.parent.globalsave and showallbuttons is False:
check_col = widget.Columns([button_check])
else:
check_col = widget.Columns([button_check, button_cancel,
button_apply, ('weight', 2, blank)])
listbox_content.append(check_col)
#Add everything into a ListBox and return it
listwalker = widget.TabbedListWalker(listbox_content)
screen = urwid.ListBox(listwalker)
modobj.edits = edits
modobj.walker = listwalker
modobj.listbox_content = listbox_content
return screen
@classmethod
def getNetwork(cls, modobj):
"""Returns addr, broadcast, netmask for each network interface."""
re_ifaces = re.compile(r"lo|vir|vbox|docker|veth")
for iface in netifaces.interfaces():
if re_ifaces.search(iface):
continue
try:
modobj.netsettings.update({iface: netifaces.ifaddresses(iface)[
netifaces.AF_INET][0]})
modobj.netsettings[iface]["onboot"] = "Yes"
except (TypeError, KeyError):
modobj.netsettings.update({iface: {"addr": "", "netmask": "",
"onboot": "no"}})
modobj.netsettings[iface]['mac'] = netifaces.ifaddresses(iface)[
netifaces.AF_LINK][0]['addr']
#Set link state
try:
with open("/sys/class/net/%s/operstate" % iface) as f:
content = f.readlines()
modobj.netsettings[iface]["link"] = content[0].strip()
except IOError:
log.warning("Unable to read operstate file for %s" % iface)
modobj.netsettings[iface]["link"] = "unknown"
#Change unknown link state to up if interface has an IP
if modobj.netsettings[iface]["link"] == "unknown":
if modobj.netsettings[iface]["addr"] != "":
modobj.netsettings[iface]["link"] = "up"
#Read bootproto from /etc/sysconfig/network-scripts/ifcfg-DEV
modobj.netsettings[iface]['bootproto'] = "none"
try:
with open("/etc/sysconfig/network-scripts/ifcfg-%s" % iface)\
as fh:
for line in fh:
if re.match("^BOOTPROTO=", line):
modobj.netsettings[iface]['bootproto'] = \
line.split('=').strip()
break
except Exception:
#Check for dhclient process running for this interface
if modobj.getDHCP(iface):
modobj.netsettings[iface]['bootproto'] = "dhcp"
else:
modobj.netsettings[iface]['bootproto'] = "none"
modobj.gateway = modobj.get_default_gateway_linux()
@classmethod
def getDHCP(cls, iface):
"""Returns True if the interface has a dhclient process running."""
noout = open('/dev/null', 'w')
dhclient_running = subprocess.call(["pgrep", "-f", "dhclient.*%s" %
(iface)], stdout=noout,
stderr=noout)
return (dhclient_running == 0)
@classmethod
def get_default_gateway_linux(cls):
"""Read the default gateway directly from /proc."""
with open("/proc/net/route") as fh:
for line in fh:
fields = line.strip().split()
if fields[1] != '00000000' or not int(fields[3], 16) & 2:
continue
return socket.inet_ntoa(struct.pack("<L", int(fields[2], 16)))
| apache-2.0 |
zaki50/droidkaigi2016 | app/src/main/java/io/github/droidkaigi/confsched/model/Speaker.java | 839 | package io.github.droidkaigi.confsched.model;
import com.google.gson.annotations.SerializedName;
import com.github.gfx.android.orma.annotation.Column;
import com.github.gfx.android.orma.annotation.PrimaryKey;
import com.github.gfx.android.orma.annotation.Table;
import org.parceler.Parcel;
import android.support.annotation.Nullable;
@Parcel
@Table
public class Speaker {
@PrimaryKey(auto = false)
@Column(indexed = true)
@SerializedName("id")
public int id;
@Column(indexed = true)
@SerializedName("name")
public String name;
@Column
@Nullable
@SerializedName("image_url")
public String imageUrl;
@Column
@Nullable
@SerializedName("twitter_name")
public String twitterName;
@Column
@Nullable
@SerializedName("github_name")
public String githubName;
}
| apache-2.0 |
ya7lelkom/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201502/CollectionSizeErrorReason.java | 1018 |
package com.google.api.ads.dfp.jaxws.v201502;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for CollectionSizeError.Reason.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="CollectionSizeError.Reason">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="TOO_LARGE"/>
* <enumeration value="UNKNOWN"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "CollectionSizeError.Reason")
@XmlEnum
public enum CollectionSizeErrorReason {
TOO_LARGE,
/**
*
* The value returned if the actual value is not exposed by the requested API version.
*
*
*/
UNKNOWN;
public String value() {
return name();
}
public static CollectionSizeErrorReason fromValue(String v) {
return valueOf(v);
}
}
| apache-2.0 |
BigBoss424/portfolio | v7/development/node_modules/date-fns/locale/fr/_lib/formatters/index.js | 843 | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var formatters = {}; // Special case for day of month ordinals in long date format context:
// 1er mars, 2 mars, 3 mars, …
// See https://github.com/date-fns/date-fns/issues/437
var monthsTokens = ['MMM', 'MMMM'];
monthsTokens.forEach(function (monthToken) {
formatters['Do ' + monthToken] = function (date, options) {
var commonFormatters = options.formatters;
var dayOfMonthToken = date.getUTCDate() === 1 ? 'Do' : 'D';
var dayOfMonthFormatter = commonFormatters[dayOfMonthToken];
var monthFormatter = commonFormatters[monthToken];
return dayOfMonthFormatter(date, options) + ' ' + monthFormatter(date, options);
};
});
var _default = formatters;
exports.default = _default;
module.exports = exports.default; | apache-2.0 |
NaNi-Z/manageiq | app/models/manageiq/providers/ansible_tower/shared/automation_manager/event_parser.rb | 340 | module ManageIQ::Providers::AnsibleTower::Shared::AutomationManager::EventParser
def event_to_hash(event, ems_id)
{
:event_type => "#{event['object1']}_#{event['operation']}",
:source => "#{self.source}",
:timestamp => event['timestamp'],
:full_data => event,
:ems_id => ems_id
}
end
end
| apache-2.0 |
lyy4j/rmq4note | tools/src/main/java/org/apache/rocketmq/tools/command/broker/CleanExpiredCQSubCommand.java | 2790 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.rocketmq.tools.command.broker;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.rocketmq.remoting.RPCHook;
import org.apache.rocketmq.tools.admin.DefaultMQAdminExt;
import org.apache.rocketmq.tools.command.SubCommand;
public class CleanExpiredCQSubCommand implements SubCommand {
@Override
public String commandName() {
return "cleanExpiredCQ";
}
@Override
public String commandDesc() {
return "Clean expired ConsumeQueue on broker.";
}
@Override
public Options buildCommandlineOptions(Options options) {
Option opt = new Option("b", "brokerAddr", true, "Broker address");
opt.setRequired(false);
options.addOption(opt);
opt = new Option("c", "cluster", true, "clustername");
opt.setRequired(false);
options.addOption(opt);
return options;
}
@Override
public void execute(CommandLine commandLine, Options options, RPCHook rpcHook) {
DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook);
defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis()));
try {
boolean result = false;
defaultMQAdminExt.start();
if (commandLine.hasOption('b')) {
String addr = commandLine.getOptionValue('b').trim();
result = defaultMQAdminExt.cleanExpiredConsumerQueueByAddr(addr);
} else {
String cluster = commandLine.getOptionValue('c');
if (null != cluster)
cluster = cluster.trim();
result = defaultMQAdminExt.cleanExpiredConsumerQueue(cluster);
}
System.out.printf(result ? "success" : "false");
} catch (Exception e) {
e.printStackTrace();
} finally {
defaultMQAdminExt.shutdown();
}
}
}
| apache-2.0 |
robzor92/hops | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/AllocateRequest.java | 9292 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.api.protocolrecords;
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Stable;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.api.ApplicationMasterProtocol;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerResourceIncreaseRequest;
import org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.api.records.UpdateContainerRequest;
import org.apache.hadoop.yarn.util.Records;
/**
* <p>The core request sent by the <code>ApplicationMaster</code> to the
* <code>ResourceManager</code> to obtain resources in the cluster.</p>
*
* <p>The request includes:
* <ul>
* <li>A response id to track duplicate responses.</li>
* <li>Progress information.</li>
* <li>
* A list of {@link ResourceRequest} to inform the
* <code>ResourceManager</code> about the application's
* resource requirements.
* </li>
* <li>
* A list of unused {@link Container} which are being returned.
* </li>
* <li>
* A list of {@link UpdateContainerRequest} to inform
* the <code>ResourceManager</code> about the change in
* requirements of running containers.
* </li>
* </ul>
*
* @see ApplicationMasterProtocol#allocate(AllocateRequest)
*/
@Public
@Stable
public abstract class AllocateRequest {
@Public
@Stable
public static AllocateRequest newInstance(int responseID, float appProgress,
List<ResourceRequest> resourceAsk,
List<ContainerId> containersToBeReleased,
ResourceBlacklistRequest resourceBlacklistRequest) {
return newInstance(responseID, appProgress, resourceAsk,
containersToBeReleased, null, resourceBlacklistRequest);
}
/**
* Use {@link AllocateRequest#newInstance(int, float, List, List,
* ResourceBlacklistRequest, List)} instead
* @param responseID responseId
* @param appProgress appProgress
* @param resourceAsk resourceAsk
* @param containersToBeReleased containersToBeReleased
* @param resourceBlacklistRequest resourceBlacklistRequest
* @param increaseRequests increaseRequests
* @return AllocateRequest
*/
@Deprecated
public static AllocateRequest newInstance(int responseID, float appProgress,
List<ResourceRequest> resourceAsk,
List<ContainerId> containersToBeReleased,
ResourceBlacklistRequest resourceBlacklistRequest,
List<ContainerResourceIncreaseRequest> increaseRequests) {
AllocateRequest allocateRequest = Records.newRecord(AllocateRequest.class);
allocateRequest.setResponseId(responseID);
allocateRequest.setProgress(appProgress);
allocateRequest.setAskList(resourceAsk);
allocateRequest.setReleaseList(containersToBeReleased);
allocateRequest.setResourceBlacklistRequest(resourceBlacklistRequest);
allocateRequest.setIncreaseRequests(increaseRequests);
return allocateRequest;
}
@Public
@Unstable
public static AllocateRequest newInstance(int responseID, float appProgress,
List<ResourceRequest> resourceAsk,
List<ContainerId> containersToBeReleased,
List<UpdateContainerRequest> updateRequests,
ResourceBlacklistRequest resourceBlacklistRequest) {
AllocateRequest allocateRequest = Records.newRecord(AllocateRequest.class);
allocateRequest.setResponseId(responseID);
allocateRequest.setProgress(appProgress);
allocateRequest.setAskList(resourceAsk);
allocateRequest.setReleaseList(containersToBeReleased);
allocateRequest.setResourceBlacklistRequest(resourceBlacklistRequest);
allocateRequest.setUpdateRequests(updateRequests);
return allocateRequest;
}
/**
* Get the <em>response id</em> used to track duplicate responses.
* @return <em>response id</em>
*/
@Public
@Stable
public abstract int getResponseId();
/**
* Set the <em>response id</em> used to track duplicate responses.
* @param id <em>response id</em>
*/
@Public
@Stable
public abstract void setResponseId(int id);
/**
* Get the <em>current progress</em> of application.
* @return <em>current progress</em> of application
*/
@Public
@Stable
public abstract float getProgress();
/**
* Set the <em>current progress</em> of application
* @param progress <em>current progress</em> of application
*/
@Public
@Stable
public abstract void setProgress(float progress);
/**
* Get the list of <code>ResourceRequest</code> to update the
* <code>ResourceManager</code> about the application's resource requirements.
* @return the list of <code>ResourceRequest</code>
* @see ResourceRequest
*/
@Public
@Stable
public abstract List<ResourceRequest> getAskList();
/**
* Set list of <code>ResourceRequest</code> to update the
* <code>ResourceManager</code> about the application's resource requirements.
* @param resourceRequests list of <code>ResourceRequest</code> to update the
* <code>ResourceManager</code> about the application's
* resource requirements
* @see ResourceRequest
*/
@Public
@Stable
public abstract void setAskList(List<ResourceRequest> resourceRequests);
/**
* Get the list of <code>ContainerId</code> of containers being
* released by the <code>ApplicationMaster</code>.
* @return list of <code>ContainerId</code> of containers being
* released by the <code>ApplicationMaster</code>
*/
@Public
@Stable
public abstract List<ContainerId> getReleaseList();
/**
* Set the list of <code>ContainerId</code> of containers being
* released by the <code>ApplicationMaster</code>
* @param releaseContainers list of <code>ContainerId</code> of
* containers being released by the
* <code>ApplicationMaster</code>
*/
@Public
@Stable
public abstract void setReleaseList(List<ContainerId> releaseContainers);
/**
* Get the <code>ResourceBlacklistRequest</code> being sent by the
* <code>ApplicationMaster</code>.
* @return the <code>ResourceBlacklistRequest</code> being sent by the
* <code>ApplicationMaster</code>
* @see ResourceBlacklistRequest
*/
@Public
@Stable
public abstract ResourceBlacklistRequest getResourceBlacklistRequest();
/**
* Set the <code>ResourceBlacklistRequest</code> to inform the
* <code>ResourceManager</code> about the blacklist additions and removals
* per the <code>ApplicationMaster</code>.
*
* @param resourceBlacklistRequest the <code>ResourceBlacklistRequest</code>
* to inform the <code>ResourceManager</code> about
* the blacklist additions and removals
* per the <code>ApplicationMaster</code>
* @see ResourceBlacklistRequest
*/
@Public
@Stable
public abstract void setResourceBlacklistRequest(
ResourceBlacklistRequest resourceBlacklistRequest);
/**
* Use {@link AllocateRequest#getUpdateRequests()} instead
* @return ContainerResourceIncreaseRequests
*/
@Deprecated
public abstract List<ContainerResourceIncreaseRequest> getIncreaseRequests();
/**
* Use {@link AllocateRequest#setUpdateRequests(List)} instead
* @param increaseRequests increaseRequests
*/
@Deprecated
public abstract void setIncreaseRequests(
List<ContainerResourceIncreaseRequest> increaseRequests);
/**
* Get the list of container update requests being sent by the
* <code>ApplicationMaster</code>.
* @return list of {@link UpdateContainerRequest}
* being sent by the
* <code>ApplicationMaster</code>.
*/
@Public
@Unstable
public abstract List<UpdateContainerRequest> getUpdateRequests();
/**
* Set the list of container update requests to inform the
* <code>ResourceManager</code> about the containers that need to be
* updated.
* @param updateRequests list of <code>UpdateContainerRequest</code> for
* containers to be updated
*/
@Public
@Unstable
public abstract void setUpdateRequests(
List<UpdateContainerRequest> updateRequests);
}
| apache-2.0 |
Melody/Webdav | tests/lock_if_header_tagged_list_test.php | 8381 | <?php
/**
* File containing the ezcWebdavFileBackendOptionsTestCase class.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
* @package Webdav
* @version //autogen//
* @license http://www.apache.org/licenses/LICENSE-2.0 Apache License, Version 2.0
* @subpackage Test
*/
require_once dirname( __FILE__ ) . '/property_test.php';
/**
* Test case for the ezcWebdavFileBackendOptions class.
*
* @package Webdav
* @version //autogen//
* @subpackage Test
*/
class ezcWebdavLockIfHeaderTaggedListTest extends ezcTestCase
{
public static function suite()
{
return new PHPUnit_Framework_TestSuite( __CLASS__ );
}
public function testConstructor()
{
$list = new ezcWebdavLockIfHeaderTaggedList();
$this->assertAttributeEquals(
array(),
'items',
$list
);
}
public function testOffsetSetSuccess()
{
$list = new ezcWebdavLockIfHeaderTaggedList();
$item1 = array( new ezcWebdavLockIfHeaderListItem() );
$item2 = array( new ezcWebdavLockIfHeaderListItem() );
$list['/some/path'] = $item1;
$list['/'] = $item2;
$this->assertAttributeEquals(
array( '/some/path' => $item1, '/' => $item2 ),
'items',
$list
);
}
public function testOffsetSetFailure()
{
$list = new ezcWebdavLockIfHeaderTaggedList();
try
{
$list['/some/path'] = 23;
$this->fail( 'Exception not thrown on invalid value.' );
}
catch ( ezcBaseValueException $e ) {}
try
{
$list['/'] = new stdClass();
$this->fail( 'Exception not thrown on invalid value.' );
}
catch ( ezcBaseValueException $e ) {}
try
{
$list[''] = new ezcWebdavLockIfHeaderListItem();
$this->fail( 'Exception not thrown on invalid offset.' );
}
catch ( ezcBaseValueException $e ) {}
try
{
$list[23] = new ezcWebdavLockIfHeaderListItem();
$this->fail( 'Exception not thrown on invalid offset.' );
}
catch ( ezcBaseValueException $e ) {}
$this->assertAttributeEquals(
array(),
'items',
$list
);
}
public function testOffsetGetSuccess()
{
$list = new ezcWebdavLockIfHeaderTaggedList();
$item1 = array( new ezcWebdavLockIfHeaderListItem() );
$item2 = array( new ezcWebdavLockIfHeaderListItem() );
$list['/some/path'] = $item1;
$list['/'] = $item2;
$this->assertEquals(
$item1,
$list['/some/path']
);
$this->assertEquals(
$item2,
$list['/']
);
$this->assertEquals(
array(),
$list['/non/existent']
);
}
public function testOffsetGetFailure()
{
$list = new ezcWebdavLockIfHeaderTaggedList();
try
{
$list[''];
$this->fail( 'Exception not thrown on invalid offset.' );
}
catch ( ezcBaseValueException $e ) {}
try
{
$list[23];
$this->fail( 'Exception not thrown on invalid value.' );
}
catch ( ezcBaseValueException $e ) {}
}
public function testOffsetIssetSuccess()
{
$list = new ezcWebdavLockIfHeaderTaggedList();
$item1 = array( new ezcWebdavLockIfHeaderListItem() );
$item2 = array( new ezcWebdavLockIfHeaderListItem() );
$list['/some/path'] = $item1;
$list['/'] = $item2;
$this->assertTrue(
isset( $list['/'] )
);
$this->assertTrue(
isset( $list['/some/path'] )
);
$this->assertFalse(
isset( $list['/none/existent'] )
);
}
public function testOffsetIssetFailure()
{
$list = new ezcWebdavLockIfHeaderTaggedList();
try
{
isset( $list[''] );
$this->fail( 'Exception not thrown on invalid offset.' );
}
catch ( ezcBaseValueException $e ) {}
try
{
isset( $list[23] );
$this->fail( 'Exception not thrown on invalid value.' );
}
catch ( ezcBaseValueException $e ) {}
}
public function testOffsetUnsetSuccess()
{
$list = new ezcWebdavLockIfHeaderTaggedList();
$item1 = array( new ezcWebdavLockIfHeaderListItem() );
$item2 = array( new ezcWebdavLockIfHeaderListItem() );
$list['/some/path'] = $item1;
$list['/'] = $item2;
$this->assertTrue(
isset( $list['/'] )
);
$this->assertTrue(
isset( $list['/some/path'] )
);
$this->assertFalse(
isset( $list['/none/existent'] )
);
unset( $list['/'] );
unset( $list['/some/path'] );
unset( $list['/none/existent'] );
$this->assertFalse(
isset( $list['/'] )
);
$this->assertFalse(
isset( $list['/some/path'] )
);
$this->assertFalse(
isset( $list['/none/existent'] )
);
}
public function testOffsetUnsetFailure()
{
$list = new ezcWebdavLockIfHeaderTaggedList();
try
{
unset( $list[''] );
$this->fail( 'Exception not thrown on invalid offset.' );
}
catch ( ezcBaseValueException $e ) {}
try
{
unset( $list[23] );
$this->fail( 'Exception not thrown on invalid value.' );
}
catch ( ezcBaseValueException $e ) {}
}
public function testGetLockTokens()
{
$item1 = new ezcWebdavLockIfHeaderListItem(
array(
new ezcWebdavLockIfHeaderCondition( 'lock-token-1' ),
new ezcWebdavLockIfHeaderCondition( 'lock-token-2', true ),
new ezcWebdavLockIfHeaderCondition( 'lock-token-3' ),
),
array(
new ezcWebdavLockIfHeaderCondition( 'etag-1', true ),
new ezcWebdavLockIfHeaderCondition( 'etag-2', true ),
new ezcWebdavLockIfHeaderCondition( 'etag-3' ),
)
);
$item2 = new ezcWebdavLockIfHeaderListItem(
array(
new ezcWebdavLockIfHeaderCondition( 'lock-token-1' ),
new ezcWebdavLockIfHeaderCondition( 'lock-token-4' ),
),
array(
new ezcWebdavLockIfHeaderCondition( 'etag-1' ),
new ezcWebdavLockIfHeaderCondition( 'etag-4', true ),
new ezcWebdavLockIfHeaderCondition( 'etag-5' ),
)
);
$item3 = new ezcWebdavLockIfHeaderListItem(
array(
new ezcWebdavLockIfHeaderCondition( 'lock-token-5', true ),
new ezcWebdavLockIfHeaderCondition( 'lock-token-6', true ),
),
array()
);
$list = new ezcWebdavLockIfHeaderTaggedList();
$list['/'] = array( $item2 );
$list['/some/path'] = array( $item1, $item3 );
$list['/other/path'] = array( $item3, $item2 );
$this->assertEquals(
array(
0 => 'lock-token-1',
1 => 'lock-token-4',
3 => 'lock-token-2',
4 => 'lock-token-3',
5 => 'lock-token-5',
6 => 'lock-token-6',
),
$list->getLockTokens()
);
}
}
?>
| apache-2.0 |
wwjiang007/hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreationDelete.java | 3265 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Test;
import org.slf4j.event.Level;
public class TestFileCreationDelete {
{
DFSTestUtil.setNameNodeLogLevel(Level.TRACE);
}
@Test
public void testFileCreationDeleteParent() throws IOException {
Configuration conf = new HdfsConfiguration();
final int MAX_IDLE_TIME = 2000; // 2s
conf.setInt("ipc.client.connection.maxidletime", MAX_IDLE_TIME);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 1000);
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
// create cluster
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
FileSystem fs = null;
try {
cluster.waitActive();
fs = cluster.getFileSystem();
// create file1.
Path dir = new Path("/foo");
Path file1 = new Path(dir, "file1");
FSDataOutputStream stm1 = TestFileCreation.createFile(fs, file1, 1);
System.out.println("testFileCreationDeleteParent: "
+ "Created file " + file1);
TestFileCreation.writeFile(stm1, 1000);
stm1.hflush();
// create file2.
Path file2 = new Path("/file2");
FSDataOutputStream stm2 = TestFileCreation.createFile(fs, file2, 1);
System.out.println("testFileCreationDeleteParent: "
+ "Created file " + file2);
TestFileCreation.writeFile(stm2, 1000);
stm2.hflush();
// rm dir
fs.delete(dir, true);
// restart cluster.
// This ensures that leases are persisted in fsimage.
cluster.shutdown();
try {Thread.sleep(2*MAX_IDLE_TIME);} catch (InterruptedException e) {}
cluster = new MiniDFSCluster.Builder(conf).format(false).build();
cluster.waitActive();
// restart cluster yet again. This triggers the code to read in
// persistent leases from fsimage.
cluster.shutdown();
try {Thread.sleep(5000);} catch (InterruptedException e) {}
cluster = new MiniDFSCluster.Builder(conf).format(false).build();
cluster.waitActive();
fs = cluster.getFileSystem();
assertTrue(!fs.exists(file1));
assertTrue(fs.exists(file2));
} finally {
fs.close();
cluster.shutdown();
}
}
}
| apache-2.0 |
objectiser/camel | platforms/commands/commands-core/src/main/java/org/apache/camel/commands/AbstractCamelCommand.java | 1364 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.commands;
import org.apache.camel.util.ObjectHelper;
/**
* Abstract base command for {@link org.apache.camel.commands.CamelCommand}
*/
public abstract class AbstractCamelCommand implements CamelCommand {
public String safeNull(String s) {
if (ObjectHelper.isEmpty(s)) {
return "";
} else {
return s;
}
}
public String safeNull(Object s) {
if (ObjectHelper.isEmpty(s)) {
return "";
} else {
return s.toString();
}
}
}
| apache-2.0 |
walteryang47/ovirt-engine | frontend/webadmin/modules/uicommonweb/src/main/java/org/ovirt/engine/ui/uicommonweb/models/ApplicationModeHelper.java | 3343 | package org.ovirt.engine.ui.uicommonweb.models;
import java.util.ArrayList;
import java.util.List;
import org.ovirt.engine.core.common.EventNotificationEntity;
import org.ovirt.engine.core.common.businessentities.ActionGroup;
import org.ovirt.engine.core.common.mode.ApplicationMode;
import org.ovirt.engine.ui.uicommonweb.dataprovider.AsyncDataProvider;
import org.ovirt.engine.ui.uicommonweb.models.configure.roles_ui.RoleNode;
public class ApplicationModeHelper {
private static ApplicationMode UI_MODE = ApplicationMode.AllModes;
public static boolean isAvailableInMode(int availableModes) {
return (availableModes & UI_MODE.getValue()) > 0;
}
public static boolean isModeSupported(ApplicationMode mode) {
return isAvailableInMode(mode.getValue());
}
public static ApplicationMode getUiMode() {
return UI_MODE;
}
public static void setUiMode(ApplicationMode uiMode) {
if (uiMode != null) {
UI_MODE = uiMode;
}
}
public static ArrayList<EventNotificationEntity> getModeSpecificEventNotificationTypeList() {
ArrayList<EventNotificationEntity> subList = new ArrayList<EventNotificationEntity>();
for (EventNotificationEntity entity : AsyncDataProvider.getInstance().getEventNotificationTypeList()) {
if ((entity.getAvailableInModes() & UI_MODE.getValue()) > 0) {
subList.add(entity);
}
}
return subList;
}
public static boolean filterActionGroupTreeByApplictionMode(RoleNode tree) {
ArrayList<RoleNode> list = new ArrayList<RoleNode>();
for (RoleNode node : tree.getLeafRoles()) {
if (node.getLeafRoles() == null || node.getLeafRoles().isEmpty()) {
return (ActionGroup.valueOf(node.getName()).getAvailableInModes() & getUiMode().getValue()) == 0;
}
if (filterActionGroupTreeByApplictionMode(node)) {
list.add(node);
}
}
for (RoleNode roleNode : list) {
tree.getLeafRoles().remove(roleNode);
}
return tree.getLeafRoles().size() == 0;
}
public static boolean filterSystemTreeByApplictionMode(SystemTreeItemModel systemItem) {
List<SystemTreeItemModel> list = new ArrayList<SystemTreeItemModel>();
for (SystemTreeItemModel item : systemItem.getChildren()) {
if (filterSystemTreeByApplictionMode(item)) {
list.add(item);
}
}
if (list.size() > 0 && list.size() == systemItem.getChildren().size()) {
List<SystemTreeItemModel> childItems = new ArrayList<SystemTreeItemModel>();
for (SystemTreeItemModel item : list) {
childItems.addAll(item.getChildren());
}
systemItem.setChildren(childItems);
}
for (SystemTreeItemModel systemTreeItemModel : list) {
systemItem.getChildren().remove(systemTreeItemModel);
if(systemTreeItemModel.getChildren() != null && !systemItem.getChildren().containsAll(systemTreeItemModel.getChildren())) {
systemItem.getChildren().addAll(systemTreeItemModel.getChildren());
}
}
return !((systemItem.getApplicationMode().getValue() & getUiMode().getValue()) > 0);
}
}
| apache-2.0 |
nssales/OG-Platform | projects/OG-Financial/src/main/java/com/opengamma/financial/comparison/PortfolioComparator.java | 2172 | /**
* Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.comparison;
import java.util.Collection;
import java.util.LinkedList;
import org.fudgemsg.FudgeContext;
import com.opengamma.core.position.Portfolio;
import com.opengamma.core.position.PortfolioNode;
import com.opengamma.core.position.Position;
import com.opengamma.core.position.impl.AbstractPortfolioNodeTraversalCallback;
import com.opengamma.core.position.impl.PortfolioNodeTraverser;
import com.opengamma.id.UniqueId;
/**
* Provides comparison operations between {@link Portfolio} objects.
*/
public class PortfolioComparator extends PositionSetComparator {
public PortfolioComparator(final FudgeContext fudgeContext) {
super(fudgeContext);
}
public static Collection<Position> getFlattenedPositions(final Portfolio portfolio) {
final Collection<Position> positions = new LinkedList<Position>();
PortfolioNodeTraverser.depthFirst(new AbstractPortfolioNodeTraversalCallback() {
@Override
public void preOrderOperation(final PortfolioNode parentNode, final Position position) {
positions.add(position);
}
}).traverse(portfolio.getRootNode());
return positions;
}
public PortfolioComparison compare(final Portfolio first, final Portfolio second) {
UniqueId firstId = first.getUniqueId();
UniqueId secondId = second.getUniqueId();
String firstName;
String secondName;
// if they are two versions of the same portfolio the names need to include the version otherwise the generated
// portfolio name won't make much sense
if (firstId != null && secondId != null && firstId.getObjectId().equals(secondId.getObjectId())) {
firstName = first.getName() + " (version " + firstId.getVersion() + ")";
secondName = second.getName() + " (version " + secondId.getVersion() + ")";
} else {
firstName = first.getName();
secondName = second.getName();
}
return new PortfolioComparison(compare(getFlattenedPositions(first), getFlattenedPositions(second)), firstName, secondName);
}
}
| apache-2.0 |
walteryang47/ovirt-engine | backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/ImportVmFromOvaCommand.java | 1578 | package org.ovirt.engine.core.bll;
import org.ovirt.engine.core.bll.tasks.CommandCoordinatorUtil;
import org.ovirt.engine.core.common.action.ConvertOvaParameters;
import org.ovirt.engine.core.common.action.ImportVmFromOvaParameters;
import org.ovirt.engine.core.common.action.VdcActionType;
import org.ovirt.engine.core.compat.Guid;
@DisableInPrepareMode
@NonTransactiveCommandAttribute(forceCompensation = true)
public class ImportVmFromOvaCommand<T extends ImportVmFromOvaParameters> extends ImportVmFromExternalProviderCommand<T> {
public ImportVmFromOvaCommand(Guid cmdId) {
super(cmdId);
}
protected ImportVmFromOvaCommand(T parameters) {
super(parameters);
}
@Override
protected void convert() {
CommandCoordinatorUtil.executeAsyncCommand(
VdcActionType.ConvertOva,
buildConvertOvaParameters(),
cloneContextAndDetachFromParent());
}
private ConvertOvaParameters buildConvertOvaParameters() {
ConvertOvaParameters parameters = new ConvertOvaParameters(getVmId());
parameters.setOvaPath(getParameters().getOvaPath());
parameters.setVmName(getVmName());
parameters.setDisks(getDisks());
parameters.setStoragePoolId(getStoragePoolId());
parameters.setStorageDomainId(getStorageDomainId());
parameters.setProxyHostId(getParameters().getProxyHostId());
parameters.setVdsGroupId(getVdsGroupId());
parameters.setVirtioIsoName(getParameters().getVirtioIsoName());
return parameters;
}
}
| apache-2.0 |
pczarn/rust | src/test/run-pass/box-compare.rs | 795 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(managed_boxes)]
use std::gc::GC;
pub fn main() {
assert!((box(GC) 1i < box(GC) 3i));
assert!((box(GC) box(GC) "hello ".to_string() >
box(GC) box(GC) "hello".to_string()));
assert!((box(GC) box(GC) box(GC) "hello".to_string() !=
box(GC) box(GC) box(GC) "there".to_string()));
}
| apache-2.0 |
camilesing/zstack | utils/src/main/java/org/zstack/utils/data/ArrayHelper.java | 2115 | package org.zstack.utils.data;
import java.io.IOException;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.util.*;
import static org.zstack.utils.ObjectUtils.serializableCopy;
public class ArrayHelper {
public static <T, K> T[] arrayFromField(Collection<K> c, String fieldName, Class<T> returnClassType) {
try {
List<T> lst = new ArrayList<T>();
for (Enumeration e = Collections.enumeration(c); e.hasMoreElements();) {
K obj = (K) e.nextElement();
Class ck = obj.getClass();
Field f = ck.getDeclaredField(fieldName);
f.setAccessible(true);
lst.add((T) f.get(obj));
}
return lst.toArray((T[]) Array.newInstance(returnClassType, lst.size()));
} catch (Exception e) {
throw new RuntimeException(String.format("Unable to extract field[%s] from collection[%s] to array of type[%s]", fieldName, c.toString(),
returnClassType.getName()));
}
}
public static <T, K> T[] arrayFromField(K[] c, String fieldName, Class<T> returnClassType) {
try {
List<T> lst = new ArrayList<T>();
for (K k : c) {
Class ck = k.getClass();
Field f = ck.getDeclaredField(fieldName);
f.setAccessible(true);
lst.add((T) f.get(k));
}
return lst.toArray((T[]) Array.newInstance(returnClassType, lst.size()));
} catch (Exception e) {
throw new RuntimeException(String.format("Unable to extract field[%s] from array[%s] to array of type[%s]", fieldName, c.toString(),
returnClassType.getName()));
}
}
public static <T> List<T> serializableCopyList(List<T> sourceList) throws IOException, ClassNotFoundException {
if(sourceList == null){
return null;
}
List<T> copyList = new ArrayList<>();
for(T o : sourceList){
copyList.add(serializableCopy(o));
}
return copyList;
}
}
| apache-2.0 |
kabir/xnio | api/src/main/java/org/xnio/ChannelExceptionHandler.java | 536 |
package org.xnio;
import java.io.IOException;
import java.nio.channels.Channel;
import java.util.EventListener;
/**
* An exception handler for utility channel listeners.
*
* @author <a href="mailto:david.lloyd@redhat.com">David M. Lloyd</a>
*/
public interface ChannelExceptionHandler<T extends Channel> extends EventListener {
/**
* Handle an exception on the channel.
*
* @param channel the channel
* @param exception the exception
*/
void handleException(T channel, IOException exception);
}
| apache-2.0 |
Squarespace/netty | transport/src/main/java/io/netty/channel/DefaultChannelConfig.java | 16072 | /*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.channel;
import io.netty.buffer.ByteBufAllocator;
import java.util.IdentityHashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
import static io.netty.channel.ChannelOption.ALLOCATOR;
import static io.netty.channel.ChannelOption.AUTO_CLOSE;
import static io.netty.channel.ChannelOption.AUTO_READ;
import static io.netty.channel.ChannelOption.CONNECT_TIMEOUT_MILLIS;
import static io.netty.channel.ChannelOption.MAX_MESSAGES_PER_READ;
import static io.netty.channel.ChannelOption.MESSAGE_SIZE_ESTIMATOR;
import static io.netty.channel.ChannelOption.RCVBUF_ALLOCATOR;
import static io.netty.channel.ChannelOption.SINGLE_EVENTEXECUTOR_PER_GROUP;
import static io.netty.channel.ChannelOption.WRITE_BUFFER_HIGH_WATER_MARK;
import static io.netty.channel.ChannelOption.WRITE_BUFFER_LOW_WATER_MARK;
import static io.netty.channel.ChannelOption.WRITE_BUFFER_WATER_MARK;
import static io.netty.channel.ChannelOption.WRITE_SPIN_COUNT;
import static io.netty.util.internal.ObjectUtil.checkNotNull;
import static io.netty.util.internal.ObjectUtil.checkPositive;
import static io.netty.util.internal.ObjectUtil.checkPositiveOrZero;
/**
* The default {@link ChannelConfig} implementation.
*/
public class DefaultChannelConfig implements ChannelConfig {
private static final MessageSizeEstimator DEFAULT_MSG_SIZE_ESTIMATOR = DefaultMessageSizeEstimator.DEFAULT;
private static final int DEFAULT_CONNECT_TIMEOUT = 30000;
private static final AtomicIntegerFieldUpdater<DefaultChannelConfig> AUTOREAD_UPDATER =
AtomicIntegerFieldUpdater.newUpdater(DefaultChannelConfig.class, "autoRead");
private static final AtomicReferenceFieldUpdater<DefaultChannelConfig, WriteBufferWaterMark> WATERMARK_UPDATER =
AtomicReferenceFieldUpdater.newUpdater(
DefaultChannelConfig.class, WriteBufferWaterMark.class, "writeBufferWaterMark");
protected final Channel channel;
private volatile ByteBufAllocator allocator = ByteBufAllocator.DEFAULT;
private volatile RecvByteBufAllocator rcvBufAllocator;
private volatile MessageSizeEstimator msgSizeEstimator = DEFAULT_MSG_SIZE_ESTIMATOR;
private volatile int connectTimeoutMillis = DEFAULT_CONNECT_TIMEOUT;
private volatile int writeSpinCount = 16;
@SuppressWarnings("FieldMayBeFinal")
private volatile int autoRead = 1;
private volatile boolean autoClose = true;
private volatile WriteBufferWaterMark writeBufferWaterMark = WriteBufferWaterMark.DEFAULT;
private volatile boolean pinEventExecutor = true;
public DefaultChannelConfig(Channel channel) {
this(channel, new AdaptiveRecvByteBufAllocator());
}
protected DefaultChannelConfig(Channel channel, RecvByteBufAllocator allocator) {
setRecvByteBufAllocator(allocator, channel.metadata());
this.channel = channel;
}
@Override
@SuppressWarnings("deprecation")
public Map<ChannelOption<?>, Object> getOptions() {
return getOptions(
null,
CONNECT_TIMEOUT_MILLIS, MAX_MESSAGES_PER_READ, WRITE_SPIN_COUNT,
ALLOCATOR, AUTO_READ, AUTO_CLOSE, RCVBUF_ALLOCATOR, WRITE_BUFFER_HIGH_WATER_MARK,
WRITE_BUFFER_LOW_WATER_MARK, WRITE_BUFFER_WATER_MARK, MESSAGE_SIZE_ESTIMATOR,
SINGLE_EVENTEXECUTOR_PER_GROUP);
}
protected Map<ChannelOption<?>, Object> getOptions(
Map<ChannelOption<?>, Object> result, ChannelOption<?>... options) {
if (result == null) {
result = new IdentityHashMap<ChannelOption<?>, Object>();
}
for (ChannelOption<?> o: options) {
result.put(o, getOption(o));
}
return result;
}
@SuppressWarnings("unchecked")
@Override
public boolean setOptions(Map<ChannelOption<?>, ?> options) {
if (options == null) {
throw new NullPointerException("options");
}
boolean setAllOptions = true;
for (Entry<ChannelOption<?>, ?> e: options.entrySet()) {
if (!setOption((ChannelOption<Object>) e.getKey(), e.getValue())) {
setAllOptions = false;
}
}
return setAllOptions;
}
@Override
@SuppressWarnings({ "unchecked", "deprecation" })
public <T> T getOption(ChannelOption<T> option) {
if (option == null) {
throw new NullPointerException("option");
}
if (option == CONNECT_TIMEOUT_MILLIS) {
return (T) Integer.valueOf(getConnectTimeoutMillis());
}
if (option == MAX_MESSAGES_PER_READ) {
return (T) Integer.valueOf(getMaxMessagesPerRead());
}
if (option == WRITE_SPIN_COUNT) {
return (T) Integer.valueOf(getWriteSpinCount());
}
if (option == ALLOCATOR) {
return (T) getAllocator();
}
if (option == RCVBUF_ALLOCATOR) {
return (T) getRecvByteBufAllocator();
}
if (option == AUTO_READ) {
return (T) Boolean.valueOf(isAutoRead());
}
if (option == AUTO_CLOSE) {
return (T) Boolean.valueOf(isAutoClose());
}
if (option == WRITE_BUFFER_HIGH_WATER_MARK) {
return (T) Integer.valueOf(getWriteBufferHighWaterMark());
}
if (option == WRITE_BUFFER_LOW_WATER_MARK) {
return (T) Integer.valueOf(getWriteBufferLowWaterMark());
}
if (option == WRITE_BUFFER_WATER_MARK) {
return (T) getWriteBufferWaterMark();
}
if (option == MESSAGE_SIZE_ESTIMATOR) {
return (T) getMessageSizeEstimator();
}
if (option == SINGLE_EVENTEXECUTOR_PER_GROUP) {
return (T) Boolean.valueOf(getPinEventExecutorPerGroup());
}
return null;
}
@Override
@SuppressWarnings("deprecation")
public <T> boolean setOption(ChannelOption<T> option, T value) {
validate(option, value);
if (option == CONNECT_TIMEOUT_MILLIS) {
setConnectTimeoutMillis((Integer) value);
} else if (option == MAX_MESSAGES_PER_READ) {
setMaxMessagesPerRead((Integer) value);
} else if (option == WRITE_SPIN_COUNT) {
setWriteSpinCount((Integer) value);
} else if (option == ALLOCATOR) {
setAllocator((ByteBufAllocator) value);
} else if (option == RCVBUF_ALLOCATOR) {
setRecvByteBufAllocator((RecvByteBufAllocator) value);
} else if (option == AUTO_READ) {
setAutoRead((Boolean) value);
} else if (option == AUTO_CLOSE) {
setAutoClose((Boolean) value);
} else if (option == WRITE_BUFFER_HIGH_WATER_MARK) {
setWriteBufferHighWaterMark((Integer) value);
} else if (option == WRITE_BUFFER_LOW_WATER_MARK) {
setWriteBufferLowWaterMark((Integer) value);
} else if (option == WRITE_BUFFER_WATER_MARK) {
setWriteBufferWaterMark((WriteBufferWaterMark) value);
} else if (option == MESSAGE_SIZE_ESTIMATOR) {
setMessageSizeEstimator((MessageSizeEstimator) value);
} else if (option == SINGLE_EVENTEXECUTOR_PER_GROUP) {
setPinEventExecutorPerGroup((Boolean) value);
} else {
return false;
}
return true;
}
protected <T> void validate(ChannelOption<T> option, T value) {
if (option == null) {
throw new NullPointerException("option");
}
option.validate(value);
}
@Override
public int getConnectTimeoutMillis() {
return connectTimeoutMillis;
}
@Override
public ChannelConfig setConnectTimeoutMillis(int connectTimeoutMillis) {
checkPositiveOrZero(connectTimeoutMillis, "connectTimeoutMillis");
this.connectTimeoutMillis = connectTimeoutMillis;
return this;
}
/**
* {@inheritDoc}
* <p>
* @throws IllegalStateException if {@link #getRecvByteBufAllocator()} does not return an object of type
* {@link MaxMessagesRecvByteBufAllocator}.
*/
@Override
@Deprecated
public int getMaxMessagesPerRead() {
try {
MaxMessagesRecvByteBufAllocator allocator = getRecvByteBufAllocator();
return allocator.maxMessagesPerRead();
} catch (ClassCastException e) {
throw new IllegalStateException("getRecvByteBufAllocator() must return an object of type " +
"MaxMessagesRecvByteBufAllocator", e);
}
}
/**
* {@inheritDoc}
* <p>
* @throws IllegalStateException if {@link #getRecvByteBufAllocator()} does not return an object of type
* {@link MaxMessagesRecvByteBufAllocator}.
*/
@Override
@Deprecated
public ChannelConfig setMaxMessagesPerRead(int maxMessagesPerRead) {
try {
MaxMessagesRecvByteBufAllocator allocator = getRecvByteBufAllocator();
allocator.maxMessagesPerRead(maxMessagesPerRead);
return this;
} catch (ClassCastException e) {
throw new IllegalStateException("getRecvByteBufAllocator() must return an object of type " +
"MaxMessagesRecvByteBufAllocator", e);
}
}
@Override
public int getWriteSpinCount() {
return writeSpinCount;
}
@Override
public ChannelConfig setWriteSpinCount(int writeSpinCount) {
checkPositive(writeSpinCount, "writeSpinCount");
// Integer.MAX_VALUE is used as a special value in the channel implementations to indicate the channel cannot
// accept any more data, and results in the writeOp being set on the selector (or execute a runnable which tries
// to flush later because the writeSpinCount quantum has been exhausted). This strategy prevents additional
// conditional logic in the channel implementations, and shouldn't be noticeable in practice.
if (writeSpinCount == Integer.MAX_VALUE) {
--writeSpinCount;
}
this.writeSpinCount = writeSpinCount;
return this;
}
@Override
public ByteBufAllocator getAllocator() {
return allocator;
}
@Override
public ChannelConfig setAllocator(ByteBufAllocator allocator) {
if (allocator == null) {
throw new NullPointerException("allocator");
}
this.allocator = allocator;
return this;
}
@SuppressWarnings("unchecked")
@Override
public <T extends RecvByteBufAllocator> T getRecvByteBufAllocator() {
return (T) rcvBufAllocator;
}
@Override
public ChannelConfig setRecvByteBufAllocator(RecvByteBufAllocator allocator) {
rcvBufAllocator = checkNotNull(allocator, "allocator");
return this;
}
/**
* Set the {@link RecvByteBufAllocator} which is used for the channel to allocate receive buffers.
* @param allocator the allocator to set.
* @param metadata Used to set the {@link ChannelMetadata#defaultMaxMessagesPerRead()} if {@code allocator}
* is of type {@link MaxMessagesRecvByteBufAllocator}.
*/
private void setRecvByteBufAllocator(RecvByteBufAllocator allocator, ChannelMetadata metadata) {
if (allocator instanceof MaxMessagesRecvByteBufAllocator) {
((MaxMessagesRecvByteBufAllocator) allocator).maxMessagesPerRead(metadata.defaultMaxMessagesPerRead());
} else if (allocator == null) {
throw new NullPointerException("allocator");
}
setRecvByteBufAllocator(allocator);
}
@Override
public boolean isAutoRead() {
return autoRead == 1;
}
@Override
public ChannelConfig setAutoRead(boolean autoRead) {
boolean oldAutoRead = AUTOREAD_UPDATER.getAndSet(this, autoRead ? 1 : 0) == 1;
if (autoRead && !oldAutoRead) {
channel.read();
} else if (!autoRead && oldAutoRead) {
autoReadCleared();
}
return this;
}
/**
* Is called once {@link #setAutoRead(boolean)} is called with {@code false} and {@link #isAutoRead()} was
* {@code true} before.
*/
protected void autoReadCleared() { }
@Override
public boolean isAutoClose() {
return autoClose;
}
@Override
public ChannelConfig setAutoClose(boolean autoClose) {
this.autoClose = autoClose;
return this;
}
@Override
public int getWriteBufferHighWaterMark() {
return writeBufferWaterMark.high();
}
@Override
public ChannelConfig setWriteBufferHighWaterMark(int writeBufferHighWaterMark) {
checkPositiveOrZero(writeBufferHighWaterMark, "writeBufferHighWaterMark");
for (;;) {
WriteBufferWaterMark waterMark = writeBufferWaterMark;
if (writeBufferHighWaterMark < waterMark.low()) {
throw new IllegalArgumentException(
"writeBufferHighWaterMark cannot be less than " +
"writeBufferLowWaterMark (" + waterMark.low() + "): " +
writeBufferHighWaterMark);
}
if (WATERMARK_UPDATER.compareAndSet(this, waterMark,
new WriteBufferWaterMark(waterMark.low(), writeBufferHighWaterMark, false))) {
return this;
}
}
}
@Override
public int getWriteBufferLowWaterMark() {
return writeBufferWaterMark.low();
}
@Override
public ChannelConfig setWriteBufferLowWaterMark(int writeBufferLowWaterMark) {
checkPositiveOrZero(writeBufferLowWaterMark, "writeBufferLowWaterMark");
for (;;) {
WriteBufferWaterMark waterMark = writeBufferWaterMark;
if (writeBufferLowWaterMark > waterMark.high()) {
throw new IllegalArgumentException(
"writeBufferLowWaterMark cannot be greater than " +
"writeBufferHighWaterMark (" + waterMark.high() + "): " +
writeBufferLowWaterMark);
}
if (WATERMARK_UPDATER.compareAndSet(this, waterMark,
new WriteBufferWaterMark(writeBufferLowWaterMark, waterMark.high(), false))) {
return this;
}
}
}
@Override
public ChannelConfig setWriteBufferWaterMark(WriteBufferWaterMark writeBufferWaterMark) {
this.writeBufferWaterMark = checkNotNull(writeBufferWaterMark, "writeBufferWaterMark");
return this;
}
@Override
public WriteBufferWaterMark getWriteBufferWaterMark() {
return writeBufferWaterMark;
}
@Override
public MessageSizeEstimator getMessageSizeEstimator() {
return msgSizeEstimator;
}
@Override
public ChannelConfig setMessageSizeEstimator(MessageSizeEstimator estimator) {
if (estimator == null) {
throw new NullPointerException("estimator");
}
msgSizeEstimator = estimator;
return this;
}
private ChannelConfig setPinEventExecutorPerGroup(boolean pinEventExecutor) {
this.pinEventExecutor = pinEventExecutor;
return this;
}
private boolean getPinEventExecutorPerGroup() {
return pinEventExecutor;
}
}
| apache-2.0 |
cnopens/scrms | src/main/java/com/mossle/core/hibernate/HibernateUtils.java | 8195 | package com.mossle.core.hibernate;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.mossle.core.util.BeanUtils;
import org.hibernate.Criteria;
import org.hibernate.Query;
import org.hibernate.criterion.CriteriaSpecification;
import org.hibernate.criterion.Criterion;
import org.hibernate.criterion.Disjunction;
import org.hibernate.criterion.MatchMode;
import org.hibernate.criterion.Projection;
import org.hibernate.criterion.Restrictions;
import org.hibernate.internal.CriteriaImpl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.Assert;
/**
* hibernate utils.
*
* @author Lingo
*/
public class HibernateUtils {
/** logger. */
private static Logger logger = LoggerFactory
.getLogger(HibernateUtils.class);
/** protected constructor. */
protected HibernateUtils() {
}
/**
* get number for count.
*
* @param result
* Object
* @return Integer
*/
public static Integer getNumber(Object result) {
if (result == null) {
return 0;
} else {
return ((Number) result).intValue();
}
}
/**
* 去除hql的select 子句,未考虑union的情况,用于pagedQuery.
*
* @param hql
* HQL字符串
* @return 删除select语句后的字符串
* @see HibernatePagingDao#pagedQuery(String,int,int,Object[])
*/
public static String removeSelect(String hql) {
Assert.hasText(hql);
if (hql.toLowerCase(Locale.ENGLISH).indexOf("distinct") != -1) {
logger.warn(
"there is a distinct in paged query hql : [{}], this maybe cause an unexpected result",
hql);
}
int beginPos = hql.toLowerCase(Locale.CHINA).indexOf("from");
Assert.isTrue(beginPos != -1, " hql : " + hql
+ " must has a keyword 'from'");
return hql.substring(beginPos);
}
/**
* 去除hql的order by 子句,用于pagedQuery.
*
* @param hql
* HQL字符串
* @return 删除排序语句后的字符串
* @see HibernatePagingDao#pagedQuery(String,int,int,Object[])
*/
public static String removeOrders(String hql) {
Assert.hasText(hql);
Pattern p = Pattern.compile("order\\s*by[\\w|\\W|\\s|\\S]*",
Pattern.CASE_INSENSITIVE);
Matcher m = p.matcher(hql);
StringBuffer sb = new StringBuffer();
while (m.find()) {
m.appendReplacement(sb, "");
}
m.appendTail(sb);
return sb.toString();
}
/**
* distinct.
*
* @param query
* Query
* @return Query
*/
public static Query distinct(Query query) {
query.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
return query;
}
/**
* distinct.
*
* @param criteria
* Criteria
* @return Criteria
*/
public static Criteria distinct(Criteria criteria) {
criteria.setResultTransformer(CriteriaSpecification.DISTINCT_ROOT_ENTITY);
return criteria;
}
/**
* find projection from criteria.
*
* @param criteria
* Criteria
* @return Projection
*/
public static Projection findProjection(Criteria criteria) {
if (criteria instanceof CriteriaImpl) {
return ((CriteriaImpl) criteria).getProjection();
} else {
throw new IllegalArgumentException(criteria
+ " is not a CriteriaImpl");
}
}
/**
* find order entries.
*
* @param criteria
* Criteria
* @return List
*/
public static List findOrderEntries(Criteria criteria) {
return (List) BeanUtils.safeGetFieldValue(criteria, "orderEntries");
}
/**
* set order entries.
*
* @param criteria
* Criteria
* @param orderEntries
* List
*/
public static void setOrderEntries(Criteria criteria, List orderEntries) {
BeanUtils.safeSetFieldValue(criteria, "orderEntries", orderEntries);
}
/**
* 按属性条件参数创建Criterion,辅助函数.
*
* @param propertyName
* String
* @param propertyValue
* Object
* @param matchType
* MatchType
* @return Criterion
*/
public static Criterion buildCriterion(String propertyName,
Object propertyValue, MatchType matchType) {
Assert.hasText(propertyName, "propertyName不能为空");
Criterion criterion = null;
// 根据MatchType构造criterion
switch (matchType) {
case EQ:
criterion = Restrictions.eq(propertyName, propertyValue);
break;
case LIKE:
criterion = Restrictions.like(propertyName, (String) propertyValue,
MatchMode.ANYWHERE);
break;
case LE:
criterion = Restrictions.le(propertyName, propertyValue);
break;
case LT:
criterion = Restrictions.lt(propertyName, propertyValue);
break;
case GE:
criterion = Restrictions.ge(propertyName, propertyValue);
break;
case GT:
criterion = Restrictions.gt(propertyName, propertyValue);
break;
case IN:
criterion = Restrictions.in(propertyName,
(Collection) propertyValue);
break;
default:
criterion = Restrictions.eq(propertyName, propertyValue);
break;
}
return criterion;
}
/**
* 按属性条件列表创建Criterion数组,辅助函数.
*
* @param filters
* List
* @return Criterion[]
*/
public static Criterion[] buildCriterion(List<PropertyFilter> filters) {
List<Criterion> criterionList = new ArrayList<Criterion>();
for (PropertyFilter filter : filters) {
// 只有一个属性需要比较的情况.
if (!filter.hasMultiProperties()) {
Criterion criterion = buildCriterion(filter.getPropertyName(),
filter.getMatchValue(), filter.getMatchType());
criterionList.add(criterion);
} else {
// 包含多个属性需要比较的情况,进行or处理.
Disjunction disjunction = Restrictions.disjunction();
for (String param : filter.getPropertyNames()) {
Criterion criterion = buildCriterion(param,
filter.getMatchValue(), filter.getMatchType());
disjunction.add(criterion);
}
criterionList.add(disjunction);
}
}
return criterionList.toArray(new Criterion[criterionList.size()]);
}
public static void buildQuery(StringBuilder buff,
PropertyFilter propertyFilter) {
if (buff.toString().toLowerCase().indexOf("where") == -1) {
buff.append(" where ");
} else {
buff.append(" and ");
}
buff.append(propertyFilter.getPropertyName());
switch (propertyFilter.getMatchType()) {
case EQ:
buff.append(" =:");
break;
case LIKE:
buff.append(" like:");
break;
case LE:
buff.append(" <=:");
break;
case LT:
buff.append(" <:");
break;
case GE:
buff.append(" >=:");
break;
case GT:
buff.append(" >:");
break;
case IN:
buff.append(" in :");
break;
default:
buff.append(" =:");
break;
}
buff.append(propertyFilter.getPropertyName().replaceAll("\\.", "_"));
}
}
| apache-2.0 |
nunezro2/cassandra_cs597 | src/java/org/apache/cassandra/cql3/functions/FunctionCall.java | 5914 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.functions;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.apache.cassandra.cql3.ColumnSpecification;
import org.apache.cassandra.cql3.Constants;
import org.apache.cassandra.cql3.Lists;
import org.apache.cassandra.cql3.Maps;
import org.apache.cassandra.cql3.Sets;
import org.apache.cassandra.cql3.Term;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.db.marshal.CollectionType;
import org.apache.cassandra.db.marshal.ListType;
import org.apache.cassandra.db.marshal.MapType;
import org.apache.cassandra.db.marshal.SetType;
import org.apache.cassandra.exceptions.InvalidRequestException;
public class FunctionCall extends Term.NonTerminal
{
private final Function fun;
private final List<Term> terms;
private FunctionCall(Function fun, List<Term> terms)
{
this.fun = fun;
this.terms = terms;
}
public void collectMarkerSpecification(ColumnSpecification[] boundNames)
{
for (Term t : terms)
t.collectMarkerSpecification(boundNames);
}
public Term.Terminal bind(List<ByteBuffer> values) throws InvalidRequestException
{
return makeTerminal(fun, bindAndGet(values));
}
public ByteBuffer bindAndGet(List<ByteBuffer> values) throws InvalidRequestException
{
List<ByteBuffer> buffers = new ArrayList<ByteBuffer>(terms.size());
for (Term t : terms)
{
// For now, we don't allow nulls as argument as no existing function needs it and it
// simplify things.
ByteBuffer val = t.bindAndGet(values);
if (val == null)
throw new InvalidRequestException(String.format("Invalid null value for argument to %s", fun));
buffers.add(val);
}
return fun.execute(buffers);
}
private static Term.Terminal makeTerminal(Function fun, ByteBuffer result) throws InvalidRequestException
{
if (!(fun.returnType() instanceof CollectionType))
return new Constants.Value(result);
switch (((CollectionType)fun.returnType()).kind)
{
case LIST: return Lists.Value.fromSerialized(result, (ListType)fun.returnType());
case SET: return Sets.Value.fromSerialized(result, (SetType)fun.returnType());
case MAP: return Maps.Value.fromSerialized(result, (MapType)fun.returnType());
}
throw new AssertionError();
}
public static class Raw implements Term.Raw
{
private final String functionName;
private final List<Term.Raw> terms;
public Raw(String functionName, List<Term.Raw> terms)
{
this.functionName = functionName;
this.terms = terms;
}
public Term prepare(ColumnSpecification receiver) throws InvalidRequestException
{
Function fun = Functions.get(functionName, terms, receiver);
List<Term> parameters = new ArrayList<Term>(terms.size());
boolean allTerminal = true;
for (int i = 0; i < terms.size(); i++)
{
Term t = terms.get(i).prepare(Functions.makeArgSpec(receiver, fun, i));
if (t instanceof NonTerminal)
allTerminal = false;
parameters.add(t);
}
return allTerminal
? makeTerminal(fun, execute(fun, parameters))
: new FunctionCall(fun, parameters);
}
// All parameters must be terminal
private static ByteBuffer execute(Function fun, List<Term> parameters) throws InvalidRequestException
{
List<ByteBuffer> buffers = new ArrayList<ByteBuffer>(parameters.size());
for (Term t : parameters)
{
assert t instanceof Term.Terminal;
buffers.add(((Term.Terminal)t).get());
}
return fun.execute(buffers);
}
public boolean isAssignableTo(ColumnSpecification receiver)
{
AbstractType<?> returnType = Functions.getReturnType(functionName, receiver.ksName, receiver.cfName);
// Note: if returnType == null, it means the function doesn't exist. We may get this if an undefined function
// is used as argument of another, existing, function. In that case, we return true here because we'll catch
// the fact that the method is undefined latter anyway and with a more helpful error message that if we were
// to return false here.
return returnType == null || receiver.type.asCQL3Type().equals(returnType.asCQL3Type());
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
sb.append(functionName).append("(");
for (int i = 0; i < terms.size(); i++)
{
if (i > 0) sb.append(", ");
sb.append(terms.get(i));
}
return sb.append(")").toString();
}
}
}
| apache-2.0 |
craigyam/amalgam8 | vendor/github.com/shirou/gopsutil/net/net_test.go | 5046 | package net
import (
"fmt"
"os"
"runtime"
"testing"
"github.com/shirou/gopsutil/internal/common"
)
func TestAddrString(t *testing.T) {
v := Addr{IP: "192.168.0.1", Port: 8000}
s := fmt.Sprintf("%v", v)
if s != "{\"ip\":\"192.168.0.1\",\"port\":8000}" {
t.Errorf("Addr string is invalid: %v", v)
}
}
func TestNetIOCountersStatString(t *testing.T) {
v := IOCountersStat{
Name: "test",
BytesSent: 100,
}
e := `{"name":"test","bytesSent":100,"bytesRecv":0,"packetsSent":0,"packetsRecv":0,"errin":0,"errout":0,"dropin":0,"dropout":0,"fifoin":0,"fifoout":0}`
if e != fmt.Sprintf("%v", v) {
t.Errorf("NetIOCountersStat string is invalid: %v", v)
}
}
func TestNetProtoCountersStatString(t *testing.T) {
v := ProtoCountersStat{
Protocol: "tcp",
Stats: map[string]int64{
"MaxConn": -1,
"ActiveOpens": 4000,
"PassiveOpens": 3000,
},
}
e := `{"protocol":"tcp","stats":{"ActiveOpens":4000,"MaxConn":-1,"PassiveOpens":3000}}`
if e != fmt.Sprintf("%v", v) {
t.Errorf("NetProtoCountersStat string is invalid: %v", v)
}
}
func TestNetConnectionStatString(t *testing.T) {
v := ConnectionStat{
Fd: 10,
Family: 10,
Type: 10,
}
e := `{"fd":10,"family":10,"type":10,"localaddr":{"ip":"","port":0},"remoteaddr":{"ip":"","port":0},"status":"","pid":0}`
if e != fmt.Sprintf("%v", v) {
t.Errorf("NetConnectionStat string is invalid: %v", v)
}
}
func TestNetIOCountersAll(t *testing.T) {
v, err := IOCounters(false)
per, err := IOCounters(true)
if err != nil {
t.Errorf("Could not get NetIOCounters: %v", err)
}
if len(v) != 1 {
t.Errorf("Could not get NetIOCounters: %v", v)
}
if v[0].Name != "all" {
t.Errorf("Invalid NetIOCounters: %v", v)
}
var pr uint64
for _, p := range per {
pr += p.PacketsRecv
}
if v[0].PacketsRecv != pr {
t.Errorf("invalid sum value: %v, %v", v[0].PacketsRecv, pr)
}
}
func TestNetIOCountersPerNic(t *testing.T) {
v, err := IOCounters(true)
if err != nil {
t.Errorf("Could not get NetIOCounters: %v", err)
}
if len(v) == 0 {
t.Errorf("Could not get NetIOCounters: %v", v)
}
for _, vv := range v {
if vv.Name == "" {
t.Errorf("Invalid NetIOCounters: %v", vv)
}
}
}
func TestGetNetIOCountersAll(t *testing.T) {
n := []IOCountersStat{
IOCountersStat{
Name: "a",
BytesRecv: 10,
PacketsRecv: 10,
},
IOCountersStat{
Name: "b",
BytesRecv: 10,
PacketsRecv: 10,
Errin: 10,
},
}
ret, err := getIOCountersAll(n)
if err != nil {
t.Error(err)
}
if len(ret) != 1 {
t.Errorf("invalid return count")
}
if ret[0].Name != "all" {
t.Errorf("invalid return name")
}
if ret[0].BytesRecv != 20 {
t.Errorf("invalid count bytesrecv")
}
if ret[0].Errin != 10 {
t.Errorf("invalid count errin")
}
}
func TestNetInterfaces(t *testing.T) {
v, err := Interfaces()
if err != nil {
t.Errorf("Could not get NetInterfaceStat: %v", err)
}
if len(v) == 0 {
t.Errorf("Could not get NetInterfaceStat: %v", err)
}
for _, vv := range v {
if vv.Name == "" {
t.Errorf("Invalid NetInterface: %v", vv)
}
}
}
func TestNetProtoCountersStatsAll(t *testing.T) {
v, err := ProtoCounters(nil)
if err != nil {
t.Fatalf("Could not get NetProtoCounters: %v", err)
}
if len(v) == 0 {
t.Fatalf("Could not get NetProtoCounters: %v", err)
}
for _, vv := range v {
if vv.Protocol == "" {
t.Errorf("Invalid NetProtoCountersStat: %v", vv)
}
if len(vv.Stats) == 0 {
t.Errorf("Invalid NetProtoCountersStat: %v", vv)
}
}
}
func TestNetProtoCountersStats(t *testing.T) {
v, err := ProtoCounters([]string{"tcp", "ip"})
if err != nil {
t.Fatalf("Could not get NetProtoCounters: %v", err)
}
if len(v) == 0 {
t.Fatalf("Could not get NetProtoCounters: %v", err)
}
if len(v) != 2 {
t.Fatalf("Go incorrect number of NetProtoCounters: %v", err)
}
for _, vv := range v {
if vv.Protocol != "tcp" && vv.Protocol != "ip" {
t.Errorf("Invalid NetProtoCountersStat: %v", vv)
}
if len(vv.Stats) == 0 {
t.Errorf("Invalid NetProtoCountersStat: %v", vv)
}
}
}
func TestNetConnections(t *testing.T) {
if ci := os.Getenv("CI"); ci != "" { // skip if test on drone.io
return
}
v, err := Connections("inet")
if err != nil {
t.Errorf("could not get NetConnections: %v", err)
}
if len(v) == 0 {
t.Errorf("could not get NetConnections: %v", v)
}
for _, vv := range v {
if vv.Family == 0 {
t.Errorf("invalid NetConnections: %v", vv)
}
}
}
func TestNetFilterCounters(t *testing.T) {
if ci := os.Getenv("CI"); ci != "" { // skip if test on drone.io
return
}
if runtime.GOOS == "linux" {
// some test environment has not the path.
if !common.PathExists("/proc/sys/net/netfilter/nf_conntrackCount") {
t.SkipNow()
}
}
v, err := FilterCounters()
if err != nil {
t.Errorf("could not get NetConnections: %v", err)
}
if len(v) == 0 {
t.Errorf("could not get NetConnections: %v", v)
}
for _, vv := range v {
if vv.ConnTrackMax == 0 {
t.Errorf("nf_conntrackMax needs to be greater than zero: %v", vv)
}
}
}
| apache-2.0 |
ricepanda/rice-git2 | rice-framework/krad-web-framework/src/main/java/org/kuali/rice/krad/uif/view/ExpressionEvaluator.java | 10859 | /**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.krad.uif.view;
import org.kuali.rice.krad.datadictionary.uif.UifDictionaryBean;
import java.util.List;
import java.util.Map;
/**
* Provides evaluation of expression language statements against a given context
*
* <p>
* Used within the UI framework to allow conditional logic to be configured through
* the XML which can alter the values of component properties
* </p>
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*/
public interface ExpressionEvaluator {
/**
* Indicator that can be added to a property name to indicate the expression result should be added to the
* property (assumed to be a collection) instead of replaced
*/
String EMBEDDED_PROPERTY_NAME_ADD_INDICATOR = ".add";
/**
* Initializes the expression context for the given expression context object
*
* <p>
* The object given here will form the default context for expression terms (terms without any
* variable prefix)
* </p>
*
* @param contextObject instance of an Object
*/
void initializeEvaluationContext(Object contextObject);
/**
* Evaluates any el expressions that are found as a string property value
* for the object
*
* <p>
* Using reflection the properties for the object are retrieved and if of
* <code>String</code> type the corresponding value is retrieved. If the
* value is not empty and contains the el placeholder see
* {@link #containsElPlaceholder(String)} then the expression is evaluated
* using the given context object and parameters. The evaluated string is
* then set as the new property value, or in the case of a template
* (expression contained within a literal string), the expression part is
* replaced in the property value.
* </p>
*
* <p>
* In addition to evaluating any property expressions, any configured
* <code>PropertyReplacer</code> for the object are also evaluated and if a
* match occurs those property replacements are made
* </p>
*
* @param view view instance being rendered
* @param expressionConfigurable object whose properties should be checked for expressions
* and evaluated
* @param evaluationParameters map of parameters that may appear in expressions, the map
* key gives the parameter name that may appear in the expression, and the map value is the object that expression
* should evaluate against when that name is found
*/
void evaluateExpressionsOnConfigurable(View view, UifDictionaryBean expressionConfigurable,
Map<String, Object> evaluationParameters);
/**
* Evaluates the given expression template string against the context object
* and map of parameters
*
* <p>
* If the template string contains one or more el placeholders (see
* {@link #containsElPlaceholder(String)}), the expression contained within
* the placeholder will be evaluated and the corresponding value will be
* substituted back into the property value where the placeholder occurred.
* If no placeholders are found, the string will be returned unchanged
* </p>
*
* @param evaluationParameters map of parameters that may appear in expressions, the map
* key gives the parameter name that may appear in the expression, and the map value is the object that expression
* should evaluate against when that name is found
* @param expressionTemplate string that should be evaluated for el expressions
* @return String formed by replacing any el expressions in the original expression template with
* their corresponding evaluation results
*/
String evaluateExpressionTemplate(Map<String, Object> evaluationParameters, String expressionTemplate);
/**
* Evaluates the configured expression for the given property name (if not exists) on the given configurable
*
* @param view view instance the configurable is associated with, used to adjust binding prefixes
* @param evaluationParameters map that will be exposed as EL parameters
* @param expressionConfigurable configurable object to pull and evaluate the expression on
* @param propertyName name of the property whose expression should be evaluated
* @param removeExpression boolean that indicates whether the expression should be removed after evaluation
*/
void evaluatePropertyExpression(View view, Map<String, Object> evaluationParameters,
UifDictionaryBean expressionConfigurable, String propertyName, boolean removeExpression);
/**
* Evaluates the given el expression against the content object and
* parameters, and returns the result of the evaluation
*
* <p>
* The given expression string is assumed to be one el expression and should
* not contain the el placeholders. The returned result depends on the
* evaluation and what type is returns, for instance a boolean will be
* return for a boolean expression, or a string for string expression
* </p>
*
* @param evaluationParameters map of parameters that may appear in expressions, the map
* key gives the parameter name that may appear in the expression, and the map value is the object that expression
* should evaluate against when that name is found
* @param expression el expression to evaluate
* @return Object result of the expression evaluation
*/
Object evaluateExpression(Map<String, Object> evaluationParameters, String expression);
/**
* Indicates whether or not the given string contains the el placeholder
* (begin and end delimiters)
*
* @param value String to check for contained placeholders
* @return boolean true if the string contains one or more placeholders, false if it contains none
* @see org.kuali.rice.krad.uif.UifConstants#EL_PLACEHOLDER_PREFIX
* @see org.kuali.rice.krad.uif.UifConstants#EL_PLACEHOLDER_SUFFIX
*/
boolean containsElPlaceholder(String value);
/**
* Adjusts the property expressions for a given object
*
* <p>
* The {@link org.kuali.rice.krad.uif.UifConstants#NO_BIND_ADJUST_PREFIX} prefix will be removed
* as this is a placeholder indicating that the property is directly on the form.
* The {@link org.kuali.rice.krad.uif.UifConstants#FIELD_PATH_BIND_ADJUST_PREFIX} prefix will be replaced by
* the object's field path - this is only applicable to DataFields. The
* {@link org.kuali.rice.krad.uif.UifConstants#DEFAULT_PATH_BIND_ADJUST_PREFIX} prefix will be replaced
* by the view's default path if it is set.
* </p>
*
* @param view the parent view of the object
* @param object Object to adjust property expressions on
* @param expression The expression to adjust
* @return the adjusted expression String
*/
String replaceBindingPrefixes(View view, Object object, String expression);
/**
* Pulls expressions within the expressionConfigurable's expression graph and moves them to the property
* expressions
* map for the expressionConfigurable or a nested expressionConfigurable (for the case of nested expression property
* names)
*
* <p>
* Expressions that are configured on properties and pulled out by the {@link org.kuali.rice.krad.uif.util.UifBeanFactoryPostProcessor}
* and put in the {@link org.kuali.rice.krad.datadictionary.uif.UifDictionaryBean#getExpressionGraph()} for the bean
* that is
* at root (non nested) level. Before evaluating the expressions, they need to be moved to the
* {@link org.kuali.rice.krad.datadictionary.uif.UifDictionaryBean#getPropertyExpressions()} map for the
* expressionConfigurable that
* property
* is on.
* </p>
*
* @param expressionConfigurable expressionConfigurable instance to process expressions for
* @param buildRefreshGraphs indicates whether the expression graphs for component refresh should be built
*/
void populatePropertyExpressionsFromGraph(UifDictionaryBean expressionConfigurable,
boolean buildRefreshGraphs);
/**
* Takes in an expression and a list to be filled in with names(property names)
* of controls found in the expression.
*
* <p>This method returns a js expression which can
* be executed on the client to determine if the original exp was satisfied before
* interacting with the server - ie, this js expression is equivalent to the one passed in.</p>
*
* <p>There are limitations on the Spring expression language that can be used as this method.
* It is only used to parse expressions which are valid case statements for determining if
* some action/processing should be performed. ONLY Properties, comparison operators, booleans,
* strings, matches expression, and boolean logic are supported. Server constants and calls will be evaluated
* early. The isValueEmpty, listContains, and emptyList custom KRAD functions, however, will be converted
* to a js equivalent function. Properties must be a valid property on the form, and should have a visible control
* within the view. </p>
*
* <p>Example valid exp: "account.name == 'Account Name'"</p>
*
* @param exp the expression to convert to a js condition
* @param controlNames the list to populate with control names found in the expression (these may later be used
* to add js change handlers)
* @return the converted expression into an equivalent js condition
*/
String parseExpression(String exp, List<String> controlNames, Map<String, Object> context);
/**
* Find the control names (ie, propertyNames) used in the passed in expression
*
* @param exp the expression to search
* @return the list of control names found (ie, propertyNames)
*/
List<String> findControlNamesInExpression(String exp);
}
| apache-2.0 |
fouasnon/camunda-bpm-platform | engine/src/main/java/org/camunda/bpm/engine/FilterService.java | 6062 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine;
import java.util.List;
import org.camunda.bpm.engine.authorization.Permissions;
import org.camunda.bpm.engine.authorization.Resources;
import org.camunda.bpm.engine.filter.Filter;
import org.camunda.bpm.engine.filter.FilterQuery;
import org.camunda.bpm.engine.query.Query;
/**
* @author Sebastian Menski
*/
public interface FilterService {
/**
* Creates a new task filter.
*
* @return a new task filter
* @throws AuthorizationException if the user has no {@link Permissions#CREATE} permissions on {@link Resources#FILTER}.
*/
Filter newTaskFilter();
/**
* Creates a new task filter with a given name.
*
* @return a new task filter with a name
* @throws AuthorizationException if the user has no {@link Permissions#CREATE} permissions on {@link Resources#FILTER}.
*/
Filter newTaskFilter(String filterName);
/**
* Creates a new filter query
*
* @return a new query for filters
*/
FilterQuery createFilterQuery();
/**
* Creates a new task filter query.
*
* @return a new query for task filters
*/
FilterQuery createTaskFilterQuery();
/**
* Saves the filter in the database.
*
* @param filter the filter to save
* @return return the saved filter
* @throws AuthorizationException if the user has no {@link Permissions#CREATE} permissions on {@link Resources#FILTER} (save new filter)
* or if user has no {@link Permissions#UPDATE} permissions on {@link Resources#FILTER} (update existing filter).
*/
Filter saveFilter(Filter filter);
/**
* Returns the filter for the given filter id.
*
* @param filterId the id of the filter
* @return the filter
* @throws AuthorizationException if the user has no {@link Permissions#READ} permissions on {@link Resources#FILTER}.
*/
Filter getFilter(String filterId);
/**
* Deletes a filter by its id.
*
* @param filterId the id of the filter
* @throws AuthorizationException if the user has no {@link Permissions#DELETE} permissions on {@link Resources#FILTER}.
*/
void deleteFilter(String filterId);
/**
* Executes the query of the filter and returns the result as list.
*
* @param filterId the the id of the filter
* @return the query result as list
* @throws AuthorizationException if the user has no {@link Permissions#READ} permissions on {@link Resources#FILTER}.
*/
<T> List<T> list(String filterId);
/**
* Executes the extended query of a filter and returns the result as list.
*
* @param filterId the id of the filter
* @param extendingQuery additional query to extend the filter query
* @return the query result as list
* @throws AuthorizationException if the user has no {@link Permissions#READ} permissions on {@link Resources#FILTER}.
*/
<T, Q extends Query<?, T>> List<T> list(String filterId, Q extendingQuery);
/**
* Executes the query of the filter and returns the result in the given boundaries as list.
*
* @param filterId the the id of the filter
* @param firstResult first result to select
* @param maxResults maximal number of results
* @return the query result as list
* @throws AuthorizationException if the user has no {@link Permissions#READ} permissions on {@link Resources#FILTER}.
*/
<T> List<T> listPage(String filterId, int firstResult, int maxResults);
/**
* Executes the extended query of a filter and returns the result in the given boundaries as list.
*
* @param extendingQuery additional query to extend the filter query
* @param filterId the id of the filter
* @param firstResult first result to select
* @param maxResults maximal number of results
* @return the query result as list
* @throws AuthorizationException if the user has no {@link Permissions#READ} permissions on {@link Resources#FILTER}.
*/
<T, Q extends Query<?, T>> List<T> listPage(String filterId, Q extendingQuery, int firstResult, int maxResults);
/**
* Executes the query of the filter and returns the a single result.
*
* @param filterId the the id of the filter
* @return the single query result
* @throws AuthorizationException if the user has no {@link Permissions#READ} permissions on {@link Resources#FILTER}.
*/
<T> T singleResult(String filterId);
/**
* Executes the extended query of the filter and returns the a single result.
*
* @param filterId the the id of the filter
* @param extendingQuery additional query to extend the filter query
* @return the single query result
* @throws AuthorizationException if the user has no {@link Permissions#READ} permissions on {@link Resources#FILTER}.
*/
<T, Q extends Query<?, T>> T singleResult(String filterId, Q extendingQuery);
/**
* Executes the query of the filter and returns the result count.
*
* @param filterId the the id of the filter
* @return the result count
* @throws AuthorizationException if the user has no {@link Permissions#READ} permissions on {@link Resources#FILTER}.
*/
Long count(String filterId);
/**
* Executes the extended query of the filter and returns the result count.
*
* @param filterId the the id of the filter
* @param extendingQuery additional query to extend the filter query
* @return the result count
* @throws AuthorizationException if the user has no {@link Permissions#READ} permissions on {@link Resources#FILTER}.
*/
Long count(String filterId, Query<?, ?> extendingQuery);
}
| apache-2.0 |
nssales/OG-Platform | projects/OG-Master/src/main/java/com/opengamma/master/impl/AbstractSearchIterator.java | 5327 | /**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.impl;
import java.util.Iterator;
import java.util.NoSuchElementException;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.master.AbstractDocument;
import com.opengamma.master.AbstractMaster;
import com.opengamma.master.AbstractSearchRequest;
import com.opengamma.master.AbstractSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.paging.PagingRequest;
/**
* An iterator that searches a config master as an iterator.
* <p>
* Large systems may store a large amount of data in each master.
* A simple search request that pulls back the entire database is unrealistic.
* This remote iterator allows the database to be queried in a consistent way remotely.
*
* @param <D> the type of the document
* @param <M> the type of the master
* @param <R> the type of the search request
*/
public abstract class AbstractSearchIterator<D extends AbstractDocument, M extends AbstractMaster<D>, R extends AbstractSearchRequest>
implements Iterator<D> {
/**
* The master that is being used.
*/
private M _master;
/**
* The request object that is being used.
*/
private final R _request;
/**
* The last result object.
*/
private AbstractSearchResult<D> _currentBatch;
/**
* The index of the next object within the batch result.
*/
private int _currentBatchIndex;
/**
* The current document, null if not fetched, at end or removed.
*/
private D _current;
/**
* The overall index of the last retrieved object.
*/
private int _overallIndex;
/**
* Creates an instance based on a request.
* <p>
* The request will be altered during the iteration.
*
* @param master the underlying master, not null
* @param request the request object, not null
*/
protected AbstractSearchIterator(M master, R request) {
ArgumentChecker.notNull(master, "master");
ArgumentChecker.notNull(request, "request");
_master = master;
_request = request;
}
//-------------------------------------------------------------------------
@Override
public boolean hasNext() {
if (_currentBatch == null || _currentBatchIndex >= _currentBatch.getDocuments().size()) {
doFetch();
}
return (_currentBatch != null && _currentBatchIndex < _currentBatch.getDocuments().size());
}
@Override
public D next() {
if (hasNext() == false) {
throw new NoSuchElementException("No more elements found");
}
return doNext();
}
/**
* Removes the last seen document.
*/
@Override
public void remove() {
if (_current == null) {
throw new IllegalStateException();
}
}
/**
* Gets the overall index of the next entry.
* <p>
* This number may skip if a bad entry is found.
*
* @return the overall index of the next entry, 0 if next() not called yet
*/
public int nextIndex() {
return _overallIndex;
}
private void doFetch() {
try {
// try to fetch a batch of 20 documents
_request.setPagingRequest(PagingRequest.ofIndex(_overallIndex, 20));
_currentBatch = doSearch(_request);
} catch (RuntimeException ex) {
doFetchOne(ex);
}
// ensure same vc for whole iterator
_request.setVersionCorrection(_currentBatch.getVersionCorrection());
// check results
if (_currentBatch.getPaging().getFirstItem() < _overallIndex) {
_currentBatchIndex = (_overallIndex - _currentBatch.getPaging().getFirstItem());
} else {
_currentBatchIndex = 0;
}
}
/**
* Fetches the next one document.
*
* @param ex the original exception, not null
*/
private void doFetchOne(RuntimeException ex) {
// try to load just the next document
int maxFailures = 5;
if (_currentBatch != null) {
maxFailures = _currentBatch.getPaging().getTotalItems() - _overallIndex; // if we have results, use maximum count
maxFailures = Math.min(maxFailures, 20);
}
while (maxFailures > 0) {
try {
_request.setPagingRequest(PagingRequest.ofIndex(_overallIndex, 1));
_currentBatch = doSearch(_request);
return;
} catch (RuntimeException ex2) {
_overallIndex++; // abandon this document
maxFailures--;
}
}
throw new OpenGammaRuntimeException("Multiple documents failed to load", ex);
}
private D doNext() {
_current = _currentBatch.getDocuments().get(_currentBatchIndex);
_currentBatchIndex++;
_overallIndex++;
return _current;
}
/**
* Performs the search on the master.
*
* @param request the request to send, not null
* @return the search result, not null
* @throws RuntimeException if an error occurs
*/
protected abstract AbstractSearchResult<D> doSearch(R request);
//-----------------------------------------------------------------------
/**
* Gets the underlying master.
*
* @return the master, not null
*/
public M getMaster() {
return _master;
}
/**
* Gets the request object that is being used.
*
* @return the request, not null
*/
public R getRequest() {
return _request;
}
}
| apache-2.0 |
calvinjia/tachyon | job/server/src/main/java/alluxio/job/plan/transform/format/orc/OrcRow.java | 4887 | /*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.job.plan.transform.format.orc;
import alluxio.job.plan.transform.FieldSchema;
import alluxio.job.plan.transform.HiveConstants;
import alluxio.job.plan.transform.format.TableRow;
import alluxio.job.plan.transform.format.csv.Decimal;
import alluxio.job.plan.transform.format.parquet.ParquetRow;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecordBuilder;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.TimestampColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.exec.vector.VoidColumnVector;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* A row in a Orc table.
*/
public class OrcRow implements TableRow {
private final VectorizedRowBatch mBatch;
private final int mPosition;
private final Map<String, Integer> mColumnNamePosition;
private final OrcSchema mSchema;
/**
* Constructor for OrcRow.
* @param schema the schema
* @param batch the vectorized row batch
* @param position the row position inside the vectorized row batch
* @param fieldNames ordered list of field names
*/
public OrcRow(OrcSchema schema, VectorizedRowBatch batch, int position,
List<String> fieldNames) {
mSchema = schema;
mBatch = batch;
mPosition = position;
mColumnNamePosition = new HashMap<>();
for (int i = 0; i < fieldNames.size(); i++) {
final String fieldName = fieldNames.get(i);
mColumnNamePosition.put(fieldName, i);
}
}
@Override
public ParquetRow toParquet() throws IOException {
Schema writeSchema = mSchema.getWriteSchema();
GenericRecordBuilder recordBuilder = new GenericRecordBuilder(writeSchema);
for (FieldSchema field : mSchema.getAlluxioSchema()) {
String name = field.getName();
String type = field.getType();
Object value = getColumn(name);
recordBuilder.set(writeSchema.getField(name), convert(value, name, type));
}
return new ParquetRow(recordBuilder.build());
}
@Override
public Object getColumn(String column) {
final Integer columnPosition = mColumnNamePosition.get(column);
if (columnPosition == null) {
throw new IllegalArgumentException("Invalid column name: " + column);
}
final ColumnVector col = mBatch.cols[columnPosition];
if (col.isNull[mPosition]) {
return null;
}
if (col instanceof TimestampColumnVector) {
return ((TimestampColumnVector) col).asScratchTimestamp(mPosition).getTime();
} else if (col instanceof VoidColumnVector) {
return null;
} else if (col instanceof DecimalColumnVector) {
final HiveDecimal hiveDecimal = ((DecimalColumnVector) col).vector[mPosition]
.getHiveDecimal();
return hiveDecimal;
} else if (col instanceof LongColumnVector) {
return ((LongColumnVector) col).vector[mPosition];
} else if (col instanceof BytesColumnVector) {
BytesColumnVector bcv = (BytesColumnVector) col;
return Arrays.copyOfRange(bcv.vector[mPosition], bcv.start[mPosition],
bcv.start[mPosition] + bcv.length[mPosition]);
} else if (col instanceof DoubleColumnVector) {
return ((DoubleColumnVector) col).vector[mPosition];
}
throw new UnsupportedOperationException("Unsupported column vector: "
+ col.getClass().getName());
}
private Object convert(Object value, String name, String type) throws IOException {
if (value == null) {
return null;
}
switch (HiveConstants.Types.getHiveConstantType(type)) {
case HiveConstants.Types.DECIMAL:
final Decimal decimal = new Decimal(type);
return ((HiveDecimal) value).bigIntegerBytesScaled(decimal.getScale());
case HiveConstants.Types.VARCHAR:
case HiveConstants.Types.CHAR:
case HiveConstants.Types.STRING:
return new String((byte[]) value);
default:
return value;
}
}
}
| apache-2.0 |
robbertvanginkel/buck | src/com/facebook/buck/apple/AbstractCodeSignIdentity.java | 2751 | /*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.apple;
import com.facebook.buck.rules.RuleKeyAppendable;
import com.facebook.buck.rules.RuleKeyObjectSink;
import com.facebook.buck.util.immutables.BuckStyleImmutable;
import com.google.common.hash.HashCode;
import java.util.Optional;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.immutables.value.Value;
/** Represents a identity used in code signing. */
@Value.Immutable
@BuckStyleImmutable
abstract class AbstractCodeSignIdentity implements RuleKeyAppendable {
private static final Pattern STRICT_HASH_PATTERN = Pattern.compile("(^[A-Fa-f0-9]{40}$)");
/**
* A pseudo-identity for ad hoc code signing.
*
* <p>See the <a
* href="https://developer.apple.com/library/mac/documentation/Darwin/Reference/ManPages/man1/codesign.1.html">codesign
* man page</a>.
*
* <p>Binaries signed with this identity will not be installable on real devices. This is only
* intended for Buck unit tests.
*/
public static final CodeSignIdentity AD_HOC =
CodeSignIdentity.builder()
.setFingerprint(Optional.empty())
.setSubjectCommonName("Ad Hoc")
.build();
/**
* Returns the identity's certificate hash, defined to be unique for each identity.
*
* <p>If absent, this identity represents an ad-hoc signing identity.
*/
public abstract Optional<HashCode> getFingerprint();
/**
* Returns the full name of the identity. e.g. "iPhone Developer: John Doe (ABCDE12345)"
*
* <p>Not guaranteed to be unique.
*/
public abstract String getSubjectCommonName();
/** Convert a {@code String} into a fingerprint {@code HashCode} if it's in the correct format. */
public static Optional<HashCode> toFingerprint(String identifier) {
Matcher matcher = STRICT_HASH_PATTERN.matcher(identifier);
if (matcher.matches()) {
return Optional.of(HashCode.fromString(identifier.toLowerCase()));
} else {
return Optional.empty();
}
}
@Override
public void appendToRuleKey(RuleKeyObjectSink sink) {
sink.setReflectively("code-sign-identity", getFingerprint().map(Object::toString));
}
}
| apache-2.0 |
scnakandala/derby | java/engine/org/apache/derby/iapi/types/UserDataValue.java | 1214 | /*
Derby - Class org.apache.derby.iapi.types.UserDataValue
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.iapi.types;
import org.apache.derby.iapi.error.StandardException;
public interface UserDataValue extends DataValueDescriptor
{
/**
* Set the value of this UserDataValue to the given Object
*
* @param theValue The value to set this UserDataValue to
*/
public void setValue(Object theValue) throws StandardException;
}
| apache-2.0 |
saydulk/horizon | openstack_dashboard/api/rest/nova.py | 10595 |
# Copyright 2014, Rackspace, US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""API over the nova service.
"""
from django.utils import http as utils_http
from django.views import generic
from openstack_dashboard import api
from openstack_dashboard.api.rest import urls
from openstack_dashboard.api.rest import utils as rest_utils
@urls.register
class Keypairs(generic.View):
"""API for nova keypairs.
"""
url_regex = r'nova/keypairs/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of keypairs associated with the current logged-in
account.
The listing result is an object with property "items".
"""
result = api.nova.keypair_list(request)
return {'items': [u.to_dict() for u in result]}
@rest_utils.ajax(data_required=True)
def post(self, request):
"""Create a keypair.
Create a keypair using the parameters supplied in the POST
application/json object. The parameters are:
:param name: the name to give the keypair
:param public_key: (optional) a key to import
This returns the new keypair object on success.
"""
if 'public_key' in request.DATA:
new = api.nova.keypair_import(request, request.DATA['name'],
request.DATA['public_key'])
else:
new = api.nova.keypair_create(request, request.DATA['name'])
return rest_utils.CreatedResponse(
'/api/nova/keypairs/%s' % utils_http.urlquote(new.name),
new.to_dict()
)
@urls.register
class AvailabilityZones(generic.View):
"""API for nova availability zones.
"""
url_regex = r'nova/availzones/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of availability zones.
The following get parameters may be passed in the GET
request:
:param detailed: If this equals "true" then the result will
include more detail.
The listing result is an object with property "items".
"""
detailed = request.GET.get('detailed') == 'true'
result = api.nova.availability_zone_list(request, detailed)
return {'items': [u.to_dict() for u in result]}
@urls.register
class Limits(generic.View):
"""API for nova limits.
"""
url_regex = r'nova/limits/$'
@rest_utils.ajax()
def get(self, request):
"""Get an object describing the current project limits.
Note: the Horizon API doesn't support any other project (tenant) but
the underlying client does...
The following get parameters may be passed in the GET
request:
:param reserved: This may be set to "true" but it's not
clear what the result of that is.
The result is an object with limits as properties.
"""
reserved = request.GET.get('reserved') == 'true'
result = api.nova.tenant_absolute_limits(request, reserved)
return result
@urls.register
class Servers(generic.View):
"""API over all servers.
"""
url_regex = r'nova/servers/$'
_optional_create = [
'block_device_mapping', 'block_device_mapping_v2', 'nics', 'meta',
'availability_zone', 'instance_count', 'admin_pass', 'disk_config',
'config_drive'
]
@rest_utils.ajax(data_required=True)
def post(self, request):
"""Create a server.
Create a server using the parameters supplied in the POST
application/json object. The required parameters as specified by
the underlying novaclient are:
:param name: The new server name.
:param source_id: The ID of the image to use.
:param flavor_id: The ID of the flavor to use.
:param key_name: (optional extension) name of previously created
keypair to inject into the instance.
:param user_data: user data to pass to be exposed by the metadata
server this can be a file type object as well or a
string.
:param security_groups: An array of one or more objects with a "name"
attribute.
Other parameters are accepted as per the underlying novaclient:
"block_device_mapping", "block_device_mapping_v2", "nics", "meta",
"availability_zone", "instance_count", "admin_pass", "disk_config",
"config_drive"
This returns the new server object on success.
"""
try:
args = (
request,
request.DATA['name'],
request.DATA['source_id'],
request.DATA['flavor_id'],
request.DATA['key_name'],
request.DATA['user_data'],
request.DATA['security_groups'],
)
except KeyError as e:
raise rest_utils.AjaxError(400, 'missing required parameter '
"'%s'" % e.args[0])
kw = {}
for name in self._optional_create:
if name in request.DATA:
kw[name] = request.DATA[name]
new = api.nova.server_create(*args, **kw)
return rest_utils.CreatedResponse(
'/api/nova/servers/%s' % utils_http.urlquote(new.id),
new.to_dict()
)
@urls.register
class Server(generic.View):
"""API for retrieving a single server
"""
url_regex = r'nova/servers/(?P<server_id>.+|default)$'
@rest_utils.ajax()
def get(self, request, server_id):
"""Get a specific server
http://localhost/api/nova/servers/1
"""
return api.nova.server_get(request, server_id).to_dict()
@urls.register
class Extensions(generic.View):
"""API for nova extensions.
"""
url_regex = r'nova/extensions/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of extensions.
The listing result is an object with property "items". Each item is
an image.
Example GET:
http://localhost/api/nova/extensions
"""
result = api.nova.list_extensions(request)
return {'items': [e.to_dict() for e in result]}
@urls.register
class Flavors(generic.View):
"""API for nova flavors.
"""
url_regex = r'nova/flavors/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of flavors.
The listing result is an object with property "items". Each item is
an flavor. By default this will return the flavors for the user's
current project. If the user is admin, public flavors will also be
returned.
:param is_public: For a regular user, set to True to see all public
flavors. For an admin user, set to False to not see public flavors.
:param get_extras: Also retrieve the extra specs.
Example GET:
http://localhost/api/nova/flavors?is_public=true
"""
is_public = request.GET.get('is_public')
is_public = (is_public and is_public.lower() == 'true')
get_extras = request.GET.get('get_extras')
get_extras = bool(get_extras and get_extras.lower() == 'true')
flavors = api.nova.flavor_list(request, is_public=is_public,
get_extras=get_extras)
result = {'items': []}
for flavor in flavors:
d = flavor.to_dict()
if get_extras:
d['extras'] = flavor.extras
result['items'].append(d)
return result
@urls.register
class Flavor(generic.View):
"""API for retrieving a single flavor
"""
url_regex = r'nova/flavors/(?P<flavor_id>[^/]+)/$'
@rest_utils.ajax()
def get(self, request, flavor_id):
"""Get a specific flavor
:param get_extras: Also retrieve the extra specs.
Example GET:
http://localhost/api/nova/flavors/1
"""
get_extras = request.GET.get('get_extras')
get_extras = bool(get_extras and get_extras.lower() == 'true')
flavor = api.nova.flavor_get(request, flavor_id, get_extras=get_extras)
result = flavor.to_dict()
if get_extras:
result['extras'] = flavor.extras
return result
@urls.register
class FlavorExtraSpecs(generic.View):
"""API for managing flavor extra specs
"""
url_regex = r'nova/flavors/(?P<flavor_id>[^/]+)/extra-specs/$'
@rest_utils.ajax()
def get(self, request, flavor_id):
"""Get a specific flavor's extra specs
Example GET:
http://localhost/api/nova/flavors/1/extra-specs
"""
return api.nova.flavor_get_extras(request, flavor_id, raw=True)
@rest_utils.ajax(data_required=True)
def patch(self, request, flavor_id):
"""Update a specific flavor's extra specs.
This method returns HTTP 204 (no content) on success.
"""
if request.DATA.get('removed'):
api.nova.flavor_extra_delete(
request, flavor_id, request.DATA.get('removed')
)
api.nova.flavor_extra_set(
request, flavor_id, request.DATA['updated']
)
@urls.register
class AggregateExtraSpecs(generic.View):
"""API for managing aggregate extra specs
"""
url_regex = r'nova/aggregates/(?P<aggregate_id>[^/]+)/extra-specs/$'
@rest_utils.ajax()
def get(self, request, aggregate_id):
"""Get a specific aggregate's extra specs
Example GET:
http://localhost/api/nova/flavors/1/extra-specs
"""
return api.nova.aggregate_get(request, aggregate_id).metadata
@rest_utils.ajax(data_required=True)
def patch(self, request, aggregate_id):
"""Update a specific aggregate's extra specs.
This method returns HTTP 204 (no content) on success.
"""
updated = request.DATA['updated']
if request.DATA.get('removed'):
for name in request.DATA.get('removed'):
updated[name] = None
api.nova.aggregate_set_metadata(request, aggregate_id, updated)
| apache-2.0 |
kingthorin/zap-extensions | addOns/fuzz/src/test/java/org/zaproxy/zap/extension/fuzz/httpfuzzer/processors/RequestContentLengthUpdaterProcessorUnitTest.java | 10958 | /*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2016 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.fuzz.httpfuzzer.processors;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.mock;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.parosproxy.paros.Constant;
import org.parosproxy.paros.network.HttpMalformedHeaderException;
import org.parosproxy.paros.network.HttpMessage;
import org.parosproxy.paros.network.HttpRequestHeader;
import org.zaproxy.zap.extension.fuzz.httpfuzzer.HttpFuzzerTaskProcessorUtils;
import org.zaproxy.zap.network.HttpRequestBody;
import org.zaproxy.zap.utils.I18N;
/** Unit test for {@link RequestContentLengthUpdaterProcessor}. */
class RequestContentLengthUpdaterProcessorUnitTest {
@BeforeEach
void setUp() throws Exception {
I18N i18n = mock(I18N.class);
given(i18n.getString(any())).willReturn("");
Constant.messages = i18n;
}
@Test
void shouldReturnANonNullInstance() {
// Given
RequestContentLengthUpdaterProcessor processor =
RequestContentLengthUpdaterProcessor.getInstance();
// When / Then
assertThat(processor, is(notNullValue()));
}
@Test
void shouldReturnsAlwaysSameInstance() {
// Given
RequestContentLengthUpdaterProcessor processor =
RequestContentLengthUpdaterProcessor.getInstance();
RequestContentLengthUpdaterProcessor processor2 =
RequestContentLengthUpdaterProcessor.getInstance();
// When / Then
assertThat(processor, is(equalTo(processor2)));
}
@Test
void shouldHaveANonNullName() {
// Given
RequestContentLengthUpdaterProcessor processor = new RequestContentLengthUpdaterProcessor();
// When
String name = processor.getName();
// Then
assertThat(name, is(notNullValue()));
}
@Test
void shouldCreateProcessorWithUndefinedMethod() {
// Given
String undefinedMethod = null;
// When / Then
assertDoesNotThrow(() -> new RequestContentLengthUpdaterProcessor(undefinedMethod));
}
@Test
void shouldFailToProcessAnUndefinedMessage() {
// Given
RequestContentLengthUpdaterProcessor processor = new RequestContentLengthUpdaterProcessor();
HttpMessage undefinedMessage = null;
HttpFuzzerTaskProcessorUtils utils = createUtils();
// When / Then
assertThrows(
NullPointerException.class,
() -> processor.processMessage(utils, undefinedMessage));
}
@Test
void shouldNotRequireUtilsToProcessMessage() {
// Given
RequestContentLengthUpdaterProcessor processor = new RequestContentLengthUpdaterProcessor();
// When / Then
assertDoesNotThrow(() -> processor.processMessage(null, new HttpMessage()));
}
@Test
void shouldReturnSameMessageWhenProcessing() {
// Given
RequestContentLengthUpdaterProcessor processor = new RequestContentLengthUpdaterProcessor();
HttpMessage message = new HttpMessage();
// When
HttpMessage processedMessage = processor.processMessage(createUtils(), message);
// Then
assertThat(processedMessage, is(equalTo(message)));
}
@Test
void shouldNotAddContentLengthIfEmptyBody() {
// Given
RequestContentLengthUpdaterProcessor processor =
new RequestContentLengthUpdaterProcessor("POST");
HttpMessage messageEmptyBody = createHttpMessage("POST");
// When
processor.processMessage(createUtils(), messageEmptyBody);
// Then
assertThat(
messageEmptyBody.getRequestHeader().getHeader(HttpRequestHeader.CONTENT_LENGTH),
is(equalTo(null)));
}
@Test
void shouldAddContentLengthIfNotEmptyBody() {
// Given
RequestContentLengthUpdaterProcessor processor =
new RequestContentLengthUpdaterProcessor("POST");
HttpMessage messageWithBody = createHttpMessage("POST", null, "body");
// When
processor.processMessage(createUtils(), messageWithBody);
// Then
assertThat(
messageWithBody.getRequestHeader().getHeader(HttpRequestHeader.CONTENT_LENGTH),
is(equalTo("4")));
}
@Test
void shouldUpdateExistingContentLengthIfEmptyBody() {
// Given
RequestContentLengthUpdaterProcessor processor =
new RequestContentLengthUpdaterProcessor("POST");
HttpMessage messageEmptyBody = createHttpMessage("POST", 15);
// When
processor.processMessage(createUtils(), messageEmptyBody);
// Then
assertThat(
messageEmptyBody.getRequestHeader().getHeader(HttpRequestHeader.CONTENT_LENGTH),
is(equalTo("0")));
}
@Test
void shouldUpdateContentLengthForAnyMethodWhenNoMethodIsSpecified() {
// Given
RequestContentLengthUpdaterProcessor processor = new RequestContentLengthUpdaterProcessor();
String body = "RequestBody";
HttpMessage postMessage = createHttpMessage("POST", 5, body);
HttpMessage getMessage = createHttpMessage("GET", 80, body);
HttpMessage xyzMessage = createHttpMessage("XYZ", 0, body);
// When
processor.processMessage(createUtils(), postMessage);
processor.processMessage(createUtils(), getMessage);
processor.processMessage(createUtils(), xyzMessage);
// Then
assertThat(
postMessage.getRequestHeader().getHeader(HttpRequestHeader.CONTENT_LENGTH),
is(equalTo("11")));
assertThat(
getMessage.getRequestHeader().getHeader(HttpRequestHeader.CONTENT_LENGTH),
is(equalTo("11")));
assertThat(
xyzMessage.getRequestHeader().getHeader(HttpRequestHeader.CONTENT_LENGTH),
is(equalTo("11")));
}
@Test
void shouldUpdateContentLengthForAnyMethodWithInstance() {
// Given
RequestContentLengthUpdaterProcessor processor =
RequestContentLengthUpdaterProcessor.getInstance();
String body = "RequestBody";
HttpMessage postMessage = createHttpMessage("POST", 5, body);
HttpMessage getMessage = createHttpMessage("GET", 80, body);
HttpMessage xyzMessage = createHttpMessage("XYZ", 0, body);
// When
processor.processMessage(createUtils(), postMessage);
processor.processMessage(createUtils(), getMessage);
processor.processMessage(createUtils(), xyzMessage);
// Then
assertThat(
postMessage.getRequestHeader().getHeader(HttpRequestHeader.CONTENT_LENGTH),
is(equalTo("11")));
assertThat(
getMessage.getRequestHeader().getHeader(HttpRequestHeader.CONTENT_LENGTH),
is(equalTo("11")));
assertThat(
xyzMessage.getRequestHeader().getHeader(HttpRequestHeader.CONTENT_LENGTH),
is(equalTo("11")));
}
@Test
void shouldUpdateContentLengthForSpecifiedMethodOnly() {
// Given
RequestContentLengthUpdaterProcessor processor =
new RequestContentLengthUpdaterProcessor("POST");
String body = "RequestBody";
HttpMessage postMessage = createHttpMessage("POST", 50, body);
HttpMessage getMessage = createHttpMessage("GET", 4, body);
HttpMessage xyzMessage = createHttpMessage("XYZ", 8, body);
// When
processor.processMessage(createUtils(), postMessage);
processor.processMessage(createUtils(), getMessage);
processor.processMessage(createUtils(), xyzMessage);
// Then
assertThat(
postMessage.getRequestHeader().getHeader(HttpRequestHeader.CONTENT_LENGTH),
is(equalTo("11")));
assertThat(
getMessage.getRequestHeader().getHeader(HttpRequestHeader.CONTENT_LENGTH),
is(equalTo("4")));
assertThat(
xyzMessage.getRequestHeader().getHeader(HttpRequestHeader.CONTENT_LENGTH),
is(equalTo("8")));
}
@Test
void shouldAcceptResultsAlways() {
// Given
RequestContentLengthUpdaterProcessor processor = new RequestContentLengthUpdaterProcessor();
// When
boolean acceptResult = processor.processResult(null, null);
// Then
assertThat(acceptResult, is(equalTo(true)));
}
private static HttpMessage createHttpMessage(String method) {
return createHttpMessage(method, null);
}
private static HttpMessage createHttpMessage(String method, Integer contentLength) {
return createHttpMessage(method, contentLength, "");
}
private static HttpMessage createHttpMessage(
String method, Integer contentLength, String body) {
StringBuilder sb = new StringBuilder(150);
sb.append(method).append(" http://example.org/ HTTP/1.1\r\n");
if (contentLength != null) {
sb.append(HttpRequestHeader.CONTENT_LENGTH)
.append(": ")
.append(contentLength)
.append("\r\n");
}
try {
return new HttpMessage(new HttpRequestHeader(sb.toString()), new HttpRequestBody(body));
} catch (HttpMalformedHeaderException e) {
throw new RuntimeException(e);
}
}
private static HttpFuzzerTaskProcessorUtils createUtils() {
return new HttpFuzzerTaskProcessorTestUtils();
}
private static class HttpFuzzerTaskProcessorTestUtils extends HttpFuzzerTaskProcessorUtils {
protected HttpFuzzerTaskProcessorTestUtils() {
super(null, null, 0, null);
}
}
}
| apache-2.0 |
nssales/OG-Platform | projects/OG-Util/src/main/java/com/opengamma/transport/FudgeSynchronousClient.java | 6779 | /**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.transport;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import org.fudgemsg.FudgeContext;
import org.fudgemsg.FudgeMsg;
import org.fudgemsg.FudgeMsgEnvelope;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.util.ArgumentChecker;
/**
* Allows synchronous RPC-style semantics to be applied over a {@link FudgeRequestSender}
* or {@link FudgeConnection}. This class also supports multiplexing different clients over
* the same underlying transport channel using correlation IDs to multiplex the requests
* and responses.
*/
public abstract class FudgeSynchronousClient implements FudgeMessageReceiver {
/** Logger. */
private static final Logger s_logger = LoggerFactory.getLogger(FudgeSynchronousClient.class);
/**
* The default timeout.
*/
private static final long DEFAULT_TIMEOUT_IN_MILLISECONDS = 30 * 1000L;
/**
* The generator of correlation ids.
*/
private final AtomicLong _nextCorrelationId = new AtomicLong();
/**
* The Fudge message sender.
*/
private final FudgeMessageSender _messageSender;
/**
* The map of pending requests keyed by correlation id.
*/
private final Map<Long, ClientRequestHolder> _pendingRequests = new ConcurrentHashMap<Long, ClientRequestHolder>();
/**
* The timeout.
*/
private long _timeoutInMilliseconds = DEFAULT_TIMEOUT_IN_MILLISECONDS;
/**
* Handler for asynchronous messages.
*/
private FudgeMessageReceiver _asynchronousMessageReceiver;
/**
* Creates the client.
* @param requestSender the sender, not null
*/
protected FudgeSynchronousClient(final FudgeRequestSender requestSender) {
ArgumentChecker.notNull(requestSender, "requestSender");
_messageSender = new FudgeMessageSender() {
@Override
public FudgeContext getFudgeContext() {
return requestSender.getFudgeContext();
}
@Override
public void send(FudgeMsg message) {
requestSender.sendRequest(message, FudgeSynchronousClient.this);
}
};
}
protected FudgeSynchronousClient(final FudgeConnection connection) {
ArgumentChecker.notNull(connection, "connection");
connection.setFudgeMessageReceiver(this);
_messageSender = connection.getFudgeMessageSender();
}
//-------------------------------------------------------------------------
/**
* Gets the message sender.
*
* @return the message sender, not null
*/
public FudgeMessageSender getMessageSender() {
return _messageSender;
}
/**
* Gets the timeout in milliseconds.
*
* @return the timeout
*/
public long getTimeoutInMilliseconds() {
return _timeoutInMilliseconds;
}
public void setTimeoutInMilliseconds(final long timeoutMilliseconds) {
_timeoutInMilliseconds = timeoutMilliseconds;
}
public void setAsynchronousMessageReceiver(final FudgeMessageReceiver asynchronousMessageReceiver) {
_asynchronousMessageReceiver = asynchronousMessageReceiver;
}
public FudgeMessageReceiver getAsynchronousMessageReceiver() {
return _asynchronousMessageReceiver;
}
/**
* Gets the next id.
*
* @return the next numeric id
*/
protected long getNextCorrelationId() {
return _nextCorrelationId.incrementAndGet();
}
//-------------------------------------------------------------------------
/**
* Sends the message.
*
* @param requestMsg the message, not null
* @param correlationId the message id
* @return the result
*/
protected FudgeMsg sendRequestAndWaitForResponse(FudgeMsg requestMsg, long correlationId) {
ClientRequestHolder requestHolder = new ClientRequestHolder();
_pendingRequests.put(correlationId, requestHolder);
try {
s_logger.debug("Sending message {}", correlationId);
getMessageSender().send(requestMsg);
try {
s_logger.debug("Blocking for message result");
requestHolder.latch.await(getTimeoutInMilliseconds(), TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
Thread.interrupted();
s_logger.error("Interrupted");
}
if (requestHolder.resultValue == null) {
s_logger.warn("Didn't get response to {} in {}ms", correlationId, getTimeoutInMilliseconds());
throw new OpenGammaRuntimeException("Didn't receive a response message to " + correlationId + " in " + getTimeoutInMilliseconds() + "ms");
}
assert getCorrelationIdFromReply(requestHolder.resultValue) == correlationId;
s_logger.debug("Received result {}", requestHolder.resultValue);
return requestHolder.resultValue;
} finally {
_pendingRequests.remove(correlationId);
s_logger.debug("Request {} complete", correlationId);
}
}
protected void sendMessage(FudgeMsg message) {
getMessageSender().send(message);
}
/**
* Receives a message from Fudge.
*
* @param fudgeContext the Fudge context, not null
* @param msgEnvelope the message, not null
*/
@Override
public void messageReceived(FudgeContext fudgeContext, FudgeMsgEnvelope msgEnvelope) {
final FudgeMsg reply = msgEnvelope.getMessage();
final Long correlationId = getCorrelationIdFromReply(reply);
if (correlationId == null) {
final FudgeMessageReceiver receiver = getAsynchronousMessageReceiver();
if (receiver == null) {
s_logger.info("Unhandled asynchronous message {}", msgEnvelope);
} else {
receiver.messageReceived(fudgeContext, msgEnvelope);
}
return;
}
final ClientRequestHolder requestHolder = _pendingRequests.remove(correlationId);
if (requestHolder == null) {
s_logger.warn("Got a response on non-pending correlation Id {}", correlationId);
return;
}
requestHolder.resultValue = reply;
requestHolder.latch.countDown();
}
/**
* Extracts the correlation id from the reply object.
*
* @param reply the reply
* @return the id, null if it's an asynchronous message (over {@link FudgeConnection} transport only)
*/
protected abstract Long getCorrelationIdFromReply(FudgeMsg reply);
//-------------------------------------------------------------------------
/**
* Data holder.
*/
private static final class ClientRequestHolder {
public FudgeMsg resultValue; // CSIGNORE: simple holder object
public final CountDownLatch latch = new CountDownLatch(1); // CSIGNORE: simple holder object
}
}
| apache-2.0 |
miarmak/CloudFerry | cloudferrylib/os/actions/map_compute_info.py | 1797 | # Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and#
# limitations under the License.
from cloudferrylib.base.action import action
from cloudferrylib.utils import utils as utl
import copy
INSTANCES = 'instances'
DIFF = 'diff'
PATH_DST = 'path_dst'
HOST_DST = 'host_dst'
class MapComputeInfo(action.Action):
def run(self, info=None, **kwargs):
new_compute_info = copy.deepcopy(info)
src_compute = self.src_cloud.resources[utl.COMPUTE_RESOURCE]
dst_compute = self.dst_cloud.resources[utl.COMPUTE_RESOURCE]
src_flavors_dict = \
{flavor.id: flavor.name for flavor in src_compute.get_flavor_list()}
dst_flavors_dict = \
{flavor.name: flavor.id for flavor in dst_compute.get_flavor_list()}
for instance_id, instance in new_compute_info[utl.INSTANCES_TYPE].iteritems():
_instance = instance['instance']
flavor_name = src_flavors_dict[_instance['flavor_id']]
_instance['flavor_id'] = dst_flavors_dict[flavor_name]
path_dst = "%s/%s" % (self.dst_cloud.cloud_config.cloud.temp, "temp%s_base" % instance_id)
instance[DIFF][PATH_DST] = path_dst
instance[DIFF][HOST_DST] = self.dst_cloud.getIpSsh()
return {
'info': new_compute_info
}
| apache-2.0 |
Darsstar/framework | server/src/main/java/com/vaadin/ui/SingleSelect.java | 887 | /*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.ui;
import com.vaadin.data.HasValue;
/**
* Single selection component whose selection is treated as a value.
*
* @author Vaadin Ltd
* @since 8.0
*
* @param <V>
* the selection value type
*
*/
public interface SingleSelect<V> extends HasValue<V> {
}
| apache-2.0 |
ftomassetti/java-symbol-solver | java-symbol-solver-testing/src/test/resources/javaparser_new_src/javaparser-core/com/github/javaparser/ast/expr/EnclosedExpr.java | 1719 | /*
* Copyright (C) 2007-2010 Júlio Vilmar Gesser.
* Copyright (C) 2011, 2013-2016 The JavaParser Team.
*
* This file is part of JavaParser.
*
* JavaParser can be used either under the terms of
* a) the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* b) the terms of the Apache License
*
* You should have received a copy of both licenses in LICENCE.LGPL and
* LICENCE.APACHE. Please refer to those files for details.
*
* JavaParser is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*/
package com.github.javaparser.ast.expr;
import com.github.javaparser.Range;
import com.github.javaparser.ast.visitor.GenericVisitor;
import com.github.javaparser.ast.visitor.VoidVisitor;
/**
* @author Julio Vilmar Gesser
*/
public final class EnclosedExpr extends Expression {
private Expression inner;
public EnclosedExpr() {
}
public EnclosedExpr(final Expression inner) {
setInner(inner);
}
public EnclosedExpr(final Range range, final Expression inner) {
super(range);
setInner(inner);
}
@Override public <R, A> R accept(final GenericVisitor<R, A> v, final A arg) {
return v.visit(this, arg);
}
@Override public <A> void accept(final VoidVisitor<A> v, final A arg) {
v.visit(this, arg);
}
public Expression getInner() {
return inner;
}
public EnclosedExpr setInner(final Expression inner) {
this.inner = inner;
setAsParentNodeOf(this.inner);
return this;
}
}
| apache-2.0 |
stoksey69/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201505/ProposalLink.java | 4513 |
package com.google.api.ads.dfp.jaxws.v201505;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
*
* A link that can be added as a resource to a {@link Proposal}.
*
*
* <p>Java class for ProposalLink complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="ProposalLink">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="id" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="name" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="creatorId" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="url" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="description" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="creationDateTime" type="{https://www.google.com/apis/ads/publisher/v201505}DateTime" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ProposalLink", propOrder = {
"id",
"name",
"creatorId",
"url",
"description",
"creationDateTime"
})
public class ProposalLink {
protected Long id;
protected String name;
protected Long creatorId;
protected String url;
protected String description;
protected DateTime creationDateTime;
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link Long }
*
*/
public Long getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setId(Long value) {
this.id = value;
}
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the creatorId property.
*
* @return
* possible object is
* {@link Long }
*
*/
public Long getCreatorId() {
return creatorId;
}
/**
* Sets the value of the creatorId property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setCreatorId(Long value) {
this.creatorId = value;
}
/**
* Gets the value of the url property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getUrl() {
return url;
}
/**
* Sets the value of the url property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setUrl(String value) {
this.url = value;
}
/**
* Gets the value of the description property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDescription() {
return description;
}
/**
* Sets the value of the description property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDescription(String value) {
this.description = value;
}
/**
* Gets the value of the creationDateTime property.
*
* @return
* possible object is
* {@link DateTime }
*
*/
public DateTime getCreationDateTime() {
return creationDateTime;
}
/**
* Sets the value of the creationDateTime property.
*
* @param value
* allowed object is
* {@link DateTime }
*
*/
public void setCreationDateTime(DateTime value) {
this.creationDateTime = value;
}
}
| apache-2.0 |
balloob/home-assistant | homeassistant/components/python_script/__init__.py | 7610 | """Component to allow running Python scripts."""
import datetime
import glob
import logging
import os
import time
from RestrictedPython import (
compile_restricted_exec,
limited_builtins,
safe_builtins,
utility_builtins,
)
from RestrictedPython.Eval import default_guarded_getitem
from RestrictedPython.Guards import (
full_write_guard,
guarded_iter_unpack_sequence,
guarded_unpack_sequence,
)
import voluptuous as vol
from homeassistant.const import SERVICE_RELOAD
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.service import async_set_service_schema
from homeassistant.loader import bind_hass
from homeassistant.util import sanitize_filename
import homeassistant.util.dt as dt_util
from homeassistant.util.yaml.loader import load_yaml
_LOGGER = logging.getLogger(__name__)
DOMAIN = "python_script"
FOLDER = "python_scripts"
CONFIG_SCHEMA = vol.Schema({DOMAIN: vol.Schema(dict)}, extra=vol.ALLOW_EXTRA)
ALLOWED_HASS = {"bus", "services", "states"}
ALLOWED_EVENTBUS = {"fire"}
ALLOWED_STATEMACHINE = {
"entity_ids",
"all",
"get",
"is_state",
"is_state_attr",
"remove",
"set",
}
ALLOWED_SERVICEREGISTRY = {"services", "has_service", "call"}
ALLOWED_TIME = {
"sleep",
"strftime",
"strptime",
"gmtime",
"localtime",
"ctime",
"time",
"mktime",
}
ALLOWED_DATETIME = {"date", "time", "datetime", "timedelta", "tzinfo"}
ALLOWED_DT_UTIL = {
"utcnow",
"now",
"as_utc",
"as_timestamp",
"as_local",
"utc_from_timestamp",
"start_of_local_day",
"parse_datetime",
"parse_date",
"get_age",
}
class ScriptError(HomeAssistantError):
"""When a script error occurs."""
def setup(hass, config):
"""Initialize the Python script component."""
path = hass.config.path(FOLDER)
if not os.path.isdir(path):
_LOGGER.warning("Folder %s not found in configuration folder", FOLDER)
return False
discover_scripts(hass)
def reload_scripts_handler(call):
"""Handle reload service calls."""
discover_scripts(hass)
hass.services.register(DOMAIN, SERVICE_RELOAD, reload_scripts_handler)
return True
def discover_scripts(hass):
"""Discover python scripts in folder."""
path = hass.config.path(FOLDER)
if not os.path.isdir(path):
_LOGGER.warning("Folder %s not found in configuration folder", FOLDER)
return False
def python_script_service_handler(call):
"""Handle python script service calls."""
execute_script(hass, call.service, call.data)
existing = hass.services.services.get(DOMAIN, {}).keys()
for existing_service in existing:
if existing_service == SERVICE_RELOAD:
continue
hass.services.remove(DOMAIN, existing_service)
# Load user-provided service descriptions from python_scripts/services.yaml
services_yaml = os.path.join(path, "services.yaml")
if os.path.exists(services_yaml):
services_dict = load_yaml(services_yaml)
else:
services_dict = {}
for fil in glob.iglob(os.path.join(path, "*.py")):
name = os.path.splitext(os.path.basename(fil))[0]
hass.services.register(DOMAIN, name, python_script_service_handler)
service_desc = {
"description": services_dict.get(name, {}).get("description", ""),
"fields": services_dict.get(name, {}).get("fields", {}),
}
async_set_service_schema(hass, DOMAIN, name, service_desc)
@bind_hass
def execute_script(hass, name, data=None):
"""Execute a script."""
filename = f"{name}.py"
with open(hass.config.path(FOLDER, sanitize_filename(filename))) as fil:
source = fil.read()
execute(hass, filename, source, data)
@bind_hass
def execute(hass, filename, source, data=None):
"""Execute Python source."""
compiled = compile_restricted_exec(source, filename=filename)
if compiled.errors:
_LOGGER.error(
"Error loading script %s: %s", filename, ", ".join(compiled.errors)
)
return
if compiled.warnings:
_LOGGER.warning(
"Warning loading script %s: %s", filename, ", ".join(compiled.warnings)
)
def protected_getattr(obj, name, default=None):
"""Restricted method to get attributes."""
if name.startswith("async_"):
raise ScriptError("Not allowed to access async methods")
if (
obj is hass
and name not in ALLOWED_HASS
or obj is hass.bus
and name not in ALLOWED_EVENTBUS
or obj is hass.states
and name not in ALLOWED_STATEMACHINE
or obj is hass.services
and name not in ALLOWED_SERVICEREGISTRY
or obj is dt_util
and name not in ALLOWED_DT_UTIL
or obj is datetime
and name not in ALLOWED_DATETIME
or isinstance(obj, TimeWrapper)
and name not in ALLOWED_TIME
):
raise ScriptError(f"Not allowed to access {obj.__class__.__name__}.{name}")
return getattr(obj, name, default)
extra_builtins = {
"datetime": datetime,
"sorted": sorted,
"time": TimeWrapper(),
"dt_util": dt_util,
"min": min,
"max": max,
"sum": sum,
"any": any,
"all": all,
}
builtins = safe_builtins.copy()
builtins.update(utility_builtins)
builtins.update(limited_builtins)
builtins.update(extra_builtins)
logger = logging.getLogger(f"{__name__}.{filename}")
restricted_globals = {
"__builtins__": builtins,
"_print_": StubPrinter,
"_getattr_": protected_getattr,
"_write_": full_write_guard,
"_getiter_": iter,
"_getitem_": default_guarded_getitem,
"_iter_unpack_sequence_": guarded_iter_unpack_sequence,
"_unpack_sequence_": guarded_unpack_sequence,
"hass": hass,
"data": data or {},
"logger": logger,
}
try:
_LOGGER.info("Executing %s: %s", filename, data)
# pylint: disable=exec-used
exec(compiled.code, restricted_globals)
except ScriptError as err:
logger.error("Error executing script: %s", err)
except Exception as err: # pylint: disable=broad-except
logger.exception("Error executing script: %s", err)
class StubPrinter:
"""Class to handle printing inside scripts."""
def __init__(self, _getattr_):
"""Initialize our printer."""
def _call_print(self, *objects, **kwargs):
"""Print text."""
# pylint: disable=no-self-use
_LOGGER.warning("Don't use print() inside scripts. Use logger.info() instead")
class TimeWrapper:
"""Wrap the time module."""
# Class variable, only going to warn once per Home Assistant run
warned = False
# pylint: disable=no-self-use
def sleep(self, *args, **kwargs):
"""Sleep method that warns once."""
if not TimeWrapper.warned:
TimeWrapper.warned = True
_LOGGER.warning(
"Using time.sleep can reduce the performance of Home Assistant"
)
time.sleep(*args, **kwargs)
def __getattr__(self, attr):
"""Fetch an attribute from Time module."""
attribute = getattr(time, attr)
if callable(attribute):
def wrapper(*args, **kw):
"""Wrap to return callable method if callable."""
return attribute(*args, **kw)
return wrapper
return attribute
| apache-2.0 |
exosite-ready/libcoap | doc/html/search/groups_61.js | 115 | var searchData=
[
['asynchronous_20messaging',['Asynchronous Messaging',['../group__coap__async.html',1,'']]]
];
| bsd-2-clause |
wmorin/homebrew-cask | Casks/codekit.rb | 472 | cask :v1 => 'codekit' do
version '2.3.7-18917'
sha256 '5958b170026f37bb78b31a9251cd1ccafb2239d9a85e9729593948b9d00255fc'
url "http://incident57.com/codekit/files/codekit-#{version.sub(%r{.*-},'')}.zip"
appcast 'https://incident57.com/codekit/appcast/ck2appcast.xml',
:sha256 => 'fba4e9552ebabca2b700f6bdcdbb83132856d6c467f536250fc34beed9a8f104'
name 'CodeKit'
homepage 'http://incident57.com/codekit/'
license :commercial
app 'CodeKit.app'
end
| bsd-2-clause |
cfillion/homebrew-cask | Casks/mcedit.rb | 570 | cask 'mcedit' do
version '1.5.4.1'
sha256 '29bde806dc415435296e14613f9052a38891e87189f63a30bd69494e13708953'
# github.com/Khroki/MCEdit-Unified was verified as official when first introduced to the cask
url "https://github.com/Khroki/MCEdit-Unified/releases/download/#{version}/MCEdit.v#{version}.OSX.64bit.zip"
appcast 'https://github.com/Khroki/MCEdit-Unified/releases.atom',
checkpoint: '5006f92d219e88b424172f139c433f225ec4e6255010c59b6feef67420ee1b93'
name 'MCEdit-Unified'
homepage 'http://www.mcedit-unified.net/'
app 'mcedit.app'
end
| bsd-2-clause |
nmfzone/gopherjs | compiler/filter/assign.go | 2415 | package filter
import (
"go/ast"
"go/token"
"github.com/gopherjs/gopherjs/compiler/analysis"
"github.com/gopherjs/gopherjs/compiler/astutil"
)
func Assign(stmt ast.Stmt, info *analysis.Info) ast.Stmt {
if s, ok := stmt.(*ast.AssignStmt); ok && s.Tok != token.ASSIGN && s.Tok != token.DEFINE {
var op token.Token
switch s.Tok {
case token.ADD_ASSIGN:
op = token.ADD
case token.SUB_ASSIGN:
op = token.SUB
case token.MUL_ASSIGN:
op = token.MUL
case token.QUO_ASSIGN:
op = token.QUO
case token.REM_ASSIGN:
op = token.REM
case token.AND_ASSIGN:
op = token.AND
case token.OR_ASSIGN:
op = token.OR
case token.XOR_ASSIGN:
op = token.XOR
case token.SHL_ASSIGN:
op = token.SHL
case token.SHR_ASSIGN:
op = token.SHR
case token.AND_NOT_ASSIGN:
op = token.AND_NOT
default:
panic(s.Tok)
}
var list []ast.Stmt
var viaTmpVars func(expr ast.Expr, name string) ast.Expr
viaTmpVars = func(expr ast.Expr, name string) ast.Expr {
switch e := astutil.RemoveParens(expr).(type) {
case *ast.IndexExpr:
return astutil.SetType(info.Info, info.Types[e].Type, &ast.IndexExpr{
X: viaTmpVars(e.X, "_slice"),
Index: viaTmpVars(e.Index, "_index"),
})
case *ast.SelectorExpr:
sel, ok := info.Selections[e]
if !ok {
// qualified identifier
return e
}
newSel := &ast.SelectorExpr{
X: viaTmpVars(e.X, "_struct"),
Sel: e.Sel,
}
info.Selections[newSel] = sel
return astutil.SetType(info.Info, info.Types[e].Type, newSel)
case *ast.StarExpr:
return astutil.SetType(info.Info, info.Types[e].Type, &ast.StarExpr{
X: viaTmpVars(e.X, "_ptr"),
})
case *ast.Ident, *ast.BasicLit:
return e
default:
tmpVar := astutil.NewIdent(name, info.Types[e].Type, info.Info, info.Pkg)
list = append(list, &ast.AssignStmt{
Lhs: []ast.Expr{tmpVar},
Tok: token.DEFINE,
Rhs: []ast.Expr{e},
})
return tmpVar
}
}
lhs := viaTmpVars(s.Lhs[0], "_val")
list = append(list, &ast.AssignStmt{
Lhs: []ast.Expr{lhs},
Tok: token.ASSIGN,
Rhs: []ast.Expr{
astutil.SetType(info.Info, info.Types[s.Lhs[0]].Type, &ast.BinaryExpr{
X: lhs,
Op: op,
Y: astutil.SetType(info.Info, info.Types[s.Rhs[0]].Type, &ast.ParenExpr{
X: s.Rhs[0],
}),
}),
},
})
return &ast.BlockStmt{
List: list,
}
}
return stmt
}
| bsd-2-clause |
alexg0/homebrew-cask | Casks/filemaker-pro.rb | 576 | cask "filemaker-pro" do
version "19.2.1.14"
sha256 "7e57f30e4f1a72063824240eff78b795de148a5233bfbfc6aaf10c101a269496"
url "https://downloads.claris.com/esd/fmp_#{version}.dmg"
appcast "https://www.filemaker.com/redirects/ss.txt"
name "FileMaker Pro"
desc "Relational database and rapid application development platform"
homepage "https://www.claris.com/filemaker/"
auto_updates true
depends_on macos: ">= :mojave"
app "FileMaker Pro.app"
caveats do
license "https://www.claris.com/company/legal/docs/eula/filemaker-pro/fmp_eula_en.pdf"
end
end
| bsd-2-clause |
axodys/homebrew-cask | Casks/mailmate.rb | 360 | cask 'mailmate' do
version :latest
sha256 :no_check
# app.com is the official download host per the vendor homepage
url 'http://dl.mailmate-app.com/MailMate.tbz'
name 'MailMate'
homepage 'http://freron.com/'
license :commercial
app 'MailMate.app'
binary 'MailMate.app/Contents/Resources/emate'
caveats do
files_in_usr_local
end
end
| bsd-2-clause |
jirkadanek/Strongtalk | vm/recompiler/recompiler.hpp | 3308 | /* Copyright 1994, 1995 LongView Technologies L.L.C. $Revision: 1.12 $ */
/* Copyright (c) 2006, Sun Microsystems, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of Sun Microsystems nor the names of its contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
*/
#ifdef DELTA_COMPILER
// The RecompilationPolicy selects which method (if any) should be recompiled.
class RecompilationPolicy : public ResourceObj {
protected:
GrowableArray<RFrame*>* stack;
char* msg; // for (performance) debugging: reason for not going up, etc.
RFrame* senderOf(RFrame* rf); // return rf->sender() and update stack if necessary
RFrame* parentOfBlock(blockClosureOop blk); // block's parent frame (or NULL)
RFrame* parentOf(RFrame* rf); // same for rf->parent()
RFrame* senderOrParentOf(RFrame* rf);// either sender or parent, depending on various factors
RFrame* findTopInlinableFrame();
void checkCurrent(RFrame*& current, RFrame*& prev, RFrame*& prevMethod);
void fixBlockParent(RFrame* rf);
void printStack();
public:
RecompilationPolicy(RFrame* first);
Recompilee* findRecompilee(); // determine what to recompile
void cleanupStaleInlineCaches(); // clean up inline caches of top methods
static bool needRecompileCounter(Compiler* c); // does this compilation (nmethod) need an invocation counter?
static bool shouldRecompileAfterUncommonTrap(nmethod* nm);
// nm encountered an uncommon case; should it be recompiled?
static bool shouldRecompileUncommonNMethod(nmethod* nm);
// nm is in uncommon mode; ok to recompile and reoptimize it?
static char* shouldNotRecompileNMethod(nmethod* nm);
// is nm fit to be recompiled? return NULL if yes, reason otherwise
static int uncommonNMethodInvocationLimit(int version);
// return invocation counter limit for an uncommon nmethod
static int uncommonNMethodAgeLimit(int version);
// return nmethod age limit for an uncommon nmethod
};
#endif
| bsd-3-clause |
pozdnyakov/chromium-crosswalk | chrome/browser/media_galleries/fileapi/native_media_file_util.cc | 24084 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/media_galleries/fileapi/native_media_file_util.h"
#include "base/bind.h"
#include "base/file_util.h"
#include "base/files/file_enumerator.h"
#include "base/strings/string_util.h"
#include "base/task_runner_util.h"
#include "chrome/browser/media_galleries/fileapi/media_file_system_mount_point_provider.h"
#include "chrome/browser/media_galleries/fileapi/media_path_filter.h"
#include "content/public/browser/browser_thread.h"
#include "googleurl/src/gurl.h"
#include "net/base/mime_sniffer.h"
#include "webkit/browser/fileapi/file_system_context.h"
#include "webkit/browser/fileapi/file_system_operation_context.h"
#include "webkit/browser/fileapi/file_system_task_runners.h"
#include "webkit/browser/fileapi/native_file_util.h"
#include "webkit/common/blob/shareable_file_reference.h"
namespace chrome {
namespace {
// Modelled after ScopedFILEClose.
struct ScopedPlatformFileClose {
void operator()(base::PlatformFile* file) {
if (file && *file != base::kInvalidPlatformFileValue)
base::ClosePlatformFile(*file);
}
};
typedef scoped_ptr<base::PlatformFile, ScopedPlatformFileClose>
ScopedPlatformFile;
// Used to skip the hidden folders and files. Returns true if the file specified
// by |path| should be skipped.
bool ShouldSkip(const base::FilePath& path) {
const base::FilePath::StringType base_name = path.BaseName().value();
if (base_name.empty())
return false;
// Dot files (aka hidden files)
if (base_name[0] == '.')
return true;
// Mac OS X file.
if (base_name == FILE_PATH_LITERAL("__MACOSX"))
return true;
#if defined(OS_WIN)
DWORD file_attributes = ::GetFileAttributes(path.value().c_str());
if ((file_attributes != INVALID_FILE_ATTRIBUTES) &&
((file_attributes & FILE_ATTRIBUTE_HIDDEN) != 0))
return true;
#else
// Windows always creates a recycle bin folder in the attached device to store
// all the deleted contents. On non-windows operating systems, there is no way
// to get the hidden attribute of windows recycle bin folders that are present
// on the attached device. Therefore, compare the file path name to the
// recycle bin name and exclude those folders. For more details, please refer
// to http://support.microsoft.com/kb/171694.
const char win_98_recycle_bin_name[] = "RECYCLED";
const char win_xp_recycle_bin_name[] = "RECYCLER";
const char win_vista_recycle_bin_name[] = "$Recycle.bin";
if ((base::strncasecmp(base_name.c_str(),
win_98_recycle_bin_name,
strlen(win_98_recycle_bin_name)) == 0) ||
(base::strncasecmp(base_name.c_str(),
win_xp_recycle_bin_name,
strlen(win_xp_recycle_bin_name)) == 0) ||
(base::strncasecmp(base_name.c_str(),
win_vista_recycle_bin_name,
strlen(win_vista_recycle_bin_name)) == 0))
return true;
#endif
return false;
}
// Returns true if the current thread is capable of doing IO.
bool IsOnTaskRunnerThread(fileapi::FileSystemOperationContext* context) {
return context->task_runner()->RunsTasksOnCurrentThread();
}
MediaPathFilter* GetMediaPathFilter(
fileapi::FileSystemOperationContext* context) {
return context->GetUserValue<MediaPathFilter*>(
MediaFileSystemMountPointProvider::kMediaPathFilterKey);
}
} // namespace
NativeMediaFileUtil::NativeMediaFileUtil() : weak_factory_(this) {
}
NativeMediaFileUtil::~NativeMediaFileUtil() {
}
// static
base::PlatformFileError NativeMediaFileUtil::IsMediaFile(
const base::FilePath& path) {
base::PlatformFile file_handle;
const int flags = base::PLATFORM_FILE_OPEN | base::PLATFORM_FILE_READ;
base::PlatformFileError error =
fileapi::NativeFileUtil::CreateOrOpen(path, flags, &file_handle, NULL);
if (error != base::PLATFORM_FILE_OK)
return error;
ScopedPlatformFile scoped_platform_file(&file_handle);
char buffer[net::kMaxBytesToSniff];
// Read as much as net::SniffMimeTypeFromLocalData() will bother looking at.
int64 len =
base::ReadPlatformFile(file_handle, 0, buffer, net::kMaxBytesToSniff);
if (len < 0)
return base::PLATFORM_FILE_ERROR_FAILED;
if (len == 0)
return base::PLATFORM_FILE_ERROR_SECURITY;
std::string mime_type;
if (!net::SniffMimeTypeFromLocalData(buffer, len, &mime_type))
return base::PLATFORM_FILE_ERROR_SECURITY;
if (StartsWithASCII(mime_type, "image/", true) ||
StartsWithASCII(mime_type, "audio/", true) ||
StartsWithASCII(mime_type, "video/", true) ||
mime_type == "application/x-shockwave-flash") {
return base::PLATFORM_FILE_OK;
}
return base::PLATFORM_FILE_ERROR_SECURITY;
}
bool NativeMediaFileUtil::CreateOrOpen(
scoped_ptr<fileapi::FileSystemOperationContext> context,
const fileapi::FileSystemURL& url,
int file_flags,
const CreateOrOpenCallback& callback) {
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::IO));
// Only called by NaCl, which should not have access to media file systems.
base::PlatformFile invalid_file(base::kInvalidPlatformFileValue);
if (!callback.is_null()) {
callback.Run(base::PLATFORM_FILE_ERROR_SECURITY,
base::PassPlatformFile(&invalid_file),
false);
}
return true;
}
bool NativeMediaFileUtil::EnsureFileExists(
scoped_ptr<fileapi::FileSystemOperationContext> context,
const fileapi::FileSystemURL& url,
const EnsureFileExistsCallback& callback) {
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::IO));
if (!callback.is_null())
callback.Run(base::PLATFORM_FILE_ERROR_SECURITY, false);
return true;
}
bool NativeMediaFileUtil::CreateDirectory(
scoped_ptr<fileapi::FileSystemOperationContext> context,
const fileapi::FileSystemURL& url,
bool exclusive,
bool recursive,
const StatusCallback& callback) {
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::IO));
fileapi::FileSystemOperationContext* context_ptr = context.release();
return context_ptr->task_runner()->PostTask(
FROM_HERE,
base::Bind(&NativeMediaFileUtil::CreateDirectoryOnTaskRunnerThread,
weak_factory_.GetWeakPtr(), base::Owned(context_ptr),
url, exclusive, recursive, callback));
}
bool NativeMediaFileUtil::GetFileInfo(
scoped_ptr<fileapi::FileSystemOperationContext> context,
const fileapi::FileSystemURL& url,
const GetFileInfoCallback& callback) {
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::IO));
fileapi::FileSystemOperationContext* context_ptr = context.release();
return context_ptr->task_runner()->PostTask(
FROM_HERE,
base::Bind(&NativeMediaFileUtil::GetFileInfoOnTaskRunnerThread,
weak_factory_.GetWeakPtr(), base::Owned(context_ptr),
url, callback));
}
bool NativeMediaFileUtil::ReadDirectory(
scoped_ptr<fileapi::FileSystemOperationContext> context,
const fileapi::FileSystemURL& url,
const ReadDirectoryCallback& callback) {
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::IO));
fileapi::FileSystemOperationContext* context_ptr = context.release();
return context_ptr->task_runner()->PostTask(
FROM_HERE,
base::Bind(&NativeMediaFileUtil::ReadDirectoryOnTaskRunnerThread,
weak_factory_.GetWeakPtr(), base::Owned(context_ptr),
url, callback));
}
bool NativeMediaFileUtil::Touch(
scoped_ptr<fileapi::FileSystemOperationContext> context,
const fileapi::FileSystemURL& url,
const base::Time& last_access_time,
const base::Time& last_modified_time,
const StatusCallback& callback) {
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::IO));
if (!callback.is_null())
callback.Run(base::PLATFORM_FILE_ERROR_SECURITY);
return true;
}
bool NativeMediaFileUtil::Truncate(
scoped_ptr<fileapi::FileSystemOperationContext> context,
const fileapi::FileSystemURL& url,
int64 length,
const StatusCallback& callback) {
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::IO));
if (!callback.is_null())
callback.Run(base::PLATFORM_FILE_ERROR_SECURITY);
return true;
}
bool NativeMediaFileUtil::CopyFileLocal(
scoped_ptr<fileapi::FileSystemOperationContext> context,
const fileapi::FileSystemURL& src_url,
const fileapi::FileSystemURL& dest_url,
const StatusCallback& callback) {
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::IO));
fileapi::FileSystemOperationContext* context_ptr = context.release();
return context_ptr->task_runner()->PostTask(
FROM_HERE,
base::Bind(&NativeMediaFileUtil::CopyOrMoveFileLocalOnTaskRunnerThread,
weak_factory_.GetWeakPtr(), base::Owned(context_ptr),
src_url, dest_url, true /* copy */, callback));
}
bool NativeMediaFileUtil::MoveFileLocal(
scoped_ptr<fileapi::FileSystemOperationContext> context,
const fileapi::FileSystemURL& src_url,
const fileapi::FileSystemURL& dest_url,
const StatusCallback& callback) {
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::IO));
fileapi::FileSystemOperationContext* context_ptr = context.release();
return context_ptr->task_runner()->PostTask(
FROM_HERE,
base::Bind(&NativeMediaFileUtil::CopyOrMoveFileLocalOnTaskRunnerThread,
weak_factory_.GetWeakPtr(), base::Owned(context_ptr),
src_url, dest_url, false /* copy */, callback));
}
bool NativeMediaFileUtil::CopyInForeignFile(
scoped_ptr<fileapi::FileSystemOperationContext> context,
const base::FilePath& src_file_path,
const fileapi::FileSystemURL& dest_url,
const StatusCallback& callback) {
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::IO));
fileapi::FileSystemOperationContext* context_ptr = context.release();
return context_ptr->task_runner()->PostTask(
FROM_HERE,
base::Bind(&NativeMediaFileUtil::CopyInForeignFileOnTaskRunnerThread,
weak_factory_.GetWeakPtr(), base::Owned(context_ptr),
src_file_path, dest_url, callback));
}
bool NativeMediaFileUtil::DeleteFile(
scoped_ptr<fileapi::FileSystemOperationContext> context,
const fileapi::FileSystemURL& url,
const StatusCallback& callback) {
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::IO));
if (!callback.is_null())
callback.Run(base::PLATFORM_FILE_ERROR_SECURITY);
return true;
}
// This is needed to support Copy and Move.
bool NativeMediaFileUtil::DeleteDirectory(
scoped_ptr<fileapi::FileSystemOperationContext> context,
const fileapi::FileSystemURL& url,
const StatusCallback& callback) {
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::IO));
fileapi::FileSystemOperationContext* context_ptr = context.release();
return context_ptr->task_runner()->PostTask(
FROM_HERE,
base::Bind(&NativeMediaFileUtil::DeleteDirectoryOnTaskRunnerThread,
weak_factory_.GetWeakPtr(), base::Owned(context_ptr),
url, callback));
}
bool NativeMediaFileUtil::CreateSnapshotFile(
scoped_ptr<fileapi::FileSystemOperationContext> context,
const fileapi::FileSystemURL& url,
const CreateSnapshotFileCallback& callback) {
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::IO));
fileapi::FileSystemOperationContext* context_ptr = context.release();
return context_ptr->task_runner()->PostTask(
FROM_HERE,
base::Bind(&NativeMediaFileUtil::CreateSnapshotFileOnTaskRunnerThread,
weak_factory_.GetWeakPtr(), base::Owned(context_ptr),
url, callback));
}
void NativeMediaFileUtil::CreateDirectoryOnTaskRunnerThread(
fileapi::FileSystemOperationContext* context,
const fileapi::FileSystemURL& url,
bool exclusive,
bool recursive,
const StatusCallback& callback) {
DCHECK(IsOnTaskRunnerThread(context));
base::PlatformFileError error =
CreateDirectorySync(context, url, exclusive, recursive);
if (callback.is_null())
return;
content::BrowserThread::PostTask(
content::BrowserThread::IO,
FROM_HERE,
base::Bind(callback, error));
}
void NativeMediaFileUtil::GetFileInfoOnTaskRunnerThread(
fileapi::FileSystemOperationContext* context,
const fileapi::FileSystemURL& url,
const GetFileInfoCallback& callback) {
DCHECK(IsOnTaskRunnerThread(context));
base::PlatformFileInfo file_info;
// TODO(thestig): remove this.
base::FilePath platform_path;
base::PlatformFileError error =
GetFileInfoSync(context, url, &file_info, &platform_path);
if (callback.is_null())
return;
content::BrowserThread::PostTask(
content::BrowserThread::IO,
FROM_HERE,
base::Bind(callback, error, file_info));
}
void NativeMediaFileUtil::ReadDirectoryOnTaskRunnerThread(
fileapi::FileSystemOperationContext* context,
const fileapi::FileSystemURL& url,
const ReadDirectoryCallback& callback) {
DCHECK(IsOnTaskRunnerThread(context));
EntryList entry_list;
base::PlatformFileError error =
ReadDirectorySync(context, url, &entry_list);
if (callback.is_null())
return;
content::BrowserThread::PostTask(
content::BrowserThread::IO,
FROM_HERE,
base::Bind(callback, error, entry_list, false /* has_more */));
}
void NativeMediaFileUtil::CopyOrMoveFileLocalOnTaskRunnerThread(
fileapi::FileSystemOperationContext* context,
const fileapi::FileSystemURL& src_url,
const fileapi::FileSystemURL& dest_url,
bool copy,
const StatusCallback& callback) {
DCHECK(IsOnTaskRunnerThread(context));
base::PlatformFileError error =
CopyOrMoveFileSync(context, src_url, dest_url, copy);
if (callback.is_null())
return;
content::BrowserThread::PostTask(
content::BrowserThread::IO,
FROM_HERE,
base::Bind(callback, error));
}
void NativeMediaFileUtil::CopyInForeignFileOnTaskRunnerThread(
fileapi::FileSystemOperationContext* context,
const base::FilePath& src_file_path,
const fileapi::FileSystemURL& dest_url,
const StatusCallback& callback) {
DCHECK(IsOnTaskRunnerThread(context));
base::PlatformFileError error =
CopyInForeignFileSync(context, src_file_path, dest_url);
if (callback.is_null())
return;
content::BrowserThread::PostTask(
content::BrowserThread::IO,
FROM_HERE,
base::Bind(callback, error));
}
void NativeMediaFileUtil::DeleteDirectoryOnTaskRunnerThread(
fileapi::FileSystemOperationContext* context,
const fileapi::FileSystemURL& url,
const StatusCallback& callback) {
DCHECK(IsOnTaskRunnerThread(context));
base::PlatformFileError error = DeleteDirectorySync(context, url);
if (callback.is_null())
return;
content::BrowserThread::PostTask(
content::BrowserThread::IO,
FROM_HERE,
base::Bind(callback, error));
}
void NativeMediaFileUtil::CreateSnapshotFileOnTaskRunnerThread(
fileapi::FileSystemOperationContext* context,
const fileapi::FileSystemURL& url,
const CreateSnapshotFileCallback& callback) {
DCHECK(IsOnTaskRunnerThread(context));
base::PlatformFileInfo file_info;
base::FilePath platform_path;
scoped_refptr<webkit_blob::ShareableFileReference> file_ref;
base::PlatformFileError error =
CreateSnapshotFileSync(context, url, &file_info, &platform_path,
&file_ref);
if (callback.is_null())
return;
content::BrowserThread::PostTask(
content::BrowserThread::IO,
FROM_HERE,
base::Bind(callback, error, file_info, platform_path, file_ref));
}
base::PlatformFileError NativeMediaFileUtil::CreateDirectorySync(
fileapi::FileSystemOperationContext* context,
const fileapi::FileSystemURL& url,
bool exclusive,
bool recursive) {
base::FilePath file_path;
base::PlatformFileError error = GetLocalFilePath(context, url, &file_path);
if (error != base::PLATFORM_FILE_OK)
return error;
return fileapi::NativeFileUtil::CreateDirectory(file_path, exclusive,
recursive);
}
base::PlatformFileError NativeMediaFileUtil::CopyOrMoveFileSync(
fileapi::FileSystemOperationContext* context,
const fileapi::FileSystemURL& src_url,
const fileapi::FileSystemURL& dest_url,
bool copy) {
DCHECK(IsOnTaskRunnerThread(context));
base::FilePath src_file_path;
base::PlatformFileError error =
GetFilteredLocalFilePathForExistingFileOrDirectory(
context, src_url,
base::PLATFORM_FILE_ERROR_NOT_FOUND,
&src_file_path);
if (error != base::PLATFORM_FILE_OK)
return error;
if (fileapi::NativeFileUtil::DirectoryExists(src_file_path))
return base::PLATFORM_FILE_ERROR_NOT_A_FILE;
base::FilePath dest_file_path;
error = GetLocalFilePath(context, dest_url, &dest_file_path);
if (error != base::PLATFORM_FILE_OK)
return error;
base::PlatformFileInfo file_info;
error = fileapi::NativeFileUtil::GetFileInfo(dest_file_path, &file_info);
if (error != base::PLATFORM_FILE_OK &&
error != base::PLATFORM_FILE_ERROR_NOT_FOUND)
return error;
if (error == base::PLATFORM_FILE_OK && file_info.is_directory)
return base::PLATFORM_FILE_ERROR_INVALID_OPERATION;
if (!GetMediaPathFilter(context)->Match(dest_file_path))
return base::PLATFORM_FILE_ERROR_SECURITY;
return fileapi::NativeFileUtil::CopyOrMoveFile(src_file_path, dest_file_path,
copy);
}
base::PlatformFileError NativeMediaFileUtil::CopyInForeignFileSync(
fileapi::FileSystemOperationContext* context,
const base::FilePath& src_file_path,
const fileapi::FileSystemURL& dest_url) {
DCHECK(IsOnTaskRunnerThread(context));
if (src_file_path.empty())
return base::PLATFORM_FILE_ERROR_INVALID_OPERATION;
base::FilePath dest_file_path;
base::PlatformFileError error =
GetFilteredLocalFilePath(context, dest_url, &dest_file_path);
if (error != base::PLATFORM_FILE_OK)
return error;
return fileapi::NativeFileUtil::CopyOrMoveFile(src_file_path, dest_file_path,
true);
}
base::PlatformFileError NativeMediaFileUtil::GetFileInfoSync(
fileapi::FileSystemOperationContext* context,
const fileapi::FileSystemURL& url,
base::PlatformFileInfo* file_info,
base::FilePath* platform_path) {
DCHECK(context);
DCHECK(IsOnTaskRunnerThread(context));
DCHECK(file_info);
DCHECK(GetMediaPathFilter(context));
base::FilePath file_path;
base::PlatformFileError error = GetLocalFilePath(context, url, &file_path);
if (error != base::PLATFORM_FILE_OK)
return error;
if (file_util::IsLink(file_path))
return base::PLATFORM_FILE_ERROR_NOT_FOUND;
error = fileapi::NativeFileUtil::GetFileInfo(file_path, file_info);
if (error != base::PLATFORM_FILE_OK)
return error;
if (platform_path)
*platform_path = file_path;
if (file_info->is_directory ||
GetMediaPathFilter(context)->Match(file_path)) {
return base::PLATFORM_FILE_OK;
}
return base::PLATFORM_FILE_ERROR_NOT_FOUND;
}
base::PlatformFileError NativeMediaFileUtil::GetLocalFilePath(
fileapi::FileSystemOperationContext* context,
const fileapi::FileSystemURL& url,
base::FilePath* local_file_path) {
DCHECK(local_file_path);
DCHECK(url.is_valid());
if (url.path().empty()) {
// Root direcory case, which should not be accessed.
return base::PLATFORM_FILE_ERROR_ACCESS_DENIED;
}
*local_file_path = url.path();
return base::PLATFORM_FILE_OK;
}
base::PlatformFileError NativeMediaFileUtil::ReadDirectorySync(
fileapi::FileSystemOperationContext* context,
const fileapi::FileSystemURL& url,
EntryList* file_list) {
DCHECK(IsOnTaskRunnerThread(context));
DCHECK(file_list);
DCHECK(file_list->empty());
base::PlatformFileInfo file_info;
base::FilePath dir_path;
base::PlatformFileError error =
GetFileInfoSync(context, url, &file_info, &dir_path);
if (error != base::PLATFORM_FILE_OK)
return error;
if (!file_info.is_directory)
return base::PLATFORM_FILE_ERROR_NOT_A_DIRECTORY;
base::FileEnumerator file_enum(
dir_path,
false /* recursive */,
base::FileEnumerator::FILES | base::FileEnumerator::DIRECTORIES);
for (base::FilePath enum_path = file_enum.Next();
!enum_path.empty();
enum_path = file_enum.Next()) {
// Skip symlinks.
if (file_util::IsLink(enum_path))
continue;
base::FileEnumerator::FileInfo info = file_enum.GetInfo();
// NativeMediaFileUtil skip criteria.
if (ShouldSkip(enum_path))
continue;
if (!info.IsDirectory() && !GetMediaPathFilter(context)->Match(enum_path))
continue;
fileapi::DirectoryEntry entry;
entry.is_directory = info.IsDirectory();
entry.name = enum_path.BaseName().value();
entry.size = info.GetSize();
entry.last_modified_time = info.GetLastModifiedTime();
file_list->push_back(entry);
}
return base::PLATFORM_FILE_OK;
}
base::PlatformFileError NativeMediaFileUtil::DeleteDirectorySync(
fileapi::FileSystemOperationContext* context,
const fileapi::FileSystemURL& url) {
DCHECK(IsOnTaskRunnerThread(context));
base::FilePath file_path;
base::PlatformFileError error = GetLocalFilePath(context, url, &file_path);
if (error != base::PLATFORM_FILE_OK)
return error;
return fileapi::NativeFileUtil::DeleteDirectory(file_path);
}
base::PlatformFileError NativeMediaFileUtil::CreateSnapshotFileSync(
fileapi::FileSystemOperationContext* context,
const fileapi::FileSystemURL& url,
base::PlatformFileInfo* file_info,
base::FilePath* platform_path,
scoped_refptr<webkit_blob::ShareableFileReference>* file_ref) {
DCHECK(IsOnTaskRunnerThread(context));
base::PlatformFileError error =
GetFileInfoSync(context, url, file_info, platform_path);
if (error == base::PLATFORM_FILE_OK && file_info->is_directory)
error = base::PLATFORM_FILE_ERROR_NOT_A_FILE;
if (error == base::PLATFORM_FILE_OK)
error = NativeMediaFileUtil::IsMediaFile(*platform_path);
// We're just returning the local file information.
*file_ref = scoped_refptr<webkit_blob::ShareableFileReference>();
return error;
}
base::PlatformFileError NativeMediaFileUtil::GetFilteredLocalFilePath(
fileapi::FileSystemOperationContext* context,
const fileapi::FileSystemURL& file_system_url,
base::FilePath* local_file_path) {
DCHECK(IsOnTaskRunnerThread(context));
base::FilePath file_path;
base::PlatformFileError error =
GetLocalFilePath(context, file_system_url, &file_path);
if (error != base::PLATFORM_FILE_OK)
return error;
if (!GetMediaPathFilter(context)->Match(file_path))
return base::PLATFORM_FILE_ERROR_SECURITY;
*local_file_path = file_path;
return base::PLATFORM_FILE_OK;
}
base::PlatformFileError
NativeMediaFileUtil::GetFilteredLocalFilePathForExistingFileOrDirectory(
fileapi::FileSystemOperationContext* context,
const fileapi::FileSystemURL& file_system_url,
base::PlatformFileError failure_error,
base::FilePath* local_file_path) {
DCHECK(IsOnTaskRunnerThread(context));
base::FilePath file_path;
base::PlatformFileError error =
GetLocalFilePath(context, file_system_url, &file_path);
if (error != base::PLATFORM_FILE_OK)
return error;
if (!file_util::PathExists(file_path))
return failure_error;
base::PlatformFileInfo file_info;
if (!file_util::GetFileInfo(file_path, &file_info))
return base::PLATFORM_FILE_ERROR_FAILED;
if (!file_info.is_directory &&
!GetMediaPathFilter(context)->Match(file_path)) {
return failure_error;
}
*local_file_path = file_path;
return base::PLATFORM_FILE_OK;
}
} // namespace chrome
| bsd-3-clause |
jeffblack360/mojito | tests/func/examples/developerguide/test_hello.js | 548 | /*
* This is a basic func test for a UseCase application.
*/
YUI({
useConsoleOutput: true,
useBrowserConsole: true,
logInclude: { TestRunner: true }
}).use('node', 'node-event-simulate', 'test', 'console', function (Y) {
var suite = new Y.Test.Suite("DeveloperGuide: hello");
suite.add(new Y.Test.Case({
"test hello": function() {
Y.Assert.areEqual("Hello World!", Y.one('pre').get('innerHTML'));
}
}));
Y.Test.Runner.add(suite);
});
| bsd-3-clause |
jp2masa/Cosmos | Users/Orvid/Orvid.Concurrent/WeakKeyDictionary.2.cs | 11201 | /*
Copyright 2008 The 'A Concurrent Hashtable' development team
(http://www.codeplex.com/CH/People/ProjectPeople.aspx)
This library is licensed under the GNU Library General Public License (LGPL). You should
have received a copy of the license along with the source code. If not, an online copy
of the license can be found at http://www.codeplex.com/CH/license.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Collections;
using System.Runtime.Serialization;
using System.Security;
namespace Orvid.Concurrent.Collections
{
#if !SILVERLIGHT
[Serializable]
#endif
public class WeakKeyDictionary<TWeakKey1, TWeakKey2, TStrongKey, TValue> : DictionaryBase<Tuple<TWeakKey1, TWeakKey2, TStrongKey>, TValue>
#if !SILVERLIGHT
, ISerializable
#endif
where TWeakKey1 : class
where TWeakKey2 : class
where TValue : class
{
sealed class InternalWeakDictionary :
InternalWeakDictionaryStrongValueBase<
Key<TWeakKey1, TWeakKey2, TStrongKey>,
Tuple<TWeakKey1, TWeakKey2, TStrongKey>,
TValue,
Stacktype<TWeakKey1, TWeakKey2, TStrongKey>
>
{
public InternalWeakDictionary(int concurrencyLevel, int capacity, KeyComparer<TWeakKey1, TWeakKey2, TStrongKey> keyComparer)
: base(concurrencyLevel, capacity, keyComparer)
{
_comparer = keyComparer;
MaintenanceWorker.Register(this);
}
public InternalWeakDictionary(KeyComparer<TWeakKey1, TWeakKey2, TStrongKey> keyComparer)
: base(keyComparer)
{
_comparer = keyComparer;
MaintenanceWorker.Register(this);
}
public KeyComparer<TWeakKey1, TWeakKey2, TStrongKey> _comparer;
protected override Key<TWeakKey1, TWeakKey2, TStrongKey> FromExternalKeyToSearchKey(Tuple<TWeakKey1, TWeakKey2, TStrongKey> externalKey)
{ return new SearchKey<TWeakKey1, TWeakKey2, TStrongKey>().Set(externalKey, _comparer); }
protected override Key<TWeakKey1, TWeakKey2, TStrongKey> FromExternalKeyToStorageKey(Tuple<TWeakKey1, TWeakKey2, TStrongKey> externalKey)
{ return new StorageKey<TWeakKey1, TWeakKey2, TStrongKey>().Set(externalKey, _comparer); }
protected override Key<TWeakKey1, TWeakKey2, TStrongKey> FromStackKeyToSearchKey(Stacktype<TWeakKey1, TWeakKey2, TStrongKey> externalKey)
{ return new SearchKey<TWeakKey1, TWeakKey2, TStrongKey>().Set(externalKey, _comparer); }
protected override Key<TWeakKey1, TWeakKey2, TStrongKey> FromStackKeyToStorageKey(Stacktype<TWeakKey1, TWeakKey2, TStrongKey> externalKey)
{ return new StorageKey<TWeakKey1, TWeakKey2, TStrongKey>().Set(externalKey, _comparer); }
protected override bool FromInternalKeyToExternalKey(Key<TWeakKey1, TWeakKey2, TStrongKey> internalKey, out Tuple<TWeakKey1, TWeakKey2, TStrongKey> externalKey)
{ return internalKey.Get(out externalKey); }
protected override bool FromInternalKeyToStackKey(Key<TWeakKey1, TWeakKey2, TStrongKey> internalKey, out Stacktype<TWeakKey1, TWeakKey2, TStrongKey> externalKey)
{ return internalKey.Get(out externalKey); }
}
readonly InternalWeakDictionary _internalDictionary;
protected override IDictionary<Tuple<TWeakKey1, TWeakKey2, TStrongKey>, TValue> InternalDictionary
{ get { return _internalDictionary; } }
#if !SILVERLIGHT
WeakKeyDictionary(SerializationInfo serializationInfo, StreamingContext streamingContext)
{
var comparer = (KeyComparer<TWeakKey1, TWeakKey2, TStrongKey>)serializationInfo.GetValue("Comparer", typeof(KeyComparer<TWeakKey1, TWeakKey2, TStrongKey>));
var items = (List<KeyValuePair<Tuple<TWeakKey1, TWeakKey2, TStrongKey>, TValue>>)serializationInfo.GetValue("Items", typeof(List<KeyValuePair<Tuple<TWeakKey1, TWeakKey2, TStrongKey>, TValue>>));
_internalDictionary = new InternalWeakDictionary(comparer);
_internalDictionary.InsertContents(items);
}
#region ISerializable Members
[SecurityCritical]
void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context)
{
info.AddValue("Comparer", _internalDictionary._comparer);
info.AddValue("Items", _internalDictionary.GetContents());
}
#endregion
#endif
public WeakKeyDictionary()
: this(EqualityComparer<TWeakKey1>.Default, EqualityComparer<TWeakKey2>.Default, EqualityComparer<TStrongKey>.Default)
{}
public WeakKeyDictionary(IEqualityComparer<TWeakKey1> weakKey1Comparer, IEqualityComparer<TWeakKey2> weakKey2Comparer, IEqualityComparer<TStrongKey> strongKeyComparer)
: this(Enumerable.Empty<KeyValuePair<Tuple<TWeakKey1, TWeakKey2, TStrongKey>, TValue>>(), weakKey1Comparer, weakKey2Comparer, strongKeyComparer)
{}
public WeakKeyDictionary(IEnumerable<KeyValuePair<Tuple<TWeakKey1, TWeakKey2, TStrongKey>, TValue>> collection)
: this(collection, EqualityComparer<TWeakKey1>.Default, EqualityComparer<TWeakKey2>.Default, EqualityComparer<TStrongKey>.Default)
{}
public WeakKeyDictionary(IEnumerable<KeyValuePair<Tuple<TWeakKey1, TWeakKey2, TStrongKey>, TValue>> collection, IEqualityComparer<TWeakKey1> weakKey1Comparer, IEqualityComparer<TWeakKey2> weakKey2Comparer, IEqualityComparer<TStrongKey> strongKeyComparer)
{
_internalDictionary =
new InternalWeakDictionary(
new KeyComparer<TWeakKey1, TWeakKey2, TStrongKey>(weakKey1Comparer, weakKey2Comparer, strongKeyComparer)
)
;
_internalDictionary.InsertContents(collection);
}
public WeakKeyDictionary(int concurrencyLevel, int capacity)
: this(concurrencyLevel, capacity, EqualityComparer<TWeakKey1>.Default, EqualityComparer<TWeakKey2>.Default, EqualityComparer<TStrongKey>.Default)
{}
public WeakKeyDictionary(int concurrencyLevel, IEnumerable<KeyValuePair<Tuple<TWeakKey1, TWeakKey2, TStrongKey>, TValue>> collection, IEqualityComparer<TWeakKey1> weakKey1Comparer, IEqualityComparer<TWeakKey2> weakKey2Comparer, IEqualityComparer<TStrongKey> strongKeyComparer)
{
var contentsList = collection.ToList();
_internalDictionary =
new InternalWeakDictionary(
concurrencyLevel,
contentsList.Count,
new KeyComparer<TWeakKey1, TWeakKey2, TStrongKey>(weakKey1Comparer, weakKey2Comparer, strongKeyComparer)
)
;
_internalDictionary.InsertContents(contentsList);
}
public WeakKeyDictionary(int concurrencyLevel, int capacity, IEqualityComparer<TWeakKey1> weakKey1Comparer, IEqualityComparer<TWeakKey2> weakKey2Comparer, IEqualityComparer<TStrongKey> strongKeyComparer)
{
_internalDictionary =
new InternalWeakDictionary(
concurrencyLevel,
capacity,
new KeyComparer<TWeakKey1, TWeakKey2, TStrongKey>(weakKey1Comparer, weakKey2Comparer, strongKeyComparer)
)
;
}
public bool ContainsKey(TWeakKey1 weakKey1, TWeakKey2 weakKey2, TStrongKey strongKey)
{ return _internalDictionary.ContainsKey(Stacktype.Create(weakKey1, weakKey2, strongKey)); }
public bool TryGetValue(TWeakKey1 weakKey1, TWeakKey2 weakKey2, TStrongKey strongKey, out TValue value)
{ return _internalDictionary.TryGetValue(Stacktype.Create(weakKey1, weakKey2, strongKey), out value); }
public TValue this[TWeakKey1 weakKey1, TWeakKey2 weakKey2, TStrongKey strongKey]
{
get { return _internalDictionary.GetItem(Stacktype.Create(weakKey1, weakKey2, strongKey)); }
set { _internalDictionary.SetItem(Stacktype.Create(weakKey1, weakKey2, strongKey), value); }
}
public bool IsEmpty
{ get { return _internalDictionary.IsEmpty; } }
public TValue AddOrUpdate(TWeakKey1 weakKey1, TWeakKey2 weakKey2, TStrongKey strongKey, Func<TWeakKey1, TWeakKey2, TStrongKey, TValue> addValueFactory, Func<TWeakKey1, TWeakKey2, TStrongKey, TValue, TValue> updateValueFactory)
{
if (null == addValueFactory)
throw new ArgumentNullException("addValueFactory");
if (null == updateValueFactory)
throw new ArgumentNullException("updateValueFactory");
return
_internalDictionary.AddOrUpdate(
Stacktype.Create(weakKey1, weakKey2, strongKey),
hr => addValueFactory(hr.Item1, hr.Item2, hr.Item3),
(hr, v) => updateValueFactory(hr.Item1, hr.Item2, hr.Item3, v)
)
;
}
public TValue AddOrUpdate(TWeakKey1 weakKey1, TWeakKey2 weakKey2, TStrongKey strongKey, TValue addValue, Func<TWeakKey1, TWeakKey2, TStrongKey, TValue, TValue> updateValueFactory)
{
if (null == updateValueFactory)
throw new ArgumentNullException("updateValueFactory");
return
_internalDictionary.AddOrUpdate(
Stacktype.Create(weakKey1, weakKey2, strongKey),
addValue,
(hr, v) => updateValueFactory(hr.Item1, hr.Item2, hr.Item3, v)
)
;
}
public TValue GetOrAdd(TWeakKey1 weakKey1, TWeakKey2 weakKey2, TStrongKey strongKey, TValue value)
{ return _internalDictionary.GetOrAdd(Stacktype.Create(weakKey1, weakKey2, strongKey), value); }
public TValue GetOrAdd(TWeakKey1 weakKey1, TWeakKey2 weakKey2, TStrongKey strongKey, Func<TWeakKey1, TWeakKey2, TStrongKey, TValue> valueFactory)
{
if (null == valueFactory)
throw new ArgumentNullException("valueFactory");
return _internalDictionary.GetOrAdd(Stacktype.Create(weakKey1, weakKey2, strongKey), hr => valueFactory(hr.Item1, hr.Item2, hr.Item3));
}
public KeyValuePair<Tuple<TWeakKey1, TWeakKey2, TStrongKey>, TValue>[] ToArray()
{ return _internalDictionary.ToArray(); }
public bool TryAdd(TWeakKey1 weakKey1, TWeakKey2 weakKey2, TStrongKey strongKey, TValue value)
{ return _internalDictionary.TryAdd(Stacktype.Create(weakKey1, weakKey2, strongKey), value); }
public bool TryRemove(TWeakKey1 weakKey1, TWeakKey2 weakKey2, TStrongKey strongKey, out TValue value)
{ return _internalDictionary.TryRemove(Stacktype.Create(weakKey1, weakKey2, strongKey), out value); }
public bool TryUpdate(TWeakKey1 weakKey1, TWeakKey2 weakKey2, TStrongKey strongKey, TValue newValue, TValue comparisonValue)
{ return _internalDictionary.TryUpdate(Stacktype.Create(weakKey1, weakKey2, strongKey), newValue, comparisonValue ); }
}
}
| bsd-3-clause |
ltilve/ChromiumGStreamerBackend | chrome/browser/custom_home_pages_table_model.cc | 10521 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/custom_home_pages_table_model.h"
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/i18n/rtl.h"
#include "base/prefs/pref_service.h"
#include "base/strings/utf_string_conversions.h"
#include "chrome/browser/history/history_service_factory.h"
#include "chrome/browser/profiles/profile.h"
#include "chrome/browser/ui/browser.h"
#include "chrome/browser/ui/browser_iterator.h"
#include "chrome/browser/ui/browser_list.h"
#include "chrome/browser/ui/tabs/tab_strip_model.h"
#include "chrome/common/pref_names.h"
#include "chrome/common/url_constants.h"
#include "chrome/grit/generated_resources.h"
#include "components/history/core/browser/history_service.h"
#include "components/url_formatter/url_formatter.h"
#include "content/public/browser/web_contents.h"
#include "ui/base/l10n/l10n_util.h"
#include "ui/base/models/table_model_observer.h"
#include "ui/gfx/codec/png_codec.h"
#include "url/gurl.h"
namespace {
// Checks whether the given URL should count as one of the "current" pages.
// Returns true for all pages except dev tools and settings.
bool ShouldAddPage(const GURL& url) {
if (url.is_empty())
return false;
if (url.SchemeIs(content::kChromeDevToolsScheme))
return false;
if (url.SchemeIs(content::kChromeUIScheme)) {
if (url.host() == chrome::kChromeUISettingsHost)
return false;
// For a settings page, the path will start with "/settings" not "settings"
// so find() will return 1, not 0.
if (url.host() == chrome::kChromeUIUberHost &&
url.path().find(chrome::kChromeUISettingsHost) == 1) {
return false;
}
}
return true;
}
} // namespace
struct CustomHomePagesTableModel::Entry {
Entry() : task_id(base::CancelableTaskTracker::kBadTaskId) {}
// URL of the page.
GURL url;
// Page title. If this is empty, we'll display the URL as the entry.
base::string16 title;
// If not |base::CancelableTaskTracker::kBadTaskId|, indicates we're loading
// the title for the page.
base::CancelableTaskTracker::TaskId task_id;
};
CustomHomePagesTableModel::CustomHomePagesTableModel(Profile* profile)
: profile_(profile),
observer_(NULL),
num_outstanding_title_lookups_(0) {
}
CustomHomePagesTableModel::~CustomHomePagesTableModel() {
}
void CustomHomePagesTableModel::SetURLs(const std::vector<GURL>& urls) {
entries_.resize(urls.size());
for (size_t i = 0; i < urls.size(); ++i) {
entries_[i].url = urls[i];
entries_[i].title.erase();
}
LoadAllTitles();
}
/**
* Move a number of existing entries to a new position, reordering the table.
*
* We determine the range of elements affected by the move, save the moved
* elements, compact the remaining ones, and re-insert moved elements.
* Expects |index_list| to be ordered ascending.
*/
void CustomHomePagesTableModel::MoveURLs(int insert_before,
const std::vector<int>& index_list) {
if (index_list.empty()) return;
DCHECK(insert_before >= 0 && insert_before <= RowCount());
// The range of elements that needs to be reshuffled is [ |first|, |last| ).
int first = std::min(insert_before, index_list.front());
int last = std::max(insert_before, index_list.back() + 1);
// Save the dragged elements. Also, adjust insertion point if it is before a
// dragged element.
std::vector<Entry> moved_entries;
for (size_t i = 0; i < index_list.size(); ++i) {
moved_entries.push_back(entries_[index_list[i]]);
if (index_list[i] == insert_before)
insert_before++;
}
// Compact the range between beginning and insertion point, moving downwards.
size_t skip_count = 0;
for (int i = first; i < insert_before; ++i) {
if (skip_count < index_list.size() && index_list[skip_count] == i)
skip_count++;
else
entries_[i - skip_count] = entries_[i];
}
// Moving items down created a gap. We start compacting up after it.
first = insert_before;
insert_before -= skip_count;
// Now compact up for elements after the insertion point.
skip_count = 0;
for (int i = last - 1; i >= first; --i) {
if (skip_count < index_list.size() &&
index_list[index_list.size() - skip_count - 1] == i) {
skip_count++;
} else {
entries_[i + skip_count] = entries_[i];
}
}
// Insert moved elements.
std::copy(moved_entries.begin(), moved_entries.end(),
entries_.begin() + insert_before);
// Possibly large change, so tell the view to just rebuild itself.
if (observer_)
observer_->OnModelChanged();
}
void CustomHomePagesTableModel::AddWithoutNotification(
int index, const GURL& url) {
DCHECK(index >= 0 && index <= RowCount());
entries_.insert(entries_.begin() + static_cast<size_t>(index), Entry());
entries_[index].url = url;
}
void CustomHomePagesTableModel::Add(int index, const GURL& url) {
AddWithoutNotification(index, url);
LoadTitle(&(entries_[index]));
if (observer_)
observer_->OnItemsAdded(index, 1);
}
void CustomHomePagesTableModel::RemoveWithoutNotification(int index) {
DCHECK(index >= 0 && index < RowCount());
Entry* entry = &(entries_[index]);
// Cancel any pending load requests now so we don't deref a bogus pointer when
// we get the loaded notification.
if (entry->task_id != base::CancelableTaskTracker::kBadTaskId) {
task_tracker_.TryCancel(entry->task_id);
entry->task_id = base::CancelableTaskTracker::kBadTaskId;
}
entries_.erase(entries_.begin() + static_cast<size_t>(index));
}
void CustomHomePagesTableModel::Remove(int index) {
RemoveWithoutNotification(index);
if (observer_)
observer_->OnItemsRemoved(index, 1);
}
void CustomHomePagesTableModel::SetToCurrentlyOpenPages() {
// Remove the current entries.
while (RowCount())
RemoveWithoutNotification(0);
// And add all tabs for all open browsers with our profile.
int add_index = 0;
for (chrome::BrowserIterator it; !it.done(); it.Next()) {
Browser* browser = *it;
if (browser->profile() != profile_)
continue; // Skip incognito browsers.
for (int tab_index = 0;
tab_index < browser->tab_strip_model()->count();
++tab_index) {
const GURL url =
browser->tab_strip_model()->GetWebContentsAt(tab_index)->GetURL();
if (ShouldAddPage(url))
AddWithoutNotification(add_index++, url);
}
}
LoadAllTitles();
}
std::vector<GURL> CustomHomePagesTableModel::GetURLs() {
std::vector<GURL> urls(entries_.size());
for (size_t i = 0; i < entries_.size(); ++i)
urls[i] = entries_[i].url;
return urls;
}
int CustomHomePagesTableModel::RowCount() {
return static_cast<int>(entries_.size());
}
base::string16 CustomHomePagesTableModel::GetText(int row, int column_id) {
DCHECK(column_id == 0);
DCHECK(row >= 0 && row < RowCount());
return entries_[row].title.empty() ? FormattedURL(row) : entries_[row].title;
}
base::string16 CustomHomePagesTableModel::GetTooltip(int row) {
return entries_[row].title.empty() ? base::string16() :
l10n_util::GetStringFUTF16(IDS_OPTIONS_STARTUP_PAGE_TOOLTIP,
entries_[row].title, FormattedURL(row));
}
void CustomHomePagesTableModel::SetObserver(ui::TableModelObserver* observer) {
observer_ = observer;
}
void CustomHomePagesTableModel::LoadTitle(Entry* entry) {
history::HistoryService* history_service =
HistoryServiceFactory::GetForProfile(profile_,
ServiceAccessType::EXPLICIT_ACCESS);
if (history_service) {
entry->task_id = history_service->QueryURL(
entry->url,
false,
base::Bind(&CustomHomePagesTableModel::OnGotTitle,
base::Unretained(this),
entry->url,
false),
&task_tracker_);
}
}
void CustomHomePagesTableModel::LoadAllTitles() {
history::HistoryService* history_service =
HistoryServiceFactory::GetForProfile(profile_,
ServiceAccessType::EXPLICIT_ACCESS);
// It's possible for multiple LoadAllTitles() queries to be inflight we want
// to make sure everything is resolved before updating the observer or we risk
// getting rendering glitches.
num_outstanding_title_lookups_ += entries_.size();
for (Entry& entry : entries_) {
if (history_service) {
entry.task_id = history_service->QueryURL(
entry.url,
false,
base::Bind(&CustomHomePagesTableModel::OnGotOneOfManyTitles,
base::Unretained(this),
entry.url),
&task_tracker_);
}
}
if (entries_.empty())
observer_->OnModelChanged();
}
void CustomHomePagesTableModel::OnGotOneOfManyTitles(const GURL& entry_url,
bool found_url,
const history::URLRow& row,
const history::VisitVector& visits) {
OnGotTitle(entry_url, false, found_url, row, visits);
DCHECK_GE(num_outstanding_title_lookups_, 1);
if (--num_outstanding_title_lookups_ == 0 && observer_)
observer_->OnModelChanged();
}
void CustomHomePagesTableModel::OnGotTitle(const GURL& entry_url,
bool observable,
bool found_url,
const history::URLRow& row,
const history::VisitVector& visits) {
Entry* entry = NULL;
size_t entry_index = 0;
for (size_t i = 0; i < entries_.size(); ++i) {
if (entries_[i].url == entry_url) {
entry = &entries_[i];
entry_index = i;
break;
}
}
if (!entry) {
// The URLs changed before we were called back.
return;
}
entry->task_id = base::CancelableTaskTracker::kBadTaskId;
if (found_url && !row.title().empty()) {
entry->title = row.title();
if (observer_ && observable)
observer_->OnItemsChanged(static_cast<int>(entry_index), 1);
}
}
base::string16 CustomHomePagesTableModel::FormattedURL(int row) const {
std::string languages =
profile_->GetPrefs()->GetString(prefs::kAcceptLanguages);
base::string16 url = url_formatter::FormatUrl(entries_[row].url, languages);
url = base::i18n::GetDisplayStringInLTRDirectionality(url);
return url;
}
| bsd-3-clause |
ltilve/ChromiumGStreamerBackend | ash/wm/workspace/workspace_window_resizer.cc | 38828 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ash/wm/workspace/workspace_window_resizer.h"
#include <algorithm>
#include <cmath>
#include <utility>
#include <vector>
#include "ash/display/window_tree_host_manager.h"
#include "ash/metrics/user_metrics_recorder.h"
#include "ash/root_window_controller.h"
#include "ash/screen_util.h"
#include "ash/shell.h"
#include "ash/shell_window_ids.h"
#include "ash/wm/default_window_resizer.h"
#include "ash/wm/dock/docked_window_layout_manager.h"
#include "ash/wm/dock/docked_window_resizer.h"
#include "ash/wm/drag_window_resizer.h"
#include "ash/wm/panels/panel_window_resizer.h"
#include "ash/wm/window_state.h"
#include "ash/wm/window_util.h"
#include "ash/wm/wm_event.h"
#include "ash/wm/workspace/phantom_window_controller.h"
#include "ash/wm/workspace/two_step_edge_cycler.h"
#include "base/command_line.h"
#include "base/memory/weak_ptr.h"
#include "ui/aura/client/aura_constants.h"
#include "ui/aura/client/screen_position_client.h"
#include "ui/aura/window.h"
#include "ui/aura/window_delegate.h"
#include "ui/aura/window_event_dispatcher.h"
#include "ui/base/hit_test.h"
#include "ui/compositor/layer.h"
#include "ui/gfx/screen.h"
#include "ui/gfx/transform.h"
#include "ui/wm/core/coordinate_conversion.h"
#include "ui/wm/core/window_util.h"
#include "ui/wm/public/window_types.h"
namespace ash {
scoped_ptr<WindowResizer> CreateWindowResizer(
aura::Window* window,
const gfx::Point& point_in_parent,
int window_component,
aura::client::WindowMoveSource source) {
DCHECK(window);
wm::WindowState* window_state = wm::GetWindowState(window);
// No need to return a resizer when the window cannot get resized or when a
// resizer already exists for this window.
if ((!window_state->CanResize() && window_component != HTCAPTION) ||
window_state->drag_details()) {
return scoped_ptr<WindowResizer>();
}
if (window_component == HTCAPTION && !window_state->can_be_dragged())
return scoped_ptr<WindowResizer>();
// TODO(varkha): The chaining of window resizers causes some of the logic
// to be repeated and the logic flow difficult to control. With some windows
// classes using reparenting during drag operations it becomes challenging to
// implement proper transition from one resizer to another during or at the
// end of the drag. This also causes http://crbug.com/247085.
// It seems the only thing the panel or dock resizer needs to do is notify the
// layout manager when a docked window is being dragged. We should have a
// better way of doing this, perhaps by having a way of observing drags or
// having a generic drag window wrapper which informs a layout manager that a
// drag has started or stopped.
// It may be possible to refactor and eliminate chaining.
WindowResizer* window_resizer = NULL;
if (!window_state->IsNormalOrSnapped() && !window_state->IsDocked())
return scoped_ptr<WindowResizer>();
int bounds_change = WindowResizer::GetBoundsChangeForWindowComponent(
window_component);
if (bounds_change == WindowResizer::kBoundsChangeDirection_None)
return scoped_ptr<WindowResizer>();
window_state->CreateDragDetails(window, point_in_parent, window_component,
source);
if (window->parent() &&
(window->parent()->id() == kShellWindowId_DefaultContainer ||
window->parent()->id() == kShellWindowId_DockedContainer ||
window->parent()->id() == kShellWindowId_PanelContainer)) {
window_resizer = WorkspaceWindowResizer::Create(
window_state, std::vector<aura::Window*>());
} else {
window_resizer = DefaultWindowResizer::Create(window_state);
}
window_resizer = DragWindowResizer::Create(window_resizer, window_state);
if (window->type() == ui::wm::WINDOW_TYPE_PANEL)
window_resizer = PanelWindowResizer::Create(window_resizer, window_state);
if (window_resizer && window->parent() &&
!::wm::GetTransientParent(window) &&
(window->parent()->id() == kShellWindowId_DefaultContainer ||
window->parent()->id() == kShellWindowId_DockedContainer ||
window->parent()->id() == kShellWindowId_PanelContainer)) {
window_resizer = DockedWindowResizer::Create(window_resizer, window_state);
}
return make_scoped_ptr<WindowResizer>(window_resizer);
}
namespace {
// Snapping distance used instead of WorkspaceWindowResizer::kScreenEdgeInset
// when resizing a window using touchscreen.
const int kScreenEdgeInsetForTouchDrag = 32;
// Current instance for use by the WorkspaceWindowResizerTest.
WorkspaceWindowResizer* instance = NULL;
// Returns true if the window should stick to the edge.
bool ShouldStickToEdge(int distance_from_edge, int sticky_size) {
return distance_from_edge < sticky_size &&
distance_from_edge > -sticky_size * 2;
}
// Returns the coordinate along the secondary axis to snap to.
int CoordinateAlongSecondaryAxis(SecondaryMagnetismEdge edge,
int leading,
int trailing,
int none) {
switch (edge) {
case SECONDARY_MAGNETISM_EDGE_LEADING:
return leading;
case SECONDARY_MAGNETISM_EDGE_TRAILING:
return trailing;
case SECONDARY_MAGNETISM_EDGE_NONE:
return none;
}
NOTREACHED();
return none;
}
// Returns the origin for |src| when magnetically attaching to |attach_to| along
// the edges |edges|. |edges| is a bitmask of the MagnetismEdges.
gfx::Point OriginForMagneticAttach(const gfx::Rect& src,
const gfx::Rect& attach_to,
const MatchedEdge& edge) {
int x = 0, y = 0;
switch (edge.primary_edge) {
case MAGNETISM_EDGE_TOP:
y = attach_to.bottom();
break;
case MAGNETISM_EDGE_LEFT:
x = attach_to.right();
break;
case MAGNETISM_EDGE_BOTTOM:
y = attach_to.y() - src.height();
break;
case MAGNETISM_EDGE_RIGHT:
x = attach_to.x() - src.width();
break;
}
switch (edge.primary_edge) {
case MAGNETISM_EDGE_TOP:
case MAGNETISM_EDGE_BOTTOM:
x = CoordinateAlongSecondaryAxis(
edge.secondary_edge, attach_to.x(), attach_to.right() - src.width(),
src.x());
break;
case MAGNETISM_EDGE_LEFT:
case MAGNETISM_EDGE_RIGHT:
y = CoordinateAlongSecondaryAxis(
edge.secondary_edge, attach_to.y(), attach_to.bottom() - src.height(),
src.y());
break;
}
return gfx::Point(x, y);
}
// Returns the bounds for a magnetic attach when resizing. |src| is the bounds
// of window being resized, |attach_to| the bounds of the window to attach to
// and |edge| identifies the edge to attach to.
gfx::Rect BoundsForMagneticResizeAttach(const gfx::Rect& src,
const gfx::Rect& attach_to,
const MatchedEdge& edge) {
int x = src.x();
int y = src.y();
int w = src.width();
int h = src.height();
gfx::Point attach_origin(OriginForMagneticAttach(src, attach_to, edge));
switch (edge.primary_edge) {
case MAGNETISM_EDGE_LEFT:
x = attach_origin.x();
w = src.right() - x;
break;
case MAGNETISM_EDGE_RIGHT:
w += attach_origin.x() - src.x();
break;
case MAGNETISM_EDGE_TOP:
y = attach_origin.y();
h = src.bottom() - y;
break;
case MAGNETISM_EDGE_BOTTOM:
h += attach_origin.y() - src.y();
break;
}
switch (edge.primary_edge) {
case MAGNETISM_EDGE_LEFT:
case MAGNETISM_EDGE_RIGHT:
if (edge.secondary_edge == SECONDARY_MAGNETISM_EDGE_LEADING) {
y = attach_origin.y();
h = src.bottom() - y;
} else if (edge.secondary_edge == SECONDARY_MAGNETISM_EDGE_TRAILING) {
h += attach_origin.y() - src.y();
}
break;
case MAGNETISM_EDGE_TOP:
case MAGNETISM_EDGE_BOTTOM:
if (edge.secondary_edge == SECONDARY_MAGNETISM_EDGE_LEADING) {
x = attach_origin.x();
w = src.right() - x;
} else if (edge.secondary_edge == SECONDARY_MAGNETISM_EDGE_TRAILING) {
w += attach_origin.x() - src.x();
}
break;
}
return gfx::Rect(x, y, w, h);
}
// Converts a window component edge to the magnetic edge to snap to.
uint32 WindowComponentToMagneticEdge(int window_component) {
switch (window_component) {
case HTTOPLEFT:
return MAGNETISM_EDGE_LEFT | MAGNETISM_EDGE_TOP;
case HTTOPRIGHT:
return MAGNETISM_EDGE_TOP | MAGNETISM_EDGE_RIGHT;
case HTBOTTOMLEFT:
return MAGNETISM_EDGE_LEFT | MAGNETISM_EDGE_BOTTOM;
case HTBOTTOMRIGHT:
return MAGNETISM_EDGE_RIGHT | MAGNETISM_EDGE_BOTTOM;
case HTTOP:
return MAGNETISM_EDGE_TOP;
case HTBOTTOM:
return MAGNETISM_EDGE_BOTTOM;
case HTRIGHT:
return MAGNETISM_EDGE_RIGHT;
case HTLEFT:
return MAGNETISM_EDGE_LEFT;
default:
break;
}
return 0;
}
} // namespace
// static
const int WorkspaceWindowResizer::kMinOnscreenSize = 20;
// static
const int WorkspaceWindowResizer::kMinOnscreenHeight = 32;
// static
const int WorkspaceWindowResizer::kScreenEdgeInset = 8;
WorkspaceWindowResizer* WorkspaceWindowResizer::GetInstanceForTest() {
return instance;
}
// Represents the width or height of a window with constraints on its minimum
// and maximum size. 0 represents a lack of a constraint.
class WindowSize {
public:
WindowSize(int size, int min, int max)
: size_(size),
min_(min),
max_(max) {
// Grow the min/max bounds to include the starting size.
if (is_underflowing())
min_ = size_;
if (is_overflowing())
max_ = size_;
}
bool is_at_capacity(bool shrinking) {
return size_ == (shrinking ? min_ : max_);
}
int size() const {
return size_;
}
bool has_min() const {
return min_ != 0;
}
bool has_max() const {
return max_ != 0;
}
bool is_valid() const {
return !is_overflowing() && !is_underflowing();
}
bool is_overflowing() const {
return has_max() && size_ > max_;
}
bool is_underflowing() const {
return has_min() && size_ < min_;
}
// Add |amount| to this WindowSize not exceeding min or max size constraints.
// Returns by how much |size_| + |amount| exceeds the min/max constraints.
int Add(int amount) {
DCHECK(is_valid());
int new_value = size_ + amount;
if (has_min() && new_value < min_) {
size_ = min_;
return new_value - min_;
}
if (has_max() && new_value > max_) {
size_ = max_;
return new_value - max_;
}
size_ = new_value;
return 0;
}
private:
int size_;
int min_;
int max_;
};
WorkspaceWindowResizer::~WorkspaceWindowResizer() {
if (did_lock_cursor_) {
Shell* shell = Shell::GetInstance();
shell->cursor_manager()->UnlockCursor();
}
if (instance == this)
instance = NULL;
}
// static
WorkspaceWindowResizer* WorkspaceWindowResizer::Create(
wm::WindowState* window_state,
const std::vector<aura::Window*>& attached_windows) {
return new WorkspaceWindowResizer(window_state, attached_windows);
}
void WorkspaceWindowResizer::Drag(const gfx::Point& location_in_parent,
int event_flags) {
last_mouse_location_ = location_in_parent;
int sticky_size;
if (event_flags & ui::EF_CONTROL_DOWN) {
sticky_size = 0;
} else if ((details().bounds_change & kBoundsChange_Resizes) &&
details().source == aura::client::WINDOW_MOVE_SOURCE_TOUCH) {
sticky_size = kScreenEdgeInsetForTouchDrag;
} else {
sticky_size = kScreenEdgeInset;
}
// |bounds| is in |GetTarget()->parent()|'s coordinates.
gfx::Rect bounds = CalculateBoundsForDrag(location_in_parent);
AdjustBoundsForMainWindow(sticky_size, &bounds);
if (bounds != GetTarget()->bounds()) {
if (!did_move_or_resize_) {
if (!details().restore_bounds.IsEmpty())
window_state()->ClearRestoreBounds();
RestackWindows();
}
did_move_or_resize_ = true;
}
gfx::Point location_in_screen = location_in_parent;
::wm::ConvertPointToScreen(GetTarget()->parent(), &location_in_screen);
aura::Window* root = NULL;
gfx::Display display =
ScreenUtil::FindDisplayContainingPoint(location_in_screen);
// Track the last screen that the pointer was on to keep the snap phantom
// window there.
if (display.is_valid()) {
root = Shell::GetInstance()
->window_tree_host_manager()
->GetRootWindowForDisplayId(display.id());
}
if (!attached_windows_.empty())
LayoutAttachedWindows(&bounds);
if (bounds != GetTarget()->bounds()) {
// SetBounds needs to be called to update the layout which affects where the
// phantom window is drawn. Keep track if the window was destroyed during
// the drag and quit early if so.
base::WeakPtr<WorkspaceWindowResizer> resizer(
weak_ptr_factory_.GetWeakPtr());
GetTarget()->SetBounds(bounds);
if (!resizer)
return;
}
const bool in_original_root = !root || root == GetTarget()->GetRootWindow();
// Hide a phantom window for snapping if the cursor is in another root window.
if (in_original_root) {
UpdateSnapPhantomWindow(location_in_parent, bounds);
} else {
snap_type_ = SNAP_NONE;
snap_phantom_window_controller_.reset();
edge_cycler_.reset();
SetDraggedWindowDocked(false);
}
}
void WorkspaceWindowResizer::CompleteDrag() {
if (!did_move_or_resize_)
return;
window_state()->set_bounds_changed_by_user(true);
snap_phantom_window_controller_.reset();
// If the window's state type changed over the course of the drag do not snap
// the window. This happens when the user minimizes or maximizes the window
// using a keyboard shortcut while dragging it.
if (window_state()->GetStateType() != details().initial_state_type)
return;
bool snapped = false;
if (snap_type_ == SNAP_LEFT || snap_type_ == SNAP_RIGHT) {
if (!window_state()->HasRestoreBounds()) {
gfx::Rect initial_bounds = ScreenUtil::ConvertRectToScreen(
GetTarget()->parent(), details().initial_bounds_in_parent);
window_state()->SetRestoreBoundsInScreen(
details().restore_bounds.IsEmpty() ?
initial_bounds :
details().restore_bounds);
}
if (!dock_layout_->is_dragged_window_docked()) {
UserMetricsRecorder* metrics = Shell::GetInstance()->metrics();
// TODO(oshima): Add event source type to WMEvent and move
// metrics recording inside WindowState::OnWMEvent.
const wm::WMEvent event(snap_type_ == SNAP_LEFT ?
wm::WM_EVENT_SNAP_LEFT : wm::WM_EVENT_SNAP_RIGHT);
window_state()->OnWMEvent(&event);
metrics->RecordUserMetricsAction(
snap_type_ == SNAP_LEFT ?
UMA_DRAG_MAXIMIZE_LEFT : UMA_DRAG_MAXIMIZE_RIGHT);
snapped = true;
}
}
if (!snapped) {
if (window_state()->IsSnapped()) {
// Keep the window snapped if the user resizes the window such that the
// window has valid bounds for a snapped window. Always unsnap the window
// if the user dragged the window via the caption area because doing this
// is slightly less confusing.
if (details().window_component == HTCAPTION ||
!AreBoundsValidSnappedBounds(window_state()->GetStateType(),
GetTarget()->bounds())) {
// Set the window to WINDOW_STATE_TYPE_NORMAL but keep the
// window at the bounds that the user has moved/resized the
// window to. ClearRestoreBounds() is used instead of
// SaveCurrentBoundsForRestore() because most of the restore
// logic is skipped because we are still in the middle of a
// drag. TODO(pkotwicz): Fix this and use
// SaveCurrentBoundsForRestore().
window_state()->ClearRestoreBounds();
window_state()->Restore();
}
} else if (!dock_layout_->is_dragged_window_docked()) {
// The window was not snapped and is not snapped. This is a user
// resize/drag and so the current bounds should be maintained, clearing
// any prior restore bounds. When the window is docked the restore bound
// must be kept so the docked state can be reverted properly.
window_state()->ClearRestoreBounds();
}
}
}
void WorkspaceWindowResizer::RevertDrag() {
window_state()->set_bounds_changed_by_user(initial_bounds_changed_by_user_);
snap_phantom_window_controller_.reset();
if (!did_move_or_resize_)
return;
GetTarget()->SetBounds(details().initial_bounds_in_parent);
if (!details().restore_bounds.IsEmpty()) {
window_state()->SetRestoreBoundsInScreen(details().restore_bounds);
}
if (details().window_component == HTRIGHT) {
int last_x = details().initial_bounds_in_parent.right();
for (size_t i = 0; i < attached_windows_.size(); ++i) {
gfx::Rect bounds(attached_windows_[i]->bounds());
bounds.set_x(last_x);
bounds.set_width(initial_size_[i]);
attached_windows_[i]->SetBounds(bounds);
last_x = attached_windows_[i]->bounds().right();
}
} else {
int last_y = details().initial_bounds_in_parent.bottom();
for (size_t i = 0; i < attached_windows_.size(); ++i) {
gfx::Rect bounds(attached_windows_[i]->bounds());
bounds.set_y(last_y);
bounds.set_height(initial_size_[i]);
attached_windows_[i]->SetBounds(bounds);
last_y = attached_windows_[i]->bounds().bottom();
}
}
}
WorkspaceWindowResizer::WorkspaceWindowResizer(
wm::WindowState* window_state,
const std::vector<aura::Window*>& attached_windows)
: WindowResizer(window_state),
attached_windows_(attached_windows),
did_lock_cursor_(false),
did_move_or_resize_(false),
initial_bounds_changed_by_user_(window_state_->bounds_changed_by_user()),
total_min_(0),
total_initial_size_(0),
snap_type_(SNAP_NONE),
num_mouse_moves_since_bounds_change_(0),
magnetism_window_(NULL),
weak_ptr_factory_(this) {
DCHECK(details().is_resizable);
// A mousemove should still show the cursor even if the window is
// being moved or resized with touch, so do not lock the cursor.
if (details().source != aura::client::WINDOW_MOVE_SOURCE_TOUCH) {
Shell* shell = Shell::GetInstance();
shell->cursor_manager()->LockCursor();
did_lock_cursor_ = true;
}
aura::Window* dock_container = Shell::GetContainer(
GetTarget()->GetRootWindow(), kShellWindowId_DockedContainer);
dock_layout_ = static_cast<DockedWindowLayoutManager*>(
dock_container->layout_manager());
// Only support attaching to the right/bottom.
DCHECK(attached_windows_.empty() ||
(details().window_component == HTRIGHT ||
details().window_component == HTBOTTOM));
// TODO: figure out how to deal with window going off the edge.
// Calculate sizes so that we can maintain the ratios if we need to resize.
int total_available = 0;
for (size_t i = 0; i < attached_windows_.size(); ++i) {
gfx::Size min(attached_windows_[i]->delegate()->GetMinimumSize());
int initial_size = PrimaryAxisSize(attached_windows_[i]->bounds().size());
initial_size_.push_back(initial_size);
// If current size is smaller than the min, use the current size as the min.
// This way we don't snap on resize.
int min_size = std::min(initial_size,
std::max(PrimaryAxisSize(min), kMinOnscreenSize));
total_min_ += min_size;
total_initial_size_ += initial_size;
total_available += std::max(min_size, initial_size) - min_size;
}
instance = this;
}
void WorkspaceWindowResizer::LayoutAttachedWindows(
gfx::Rect* bounds) {
gfx::Rect work_area(ScreenUtil::GetDisplayWorkAreaBoundsInParent(
GetTarget()));
int initial_size = PrimaryAxisSize(details().initial_bounds_in_parent.size());
int current_size = PrimaryAxisSize(bounds->size());
int start = PrimaryAxisCoordinate(bounds->right(), bounds->bottom());
int end = PrimaryAxisCoordinate(work_area.right(), work_area.bottom());
int delta = current_size - initial_size;
int available_size = end - start;
std::vector<int> sizes;
int leftovers = CalculateAttachedSizes(delta, available_size, &sizes);
// leftovers > 0 means that the attached windows can't grow to compensate for
// the shrinkage of the main window. This line causes the attached windows to
// be moved so they are still flush against the main window, rather than the
// main window being prevented from shrinking.
leftovers = std::min(0, leftovers);
// Reallocate any leftover pixels back into the main window. This is
// necessary when, for example, the main window shrinks, but none of the
// attached windows can grow without exceeding their max size constraints.
// Adding the pixels back to the main window effectively prevents the main
// window from resizing too far.
if (details().window_component == HTRIGHT)
bounds->set_width(bounds->width() + leftovers);
else
bounds->set_height(bounds->height() + leftovers);
DCHECK_EQ(attached_windows_.size(), sizes.size());
int last = PrimaryAxisCoordinate(bounds->right(), bounds->bottom());
for (size_t i = 0; i < attached_windows_.size(); ++i) {
gfx::Rect attached_bounds(attached_windows_[i]->bounds());
if (details().window_component == HTRIGHT) {
attached_bounds.set_x(last);
attached_bounds.set_width(sizes[i]);
} else {
attached_bounds.set_y(last);
attached_bounds.set_height(sizes[i]);
}
attached_windows_[i]->SetBounds(attached_bounds);
last += sizes[i];
}
}
int WorkspaceWindowResizer::CalculateAttachedSizes(
int delta,
int available_size,
std::vector<int>* sizes) const {
std::vector<WindowSize> window_sizes;
CreateBucketsForAttached(&window_sizes);
// How much we need to grow the attached by (collectively).
int grow_attached_by = 0;
if (delta > 0) {
// If the attached windows don't fit when at their initial size, we will
// have to shrink them by how much they overflow.
if (total_initial_size_ >= available_size)
grow_attached_by = available_size - total_initial_size_;
} else {
// If we're shrinking, we grow the attached so the total size remains
// constant.
grow_attached_by = -delta;
}
int leftover_pixels = 0;
while (grow_attached_by != 0) {
int leftovers = GrowFairly(grow_attached_by, window_sizes);
if (leftovers == grow_attached_by) {
leftover_pixels = leftovers;
break;
}
grow_attached_by = leftovers;
}
for (size_t i = 0; i < window_sizes.size(); ++i)
sizes->push_back(window_sizes[i].size());
return leftover_pixels;
}
int WorkspaceWindowResizer::GrowFairly(
int pixels,
std::vector<WindowSize>& sizes) const {
bool shrinking = pixels < 0;
std::vector<WindowSize*> nonfull_windows;
for (size_t i = 0; i < sizes.size(); ++i) {
if (!sizes[i].is_at_capacity(shrinking))
nonfull_windows.push_back(&sizes[i]);
}
std::vector<float> ratios;
CalculateGrowthRatios(nonfull_windows, &ratios);
int remaining_pixels = pixels;
bool add_leftover_pixels_to_last = true;
for (size_t i = 0; i < nonfull_windows.size(); ++i) {
int grow_by = pixels * ratios[i];
// Put any leftover pixels into the last window.
if (i == nonfull_windows.size() - 1 && add_leftover_pixels_to_last)
grow_by = remaining_pixels;
int remainder = nonfull_windows[i]->Add(grow_by);
int consumed = grow_by - remainder;
remaining_pixels -= consumed;
if (nonfull_windows[i]->is_at_capacity(shrinking) && remainder > 0) {
// Because this window overflowed, some of the pixels in
// |remaining_pixels| aren't there due to rounding errors. Rather than
// unfairly giving all those pixels to the last window, we refrain from
// allocating them so that this function can be called again to distribute
// the pixels fairly.
add_leftover_pixels_to_last = false;
}
}
return remaining_pixels;
}
void WorkspaceWindowResizer::CalculateGrowthRatios(
const std::vector<WindowSize*>& sizes,
std::vector<float>* out_ratios) const {
DCHECK(out_ratios->empty());
int total_value = 0;
for (size_t i = 0; i < sizes.size(); ++i)
total_value += sizes[i]->size();
for (size_t i = 0; i < sizes.size(); ++i)
out_ratios->push_back(
(static_cast<float>(sizes[i]->size())) / total_value);
}
void WorkspaceWindowResizer::CreateBucketsForAttached(
std::vector<WindowSize>* sizes) const {
for (size_t i = 0; i < attached_windows_.size(); i++) {
int initial_size = initial_size_[i];
aura::WindowDelegate* delegate = attached_windows_[i]->delegate();
int min = PrimaryAxisSize(delegate->GetMinimumSize());
int max = PrimaryAxisSize(delegate->GetMaximumSize());
sizes->push_back(WindowSize(initial_size, min, max));
}
}
void WorkspaceWindowResizer::MagneticallySnapToOtherWindows(gfx::Rect* bounds) {
if (UpdateMagnetismWindow(*bounds, kAllMagnetismEdges)) {
gfx::Point point = OriginForMagneticAttach(
ScreenUtil::ConvertRectToScreen(GetTarget()->parent(), *bounds),
magnetism_window_->GetBoundsInScreen(),
magnetism_edge_);
aura::client::GetScreenPositionClient(GetTarget()->GetRootWindow())->
ConvertPointFromScreen(GetTarget()->parent(), &point);
bounds->set_origin(point);
}
}
void WorkspaceWindowResizer::MagneticallySnapResizeToOtherWindows(
gfx::Rect* bounds) {
const uint32 edges = WindowComponentToMagneticEdge(
details().window_component);
if (UpdateMagnetismWindow(*bounds, edges)) {
*bounds = ScreenUtil::ConvertRectFromScreen(
GetTarget()->parent(),
BoundsForMagneticResizeAttach(
ScreenUtil::ConvertRectToScreen(GetTarget()->parent(), *bounds),
magnetism_window_->GetBoundsInScreen(),
magnetism_edge_));
}
}
bool WorkspaceWindowResizer::UpdateMagnetismWindow(const gfx::Rect& bounds,
uint32 edges) {
// |bounds| are in coordinates of original window's parent.
gfx::Rect bounds_in_screen =
ScreenUtil::ConvertRectToScreen(GetTarget()->parent(), bounds);
MagnetismMatcher matcher(bounds_in_screen, edges);
// If we snapped to a window then check it first. That way we don't bounce
// around when close to multiple edges.
if (magnetism_window_) {
if (window_tracker_.Contains(magnetism_window_) &&
matcher.ShouldAttach(magnetism_window_->GetBoundsInScreen(),
&magnetism_edge_)) {
return true;
}
window_tracker_.Remove(magnetism_window_);
magnetism_window_ = NULL;
}
// Avoid magnetically snapping windows that are not resizable.
// TODO(oshima): change this to window.type() == TYPE_NORMAL.
if (!window_state()->CanResize())
return false;
aura::Window::Windows root_windows = Shell::GetAllRootWindows();
for (aura::Window::Windows::iterator iter = root_windows.begin();
iter != root_windows.end(); ++iter) {
const aura::Window* root_window = *iter;
// Test all children from the desktop in each root window.
const aura::Window::Windows& children = Shell::GetContainer(
root_window, kShellWindowId_DefaultContainer)->children();
for (aura::Window::Windows::const_reverse_iterator i = children.rbegin();
i != children.rend() && !matcher.AreEdgesObscured(); ++i) {
wm::WindowState* other_state = wm::GetWindowState(*i);
if (other_state->window() == GetTarget() ||
!other_state->window()->IsVisible() ||
!other_state->IsNormalOrSnapped() ||
!other_state->CanResize()) {
continue;
}
if (matcher.ShouldAttach(
other_state->window()->GetBoundsInScreen(), &magnetism_edge_)) {
magnetism_window_ = other_state->window();
window_tracker_.Add(magnetism_window_);
return true;
}
}
}
return false;
}
void WorkspaceWindowResizer::AdjustBoundsForMainWindow(
int sticky_size,
gfx::Rect* bounds) {
gfx::Point last_mouse_location_in_screen = last_mouse_location_;
::wm::ConvertPointToScreen(GetTarget()->parent(),
&last_mouse_location_in_screen);
gfx::Display display = Shell::GetScreen()->GetDisplayNearestPoint(
last_mouse_location_in_screen);
gfx::Rect work_area =
ScreenUtil::ConvertRectFromScreen(GetTarget()->parent(),
display.work_area());
if (details().window_component == HTCAPTION) {
// Adjust the bounds to the work area where the mouse cursor is located.
// Always keep kMinOnscreenHeight or the window height (whichever is less)
// on the bottom.
int max_y = work_area.bottom() - std::min(kMinOnscreenHeight,
bounds->height());
if (bounds->y() > max_y) {
bounds->set_y(max_y);
} else if (bounds->y() <= work_area.y()) {
// Don't allow dragging above the top of the display until the mouse
// cursor reaches the work area above if any.
bounds->set_y(work_area.y());
}
if (sticky_size > 0) {
// Possibly stick to edge except when a mouse pointer is outside the
// work area.
if (display.work_area().Contains(last_mouse_location_in_screen))
StickToWorkAreaOnMove(work_area, sticky_size, bounds);
MagneticallySnapToOtherWindows(bounds);
}
} else if (sticky_size > 0) {
MagneticallySnapResizeToOtherWindows(bounds);
if (!magnetism_window_ && sticky_size > 0)
StickToWorkAreaOnResize(work_area, sticky_size, bounds);
}
if (attached_windows_.empty())
return;
if (details().window_component == HTRIGHT) {
bounds->set_width(std::min(bounds->width(),
work_area.right() - total_min_ - bounds->x()));
} else {
DCHECK_EQ(HTBOTTOM, details().window_component);
bounds->set_height(std::min(bounds->height(),
work_area.bottom() - total_min_ - bounds->y()));
}
}
bool WorkspaceWindowResizer::StickToWorkAreaOnMove(
const gfx::Rect& work_area,
int sticky_size,
gfx::Rect* bounds) const {
const int left_edge = work_area.x();
const int right_edge = work_area.right();
const int top_edge = work_area.y();
const int bottom_edge = work_area.bottom();
bool updated = false;
if (ShouldStickToEdge(bounds->x() - left_edge, sticky_size)) {
bounds->set_x(left_edge);
updated = true;
} else if (ShouldStickToEdge(right_edge - bounds->right(), sticky_size)) {
bounds->set_x(right_edge - bounds->width());
updated = true;
}
if (ShouldStickToEdge(bounds->y() - top_edge, sticky_size)) {
bounds->set_y(top_edge);
updated = true;
} else if (ShouldStickToEdge(bottom_edge - bounds->bottom(), sticky_size) &&
bounds->height() < (bottom_edge - top_edge)) {
// Only snap to the bottom if the window is smaller than the work area.
// Doing otherwise can lead to window snapping in weird ways as it bounces
// between snapping to top then bottom.
bounds->set_y(bottom_edge - bounds->height());
updated = true;
}
return updated;
}
void WorkspaceWindowResizer::StickToWorkAreaOnResize(
const gfx::Rect& work_area,
int sticky_size,
gfx::Rect* bounds) const {
const uint32 edges = WindowComponentToMagneticEdge(
details().window_component);
const int left_edge = work_area.x();
const int right_edge = work_area.right();
const int top_edge = work_area.y();
const int bottom_edge = work_area.bottom();
if (edges & MAGNETISM_EDGE_TOP &&
ShouldStickToEdge(bounds->y() - top_edge, sticky_size)) {
bounds->set_height(bounds->bottom() - top_edge);
bounds->set_y(top_edge);
}
if (edges & MAGNETISM_EDGE_LEFT &&
ShouldStickToEdge(bounds->x() - left_edge, sticky_size)) {
bounds->set_width(bounds->right() - left_edge);
bounds->set_x(left_edge);
}
if (edges & MAGNETISM_EDGE_BOTTOM &&
ShouldStickToEdge(bottom_edge - bounds->bottom(), sticky_size)) {
bounds->set_height(bottom_edge - bounds->y());
}
if (edges & MAGNETISM_EDGE_RIGHT &&
ShouldStickToEdge(right_edge - bounds->right(), sticky_size)) {
bounds->set_width(right_edge - bounds->x());
}
}
int WorkspaceWindowResizer::PrimaryAxisSize(const gfx::Size& size) const {
return PrimaryAxisCoordinate(size.width(), size.height());
}
int WorkspaceWindowResizer::PrimaryAxisCoordinate(int x, int y) const {
switch (details().window_component) {
case HTRIGHT:
return x;
case HTBOTTOM:
return y;
default:
NOTREACHED();
}
return 0;
}
void WorkspaceWindowResizer::UpdateSnapPhantomWindow(const gfx::Point& location,
const gfx::Rect& bounds) {
if (!did_move_or_resize_ || details().window_component != HTCAPTION)
return;
SnapType last_type = snap_type_;
snap_type_ = GetSnapType(location);
if (snap_type_ == SNAP_NONE || snap_type_ != last_type) {
snap_phantom_window_controller_.reset();
edge_cycler_.reset();
if (snap_type_ == SNAP_NONE) {
SetDraggedWindowDocked(false);
return;
}
}
DCHECK(snap_type_ == SNAP_LEFT || snap_type_ == SNAP_RIGHT);
DockedAlignment desired_alignment = (snap_type_ == SNAP_LEFT) ?
DOCKED_ALIGNMENT_LEFT : DOCKED_ALIGNMENT_RIGHT;
const bool can_dock =
dock_layout_->CanDockWindow(GetTarget(), desired_alignment) &&
dock_layout_->GetAlignmentOfWindow(GetTarget()) != DOCKED_ALIGNMENT_NONE;
if (!can_dock) {
// If the window cannot be docked, undock the window. This may change the
// workspace bounds and hence |snap_type_|.
SetDraggedWindowDocked(false);
snap_type_ = GetSnapType(location);
}
const bool can_snap = snap_type_ != SNAP_NONE && window_state()->CanSnap();
if (!can_snap && !can_dock) {
snap_type_ = SNAP_NONE;
snap_phantom_window_controller_.reset();
edge_cycler_.reset();
return;
}
if (!edge_cycler_) {
edge_cycler_.reset(new TwoStepEdgeCycler(
location, snap_type_ == SNAP_LEFT
? TwoStepEdgeCycler::DIRECTION_LEFT
: TwoStepEdgeCycler::DIRECTION_RIGHT));
} else {
edge_cycler_->OnMove(location);
}
// Update phantom window with snapped or docked guide bounds.
// Windows that cannot be snapped or are less wide than kMaxDockWidth can get
// docked without going through a snapping sequence.
gfx::Rect phantom_bounds;
const bool should_dock = can_dock &&
(!can_snap ||
GetTarget()->bounds().width() <=
DockedWindowLayoutManager::kMaxDockWidth ||
edge_cycler_->use_second_mode() ||
dock_layout_->is_dragged_window_docked());
if (should_dock) {
SetDraggedWindowDocked(true);
phantom_bounds = ScreenUtil::ConvertRectFromScreen(
GetTarget()->parent(), dock_layout_->dragged_bounds());
} else {
phantom_bounds = (snap_type_ == SNAP_LEFT) ?
wm::GetDefaultLeftSnappedWindowBoundsInParent(GetTarget()) :
wm::GetDefaultRightSnappedWindowBoundsInParent(GetTarget());
}
if (!snap_phantom_window_controller_) {
snap_phantom_window_controller_.reset(
new PhantomWindowController(GetTarget()));
}
snap_phantom_window_controller_->Show(ScreenUtil::ConvertRectToScreen(
GetTarget()->parent(), phantom_bounds));
}
void WorkspaceWindowResizer::RestackWindows() {
if (attached_windows_.empty())
return;
// Build a map from index in children to window, returning if there is a
// window with a different parent.
typedef std::map<size_t, aura::Window*> IndexToWindowMap;
IndexToWindowMap map;
aura::Window* parent = GetTarget()->parent();
const aura::Window::Windows& windows(parent->children());
map[std::find(windows.begin(), windows.end(), GetTarget()) -
windows.begin()] = GetTarget();
for (std::vector<aura::Window*>::const_iterator i =
attached_windows_.begin(); i != attached_windows_.end(); ++i) {
if ((*i)->parent() != parent)
return;
size_t index =
std::find(windows.begin(), windows.end(), *i) - windows.begin();
map[index] = *i;
}
// Reorder the windows starting at the topmost.
parent->StackChildAtTop(map.rbegin()->second);
for (IndexToWindowMap::const_reverse_iterator i = map.rbegin();
i != map.rend(); ) {
aura::Window* window = i->second;
++i;
if (i != map.rend())
parent->StackChildBelow(i->second, window);
}
}
WorkspaceWindowResizer::SnapType WorkspaceWindowResizer::GetSnapType(
const gfx::Point& location) const {
// TODO: this likely only wants total display area, not the area of a single
// display.
gfx::Rect area(ScreenUtil::GetDisplayWorkAreaBoundsInParent(GetTarget()));
if (details().source == aura::client::WINDOW_MOVE_SOURCE_TOUCH) {
// Increase tolerance for touch-snapping near the screen edges. This is only
// necessary when the work area left or right edge is same as screen edge.
gfx::Rect display_bounds(ScreenUtil::GetDisplayBoundsInParent(GetTarget()));
int inset_left = 0;
if (area.x() == display_bounds.x())
inset_left = kScreenEdgeInsetForTouchDrag;
int inset_right = 0;
if (area.right() == display_bounds.right())
inset_right = kScreenEdgeInsetForTouchDrag;
area.Inset(inset_left, 0, inset_right, 0);
}
if (location.x() <= area.x())
return SNAP_LEFT;
if (location.x() >= area.right() - 1)
return SNAP_RIGHT;
return SNAP_NONE;
}
void WorkspaceWindowResizer::SetDraggedWindowDocked(bool should_dock) {
if (should_dock) {
if (!dock_layout_->is_dragged_window_docked()) {
window_state()->set_bounds_changed_by_user(false);
dock_layout_->DockDraggedWindow(GetTarget());
}
} else {
if (dock_layout_->is_dragged_window_docked()) {
dock_layout_->UndockDraggedWindow();
window_state()->set_bounds_changed_by_user(true);
}
}
}
bool WorkspaceWindowResizer::AreBoundsValidSnappedBounds(
wm::WindowStateType snapped_type,
const gfx::Rect& bounds_in_parent) const {
DCHECK(snapped_type == wm::WINDOW_STATE_TYPE_LEFT_SNAPPED ||
snapped_type == wm::WINDOW_STATE_TYPE_RIGHT_SNAPPED);
gfx::Rect snapped_bounds = ScreenUtil::GetDisplayWorkAreaBoundsInParent(
GetTarget());
if (snapped_type == wm::WINDOW_STATE_TYPE_RIGHT_SNAPPED)
snapped_bounds.set_x(snapped_bounds.right() - bounds_in_parent.width());
snapped_bounds.set_width(bounds_in_parent.width());
return bounds_in_parent == snapped_bounds;
}
} // namespace ash
| bsd-3-clause |
kebot/GiBeans | application/uc_client/control/pm.php | 7854 | <?php
/*
[UCenter] (C)2001-2009 Comsenz Inc.
This is NOT a freeware, use is subject to license terms
$Id: pm.php 836 2008-12-05 02:25:48Z monkey $
*/
!defined('IN_UC') && exit('Access Denied');
define('PMLIMIT1DAY_ERROR', -1);
define('PMFLOODCTRL_ERROR', -2);
define('PMMSGTONOTFRIEND', -3);
define('PMSENDREGDAYS', -4);
class pmcontrol extends base {
function __construct() {
$this->pmcontrol();
}
function pmcontrol() {
parent::__construct();
$this->load('user');
$this->load('pm');
}
function oncheck_newpm() {
$this->init_input();
$this->user['uid'] = intval($this->input('uid'));
$more = $this->input('more');
$result = $_ENV['pm']->check_newpm($this->user['uid'], $more);
if($more == 3) {
require_once UC_ROOT.'lib/uccode.class.php';
$this->uccode = new uccode();
$result['lastmsg'] = $this->uccode->complie($result['lastmsg']);
}
return $result;
}
function onsendpm() {
$this->init_input();
$fromuid = $this->input('fromuid');
$msgto = $this->input('msgto');
$subject = $this->input('subject');
$message = $this->input('message');
$replypmid = $this->input('replypmid');
$isusername = $this->input('isusername');
if($fromuid) {
$user = $_ENV['user']->get_user_by_uid($fromuid);
$user = daddslashes($user, 1);
if(!$user) {
return 0;
}
$this->user['uid'] = $user['uid'];
$this->user['username'] = $user['username'];
} else {
$this->user['uid'] = 0;
$this->user['username'] = '';
}
if($replypmid) {
$isusername = 1;
$pms = $_ENV['pm']->get_pm_by_pmid($this->user['uid'], $replypmid);
if($pms[0]['msgfromid'] == $this->user['uid']) {
$user = $_ENV['user']->get_user_by_uid($pms[0]['msgtoid']);
$msgto = $user['username'];
} else {
$msgto = $pms[0]['msgfrom'];
}
}
$msgto = array_unique(explode(',', $msgto));
$isusername && $msgto = $_ENV['user']->name2id($msgto);
$blackls = $_ENV['pm']->get_blackls($this->user['uid'], $msgto);
if($fromuid) {
if($this->settings['pmsendregdays']) {
if($user['regdate'] > $this->time - $this->settings['pmsendregdays'] * 86400) {
return PMSENDREGDAYS;
}
}
$this->load('friend');
if(count($msgto) > 1 && !($is_friend = $_ENV['friend']->is_friend($fromuid, $msgto, 3))) {
return PMMSGTONOTFRIEND;
}
$pmlimit1day = $this->settings['pmlimit1day'] && $_ENV['pm']->count_pm_by_fromuid($this->user['uid'], 86400) > $this->settings['pmlimit1day'];
if($pmlimit1day || ($this->settings['pmfloodctrl'] && $_ENV['pm']->count_pm_by_fromuid($this->user['uid'], $this->settings['pmfloodctrl']))) {
if(!$_ENV['friend']->is_friend($fromuid, $msgto, 3)) {
if(!$_ENV['pm']->is_reply_pm($fromuid, $msgto)) {
if($pmlimit1day) {
return PMLIMIT1DAY_ERROR;
} else {
return PMFLOODCTRL_ERROR;
}
}
}
}
}
$lastpmid = 0;
foreach($msgto as $uid) {
if(!$fromuid || !in_array('{ALL}', $blackls[$uid])) {
$blackls[$uid] = $_ENV['user']->name2id($blackls[$uid]);
if(!$fromuid || isset($blackls[$uid]) && !in_array($this->user['uid'], $blackls[$uid])) {
$lastpmid = $_ENV['pm']->sendpm($subject, $message, $this->user, $uid, $replypmid);
}
}
}
return $lastpmid;
}
function ondelete() {
$this->init_input();
$this->user['uid'] = intval($this->input('uid'));
$id = $_ENV['pm']->deletepm($this->user['uid'], $this->input('pmids'));
return $id;
}
function ondeleteuser() {
$this->init_input();
$this->user['uid'] = intval($this->input('uid'));
$id = $_ENV['pm']->deleteuidpm($this->user['uid'], $this->input('touids'));
return $id;
}
function onreadstatus() {
$this->init_input();
$this->user['uid'] = intval($this->input('uid'));
$_ENV['pm']->set_pm_status($this->user['uid'], $this->input('uids'), $this->input('pmids'), $this->input('status'));
}
function onignore() {
$this->init_input();
$this->user['uid'] = intval($this->input('uid'));
return $_ENV['pm']->set_ignore($this->user['uid']);
}
function onls() {
$this->init_input();
$pagesize = $this->input('pagesize');
$folder = $this->input('folder');
$filter = $this->input('filter');
$page = $this->input('page');
$folder = in_array($folder, array('newbox', 'inbox', 'outbox', 'searchbox')) ? $folder : 'inbox';
if($folder != 'searchbox') {
$filter = $filter ? (in_array($filter, array('newpm', 'privatepm', 'systempm', 'announcepm')) ? $filter : '') : '';
}
$msglen = $this->input('msglen');
$this->user['uid'] = intval($this->input('uid'));
if($folder != 'searchbox') {
$pmnum = $_ENV['pm']->get_num($this->user['uid'], $folder, $filter);
$start = $this->page_get_start($page, $pagesize, $pmnum);
} else {
$pmnum = $pagesize;
$start = ($page - 1) * $pagesize;
}
if($pagesize > 0) {
$pms = $_ENV['pm']->get_pm_list($this->user['uid'], $pmnum, $folder, $filter, $start, $pagesize);
if(is_array($pms) && !empty($pms)) {
foreach($pms as $key => $pm) {
if($msglen) {
$pms[$key]['message'] = htmlspecialchars($_ENV['pm']->removecode($pms[$key]['message'], $msglen));
} else {
unset($pms[$key]['message']);
}
unset($pms[$key]['folder']);
}
}
$result['data'] = $pms;
}
$result['count'] = $pmnum;
return $result;
}
function onviewnode() {
$this->init_input();
$this->user['uid'] = intval($this->input('uid'));
$pmid = $_ENV['pm']->pmintval($this->input('pmid'));
$type = $this->input('type');
$pm = $_ENV['pm']->get_pmnode_by_pmid($this->user['uid'], $pmid, $type);
if($pm) {
require_once UC_ROOT.'lib/uccode.class.php';
$this->uccode = new uccode();
$pm['message'] = $this->uccode->complie($pm['message']);
return $pm;
}
}
function onview() {
$this->init_input();
$this->user['uid'] = intval($this->input('uid'));
$touid = $this->input('touid');
$pmid = $_ENV['pm']->pmintval($this->input('pmid'));
$daterange = $this->input('daterange');
if(empty($pmid)) {
$daterange = empty($daterange) ? 1 : $daterange;
$today = $this->time - ($this->time + $this->settings['timeoffset']) % 86400;
if($daterange == 1) {
$starttime = $today;
} elseif($daterange == 2) {
$starttime = $today - 86400;
} elseif($daterange == 3) {
$starttime = $today - 172800;
} elseif($daterange == 4) {
$starttime = $today - 604800;
} elseif($daterange == 5) {
$starttime = 0;
}
$endtime = $this->time;
$pms = $_ENV['pm']->get_pm_by_touid($this->user['uid'], $touid, $starttime, $endtime);
} else {
$pms = $_ENV['pm']->get_pm_by_pmid($this->user['uid'], $pmid);
}
require_once UC_ROOT.'lib/uccode.class.php';
$this->uccode = new uccode();
$status = FALSE;
foreach($pms as $key => $pm) {
$pms[$key]['message'] = $this->uccode->complie($pms[$key]['message']);
!$status && $status = $pm['msgtoid'] && $pm['new'];
}
$status && $_ENV['pm']->set_pm_status($this->user['uid'], $touid, $pmid);
return $pms;
}
function onblackls_get() {
$this->init_input();
$this->user['uid'] = intval($this->input('uid'));
return $_ENV['pm']->get_blackls($this->user['uid']);
}
function onblackls_set() {
$this->init_input();
$this->user['uid'] = intval($this->input('uid'));
$blackls = $this->input('blackls');
return $_ENV['pm']->set_blackls($this->user['uid'], $blackls);
}
function onblackls_add() {
$this->init_input();
$this->user['uid'] = intval($this->input('uid'));
$username = $this->input('username');
return $_ENV['pm']->update_blackls($this->user['uid'], $username, 1);
}
function onblackls_delete($arr) {
$this->init_input();
$this->user['uid'] = intval($this->input('uid'));
$username = $this->input('username');
return $_ENV['pm']->update_blackls($this->user['uid'], $username, 2);
}
}
?> | bsd-3-clause |
joone/chromium-crosswalk | chrome/browser/chromeos/policy/user_cloud_policy_manager_chromeos.cc | 16861 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/chromeos/policy/user_cloud_policy_manager_chromeos.h"
#include <set>
#include <utility>
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/logging.h"
#include "base/metrics/histogram.h"
#include "base/metrics/sparse_histogram.h"
#include "base/sequenced_task_runner.h"
#include "base/values.h"
#include "chrome/browser/browser_process.h"
#include "chrome/browser/chromeos/login/helper.h"
#include "chrome/browser/chromeos/login/session/user_session_manager.h"
#include "chrome/browser/chromeos/login/users/affiliation.h"
#include "chrome/browser/chromeos/login/users/chrome_user_manager_impl.h"
#include "chrome/browser/chromeos/policy/policy_oauth2_token_fetcher.h"
#include "chrome/browser/chromeos/policy/user_cloud_policy_manager_factory_chromeos.h"
#include "chrome/browser/chromeos/policy/wildcard_login_checker.h"
#include "chrome/browser/lifetime/application_lifetime.h"
#include "chrome/common/chrome_content_client.h"
#include "components/policy/core/common/cloud/cloud_external_data_manager.h"
#include "components/policy/core/common/cloud/cloud_policy_refresh_scheduler.h"
#include "components/policy/core/common/cloud/device_management_service.h"
#include "components/policy/core/common/cloud/system_policy_request_context.h"
#include "components/policy/core/common/policy_map.h"
#include "components/policy/core/common/policy_pref_names.h"
#include "components/policy/core/common/policy_types.h"
#include "components/user_manager/user_manager.h"
#include "net/url_request/url_request_context_getter.h"
#include "policy/policy_constants.h"
#include "url/gurl.h"
namespace em = enterprise_management;
namespace policy {
namespace {
// UMA histogram names.
const char kUMADelayInitialization[] =
"Enterprise.UserPolicyChromeOS.DelayInitialization";
const char kUMAInitialFetchClientError[] =
"Enterprise.UserPolicyChromeOS.InitialFetch.ClientError";
const char kUMAInitialFetchDelayClientRegister[] =
"Enterprise.UserPolicyChromeOS.InitialFetch.DelayClientRegister";
const char kUMAInitialFetchDelayOAuth2Token[] =
"Enterprise.UserPolicyChromeOS.InitialFetch.DelayOAuth2Token";
const char kUMAInitialFetchDelayPolicyFetch[] =
"Enterprise.UserPolicyChromeOS.InitialFetch.DelayPolicyFetch";
const char kUMAInitialFetchDelayTotal[] =
"Enterprise.UserPolicyChromeOS.InitialFetch.DelayTotal";
const char kUMAInitialFetchOAuth2Error[] =
"Enterprise.UserPolicyChromeOS.InitialFetch.OAuth2Error";
const char kUMAInitialFetchOAuth2NetworkError[] =
"Enterprise.UserPolicyChromeOS.InitialFetch.OAuth2NetworkError";
void OnWildcardCheckCompleted(const std::string& username,
WildcardLoginChecker::Result result) {
if (result == WildcardLoginChecker::RESULT_BLOCKED) {
LOG(ERROR) << "Online wildcard login check failed, terminating session.";
// TODO(mnissler): This only removes the user pod from the login screen, but
// the cryptohome remains. This is because deleting the cryptohome for a
// logged-in session is not possible. Fix this either by delaying the
// cryptohome deletion operation or by getting rid of the in-session
// wildcard check.
user_manager::UserManager::Get()->RemoveUserFromList(
AccountId::FromUserEmail(username));
chrome::AttemptUserExit();
}
}
} // namespace
UserCloudPolicyManagerChromeOS::UserCloudPolicyManagerChromeOS(
scoped_ptr<CloudPolicyStore> store,
scoped_ptr<CloudExternalDataManager> external_data_manager,
const base::FilePath& component_policy_cache_path,
bool wait_for_policy_fetch,
base::TimeDelta initial_policy_fetch_timeout,
const scoped_refptr<base::SequencedTaskRunner>& task_runner,
const scoped_refptr<base::SequencedTaskRunner>& file_task_runner,
const scoped_refptr<base::SequencedTaskRunner>& io_task_runner)
: CloudPolicyManager(dm_protocol::kChromeUserPolicyType,
std::string(),
store.get(),
task_runner,
file_task_runner,
io_task_runner),
store_(std::move(store)),
external_data_manager_(std::move(external_data_manager)),
component_policy_cache_path_(component_policy_cache_path),
wait_for_policy_fetch_(wait_for_policy_fetch),
policy_fetch_timeout_(false, false) {
time_init_started_ = base::Time::Now();
if (wait_for_policy_fetch_ && !initial_policy_fetch_timeout.is_max()) {
policy_fetch_timeout_.Start(
FROM_HERE,
initial_policy_fetch_timeout,
base::Bind(&UserCloudPolicyManagerChromeOS::OnBlockingFetchTimeout,
base::Unretained(this)));
}
}
UserCloudPolicyManagerChromeOS::~UserCloudPolicyManagerChromeOS() {}
void UserCloudPolicyManagerChromeOS::Connect(
PrefService* local_state,
DeviceManagementService* device_management_service,
scoped_refptr<net::URLRequestContextGetter> system_request_context) {
DCHECK(device_management_service);
DCHECK(local_state);
local_state_ = local_state;
scoped_refptr<net::URLRequestContextGetter> request_context;
if (system_request_context.get()) {
// |system_request_context| can be null for tests.
// Use the system request context here instead of a context derived
// from the Profile because Connect() is called before the profile is
// fully initialized (required so we can perform the initial policy load).
// TODO(atwilson): Change this to use a UserPolicyRequestContext once
// Connect() is called after profile initialization. http://crbug.com/323591
request_context = new SystemPolicyRequestContext(
system_request_context, GetUserAgent());
}
scoped_ptr<CloudPolicyClient> cloud_policy_client(new CloudPolicyClient(
std::string(), std::string(), kPolicyVerificationKeyHash,
device_management_service, request_context));
CreateComponentCloudPolicyService(component_policy_cache_path_,
request_context, cloud_policy_client.get());
core()->Connect(std::move(cloud_policy_client));
client()->AddObserver(this);
external_data_manager_->Connect(request_context);
// Determine the next step after the CloudPolicyService initializes.
if (service()->IsInitializationComplete()) {
OnInitializationCompleted(service());
} else {
service()->AddObserver(this);
}
}
void UserCloudPolicyManagerChromeOS::OnAccessTokenAvailable(
const std::string& access_token) {
access_token_ = access_token;
if (!wildcard_username_.empty()) {
wildcard_login_checker_.reset(new WildcardLoginChecker());
wildcard_login_checker_->StartWithAccessToken(
access_token,
base::Bind(&OnWildcardCheckCompleted, wildcard_username_));
}
if (service() && service()->IsInitializationComplete() &&
client() && !client()->is_registered()) {
OnOAuth2PolicyTokenFetched(
access_token, GoogleServiceAuthError(GoogleServiceAuthError::NONE));
}
}
bool UserCloudPolicyManagerChromeOS::IsClientRegistered() const {
return client() && client()->is_registered();
}
void UserCloudPolicyManagerChromeOS::EnableWildcardLoginCheck(
const std::string& username) {
DCHECK(access_token_.empty());
wildcard_username_ = username;
}
void UserCloudPolicyManagerChromeOS::Shutdown() {
if (client())
client()->RemoveObserver(this);
if (service())
service()->RemoveObserver(this);
token_fetcher_.reset();
external_data_manager_->Disconnect();
CloudPolicyManager::Shutdown();
}
bool UserCloudPolicyManagerChromeOS::IsInitializationComplete(
PolicyDomain domain) const {
if (!CloudPolicyManager::IsInitializationComplete(domain))
return false;
if (domain == POLICY_DOMAIN_CHROME)
return !wait_for_policy_fetch_;
return true;
}
void UserCloudPolicyManagerChromeOS::OnInitializationCompleted(
CloudPolicyService* cloud_policy_service) {
DCHECK_EQ(service(), cloud_policy_service);
cloud_policy_service->RemoveObserver(this);
time_init_completed_ = base::Time::Now();
UMA_HISTOGRAM_MEDIUM_TIMES(kUMADelayInitialization,
time_init_completed_ - time_init_started_);
// If the CloudPolicyClient isn't registered at this stage then it needs an
// OAuth token for the initial registration.
//
// If |wait_for_policy_fetch_| is true then Profile initialization is blocking
// on the initial policy fetch, so the token must be fetched immediately.
// In that case, the signin Profile is used to authenticate a Gaia request to
// fetch a refresh token, and then the policy token is fetched.
//
// If |wait_for_policy_fetch_| is false then the UserCloudPolicyTokenForwarder
// service will eventually call OnAccessTokenAvailable() once an access token
// is available. That call may have already happened while waiting for
// initialization of the CloudPolicyService, so in that case check if an
// access token is already available.
if (!client()->is_registered()) {
if (wait_for_policy_fetch_) {
FetchPolicyOAuthToken();
} else if (!access_token_.empty()) {
OnAccessTokenAvailable(access_token_);
}
}
if (!wait_for_policy_fetch_) {
// If this isn't blocking on a policy fetch then
// CloudPolicyManager::OnStoreLoaded() already published the cached policy.
// Start the refresh scheduler now, which will eventually refresh the
// cached policy or make the first fetch once the OAuth2 token is
// available.
StartRefreshSchedulerIfReady();
}
}
void UserCloudPolicyManagerChromeOS::OnPolicyFetched(
CloudPolicyClient* client) {
// No action required. If we're blocked on a policy fetch, we'll learn about
// completion of it through OnInitialPolicyFetchComplete().
}
void UserCloudPolicyManagerChromeOS::OnRegistrationStateChanged(
CloudPolicyClient* cloud_policy_client) {
DCHECK_EQ(client(), cloud_policy_client);
if (wait_for_policy_fetch_) {
time_client_registered_ = base::Time::Now();
if (!time_token_available_.is_null()) {
UMA_HISTOGRAM_MEDIUM_TIMES(
kUMAInitialFetchDelayClientRegister,
time_client_registered_ - time_token_available_);
}
// If we're blocked on the policy fetch, now is a good time to issue it.
if (client()->is_registered()) {
service()->RefreshPolicy(
base::Bind(
&UserCloudPolicyManagerChromeOS::OnInitialPolicyFetchComplete,
base::Unretained(this)));
} else {
// If the client has switched to not registered, we bail out as this
// indicates the cloud policy setup flow has been aborted.
CancelWaitForPolicyFetch();
}
}
}
void UserCloudPolicyManagerChromeOS::OnClientError(
CloudPolicyClient* cloud_policy_client) {
DCHECK_EQ(client(), cloud_policy_client);
if (wait_for_policy_fetch_) {
UMA_HISTOGRAM_SPARSE_SLOWLY(kUMAInitialFetchClientError,
cloud_policy_client->status());
}
CancelWaitForPolicyFetch();
}
void UserCloudPolicyManagerChromeOS::OnComponentCloudPolicyUpdated() {
CloudPolicyManager::OnComponentCloudPolicyUpdated();
StartRefreshSchedulerIfReady();
}
void UserCloudPolicyManagerChromeOS::OnStoreLoaded(
CloudPolicyStore* cloud_policy_store) {
CloudPolicyManager::OnStoreLoaded(cloud_policy_store);
em::PolicyData const* const policy_data = cloud_policy_store->policy();
if (policy_data) {
chromeos::AffiliationIDSet set_of_user_affiliation_ids(
policy_data->user_affiliation_ids().begin(),
policy_data->user_affiliation_ids().end());
chromeos::ChromeUserManager::Get()->SetUserAffiliation(
policy_data->username(), set_of_user_affiliation_ids);
}
}
void UserCloudPolicyManagerChromeOS::GetChromePolicy(PolicyMap* policy_map) {
CloudPolicyManager::GetChromePolicy(policy_map);
// If the store has a verified policy blob received from the server then apply
// the defaults for policies that haven't been configured by the administrator
// given that this is an enterprise user.
if (!store()->has_policy())
return;
SetEnterpriseUsersDefaults(policy_map);
}
void UserCloudPolicyManagerChromeOS::FetchPolicyOAuthToken() {
const std::string& refresh_token = chromeos::UserSessionManager::GetInstance()
->user_context()
.GetRefreshToken();
if (!refresh_token.empty()) {
token_fetcher_.reset(new PolicyOAuth2TokenFetcher());
token_fetcher_->StartWithRefreshToken(
refresh_token, g_browser_process->system_request_context(),
base::Bind(&UserCloudPolicyManagerChromeOS::OnOAuth2PolicyTokenFetched,
base::Unretained(this)));
return;
}
scoped_refptr<net::URLRequestContextGetter> signin_context =
chromeos::login::GetSigninContext();
if (!signin_context.get()) {
LOG(ERROR) << "No signin context for policy oauth token fetch!";
OnOAuth2PolicyTokenFetched(
std::string(), GoogleServiceAuthError(GoogleServiceAuthError::NONE));
return;
}
token_fetcher_.reset(new PolicyOAuth2TokenFetcher());
token_fetcher_->StartWithSigninContext(
signin_context.get(), g_browser_process->system_request_context(),
base::Bind(&UserCloudPolicyManagerChromeOS::OnOAuth2PolicyTokenFetched,
base::Unretained(this)));
}
void UserCloudPolicyManagerChromeOS::OnOAuth2PolicyTokenFetched(
const std::string& policy_token,
const GoogleServiceAuthError& error) {
DCHECK(!client()->is_registered());
time_token_available_ = base::Time::Now();
if (wait_for_policy_fetch_) {
UMA_HISTOGRAM_MEDIUM_TIMES(kUMAInitialFetchDelayOAuth2Token,
time_token_available_ - time_init_completed_);
}
if (error.state() == GoogleServiceAuthError::NONE) {
// Start client registration. Either OnRegistrationStateChanged() or
// OnClientError() will be called back.
client()->Register(em::DeviceRegisterRequest::USER,
em::DeviceRegisterRequest::FLAVOR_USER_REGISTRATION,
policy_token, std::string(), std::string(),
std::string());
} else {
// Failed to get a token, stop waiting and use an empty policy.
CancelWaitForPolicyFetch();
UMA_HISTOGRAM_ENUMERATION(kUMAInitialFetchOAuth2Error,
error.state(),
GoogleServiceAuthError::NUM_STATES);
if (error.state() == GoogleServiceAuthError::CONNECTION_FAILED) {
// Network errors are negative in the code, but the histogram data type
// expects the corresponding positive value.
UMA_HISTOGRAM_SPARSE_SLOWLY(kUMAInitialFetchOAuth2NetworkError,
-error.network_error());
}
}
token_fetcher_.reset();
}
void UserCloudPolicyManagerChromeOS::OnInitialPolicyFetchComplete(
bool success) {
const base::Time now = base::Time::Now();
UMA_HISTOGRAM_MEDIUM_TIMES(kUMAInitialFetchDelayPolicyFetch,
now - time_client_registered_);
UMA_HISTOGRAM_MEDIUM_TIMES(kUMAInitialFetchDelayTotal,
now - time_init_started_);
CancelWaitForPolicyFetch();
}
void UserCloudPolicyManagerChromeOS::OnBlockingFetchTimeout() {
if (!wait_for_policy_fetch_)
return;
LOG(WARNING) << "Timed out while waiting for the initial policy fetch. "
<< "The first session will start without policy.";
CancelWaitForPolicyFetch();
}
void UserCloudPolicyManagerChromeOS::CancelWaitForPolicyFetch() {
if (!wait_for_policy_fetch_)
return;
wait_for_policy_fetch_ = false;
policy_fetch_timeout_.Stop();
CheckAndPublishPolicy();
// Now that |wait_for_policy_fetch_| is guaranteed to be false, the scheduler
// can be started.
StartRefreshSchedulerIfReady();
}
void UserCloudPolicyManagerChromeOS::StartRefreshSchedulerIfReady() {
if (core()->refresh_scheduler())
return; // Already started.
if (wait_for_policy_fetch_)
return; // Still waiting for the initial, blocking fetch.
if (!service() || !local_state_)
return; // Not connected.
if (component_policy_service() &&
!component_policy_service()->is_initialized()) {
// If the client doesn't have the list of components to fetch yet then don't
// start the scheduler. The |component_policy_service_| will call back into
// OnComponentCloudPolicyUpdated() once it's ready.
return;
}
core()->StartRefreshScheduler();
core()->TrackRefreshDelayPref(local_state_,
policy_prefs::kUserPolicyRefreshRate);
}
} // namespace policy
| bsd-3-clause |
pozdnyakov/chromium-crosswalk | chromeos/dbus/shill_device_client_stub.cc | 12584 | // Copyright (c) 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chromeos/dbus/shill_device_client_stub.h"
#include "base/bind.h"
#include "base/message_loop.h"
#include "base/stl_util.h"
#include "base/values.h"
#include "chromeos/dbus/dbus_thread_manager.h"
#include "chromeos/dbus/shill_manager_client.h"
#include "chromeos/dbus/shill_property_changed_observer.h"
#include "dbus/bus.h"
#include "dbus/message.h"
#include "dbus/object_path.h"
#include "dbus/object_proxy.h"
#include "dbus/values_util.h"
#include "third_party/cros_system_api/dbus/service_constants.h"
namespace chromeos {
namespace {
void ErrorFunction(const std::string& error_name,
const std::string& error_message) {
LOG(ERROR) << "Shill Error: " << error_name << " : " << error_message;
}
} // namespace
ShillDeviceClientStub::ShillDeviceClientStub() : weak_ptr_factory_(this) {
SetDefaultProperties();
}
ShillDeviceClientStub::~ShillDeviceClientStub() {
STLDeleteContainerPairSecondPointers(
observer_list_.begin(), observer_list_.end());
}
// ShillDeviceClient overrides.
void ShillDeviceClientStub::AddPropertyChangedObserver(
const dbus::ObjectPath& device_path,
ShillPropertyChangedObserver* observer){
GetObserverList(device_path).AddObserver(observer);
}
void ShillDeviceClientStub::RemovePropertyChangedObserver(
const dbus::ObjectPath& device_path,
ShillPropertyChangedObserver* observer){
GetObserverList(device_path).RemoveObserver(observer);
}
void ShillDeviceClientStub::GetProperties(
const dbus::ObjectPath& device_path,
const DictionaryValueCallback& callback){
if (callback.is_null())
return;
base::MessageLoop::current()->PostTask(
FROM_HERE,
base::Bind(&ShillDeviceClientStub::PassStubDeviceProperties,
weak_ptr_factory_.GetWeakPtr(),
device_path, callback));
}
base::DictionaryValue* ShillDeviceClientStub::CallGetPropertiesAndBlock(
const dbus::ObjectPath& device_path){
base::DictionaryValue* device_properties = NULL;
stub_devices_.GetDictionaryWithoutPathExpansion(
device_path.value(), &device_properties);
return device_properties;
}
void ShillDeviceClientStub::ProposeScan(const dbus::ObjectPath& device_path,
const VoidDBusMethodCallback& callback){
PostVoidCallback(callback, DBUS_METHOD_CALL_SUCCESS);
}
void ShillDeviceClientStub::SetProperty(const dbus::ObjectPath& device_path,
const std::string& name,
const base::Value& value,
const base::Closure& callback,
const ErrorCallback& error_callback){
base::DictionaryValue* device_properties = NULL;
if (!stub_devices_.GetDictionary(device_path.value(), &device_properties)) {
std::string error_name("org.chromium.flimflam.Error.Failure");
std::string error_message("Failed");
if (!error_callback.is_null()) {
base::MessageLoop::current()->PostTask(FROM_HERE,
base::Bind(error_callback,
error_name,
error_message));
}
return;
}
device_properties->Set(name, value.DeepCopy());
base::MessageLoop::current()->PostTask(
FROM_HERE,
base::Bind(&ShillDeviceClientStub::NotifyObserversPropertyChanged,
weak_ptr_factory_.GetWeakPtr(), device_path, name));
if (callback.is_null())
return;
base::MessageLoop::current()->PostTask(FROM_HERE, callback);
}
void ShillDeviceClientStub::ClearProperty(
const dbus::ObjectPath& device_path,
const std::string& name,
const VoidDBusMethodCallback& callback){
base::DictionaryValue* device_properties = NULL;
if (!stub_devices_.GetDictionary(device_path.value(), &device_properties)) {
PostVoidCallback(callback, DBUS_METHOD_CALL_FAILURE);
return;
}
device_properties->Remove(name, NULL);
PostVoidCallback(callback, DBUS_METHOD_CALL_SUCCESS);
}
void ShillDeviceClientStub::AddIPConfig(
const dbus::ObjectPath& device_path,
const std::string& method,
const ObjectPathDBusMethodCallback& callback){
if (callback.is_null())
return;
base::MessageLoop::current()->PostTask(FROM_HERE,
base::Bind(callback,
DBUS_METHOD_CALL_SUCCESS,
dbus::ObjectPath()));
}
void ShillDeviceClientStub::RequirePin(const dbus::ObjectPath& device_path,
const std::string& pin,
bool require,
const base::Closure& callback,
const ErrorCallback& error_callback){
if (callback.is_null())
return;
base::MessageLoop::current()->PostTask(FROM_HERE, callback);
}
void ShillDeviceClientStub::EnterPin(const dbus::ObjectPath& device_path,
const std::string& pin,
const base::Closure& callback,
const ErrorCallback& error_callback){
if (callback.is_null())
return;
base::MessageLoop::current()->PostTask(FROM_HERE, callback);
}
void ShillDeviceClientStub::UnblockPin(const dbus::ObjectPath& device_path,
const std::string& puk,
const std::string& pin,
const base::Closure& callback,
const ErrorCallback& error_callback){
if (callback.is_null())
return;
base::MessageLoop::current()->PostTask(FROM_HERE, callback);
}
void ShillDeviceClientStub::ChangePin(const dbus::ObjectPath& device_path,
const std::string& old_pin,
const std::string& new_pin,
const base::Closure& callback,
const ErrorCallback& error_callback){
if (callback.is_null())
return;
base::MessageLoop::current()->PostTask(FROM_HERE, callback);
}
void ShillDeviceClientStub::Register(const dbus::ObjectPath& device_path,
const std::string& network_id,
const base::Closure& callback,
const ErrorCallback& error_callback){
if (callback.is_null())
return;
base::MessageLoop::current()->PostTask(FROM_HERE, callback);
}
void ShillDeviceClientStub::SetCarrier(const dbus::ObjectPath& device_path,
const std::string& carrier,
const base::Closure& callback,
const ErrorCallback& error_callback){
if (callback.is_null())
return;
base::MessageLoop::current()->PostTask(FROM_HERE, callback);
}
void ShillDeviceClientStub::Reset(const dbus::ObjectPath& device_path,
const base::Closure& callback,
const ErrorCallback& error_callback){
if (callback.is_null())
return;
base::MessageLoop::current()->PostTask(FROM_HERE, callback);
}
ShillDeviceClient::TestInterface* ShillDeviceClientStub::GetTestInterface(){
return this;
}
// ShillDeviceClient::TestInterface overrides.
void ShillDeviceClientStub::AddDevice(const std::string& device_path,
const std::string& type,
const std::string& object_path){
DBusThreadManager::Get()->GetShillManagerClient()->GetTestInterface()->
AddDevice(device_path);
base::DictionaryValue* properties = GetDeviceProperties(device_path);
properties->SetWithoutPathExpansion(
flimflam::kTypeProperty,
base::Value::CreateStringValue(type));
properties->SetWithoutPathExpansion(
flimflam::kDBusObjectProperty,
base::Value::CreateStringValue(object_path));
properties->SetWithoutPathExpansion(
flimflam::kDBusConnectionProperty,
base::Value::CreateStringValue("/stub"));
}
void ShillDeviceClientStub::RemoveDevice(const std::string& device_path){
DBusThreadManager::Get()->GetShillManagerClient()->GetTestInterface()->
RemoveDevice(device_path);
stub_devices_.RemoveWithoutPathExpansion(device_path, NULL);
}
void ShillDeviceClientStub::ClearDevices(){
DBusThreadManager::Get()->GetShillManagerClient()->GetTestInterface()->
ClearDevices();
stub_devices_.Clear();
}
void ShillDeviceClientStub::SetDeviceProperty(const std::string& device_path,
const std::string& name,
const base::Value& value){
VLOG(1) << "SetDeviceProperty: " << device_path
<< ": " << name << " = " << value;
SetProperty(dbus::ObjectPath(device_path), name, value,
base::Bind(&base::DoNothing),
base::Bind(&ErrorFunction));
}
std::string ShillDeviceClientStub::GetDevicePathForType(
const std::string& type) {
for (base::DictionaryValue::Iterator iter(stub_devices_);
!iter.IsAtEnd(); iter.Advance()) {
const base::DictionaryValue* properties = NULL;
if (!iter.value().GetAsDictionary(&properties))
continue;
std::string prop_type;
if (!properties->GetStringWithoutPathExpansion(
flimflam::kTypeProperty, &prop_type) ||
prop_type != type)
continue;
return iter.key();
}
return std::string();
}
void ShillDeviceClientStub::SetDefaultProperties() {
// Add a wifi device. Note: path matches Manager entry.
AddDevice("stub_wifi_device1", flimflam::kTypeWifi, "/device/wifi1");
// Add a cellular device. Used in SMS stub. Note: path matches
// Manager entry.
AddDevice("stub_cellular_device1", flimflam::kTypeCellular,
"/device/cellular1");
}
void ShillDeviceClientStub::PassStubDeviceProperties(
const dbus::ObjectPath& device_path,
const DictionaryValueCallback& callback) const {
const base::DictionaryValue* device_properties = NULL;
if (!stub_devices_.GetDictionaryWithoutPathExpansion(
device_path.value(), &device_properties)) {
base::DictionaryValue empty_dictionary;
callback.Run(DBUS_METHOD_CALL_FAILURE, empty_dictionary);
return;
}
callback.Run(DBUS_METHOD_CALL_SUCCESS, *device_properties);
}
// Posts a task to run a void callback with status code |status|.
void ShillDeviceClientStub::PostVoidCallback(
const VoidDBusMethodCallback& callback,
DBusMethodCallStatus status) {
if (callback.is_null())
return;
base::MessageLoop::current()->PostTask(FROM_HERE,
base::Bind(callback, status));
}
void ShillDeviceClientStub::NotifyObserversPropertyChanged(
const dbus::ObjectPath& device_path,
const std::string& property) {
base::DictionaryValue* dict = NULL;
std::string path = device_path.value();
if (!stub_devices_.GetDictionaryWithoutPathExpansion(path, &dict)) {
LOG(ERROR) << "Notify for unknown service: " << path;
return;
}
base::Value* value = NULL;
if (!dict->GetWithoutPathExpansion(property, &value)) {
LOG(ERROR) << "Notify for unknown property: "
<< path << " : " << property;
return;
}
FOR_EACH_OBSERVER(ShillPropertyChangedObserver,
GetObserverList(device_path),
OnPropertyChanged(property, *value));
}
base::DictionaryValue* ShillDeviceClientStub::GetDeviceProperties(
const std::string& device_path) {
base::DictionaryValue* properties = NULL;
if (!stub_devices_.GetDictionaryWithoutPathExpansion(
device_path, &properties)) {
properties = new base::DictionaryValue;
stub_devices_.Set(device_path, properties);
}
return properties;
}
ShillDeviceClientStub::PropertyObserverList&
ShillDeviceClientStub::GetObserverList(const dbus::ObjectPath& device_path) {
std::map<dbus::ObjectPath, PropertyObserverList*>::iterator iter =
observer_list_.find(device_path);
if (iter != observer_list_.end())
return *(iter->second);
PropertyObserverList* observer_list = new PropertyObserverList();
observer_list_[device_path] = observer_list;
return *observer_list;
}
} // namespace chromeos
| bsd-3-clause |
chromium/chromium | third_party/blink/web_tests/wpt_internal/handwriting/resources/mock-handwriting-recognition-service.js | 3396 | import {
CreateHandwritingRecognizerResult,
HandwritingRecognitionService,
HandwritingRecognitionServiceReceiver,
HandwritingRecognizerReceiver,
HandwritingRecognizerRemote,
HandwritingRecognitionType,
HandwritingInputType,
} from '/gen/third_party/blink/public/mojom/handwriting/handwriting.mojom.m.js';
// Generates the prediction result based on strokes and hints.
// The segmentation result is empty.
function transformHandwritingMojoStroke(stroke) {
return stroke.points.map(point => ({
x: Math.round(point.location.x),
y: Math.round(point.location.y),
t: Math.round(Number(point.t.microseconds) / 1000)}));
}
function transformHandwritingIDLStroke(stroke) {
return stroke.getPoints().map(point => ({
x: Math.round(point.x),
y: Math.round(point.y),
t: Math.round(point.t)}));
}
// We need to export this function because we will verify whether the prediction
// result is as expected.
export function generateHandwritingPrediction(strokes, hints) {
const result = { strokes: [] };
for (let i = 0; i < strokes.length; i++) {
// Check which kind of stroke it is. Mojo Stroke should have a `points`
// member and IDL stroke does not.
// Note that `strokes[i] instanceof HandwritingStroke` does not work here.
if ('points' in strokes[i]) {
result.strokes.push(transformHandwritingMojoStroke(strokes[i]));
} else {
result.strokes.push(transformHandwritingIDLStroke(strokes[i]));
}
}
result.hints = hints;
return [{text: JSON.stringify(result), segmentationResult: []}];
}
class MockHandwritingRecognizer {
// In this mock impl, we ignore the `modelConstraint`.
constructor(modelConstraint) {}
bind(request) {
this.receiver_ = new HandwritingRecognizerReceiver(this);
this.receiver_.$.bindHandle(request.handle);
}
async getPrediction(strokes, hints) {
return {prediction: generateHandwritingPrediction(strokes, hints)};
}
}
let mockHandwritingRecognizer =
new MockHandwritingRecognizer({languages: ['en']});
class MockHandwritingRecognitionService {
constructor() {
this.interceptor_ = new MojoInterfaceInterceptor(
HandwritingRecognitionService.$interfaceName);
this.interceptor_.oninterfacerequest = e => this.bind(e.handle);
this.receiver_ = new HandwritingRecognitionServiceReceiver(this);
this.interceptor_.start();
}
bind(handle) {
this.receiver_.$.bindHandle(handle);
}
async createHandwritingRecognizer(modelConstraint) {
const handwritingRecognizer = new HandwritingRecognizerRemote();
mockHandwritingRecognizer.bind(
handwritingRecognizer.$.bindNewPipeAndPassReceiver());
return {
result: CreateHandwritingRecognizerResult.kOk,
handwritingRecognizer: handwritingRecognizer,
};
}
async queryHandwritingRecognizer(constraint) {
// Pretend to support all features.
let desc = {
textAlternatives: true,
textSegmentation: true,
hints: {
recognitionType: [
HandwritingRecognitionType.kText,
],
inputType: [
HandwritingInputType.kMouse,
HandwritingInputType.kStylus,
HandwritingInputType.kTouch,
],
textContext: true,
alternatives: true,
}
};
return { result: desc };
}
}
let mockHandwritingRecognitionService = new MockHandwritingRecognitionService();
| bsd-3-clause |
if1live/litesql | src/tests/test-datetime.cpp | 883 | /* LiteSQL - test-datetime
*
* The list of contributors at http://litesql.sf.net/
*
* See LICENSE for copyright information. */
#include <assert.h>
#include "litesql/datetime.hpp"
/*
Datetime unit tester
TC1: test for equality on load/ save (see ticket #13)
*/
using namespace litesql;
int main(int argc, char *argv[]) {
// TC1 for DateTime
DateTime dt;
std::string dtstring = dt.asString();
DateTime dt2 = convert<const string&, DateTime>(dtstring);
assert(dt.timeStamp() == dt2.timeStamp());
// TC1 for Date
Date d;
std::string dstring = d.asString();
Date d2 = convert<const string&, Date>(dstring);
assert(d.timeStamp() == d2.timeStamp());
// TC1 for Time
Time t;
std::string tstring = t.asString();
Time t2 = convert<const string&, Time>(tstring);
assert(t.secs() == t2.secs());
return 0;
}
| bsd-3-clause |
amyefev/AnnaShop | vendor/squizlabs/php_codesniffer/src/Standards/Squiz/Sniffs/CSS/SemicolonSpacingSniff.php | 2130 | <?php
/**
* Ensure each style definition has a semi-colon and it is spaced correctly.
*
* @author Greg Sherwood <gsherwood@squiz.net>
* @copyright 2006-2015 Squiz Pty Ltd (ABN 77 084 670 600)
* @license https://github.com/squizlabs/PHP_CodeSniffer/blob/master/licence.txt BSD Licence
*/
namespace PHP_CodeSniffer\Standards\Squiz\Sniffs\CSS;
use PHP_CodeSniffer\Sniffs\Sniff;
use PHP_CodeSniffer\Files\File;
class SemicolonSpacingSniff implements Sniff
{
/**
* A list of tokenizers this sniff supports.
*
* @var array
*/
public $supportedTokenizers = array('CSS');
/**
* Returns the token types that this sniff is interested in.
*
* @return int[]
*/
public function register()
{
return array(T_STYLE);
}//end register()
/**
* Processes the tokens that this sniff is interested in.
*
* @param \PHP_CodeSniffer\Files\File $phpcsFile The file where the token was found.
* @param int $stackPtr The position in the stack where
* the token was found.
*
* @return void
*/
public function process(File $phpcsFile, $stackPtr)
{
$tokens = $phpcsFile->getTokens();
$semicolon = $phpcsFile->findNext(T_SEMICOLON, ($stackPtr + 1));
if ($semicolon === false || $tokens[$semicolon]['line'] !== $tokens[$stackPtr]['line']) {
$error = 'Style definitions must end with a semicolon';
$phpcsFile->addError($error, $stackPtr, 'NotAtEnd');
return;
}
if ($tokens[($semicolon - 1)]['code'] === T_WHITESPACE) {
$length = strlen($tokens[($semicolon - 1)]['content']);
$error = 'Expected 0 spaces before semicolon in style definition; %s found';
$data = array($length);
$fix = $phpcsFile->addFixableError($error, $stackPtr, 'SpaceFound', $data);
if ($fix === true) {
$phpcsFile->fixer->replaceToken(($semicolon - 1), '');
}
}
}//end process()
}//end class
| bsd-3-clause |
l8s/Eto | Source/Eto.Direct2D/Drawing/SolidBrushHandler.cs | 918 | using Eto.Drawing;
using sd = SharpDX.Direct2D1;
namespace Eto.Direct2D.Drawing
{
/// <summary>
/// Handler for <see cref="ISolidBrush"/>
/// </summary>
/// <copyright>(c) 2013 by Vivek Jhaveri</copyright>
/// <license type="BSD-3">See LICENSE for full terms</license>
public class SolidBrushHandler : SolidBrush.IHandler
{
public class SolidBrushData : BrushData
{
public Color Color { get; set; }
protected override sd.Brush Create(sd.RenderTarget target)
{
return new sd.SolidColorBrush(target, Color.ToDx());
}
}
public object Create(Color color)
{
return new SolidBrushData { Color = color };
}
public Color GetColor(SolidBrush widget)
{
return ((SolidBrushData)widget.ControlObject).Color;
}
public void SetColor(SolidBrush widget, Color color)
{
var brush = ((SolidBrushData)widget.ControlObject);
brush.Reset();
brush.Color = color;
}
}
}
| bsd-3-clause |
chromium2014/src | chrome/browser/chromeos/file_manager/filesystem_api_util.cc | 8916 | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/chromeos/file_manager/filesystem_api_util.h"
#include "base/callback.h"
#include "base/files/file.h"
#include "base/files/file_path.h"
#include "base/memory/scoped_ptr.h"
#include "chrome/browser/chromeos/drive/file_errors.h"
#include "chrome/browser/chromeos/drive/file_system_interface.h"
#include "chrome/browser/chromeos/drive/file_system_util.h"
#include "chrome/browser/chromeos/file_manager/app_id.h"
#include "chrome/browser/chromeos/file_manager/fileapi_util.h"
#include "chrome/browser/chromeos/file_system_provider/mount_path_util.h"
#include "chrome/browser/chromeos/file_system_provider/provided_file_system_interface.h"
#include "chrome/browser/extensions/extension_util.h"
#include "chrome/browser/profiles/profile.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/storage_partition.h"
#include "google_apis/drive/task_util.h"
#include "webkit/browser/fileapi/file_system_context.h"
namespace file_manager {
namespace util {
namespace {
// Helper function used to implement GetNonNativeLocalPathMimeType. It extracts
// the mime type from the passed Drive resource entry.
void GetMimeTypeAfterGetResourceEntryForDrive(
const base::Callback<void(bool, const std::string&)>& callback,
drive::FileError error,
scoped_ptr<drive::ResourceEntry> entry) {
DCHECK_CURRENTLY_ON(content::BrowserThread::UI);
if (error != drive::FILE_ERROR_OK || !entry->has_file_specific_info()) {
callback.Run(false, std::string());
return;
}
callback.Run(true, entry->file_specific_info().content_mime_type());
}
// Helper function used to implement GetNonNativeLocalPathMimeType. It extracts
// the mime type from the passed metadata from a providing extension.
void GetMimeTypeAfterGetMetadataForProvidedFileSystem(
const base::Callback<void(bool, const std::string&)>& callback,
const chromeos::file_system_provider::EntryMetadata& metadata,
base::File::Error result) {
DCHECK_CURRENTLY_ON(content::BrowserThread::UI);
if (result != base::File::FILE_OK) {
callback.Run(false, std::string());
return;
}
callback.Run(true, metadata.mime_type);
}
// Helper function to converts a callback that takes boolean value to that takes
// File::Error, by regarding FILE_OK as the only successful value.
void BoolCallbackAsFileErrorCallback(
const base::Callback<void(bool)>& callback,
base::File::Error error) {
return callback.Run(error == base::File::FILE_OK);
}
// Part of PrepareFileOnIOThread. It tries to create a new file if the given
// |url| is not already inhabited.
void PrepareFileAfterCheckExistOnIOThread(
scoped_refptr<fileapi::FileSystemContext> file_system_context,
const fileapi::FileSystemURL& url,
const fileapi::FileSystemOperation::StatusCallback& callback,
base::File::Error error) {
DCHECK_CURRENTLY_ON(content::BrowserThread::IO);
if (error != base::File::FILE_ERROR_NOT_FOUND) {
callback.Run(error);
return;
}
// Call with the second argument |exclusive| set to false, meaning that it
// is not an error even if the file already exists (it can happen if the file
// is created after the previous FileExists call and before this CreateFile.)
//
// Note that the preceding call to FileExists is necessary for handling
// read only filesystems that blindly rejects handling CreateFile().
file_system_context->operation_runner()->CreateFile(url, false, callback);
}
// Checks whether a file exists at the given |url|, and try creating it if it
// is not already there.
void PrepareFileOnIOThread(
scoped_refptr<fileapi::FileSystemContext> file_system_context,
const fileapi::FileSystemURL& url,
const base::Callback<void(bool)>& callback) {
DCHECK_CURRENTLY_ON(content::BrowserThread::IO);
file_system_context->operation_runner()->FileExists(
url,
base::Bind(&PrepareFileAfterCheckExistOnIOThread,
file_system_context,
url,
base::Bind(&BoolCallbackAsFileErrorCallback, callback)));
}
} // namespace
bool IsUnderNonNativeLocalPath(Profile* profile,
const base::FilePath& path) {
DCHECK_CURRENTLY_ON(content::BrowserThread::UI);
GURL url;
if (!util::ConvertAbsoluteFilePathToFileSystemUrl(
profile, path, kFileManagerAppId, &url)) {
return false;
}
fileapi::FileSystemURL filesystem_url =
GetFileSystemContextForExtensionId(profile,
kFileManagerAppId)->CrackURL(url);
if (!filesystem_url.is_valid())
return false;
switch (filesystem_url.type()) {
case fileapi::kFileSystemTypeNativeLocal:
case fileapi::kFileSystemTypeRestrictedNativeLocal:
return false;
default:
// The path indeed corresponds to a mount point not associated with a
// native local path.
return true;
}
}
void GetNonNativeLocalPathMimeType(
Profile* profile,
const base::FilePath& path,
const base::Callback<void(bool, const std::string&)>& callback) {
DCHECK_CURRENTLY_ON(content::BrowserThread::UI);
DCHECK(IsUnderNonNativeLocalPath(profile, path));
if (drive::util::IsUnderDriveMountPoint(path)) {
drive::FileSystemInterface* file_system =
drive::util::GetFileSystemByProfile(profile);
if (!file_system) {
content::BrowserThread::PostTask(
content::BrowserThread::UI,
FROM_HERE,
base::Bind(callback, false, std::string()));
return;
}
file_system->GetResourceEntry(
drive::util::ExtractDrivePath(path),
base::Bind(&GetMimeTypeAfterGetResourceEntryForDrive, callback));
return;
}
if (chromeos::file_system_provider::util::IsFileSystemProviderLocalPath(
path)) {
chromeos::file_system_provider::util::LocalPathParser parser(profile, path);
if (!parser.Parse()) {
content::BrowserThread::PostTask(
content::BrowserThread::UI,
FROM_HERE,
base::Bind(callback, false, std::string()));
return;
}
parser.file_system()->GetMetadata(
parser.file_path(),
base::Bind(&GetMimeTypeAfterGetMetadataForProvidedFileSystem,
callback));
return;
}
// As a fallback just return success with an empty mime type value.
content::BrowserThread::PostTask(
content::BrowserThread::UI,
FROM_HERE,
base::Bind(callback, true /* success */, std::string()));
}
void IsNonNativeLocalPathDirectory(
Profile* profile,
const base::FilePath& path,
const base::Callback<void(bool)>& callback) {
DCHECK_CURRENTLY_ON(content::BrowserThread::UI);
DCHECK(IsUnderNonNativeLocalPath(profile, path));
GURL url;
if (!util::ConvertAbsoluteFilePathToFileSystemUrl(
profile, path, kFileManagerAppId, &url)) {
// Posting to the current thread, so that we always call back asynchronously
// independent from whether or not the operation succeeds.
content::BrowserThread::PostTask(content::BrowserThread::UI,
FROM_HERE,
base::Bind(callback, false));
return;
}
util::CheckIfDirectoryExists(
GetFileSystemContextForExtensionId(profile, kFileManagerAppId),
url,
base::Bind(&BoolCallbackAsFileErrorCallback, callback));
}
void PrepareNonNativeLocalFileForWritableApp(
Profile* profile,
const base::FilePath& path,
const base::Callback<void(bool)>& callback) {
DCHECK_CURRENTLY_ON(content::BrowserThread::UI);
DCHECK(IsUnderNonNativeLocalPath(profile, path));
GURL url;
if (!util::ConvertAbsoluteFilePathToFileSystemUrl(
profile, path, kFileManagerAppId, &url)) {
// Posting to the current thread, so that we always call back asynchronously
// independent from whether or not the operation succeeds.
content::BrowserThread::PostTask(content::BrowserThread::UI,
FROM_HERE,
base::Bind(callback, false));
return;
}
fileapi::FileSystemContext* const context =
GetFileSystemContextForExtensionId(profile, kFileManagerAppId);
DCHECK(context);
// Check the existence of a file using file system API implementation on
// behalf of the file manager app. We need to grant access beforehand.
context->external_backend()->GrantFullAccessToExtension(kFileManagerAppId);
content::BrowserThread::PostTask(
content::BrowserThread::IO,
FROM_HERE,
base::Bind(&PrepareFileOnIOThread,
make_scoped_refptr(context),
context->CrackURL(url),
google_apis::CreateRelayCallback(callback)));
}
} // namespace util
} // namespace file_manager
| bsd-3-clause |
wuhengzhi/chromium-crosswalk | content/browser/media/midi_host.cc | 9158 | // Copyright (c) 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/browser/media/midi_host.h"
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/metrics/histogram_macros.h"
#include "base/process/process.h"
#include "base/trace_event/trace_event.h"
#include "content/browser/bad_message.h"
#include "content/browser/browser_main_loop.h"
#include "content/browser/child_process_security_policy_impl.h"
#include "content/browser/media/media_internals.h"
#include "content/common/media/midi_messages.h"
#include "content/public/browser/content_browser_client.h"
#include "content/public/browser/media_observer.h"
#include "content/public/browser/user_metrics.h"
#include "media/midi/midi_manager.h"
#include "media/midi/midi_message_queue.h"
#include "media/midi/midi_message_util.h"
namespace content {
namespace {
// The total number of bytes which we're allowed to send to the OS
// before knowing that they have been successfully sent.
const size_t kMaxInFlightBytes = 10 * 1024 * 1024; // 10 MB.
// We keep track of the number of bytes successfully sent to
// the hardware. Every once in a while we report back to the renderer
// the number of bytes sent since the last report. This threshold determines
// how many bytes will be sent before reporting back to the renderer.
const size_t kAcknowledgementThresholdBytes = 1024 * 1024; // 1 MB.
bool IsDataByte(uint8_t data) {
return (data & 0x80) == 0;
}
bool IsSystemRealTimeMessage(uint8_t data) {
return 0xf8 <= data && data <= 0xff;
}
} // namespace
using media::midi::kSysExByte;
using media::midi::kEndOfSysExByte;
MidiHost::MidiHost(int renderer_process_id,
media::midi::MidiManager* midi_manager)
: BrowserMessageFilter(MidiMsgStart),
renderer_process_id_(renderer_process_id),
has_sys_ex_permission_(false),
is_session_requested_(false),
midi_manager_(midi_manager),
sent_bytes_in_flight_(0),
bytes_sent_since_last_acknowledgement_(0),
output_port_count_(0) {
DCHECK(midi_manager_);
}
MidiHost::~MidiHost() = default;
void MidiHost::OnChannelClosing() {
// If we get here the MidiHost is going to be destroyed soon. Prevent any
// subsequent calls from MidiManager by closing our session.
// If Send() is called from a different thread (e.g. a separate thread owned
// by the MidiManager implementation), it will get posted to the IO thread.
// There is a race condition here if our refcount is 0 and we're about to or
// have already entered OnDestruct().
if (is_session_requested_ && midi_manager_) {
midi_manager_->EndSession(this);
is_session_requested_ = false;
}
}
void MidiHost::OnDestruct() const {
BrowserThread::DeleteOnIOThread::Destruct(this);
}
// IPC Messages handler
bool MidiHost::OnMessageReceived(const IPC::Message& message) {
bool handled = true;
IPC_BEGIN_MESSAGE_MAP(MidiHost, message)
IPC_MESSAGE_HANDLER(MidiHostMsg_StartSession, OnStartSession)
IPC_MESSAGE_HANDLER(MidiHostMsg_SendData, OnSendData)
IPC_MESSAGE_HANDLER(MidiHostMsg_EndSession, OnEndSession)
IPC_MESSAGE_UNHANDLED(handled = false)
IPC_END_MESSAGE_MAP()
return handled;
}
void MidiHost::OnStartSession() {
is_session_requested_ = true;
if (midi_manager_)
midi_manager_->StartSession(this);
}
void MidiHost::OnSendData(uint32_t port,
const std::vector<uint8_t>& data,
double timestamp) {
{
base::AutoLock auto_lock(output_port_count_lock_);
if (output_port_count_ <= port) {
bad_message::ReceivedBadMessage(this, bad_message::MH_INVALID_MIDI_PORT);
return;
}
}
if (data.empty())
return;
// Blink running in a renderer checks permission to raise a SecurityError
// in JavaScript. The actual permission check for security purposes
// happens here in the browser process.
if (!has_sys_ex_permission_ &&
std::find(data.begin(), data.end(), kSysExByte) != data.end()) {
bad_message::ReceivedBadMessage(this, bad_message::MH_SYS_EX_PERMISSION);
return;
}
if (!IsValidWebMIDIData(data))
return;
{
base::AutoLock auto_lock(in_flight_lock_);
// Sanity check that we won't send too much data.
// TODO(yukawa): Consider to send an error event back to the renderer
// after some future discussion in W3C.
if (data.size() + sent_bytes_in_flight_ > kMaxInFlightBytes)
return;
sent_bytes_in_flight_ += data.size();
}
if (midi_manager_)
midi_manager_->DispatchSendMidiData(this, port, data, timestamp);
}
void MidiHost::OnEndSession() {
is_session_requested_ = false;
if (midi_manager_)
midi_manager_->EndSession(this);
}
void MidiHost::CompleteStartSession(media::midi::Result result) {
DCHECK(is_session_requested_);
if (result == media::midi::Result::OK) {
// ChildSecurityPolicy is set just before OnStartSession by
// MidiDispatcherHost. So we can safely cache the policy.
has_sys_ex_permission_ = ChildProcessSecurityPolicyImpl::GetInstance()->
CanSendMidiSysExMessage(renderer_process_id_);
}
Send(new MidiMsg_SessionStarted(result));
}
void MidiHost::AddInputPort(const media::midi::MidiPortInfo& info) {
base::AutoLock auto_lock(messages_queues_lock_);
// MidiMessageQueue is created later in ReceiveMidiData().
received_messages_queues_.push_back(nullptr);
Send(new MidiMsg_AddInputPort(info));
}
void MidiHost::AddOutputPort(const media::midi::MidiPortInfo& info) {
base::AutoLock auto_lock(output_port_count_lock_);
output_port_count_++;
Send(new MidiMsg_AddOutputPort(info));
}
void MidiHost::SetInputPortState(uint32_t port,
media::midi::MidiPortState state) {
Send(new MidiMsg_SetInputPortState(port, state));
}
void MidiHost::SetOutputPortState(uint32_t port,
media::midi::MidiPortState state) {
Send(new MidiMsg_SetOutputPortState(port, state));
}
void MidiHost::ReceiveMidiData(uint32_t port,
const uint8_t* data,
size_t length,
double timestamp) {
TRACE_EVENT0("midi", "MidiHost::ReceiveMidiData");
base::AutoLock auto_lock(messages_queues_lock_);
if (received_messages_queues_.size() <= port)
return;
// Lazy initialization
if (received_messages_queues_[port] == nullptr)
received_messages_queues_[port] = new media::midi::MidiMessageQueue(true);
received_messages_queues_[port]->Add(data, length);
std::vector<uint8_t> message;
while (true) {
received_messages_queues_[port]->Get(&message);
if (message.empty())
break;
// MIDI devices may send a system exclusive messages even if the renderer
// doesn't have a permission to receive it. Don't kill the renderer as
// OnSendData() does.
if (message[0] == kSysExByte && !has_sys_ex_permission_)
continue;
// Send to the renderer.
Send(new MidiMsg_DataReceived(port, message, timestamp));
}
}
void MidiHost::AccumulateMidiBytesSent(size_t n) {
{
base::AutoLock auto_lock(in_flight_lock_);
if (n <= sent_bytes_in_flight_)
sent_bytes_in_flight_ -= n;
}
if (bytes_sent_since_last_acknowledgement_ + n >=
bytes_sent_since_last_acknowledgement_)
bytes_sent_since_last_acknowledgement_ += n;
if (bytes_sent_since_last_acknowledgement_ >=
kAcknowledgementThresholdBytes) {
Send(new MidiMsg_AcknowledgeSentData(
bytes_sent_since_last_acknowledgement_));
bytes_sent_since_last_acknowledgement_ = 0;
}
}
void MidiHost::Detach() {
midi_manager_ = nullptr;
}
// static
bool MidiHost::IsValidWebMIDIData(const std::vector<uint8_t>& data) {
bool in_sysex = false;
size_t sysex_start_offset = 0;
size_t waiting_data_length = 0;
for (size_t i = 0; i < data.size(); ++i) {
const uint8_t current = data[i];
if (IsSystemRealTimeMessage(current))
continue; // Real time message can be placed at any point.
if (waiting_data_length > 0) {
if (!IsDataByte(current))
return false; // Error: |current| should have been data byte.
--waiting_data_length;
continue; // Found data byte as expected.
}
if (in_sysex) {
if (data[i] == kEndOfSysExByte) {
in_sysex = false;
UMA_HISTOGRAM_COUNTS("Media.Midi.SysExMessageSizeUpTo1MB",
i - sysex_start_offset + 1);
} else if (!IsDataByte(current)) {
return false; // Error: |current| should have been data byte.
}
continue; // Found data byte as expected.
}
if (current == kSysExByte) {
in_sysex = true;
sysex_start_offset = i;
continue; // Found SysEX
}
waiting_data_length = media::midi::GetMidiMessageLength(current);
if (waiting_data_length == 0)
return false; // Error: |current| should have been a valid status byte.
--waiting_data_length; // Found status byte
}
return waiting_data_length == 0 && !in_sysex;
}
} // namespace content
| bsd-3-clause |
sidred10/json | json_tests/benches/bench_json.rs | 2200 | use std::{f64, i64, u64};
use test::Bencher;
use serde_json;
#[bench]
fn bench_deserializer_i64(b: &mut Bencher) {
let s = serde_json::to_string(&i64::MIN).unwrap();
b.bytes = s.len() as u64;
b.iter(|| {
let _s: i64 = serde_json::from_str(&s).unwrap();
});
}
#[bench]
fn bench_deserializer_u64(b: &mut Bencher) {
let s = serde_json::to_string(&u64::MAX).unwrap();
b.bytes = s.len() as u64;
b.iter(|| {
let _s: u64 = serde_json::from_str(&s).unwrap();
});
}
#[bench]
fn bench_deserializer_f64_epsilon(b: &mut Bencher) {
let s = serde_json::to_string(&f64::EPSILON).unwrap();
b.bytes = s.len() as u64;
b.iter(|| {
let _s: f64 = serde_json::from_str(&s).unwrap();
});
}
#[bench]
fn bench_deserializer_f64_min(b: &mut Bencher) {
let s = serde_json::to_string(&f64::MIN).unwrap();
b.bytes = s.len() as u64;
b.iter(|| {
let _s: f64 = serde_json::from_str(&s).unwrap();
});
}
#[bench]
fn bench_deserializer_f64_max(b: &mut Bencher) {
let s = "1.7976931348623157e+308";
let s = serde_json::to_string(&f64::MAX).unwrap();
println!("{}", s);
b.bytes = s.len() as u64;
b.iter(|| {
let _s: f64 = serde_json::from_str(&s).unwrap();
});
}
fn make_string(pattern: &str) -> String {
let times = 1000;
let mut s = String::with_capacity(pattern.len() * times + 2);
s.push('"');
for _ in 0 .. times {
s.push_str(pattern);
}
s.push('"');
s
}
#[bench]
fn bench_deserializer_string(b: &mut Bencher) {
let s = make_string("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz123456790");
b.bytes = s.len() as u64;
b.iter(|| {
let _s: String = serde_json::from_str(&s).unwrap();
});
}
#[bench]
fn bench_deserializer_escapes(b: &mut Bencher) {
let s = make_string(r"\b\f\n\r\t");
b.bytes = s.len() as u64;
b.iter(|| {
let _s: String = serde_json::from_str(&s).unwrap();
});
}
#[bench]
fn bench_deserializer_unicode(b: &mut Bencher) {
let s = make_string(r"\uD834\uDD1E");
b.bytes = s.len() as u64;
b.iter(|| {
let _s: String = serde_json::from_str(&s).unwrap();
});
}
| bsd-3-clause |
hujiajie/chromium-crosswalk | content/renderer/p2p/ipc_network_manager_unittest.cc | 4889 | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/memory/scoped_ptr.h"
#include "content/renderer/p2p/ipc_network_manager.h"
#include "content/renderer/p2p/network_list_manager.h"
#include "net/base/ip_address_number.h"
#include "net/base/net_util.h"
#include "net/base/network_change_notifier.h"
#include "net/base/network_interfaces.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace content {
namespace {
class MockP2PSocketDispatcher : public NetworkListManager {
public:
void AddNetworkListObserver(
NetworkListObserver* network_list_observer) override {}
void RemoveNetworkListObserver(
NetworkListObserver* network_list_observer) override {}
~MockP2PSocketDispatcher() override {}
};
} // namespace
// 2 IPv6 addresses with only last digit different.
static const char kIPv6PublicAddrString1[] =
"2401:fa00:4:1000:be30:5b30:50e5:c3";
static const char kIPv6PublicAddrString2[] =
"2401:fa00:4:1000:be30:5b30:50e5:c4";
static const char kIPv4MappedAddrString[] = "::ffff:38.32.0.0";
class IpcNetworkManagerTest : public testing::Test {
public:
IpcNetworkManagerTest()
: network_list_manager_(new MockP2PSocketDispatcher()),
network_manager_(new IpcNetworkManager(network_list_manager_.get())) {}
protected:
scoped_ptr<MockP2PSocketDispatcher> network_list_manager_;
scoped_ptr<IpcNetworkManager> network_manager_;
};
// Test overall logic of IpcNetworkManager on OnNetworkListChanged
// that it should group addresses with the same network key under
// single Network class. This also tests the logic inside
// IpcNetworkManager in addition to MergeNetworkList.
// TODO(guoweis): disable this test case for now until fix for webrtc
// issue 19249005 integrated into chromium
TEST_F(IpcNetworkManagerTest, TestMergeNetworkList) {
net::NetworkInterfaceList list;
net::IPAddressNumber ip_number;
std::vector<rtc::Network*> networks;
rtc::IPAddress ip_address;
// Add 2 networks with the same prefix and prefix length.
EXPECT_TRUE(net::ParseIPLiteralToNumber(kIPv6PublicAddrString1, &ip_number));
list.push_back(
net::NetworkInterface("em1",
"em1",
0,
net::NetworkChangeNotifier::CONNECTION_UNKNOWN,
ip_number,
64,
net::IP_ADDRESS_ATTRIBUTE_NONE));
EXPECT_TRUE(net::ParseIPLiteralToNumber(kIPv6PublicAddrString2, &ip_number));
list.push_back(
net::NetworkInterface("em1",
"em1",
0,
net::NetworkChangeNotifier::CONNECTION_UNKNOWN,
ip_number,
64,
net::IP_ADDRESS_ATTRIBUTE_NONE));
network_manager_->OnNetworkListChanged(list, net::IPAddressNumber(),
net::IPAddressNumber());
network_manager_->GetNetworks(&networks);
EXPECT_EQ(1uL, networks.size());
EXPECT_EQ(2uL, networks[0]->GetIPs().size());
// Add another network with different prefix length, should result in
// a different network.
networks.clear();
list.push_back(
net::NetworkInterface("em1",
"em1",
0,
net::NetworkChangeNotifier::CONNECTION_UNKNOWN,
ip_number,
48,
net::IP_ADDRESS_ATTRIBUTE_NONE));
// Push an unknown address as the default address.
EXPECT_TRUE(net::ParseIPLiteralToNumber(kIPv4MappedAddrString, &ip_number));
network_manager_->OnNetworkListChanged(list, net::IPAddressNumber(),
ip_number);
// The unknown default address should be ignored.
EXPECT_FALSE(network_manager_->GetDefaultLocalAddress(AF_INET6, &ip_address));
network_manager_->GetNetworks(&networks);
// Verify we have 2 networks now.
EXPECT_EQ(2uL, networks.size());
// Verify the network with prefix length of 64 has 2 IP addresses.
EXPECT_EQ(64, networks[1]->prefix_length());
EXPECT_EQ(2uL, networks[1]->GetIPs().size());
EXPECT_TRUE(rtc::IPFromString(kIPv6PublicAddrString1, &ip_address));
EXPECT_EQ(networks[1]->GetIPs()[0], ip_address);
EXPECT_TRUE(rtc::IPFromString(kIPv6PublicAddrString2, &ip_address));
EXPECT_EQ(networks[1]->GetIPs()[1], ip_address);
// Verify the network with prefix length of 48 has 2 IP addresses.
EXPECT_EQ(48, networks[0]->prefix_length());
EXPECT_EQ(1uL, networks[0]->GetIPs().size());
EXPECT_TRUE(rtc::IPFromString(kIPv6PublicAddrString2, &ip_address));
EXPECT_EQ(networks[0]->GetIPs()[0], ip_address);
}
} // namespace content
| bsd-3-clause |
xizi-xu/Argus | ArgusWeb/app/js/services/tags.js | 320 | /*global angular:false */
angular.module('argus.services.tags', [])
.service('Tags', ['CONFIG', '$http', function(CONFIG, $http) {
this.getDropdownOptions = function(key) {
var request = $http({
method: 'GET',
url: CONFIG.wsUrl + 'schema/tags',
params: {
tagk: key
}
});
return request;
};
}]);
| bsd-3-clause |
adobe/chromium | chrome/browser/ui/gtk/one_click_signin_dialog_gtk.cc | 3877 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/ui/gtk/one_click_signin_dialog_gtk.h"
#include <gtk/gtk.h>
#include "base/basictypes.h"
#include "base/callback.h"
#include "base/compiler_specific.h"
#include "base/logging.h"
#include "chrome/browser/ui/gtk/gtk_util.h"
#include "grit/chromium_strings.h"
#include "grit/generated_resources.h"
#include "ui/base/gtk/gtk_hig_constants.h"
#include "ui/base/l10n/l10n_util.h"
OneClickSigninDialogGtk::OneClickSigninDialogGtk(
GtkWindow* parent_window,
const OneClickAcceptCallback& accept_callback)
: dialog_(NULL),
use_default_settings_checkbox_(NULL),
accept_callback_(accept_callback) {
// Lay out the dialog.
dialog_ = gtk_dialog_new_with_buttons(
l10n_util::GetStringUTF8(IDS_ONE_CLICK_SIGNIN_DIALOG_TITLE).c_str(),
parent_window,
GTK_DIALOG_MODAL,
NULL);
ignore_result(gtk_dialog_add_button(
GTK_DIALOG(dialog_),
l10n_util::GetStringUTF8(IDS_CANCEL).c_str(),
GTK_RESPONSE_CLOSE));
GtkWidget* ok_button = gtk_dialog_add_button(
GTK_DIALOG(dialog_),
l10n_util::GetStringUTF8(IDS_ONE_CLICK_SIGNIN_DIALOG_OK_BUTTON).c_str(),
GTK_RESPONSE_ACCEPT);
#if !GTK_CHECK_VERSION(2, 22, 0)
gtk_dialog_set_has_separator(GTK_DIALOG(dialog_), FALSE);
#endif
GtkWidget* const content_area =
gtk_dialog_get_content_area(GTK_DIALOG(dialog_));
gtk_box_set_spacing(GTK_BOX(content_area), ui::kContentAreaSpacing);
// Heading.
GtkWidget* heading_label = gtk_util::CreateBoldLabel(
l10n_util::GetStringUTF8(IDS_ONE_CLICK_SIGNIN_DIALOG_HEADING).c_str());
gtk_label_set_line_wrap(GTK_LABEL(heading_label), TRUE);
gtk_misc_set_alignment(GTK_MISC(heading_label), 0.0, 0.5);
gtk_box_pack_start(GTK_BOX(content_area), heading_label, FALSE, FALSE, 0);
// Message.
GtkWidget* message_label = gtk_label_new(
l10n_util::GetStringUTF8(IDS_ONE_CLICK_SIGNIN_DIALOG_MESSAGE).c_str());
gtk_label_set_line_wrap(GTK_LABEL(message_label), TRUE);
gtk_misc_set_alignment(GTK_MISC(message_label), 0.0, 0.5);
gtk_box_pack_start(GTK_BOX(content_area), message_label, FALSE, FALSE, 0);
// Checkbox.
use_default_settings_checkbox_ = gtk_check_button_new_with_label(
l10n_util::GetStringUTF8(
IDS_ONE_CLICK_SIGNIN_DIALOG_CHECKBOX).c_str());
gtk_toggle_button_set_active(
GTK_TOGGLE_BUTTON(use_default_settings_checkbox_), TRUE);
gtk_box_pack_start(GTK_BOX(content_area),
use_default_settings_checkbox_, FALSE, FALSE, 0);
g_signal_connect(dialog_, "response", G_CALLBACK(OnResponseThunk), this);
gtk_window_set_resizable(GTK_WINDOW(dialog_), FALSE);
gtk_dialog_set_default_response(GTK_DIALOG(dialog_), GTK_RESPONSE_ACCEPT);
gtk_widget_show_all(dialog_);
gtk_widget_grab_focus(ok_button);
}
void OneClickSigninDialogGtk::SetUseDefaultSettingsForTest(
bool use_default_settings) {
gtk_toggle_button_set_active(
GTK_TOGGLE_BUTTON(use_default_settings_checkbox_), FALSE);
}
void OneClickSigninDialogGtk::SendResponseForTest(int response_id) {
OnResponse(dialog_, response_id);
}
OneClickSigninDialogGtk::~OneClickSigninDialogGtk() {}
void OneClickSigninDialogGtk::OnResponse(GtkWidget* dialog, int response_id) {
if (response_id == GTK_RESPONSE_ACCEPT) {
const bool use_default_settings =
gtk_toggle_button_get_active(
GTK_TOGGLE_BUTTON(use_default_settings_checkbox_));
accept_callback_.Run(use_default_settings);
}
gtk_widget_destroy(dialog_);
delete this;
}
void ShowOneClickSigninDialog(
gfx::NativeWindow parent_window,
const OneClickAcceptCallback& accept_callback) {
ignore_result(
new OneClickSigninDialogGtk(parent_window, accept_callback));
}
| bsd-3-clause |
akutz/go | src/net/http/http_test.go | 4536 | // Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Tests of internal functions and things with no better homes.
package http
import (
"bytes"
"internal/testenv"
"net/url"
"os/exec"
"reflect"
"testing"
"time"
)
func init() {
shutdownPollInterval = 5 * time.Millisecond
}
func TestForeachHeaderElement(t *testing.T) {
tests := []struct {
in string
want []string
}{
{"Foo", []string{"Foo"}},
{" Foo", []string{"Foo"}},
{"Foo ", []string{"Foo"}},
{" Foo ", []string{"Foo"}},
{"foo", []string{"foo"}},
{"anY-cAsE", []string{"anY-cAsE"}},
{"", nil},
{",,,, , ,, ,,, ,", nil},
{" Foo,Bar, Baz,lower,,Quux ", []string{"Foo", "Bar", "Baz", "lower", "Quux"}},
}
for _, tt := range tests {
var got []string
foreachHeaderElement(tt.in, func(v string) {
got = append(got, v)
})
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("foreachHeaderElement(%q) = %q; want %q", tt.in, got, tt.want)
}
}
}
func TestCleanHost(t *testing.T) {
tests := []struct {
in, want string
}{
{"www.google.com", "www.google.com"},
{"www.google.com foo", "www.google.com"},
{"www.google.com/foo", "www.google.com"},
{" first character is a space", ""},
{"[1::6]:8080", "[1::6]:8080"},
// Punycode:
{"гофер.рф/foo", "xn--c1ae0ajs.xn--p1ai"},
{"bücher.de", "xn--bcher-kva.de"},
{"bücher.de:8080", "xn--bcher-kva.de:8080"},
// Verify we convert to lowercase before punycode:
{"BÜCHER.de", "xn--bcher-kva.de"},
{"BÜCHER.de:8080", "xn--bcher-kva.de:8080"},
// Verify we normalize to NFC before punycode:
{"gophér.nfc", "xn--gophr-esa.nfc"}, // NFC input; no work needed
{"goph\u0065\u0301r.nfd", "xn--gophr-esa.nfd"}, // NFD input
}
for _, tt := range tests {
got := cleanHost(tt.in)
if tt.want != got {
t.Errorf("cleanHost(%q) = %q, want %q", tt.in, got, tt.want)
}
}
}
// Test that cmd/go doesn't link in the HTTP server.
//
// This catches accidental dependencies between the HTTP transport and
// server code.
func TestCmdGoNoHTTPServer(t *testing.T) {
t.Parallel()
goBin := testenv.GoToolPath(t)
out, err := exec.Command(goBin, "tool", "nm", goBin).CombinedOutput()
if err != nil {
t.Fatalf("go tool nm: %v: %s", err, out)
}
wantSym := map[string]bool{
// Verify these exist: (sanity checking this test)
"net/http.(*Client).Get": true,
"net/http.(*Transport).RoundTrip": true,
// Verify these don't exist:
"net/http.http2Server": false,
"net/http.(*Server).Serve": false,
"net/http.(*ServeMux).ServeHTTP": false,
"net/http.DefaultServeMux": false,
}
for sym, want := range wantSym {
got := bytes.Contains(out, []byte(sym))
if !want && got {
t.Errorf("cmd/go unexpectedly links in HTTP server code; found symbol %q in cmd/go", sym)
}
if want && !got {
t.Errorf("expected to find symbol %q in cmd/go; not found", sym)
}
}
}
// Tests that the nethttpomithttp2 build tag doesn't rot too much,
// even if there's not a regular builder on it.
func TestOmitHTTP2(t *testing.T) {
if testing.Short() {
t.Skip("skipping in short mode")
}
t.Parallel()
goTool := testenv.GoToolPath(t)
out, err := exec.Command(goTool, "test", "-short", "-tags=nethttpomithttp2", "net/http").CombinedOutput()
if err != nil {
t.Fatalf("go test -short failed: %v, %s", err, out)
}
}
// Tests that the nethttpomithttp2 build tag at least type checks
// in short mode.
// The TestOmitHTTP2 test above actually runs tests (in long mode).
func TestOmitHTTP2Vet(t *testing.T) {
t.Parallel()
goTool := testenv.GoToolPath(t)
out, err := exec.Command(goTool, "vet", "-tags=nethttpomithttp2", "net/http").CombinedOutput()
if err != nil {
t.Fatalf("go vet failed: %v, %s", err, out)
}
}
var valuesCount int
func BenchmarkCopyValues(b *testing.B) {
b.ReportAllocs()
src := url.Values{
"a": {"1", "2", "3", "4", "5"},
"b": {"2", "2", "3", "4", "5"},
"c": {"3", "2", "3", "4", "5"},
"d": {"4", "2", "3", "4", "5"},
"e": {"1", "1", "2", "3", "4", "5", "6", "7", "abcdef", "l", "a", "b", "c", "d", "z"},
"j": {"1", "2"},
"m": nil,
}
for i := 0; i < b.N; i++ {
dst := url.Values{"a": {"b"}, "b": {"2"}, "c": {"3"}, "d": {"4"}, "j": nil, "m": {"x"}}
copyValues(dst, src)
if valuesCount = len(dst["a"]); valuesCount != 6 {
b.Fatalf(`%d items in dst["a"] but expected 6`, valuesCount)
}
}
if valuesCount == 0 {
b.Fatal("Benchmark wasn't run")
}
}
| bsd-3-clause |
dwing4g/leveldb | db/version_edit_test.cc | 1338 | // Copyright (c) 2011 The LevelDB Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. See the AUTHORS file for names of contributors.
#include "db/version_edit.h"
#include "gtest/gtest.h"
namespace leveldb {
static void TestEncodeDecode(const VersionEdit& edit) {
std::string encoded, encoded2;
edit.EncodeTo(&encoded);
VersionEdit parsed;
Status s = parsed.DecodeFrom(encoded);
ASSERT_TRUE(s.ok()) << s.ToString();
parsed.EncodeTo(&encoded2);
ASSERT_EQ(encoded, encoded2);
}
TEST(VersionEditTest, EncodeDecode) {
static const uint64_t kBig = 1ull << 50;
VersionEdit edit;
for (int i = 0; i < 4; i++) {
TestEncodeDecode(edit);
edit.AddFile(3, kBig + 300 + i, kBig + 400 + i,
InternalKey("foo", kBig + 500 + i, kTypeValue),
InternalKey("zoo", kBig + 600 + i, kTypeDeletion));
edit.RemoveFile(4, kBig + 700 + i);
edit.SetCompactPointer(i, InternalKey("x", kBig + 900 + i, kTypeValue));
}
edit.SetComparatorName("foo");
edit.SetLogNumber(kBig + 100);
edit.SetNextFile(kBig + 200);
edit.SetLastSequence(kBig + 1000);
TestEncodeDecode(edit);
}
} // namespace leveldb
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
| bsd-3-clause |
Livit/Mailbird.Gong.WPF.DragDrop | GongSolutions.Wpf.DragDrop/IDropInfo.cs | 4514 | using System;
using System.Collections;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
namespace GongSolutions.Wpf.DragDrop
{
public interface IDropInfo
{
/// <summary>
/// Gets the drag data.
/// </summary>
///
/// <remarks>
/// If the drag came from within the framework, this will hold:
///
/// - The dragged data if a single item was dragged.
/// - A typed IEnumerable if multiple items were dragged.
/// </remarks>
object Data { get; }
/// <summary>
/// Gets a <see cref="DragInfo"/> object holding information about the source of the drag,
/// if the drag came from within the framework.
/// </summary>
IDragInfo DragInfo { get; }
/// <summary>
/// Gets the mouse position relative to the VisualTarget
/// </summary>
Point DropPosition { get; }
/// <summary>
/// Gets or sets the class of drop target to display.
/// </summary>
///
/// <remarks>
/// The standard drop target adorner classes are held in the <see cref="DropTargetAdorners"/>
/// class.
/// </remarks>
Type DropTargetAdorner { get; set; }
/// <summary>
/// Gets or sets the allowed effects for the drop.
/// </summary>
///
/// <remarks>
/// This must be set to a value other than <see cref="DragDropEffects.None"/> by a drop handler in order
/// for a drop to be possible.
/// </remarks>
DragDropEffects Effects { get; set; }
/// <summary>
/// Gets the current insert position within <see cref="TargetCollection"/>.
/// </summary>
int InsertIndex { get; }
/// <summary>
/// Gets the current insert position within the source (unfiltered) <see cref="TargetCollection"/>.
/// </summary>
/// <remarks>
/// This should be only used in a Drop action.
/// This works only correct with different objects (string, int, etc won't work correct).
/// </remarks>
int UnfilteredInsertIndex { get; }
/// <summary>
/// Gets the collection that the target ItemsControl is bound to.
/// </summary>
///
/// <remarks>
/// If the current drop target is unbound or not an ItemsControl, this will be null.
/// </remarks>
IEnumerable TargetCollection { get; }
/// <summary>
/// Gets the object that the current drop target is bound to.
/// </summary>
///
/// <remarks>
/// If the current drop target is unbound or not an ItemsControl, this will be null.
/// </remarks>
object TargetItem { get; }
/// <summary>
/// Gets the current group target.
/// </summary>
///
/// <remarks>
/// If the drag is currently over an ItemsControl with groups, describes the group that
/// the drag is currently over.
/// </remarks>
CollectionViewGroup TargetGroup { get; }
/// <summary>
/// Gets the control that is the current drop target.
/// </summary>
UIElement VisualTarget { get; }
/// <summary>
/// Gets the item in an ItemsControl that is the current drop target.
/// </summary>
///
/// <remarks>
/// If the current drop target is unbound or not an ItemsControl, this will be null.
/// </remarks>
UIElement VisualTargetItem { get; }
/// <summary>
/// Gets the orientation of the current drop target.
/// </summary>
Orientation VisualTargetOrientation { get; }
/// <summary>
/// Gets the FlowDirection of the current drop target.
/// </summary>
FlowDirection VisualTargetFlowDirection { get; }
/// <summary>
/// Gets and sets the text displayed in the DropDropEffects adorner.
/// </summary>
string DestinationText { get; set; }
/// <summary>
/// Gets the relative position the item will be inserted to compared to the TargetItem
/// </summary>
RelativeInsertPosition InsertPosition { get; }
/// <summary>
/// Gets a flag enumeration indicating the current state of the SHIFT, CTRL, and ALT keys, as well as the state of the mouse buttons.
/// </summary>
DragDropKeyStates KeyStates { get; }
/// <summary>
/// Indicates if the drop info should be handled by itself (useful for child elements)
/// </summary>
bool NotHandled { get; set; }
/// <summary>
/// Gets a value indicating whether the target is in the same context as the source, <see cref="DragDrop.DragDropContextProperty" />.
/// </summary>
bool IsSameDragDropContextAsSource { get; }
}
} | bsd-3-clause |
hujiajie/chromium-crosswalk | third_party/WebKit/Source/core/svg/SVGFELightElement.cpp | 5285 | /*
* Copyright (C) 2004, 2005, 2007 Nikolas Zimmermann <zimmermann@kde.org>
* Copyright (C) 2004, 2005, 2006 Rob Buis <buis@kde.org>
* Copyright (C) 2005 Oliver Hunt <oliver@nerget.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "core/svg/SVGFELightElement.h"
#include "core/SVGNames.h"
#include "core/dom/ElementTraversal.h"
#include "core/layout/LayoutObject.h"
#include "core/svg/SVGFEDiffuseLightingElement.h"
#include "core/svg/SVGFESpecularLightingElement.h"
namespace blink {
SVGFELightElement::SVGFELightElement(const QualifiedName& tagName, Document& document)
: SVGElement(tagName, document)
, m_azimuth(SVGAnimatedNumber::create(this, SVGNames::azimuthAttr, SVGNumber::create()))
, m_elevation(SVGAnimatedNumber::create(this, SVGNames::elevationAttr, SVGNumber::create()))
, m_x(SVGAnimatedNumber::create(this, SVGNames::xAttr, SVGNumber::create()))
, m_y(SVGAnimatedNumber::create(this, SVGNames::yAttr, SVGNumber::create()))
, m_z(SVGAnimatedNumber::create(this, SVGNames::zAttr, SVGNumber::create()))
, m_pointsAtX(SVGAnimatedNumber::create(this, SVGNames::pointsAtXAttr, SVGNumber::create()))
, m_pointsAtY(SVGAnimatedNumber::create(this, SVGNames::pointsAtYAttr, SVGNumber::create()))
, m_pointsAtZ(SVGAnimatedNumber::create(this, SVGNames::pointsAtZAttr, SVGNumber::create()))
, m_specularExponent(SVGAnimatedNumber::create(this, SVGNames::specularExponentAttr, SVGNumber::create(1)))
, m_limitingConeAngle(SVGAnimatedNumber::create(this, SVGNames::limitingConeAngleAttr, SVGNumber::create()))
{
addToPropertyMap(m_azimuth);
addToPropertyMap(m_elevation);
addToPropertyMap(m_x);
addToPropertyMap(m_y);
addToPropertyMap(m_z);
addToPropertyMap(m_pointsAtX);
addToPropertyMap(m_pointsAtY);
addToPropertyMap(m_pointsAtZ);
addToPropertyMap(m_specularExponent);
addToPropertyMap(m_limitingConeAngle);
}
DEFINE_TRACE(SVGFELightElement)
{
visitor->trace(m_azimuth);
visitor->trace(m_elevation);
visitor->trace(m_x);
visitor->trace(m_y);
visitor->trace(m_z);
visitor->trace(m_pointsAtX);
visitor->trace(m_pointsAtY);
visitor->trace(m_pointsAtZ);
visitor->trace(m_specularExponent);
visitor->trace(m_limitingConeAngle);
SVGElement::trace(visitor);
}
SVGFELightElement* SVGFELightElement::findLightElement(const SVGElement& svgElement)
{
return Traversal<SVGFELightElement>::firstChild(svgElement);
}
FloatPoint3D SVGFELightElement::position() const
{
return FloatPoint3D(x()->currentValue()->value(), y()->currentValue()->value(), z()->currentValue()->value());
}
FloatPoint3D SVGFELightElement::pointsAt() const
{
return FloatPoint3D(pointsAtX()->currentValue()->value(), pointsAtY()->currentValue()->value(), pointsAtZ()->currentValue()->value());
}
void SVGFELightElement::svgAttributeChanged(const QualifiedName& attrName)
{
if (attrName == SVGNames::azimuthAttr
|| attrName == SVGNames::elevationAttr
|| attrName == SVGNames::xAttr
|| attrName == SVGNames::yAttr
|| attrName == SVGNames::zAttr
|| attrName == SVGNames::pointsAtXAttr
|| attrName == SVGNames::pointsAtYAttr
|| attrName == SVGNames::pointsAtZAttr
|| attrName == SVGNames::specularExponentAttr
|| attrName == SVGNames::limitingConeAngleAttr) {
ContainerNode* parent = parentNode();
if (!parent)
return;
LayoutObject* layoutObject = parent->layoutObject();
if (!layoutObject || !layoutObject->isSVGResourceFilterPrimitive())
return;
SVGElement::InvalidationGuard invalidationGuard(this);
if (isSVGFEDiffuseLightingElement(*parent)) {
toSVGFEDiffuseLightingElement(*parent).lightElementAttributeChanged(this, attrName);
return;
}
if (isSVGFESpecularLightingElement(*parent)) {
toSVGFESpecularLightingElement(*parent).lightElementAttributeChanged(this, attrName);
return;
}
ASSERT_NOT_REACHED();
}
SVGElement::svgAttributeChanged(attrName);
}
void SVGFELightElement::childrenChanged(const ChildrenChange& change)
{
SVGElement::childrenChanged(change);
if (!change.byParser) {
if (ContainerNode* parent = parentNode()) {
LayoutObject* layoutObject = parent->layoutObject();
if (layoutObject && layoutObject->isSVGResourceFilterPrimitive())
markForLayoutAndParentResourceInvalidation(layoutObject);
}
}
}
}
| bsd-3-clause |
ottramst/TamaraIntranet | user/plugins/login-oauth/vendor/lusitanian/oauth/examples/init.example.php | 4522 | <?php
/**
* This file sets up the information needed to test the examples in different environments.
*
* PHP version 5.4
*
* @author David Desberg <david@daviddesberg.com>
* @author Pieter Hordijk <info@pieterhordijk.com>
* @copyright Copyright (c) 2012 The authors
* @license http://www.opensource.org/licenses/mit-license.html MIT License
*/
/**
* @var array A list of all the credentials to be used by the different services in the examples
*/
$servicesCredentials = array(
'amazon' => array(
'key' => '',
'secret' => '',
),
'bitbucket' => array(
'key' => '',
'secret' => '',
),
'bitly' => array(
'key' => '',
'secret' => '',
),
'bitrix24' => array(
'key' => '',
'secret' => '',
),
'box' => array(
'key' => '',
'secret' => '',
),
'buffer' => array(
'key' => '',
'secret' => '',
),
'dailymotion' => array(
'key' => '',
'secret' => '',
),
'delicious' => array(
'key' => '',
'secret' => '',
),
'deezer' => array(
'key' => '',
'secret' => '',
),
'deviantart' => array(
'key' => '',
'secret' => '',
),
'dropbox' => array(
'key' => '',
'secret' => '',
),
'etsy' => array(
'key' => '',
'secret' => '',
),
'eveonline' => array(
'key' => '',
'secret' => '',
),
'facebook' => array(
'key' => '',
'secret' => '',
),
'fitbit' => array(
'key' => '',
'secret' => '',
),
'fivehundredpx' => array(
'key' => '',
'secret' => '',
),
'flickr' => array(
'key' => '',
'secret' => '',
),
'foursquare' => array(
'key' => '',
'secret' => '',
),
'github' => array(
'key' => '',
'secret' => '',
),
'google' => array(
'key' => '',
'secret' => '',
),
'hubic' => array(
'key' => '',
'secret' => '',
),
'instagram' => array(
'key' => '',
'secret' => '',
),
'linkedin' => array(
'key' => '',
'secret' => '',
),
'mailchimp' => array(
'key' => '',
'secret' => '',
),
'microsoft' => array(
'key' => '',
'secret' => '',
),
'nest' => array(
'key' => '',
'secret' => '',
),
'netatmo' => array(
'key' => '',
'secret' => '',
),
'parrotFlowerPower' => array(
'key' => '',
'secret' => '',
),
'paypal' => array(
'key' => '',
'secret' => '',
),
'pinterest' => array(
'key' => '',
'secret' => '',
),
'pocket' => array(
'key' => '',
),
'quickbooks' => array(
'key' => '',
'secret' => '',
),
'reddit' => array(
'key' => '',
'secret' => '',
),
'redmine' => array(
'key' => '',
'secret' => ''
),
'runkeeper' => array(
'key' => '',
'secret' => '',
),
'salesforce' => array(
'key' => '',
'secret' => ''
),
'scoopit' => array(
'key' => '',
'secret' => ''
),
'soundcloud' => array(
'key' => '',
'secret' => '',
),
'spotify' => array(
'key' => '',
'secret' => '',
),
'strava' => array(
'key' => '',
'secret' => '',
),
'tumblr' => array(
'key' => '',
'secret' => '',
),
'twitter' => array(
'key' => '',
'secret' => '',
),
'ustream' => array(
'key' => '',
'secret' => '',
),
'vimeo' => array(
'key' => '',
'secret' => '',
),
'yahoo' => array(
'key' => '',
'secret' => ''
),
'yammer' => array(
'key' => '',
'secret' => ''
),
);
/** @var $serviceFactory \OAuth\ServiceFactory An OAuth service factory. */
$serviceFactory = new \OAuth\ServiceFactory();
| mit |
mylibero/cli | test/crossgen.Tests/crossgen.Tests.cs | 3199 | // Copyright (c) .NET Foundation and contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection.PortableExecutable;
using Microsoft.DotNet.ProjectModel;
using Microsoft.DotNet.Tools.Test.Utilities;
using FluentAssertions;
using Xunit;
namespace Microsoft.DotNet.Tests
{
/// <summary>
/// Static analysis of assemblies to make sure that they are crossgened.
/// </summary>
public class CrossgenTests : TestBase
{
[Fact(Skip="https://github.com/dotnet/cli/issues/3059")]
public void CLI_SDK_assemblies_must_be_crossgened()
{
string dotnetDir = FindDotnetDirInPath();
string cliPath = Directory.EnumerateFiles(dotnetDir, "dotnet.dll", SearchOption.AllDirectories).First();
cliPath = Path.GetDirectoryName(cliPath);
CheckDirectoryIsCrossgened(cliPath);
}
[Fact(Skip="https://github.com/dotnet/cli/issues/3059")]
public void Shared_Fx_assemblies_must_be_crossgened()
{
string dotnetDir = FindDotnetDirInPath();
string sharedFxPath = Directory.EnumerateFiles(dotnetDir, "mscorlib*.dll", SearchOption.AllDirectories).First();
sharedFxPath = Path.GetDirectoryName(sharedFxPath);
CheckDirectoryIsCrossgened(sharedFxPath);
}
private static void CheckDirectoryIsCrossgened(string pathToAssemblies)
{
Console.WriteLine($"Checking directory '{pathToAssemblies}' for crossgened assemblies");
var dlls = Directory.EnumerateFiles(pathToAssemblies, "*.dll", SearchOption.TopDirectoryOnly);
var exes = Directory.EnumerateFiles(pathToAssemblies, "*.exe", SearchOption.TopDirectoryOnly);
var assemblies = dlls.Concat(exes);
assemblies.Count().Should().NotBe(0, $"No assemblies found at directory '{pathToAssemblies}'");
foreach (var assembly in assemblies)
{
using (var asmStream = File.OpenRead(assembly))
{
using (var peReader = new PEReader(asmStream))
{
if (peReader.HasMetadata)
{
peReader.IsCrossgened().Should().BeTrue($"Managed assembly '{assembly}' is not crossgened.");
}
}
}
}
}
private static string FindDotnetDirInPath()
{
string dotnetExecutable = $"dotnet{FileNameSuffixes.CurrentPlatform.Exe}";
foreach (string path in (Environment.GetEnvironmentVariable("PATH") ?? "").Split(Path.PathSeparator))
{
string dotnetPath = Path.Combine(path, dotnetExecutable);
if (File.Exists(dotnetPath))
{
return Path.GetDirectoryName(dotnetPath);
}
}
throw new FileNotFoundException($"Unable to find '{dotnetExecutable}' in the $PATH");
}
}
}
| mit |
kewaunited/passenger | ext/boost/smart_ptr/make_shared_object.hpp | 39230 | #ifndef BOOST_SMART_PTR_MAKE_SHARED_OBJECT_HPP_INCLUDED
#define BOOST_SMART_PTR_MAKE_SHARED_OBJECT_HPP_INCLUDED
// make_shared_object.hpp
//
// Copyright (c) 2007, 2008, 2012 Peter Dimov
//
// Distributed under the Boost Software License, Version 1.0.
// See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt
//
// See http://www.boost.org/libs/smart_ptr/make_shared.html
// for documentation.
#include <boost/config.hpp>
#include <boost/smart_ptr/shared_ptr.hpp>
#include <boost/smart_ptr/detail/sp_forward.hpp>
#include <boost/type_traits/type_with_alignment.hpp>
#include <boost/type_traits/alignment_of.hpp>
#include <cstddef>
#include <new>
namespace boost
{
namespace detail
{
template< std::size_t N, std::size_t A > struct sp_aligned_storage
{
union type
{
char data_[ N ];
typename boost::type_with_alignment< A >::type align_;
};
};
template< class T > class sp_ms_deleter
{
private:
typedef typename sp_aligned_storage< sizeof( T ), ::boost::alignment_of< T >::value >::type storage_type;
bool initialized_;
storage_type storage_;
private:
void destroy()
{
if( initialized_ )
{
#if defined( __GNUC__ )
// fixes incorrect aliasing warning
T * p = reinterpret_cast< T* >( storage_.data_ );
p->~T();
#else
reinterpret_cast< T* >( storage_.data_ )->~T();
#endif
initialized_ = false;
}
}
public:
sp_ms_deleter() BOOST_NOEXCEPT : initialized_( false )
{
}
// optimization: do not copy storage_
sp_ms_deleter( sp_ms_deleter const & ) BOOST_NOEXCEPT : initialized_( false )
{
}
~sp_ms_deleter()
{
destroy();
}
void operator()( T * )
{
destroy();
}
static void operator_fn( T* ) // operator() can't be static
{
}
void * address() BOOST_NOEXCEPT
{
return storage_.data_;
}
void set_initialized() BOOST_NOEXCEPT
{
initialized_ = true;
}
};
template< class T > struct sp_if_not_array
{
typedef boost::shared_ptr< T > type;
};
#if !defined( BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION )
template< class T > struct sp_if_not_array< T[] >
{
};
#if !defined( __BORLANDC__ ) || !BOOST_WORKAROUND( __BORLANDC__, < 0x600 )
template< class T, std::size_t N > struct sp_if_not_array< T[N] >
{
};
#endif
#endif
} // namespace detail
#if !defined( BOOST_NO_FUNCTION_TEMPLATE_ORDERING )
# define BOOST_SP_MSD( T ) boost::detail::sp_inplace_tag< boost::detail::sp_ms_deleter< T > >()
#else
# define BOOST_SP_MSD( T ) boost::detail::sp_ms_deleter< T >()
#endif
// Zero-argument versions
//
// Used even when variadic templates are available because of the new T() vs new T issue
template< class T > typename boost::detail::sp_if_not_array< T >::type make_shared()
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T();
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T > typename boost::detail::sp_if_not_array< T >::type make_shared_noinit()
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T;
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A > typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T();
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A > typename boost::detail::sp_if_not_array< T >::type allocate_shared_noinit( A const & a )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T;
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
#if !defined( BOOST_NO_CXX11_VARIADIC_TEMPLATES ) && !defined( BOOST_NO_CXX11_RVALUE_REFERENCES )
// Variadic templates, rvalue reference
template< class T, class Arg1, class... Args > typename boost::detail::sp_if_not_array< T >::type make_shared( Arg1 && arg1, Args && ... args )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( boost::detail::sp_forward<Arg1>( arg1 ), boost::detail::sp_forward<Args>( args )... );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class Arg1, class... Args > typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, Arg1 && arg1, Args && ... args )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( boost::detail::sp_forward<Arg1>( arg1 ), boost::detail::sp_forward<Args>( args )... );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
#elif !defined( BOOST_NO_CXX11_RVALUE_REFERENCES )
// For example MSVC 10.0
template< class T, class A1 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 && a1 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 && a1 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 && a1, A2 && a2 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 && a1, A2 && a2 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2, class A3 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 && a1, A2 && a2, A3 && a3 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 ),
boost::detail::sp_forward<A3>( a3 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2, class A3 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 && a1, A2 && a2, A3 && a3 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 ),
boost::detail::sp_forward<A3>( a3 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2, class A3, class A4 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 && a1, A2 && a2, A3 && a3, A4 && a4 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 ),
boost::detail::sp_forward<A3>( a3 ),
boost::detail::sp_forward<A4>( a4 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2, class A3, class A4 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 && a1, A2 && a2, A3 && a3, A4 && a4 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 ),
boost::detail::sp_forward<A3>( a3 ),
boost::detail::sp_forward<A4>( a4 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2, class A3, class A4, class A5 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 && a1, A2 && a2, A3 && a3, A4 && a4, A5 && a5 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 ),
boost::detail::sp_forward<A3>( a3 ),
boost::detail::sp_forward<A4>( a4 ),
boost::detail::sp_forward<A5>( a5 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2, class A3, class A4, class A5 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 && a1, A2 && a2, A3 && a3, A4 && a4, A5 && a5 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 ),
boost::detail::sp_forward<A3>( a3 ),
boost::detail::sp_forward<A4>( a4 ),
boost::detail::sp_forward<A5>( a5 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2, class A3, class A4, class A5, class A6 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 && a1, A2 && a2, A3 && a3, A4 && a4, A5 && a5, A6 && a6 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 ),
boost::detail::sp_forward<A3>( a3 ),
boost::detail::sp_forward<A4>( a4 ),
boost::detail::sp_forward<A5>( a5 ),
boost::detail::sp_forward<A6>( a6 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2, class A3, class A4, class A5, class A6 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 && a1, A2 && a2, A3 && a3, A4 && a4, A5 && a5, A6 && a6 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 ),
boost::detail::sp_forward<A3>( a3 ),
boost::detail::sp_forward<A4>( a4 ),
boost::detail::sp_forward<A5>( a5 ),
boost::detail::sp_forward<A6>( a6 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2, class A3, class A4, class A5, class A6, class A7 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 && a1, A2 && a2, A3 && a3, A4 && a4, A5 && a5, A6 && a6, A7 && a7 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 ),
boost::detail::sp_forward<A3>( a3 ),
boost::detail::sp_forward<A4>( a4 ),
boost::detail::sp_forward<A5>( a5 ),
boost::detail::sp_forward<A6>( a6 ),
boost::detail::sp_forward<A7>( a7 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2, class A3, class A4, class A5, class A6, class A7 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 && a1, A2 && a2, A3 && a3, A4 && a4, A5 && a5, A6 && a6, A7 && a7 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 ),
boost::detail::sp_forward<A3>( a3 ),
boost::detail::sp_forward<A4>( a4 ),
boost::detail::sp_forward<A5>( a5 ),
boost::detail::sp_forward<A6>( a6 ),
boost::detail::sp_forward<A7>( a7 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2, class A3, class A4, class A5, class A6, class A7, class A8 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 && a1, A2 && a2, A3 && a3, A4 && a4, A5 && a5, A6 && a6, A7 && a7, A8 && a8 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 ),
boost::detail::sp_forward<A3>( a3 ),
boost::detail::sp_forward<A4>( a4 ),
boost::detail::sp_forward<A5>( a5 ),
boost::detail::sp_forward<A6>( a6 ),
boost::detail::sp_forward<A7>( a7 ),
boost::detail::sp_forward<A8>( a8 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2, class A3, class A4, class A5, class A6, class A7, class A8 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 && a1, A2 && a2, A3 && a3, A4 && a4, A5 && a5, A6 && a6, A7 && a7, A8 && a8 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 ),
boost::detail::sp_forward<A3>( a3 ),
boost::detail::sp_forward<A4>( a4 ),
boost::detail::sp_forward<A5>( a5 ),
boost::detail::sp_forward<A6>( a6 ),
boost::detail::sp_forward<A7>( a7 ),
boost::detail::sp_forward<A8>( a8 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2, class A3, class A4, class A5, class A6, class A7, class A8, class A9 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 && a1, A2 && a2, A3 && a3, A4 && a4, A5 && a5, A6 && a6, A7 && a7, A8 && a8, A9 && a9 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 ),
boost::detail::sp_forward<A3>( a3 ),
boost::detail::sp_forward<A4>( a4 ),
boost::detail::sp_forward<A5>( a5 ),
boost::detail::sp_forward<A6>( a6 ),
boost::detail::sp_forward<A7>( a7 ),
boost::detail::sp_forward<A8>( a8 ),
boost::detail::sp_forward<A9>( a9 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2, class A3, class A4, class A5, class A6, class A7, class A8, class A9 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 && a1, A2 && a2, A3 && a3, A4 && a4, A5 && a5, A6 && a6, A7 && a7, A8 && a8, A9 && a9 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 ),
boost::detail::sp_forward<A3>( a3 ),
boost::detail::sp_forward<A4>( a4 ),
boost::detail::sp_forward<A5>( a5 ),
boost::detail::sp_forward<A6>( a6 ),
boost::detail::sp_forward<A7>( a7 ),
boost::detail::sp_forward<A8>( a8 ),
boost::detail::sp_forward<A9>( a9 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2, class A3, class A4, class A5, class A6, class A7, class A8, class A9, class A10 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 && a1, A2 && a2, A3 && a3, A4 && a4, A5 && a5, A6 && a6, A7 && a7, A8 && a8, A9 && a9, A10 && a10 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 ),
boost::detail::sp_forward<A3>( a3 ),
boost::detail::sp_forward<A4>( a4 ),
boost::detail::sp_forward<A5>( a5 ),
boost::detail::sp_forward<A6>( a6 ),
boost::detail::sp_forward<A7>( a7 ),
boost::detail::sp_forward<A8>( a8 ),
boost::detail::sp_forward<A9>( a9 ),
boost::detail::sp_forward<A10>( a10 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2, class A3, class A4, class A5, class A6, class A7, class A8, class A9, class A10 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 && a1, A2 && a2, A3 && a3, A4 && a4, A5 && a5, A6 && a6, A7 && a7, A8 && a8, A9 && a9, A10 && a10 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T(
boost::detail::sp_forward<A1>( a1 ),
boost::detail::sp_forward<A2>( a2 ),
boost::detail::sp_forward<A3>( a3 ),
boost::detail::sp_forward<A4>( a4 ),
boost::detail::sp_forward<A5>( a5 ),
boost::detail::sp_forward<A6>( a6 ),
boost::detail::sp_forward<A7>( a7 ),
boost::detail::sp_forward<A8>( a8 ),
boost::detail::sp_forward<A9>( a9 ),
boost::detail::sp_forward<A9>( a10 )
);
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
#else
// C++03 version
template< class T, class A1 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 const & a1 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 const & a1 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 const & a1, A2 const & a2 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 const & a1, A2 const & a2 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2, class A3 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 const & a1, A2 const & a2, A3 const & a3 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2, a3 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2, class A3 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 const & a1, A2 const & a2, A3 const & a3 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2, a3 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2, class A3, class A4 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 const & a1, A2 const & a2, A3 const & a3, A4 const & a4 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2, a3, a4 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2, class A3, class A4 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 const & a1, A2 const & a2, A3 const & a3, A4 const & a4 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2, a3, a4 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2, class A3, class A4, class A5 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 const & a1, A2 const & a2, A3 const & a3, A4 const & a4, A5 const & a5 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2, a3, a4, a5 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2, class A3, class A4, class A5 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 const & a1, A2 const & a2, A3 const & a3, A4 const & a4, A5 const & a5 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2, a3, a4, a5 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2, class A3, class A4, class A5, class A6 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 const & a1, A2 const & a2, A3 const & a3, A4 const & a4, A5 const & a5, A6 const & a6 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2, a3, a4, a5, a6 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2, class A3, class A4, class A5, class A6 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 const & a1, A2 const & a2, A3 const & a3, A4 const & a4, A5 const & a5, A6 const & a6 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2, a3, a4, a5, a6 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2, class A3, class A4, class A5, class A6, class A7 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 const & a1, A2 const & a2, A3 const & a3, A4 const & a4, A5 const & a5, A6 const & a6, A7 const & a7 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2, a3, a4, a5, a6, a7 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2, class A3, class A4, class A5, class A6, class A7 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 const & a1, A2 const & a2, A3 const & a3, A4 const & a4, A5 const & a5, A6 const & a6, A7 const & a7 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2, a3, a4, a5, a6, a7 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2, class A3, class A4, class A5, class A6, class A7, class A8 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 const & a1, A2 const & a2, A3 const & a3, A4 const & a4, A5 const & a5, A6 const & a6, A7 const & a7, A8 const & a8 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2, a3, a4, a5, a6, a7, a8 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2, class A3, class A4, class A5, class A6, class A7, class A8 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 const & a1, A2 const & a2, A3 const & a3, A4 const & a4, A5 const & a5, A6 const & a6, A7 const & a7, A8 const & a8 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2, a3, a4, a5, a6, a7, a8 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2, class A3, class A4, class A5, class A6, class A7, class A8, class A9 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 const & a1, A2 const & a2, A3 const & a3, A4 const & a4, A5 const & a5, A6 const & a6, A7 const & a7, A8 const & a8, A9 const & a9 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2, a3, a4, a5, a6, a7, a8, a9 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2, class A3, class A4, class A5, class A6, class A7, class A8, class A9 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 const & a1, A2 const & a2, A3 const & a3, A4 const & a4, A5 const & a5, A6 const & a6, A7 const & a7, A8 const & a8, A9 const & a9 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2, a3, a4, a5, a6, a7, a8, a9 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A1, class A2, class A3, class A4, class A5, class A6, class A7, class A8, class A9, class A10 >
typename boost::detail::sp_if_not_array< T >::type make_shared( A1 const & a1, A2 const & a2, A3 const & a3, A4 const & a4, A5 const & a5, A6 const & a6, A7 const & a7, A8 const & a8, A9 const & a9, A10 const & a10 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ) );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2, a3, a4, a5, a6, a7, a8, a9, a10 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
template< class T, class A, class A1, class A2, class A3, class A4, class A5, class A6, class A7, class A8, class A9, class A10 >
typename boost::detail::sp_if_not_array< T >::type allocate_shared( A const & a, A1 const & a1, A2 const & a2, A3 const & a3, A4 const & a4, A5 const & a5, A6 const & a6, A7 const & a7, A8 const & a8, A9 const & a9, A10 const & a10 )
{
boost::shared_ptr< T > pt( static_cast< T* >( 0 ), BOOST_SP_MSD( T ), a );
boost::detail::sp_ms_deleter< T > * pd = static_cast<boost::detail::sp_ms_deleter< T > *>( pt._internal_get_untyped_deleter() );
void * pv = pd->address();
::new( pv ) T( a1, a2, a3, a4, a5, a6, a7, a8, a9, a10 );
pd->set_initialized();
T * pt2 = static_cast< T* >( pv );
boost::detail::sp_enable_shared_from_this( &pt, pt2, pt2 );
return boost::shared_ptr< T >( pt, pt2 );
}
#endif
#undef BOOST_SP_MSD
} // namespace boost
#endif // #ifndef BOOST_SMART_PTR_MAKE_SHARED_OBJECT_HPP_INCLUDED
| mit |
hyonholee/azure-sdk-for-net | sdk/recoveryservices-siterecovery/Microsoft.Azure.Management.RecoveryServices.SiteRecovery/src/Generated/Models/KeyEncryptionKeyInfo.cs | 2007 | // <auto-generated>
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
// </auto-generated>
namespace Microsoft.Azure.Management.RecoveryServices.SiteRecovery.Models
{
using Newtonsoft.Json;
using System.Linq;
/// <summary>
/// Key Encryption Key (KEK) information.
/// </summary>
public partial class KeyEncryptionKeyInfo
{
/// <summary>
/// Initializes a new instance of the KeyEncryptionKeyInfo class.
/// </summary>
public KeyEncryptionKeyInfo()
{
CustomInit();
}
/// <summary>
/// Initializes a new instance of the KeyEncryptionKeyInfo class.
/// </summary>
/// <param name="keyIdentifier">The key url / identifier.</param>
/// <param name="keyVaultResourceArmId">The KeyVault resource ARM id
/// for key.</param>
public KeyEncryptionKeyInfo(string keyIdentifier = default(string), string keyVaultResourceArmId = default(string))
{
KeyIdentifier = keyIdentifier;
KeyVaultResourceArmId = keyVaultResourceArmId;
CustomInit();
}
/// <summary>
/// An initialization method that performs custom operations like setting defaults
/// </summary>
partial void CustomInit();
/// <summary>
/// Gets or sets the key url / identifier.
/// </summary>
[JsonProperty(PropertyName = "keyIdentifier")]
public string KeyIdentifier { get; set; }
/// <summary>
/// Gets or sets the KeyVault resource ARM id for key.
/// </summary>
[JsonProperty(PropertyName = "keyVaultResourceArmId")]
public string KeyVaultResourceArmId { get; set; }
}
}
| mit |
adorr/mongoid | lib/mongoid/extensions/object/conversions.rb | 515 | # encoding: utf-8
module Mongoid #:nodoc:
module Extensions #:nodoc:
module Object #:nodoc:
# This module converts objects into mongoid related objects.
module Conversions #:nodoc:
extend ActiveSupport::Concern
module ClassMethods
def set(value)
value.respond_to?(:raw_attributes) ? value.raw_attributes : value
end
def get(value)
value ? self.instantiate(value) : value
end
end
end
end
end
end
| mit |
kivatu/kivy-bak | kivy/animation.py | 22342 | '''
Animation
=========
:class:`Animation` and :class:`AnimationTransition` are used to animate
:class:`~kivy.uix.widget.Widget` properties. You must specify at least a
property name and target value. To use an Animation, follow these steps:
* Setup an Animation object
* Use the Animation object on a Widget
Simple animation
----------------
To animate a Widget's x or y position, simply specify the target x/y values
where you want the widget positioned at the end of the animation::
anim = Animation(x=100, y=100)
anim.start(widget)
The animation will last for 1 second unless :attr:`duration` is specified.
When anim.start() is called, the Widget will move smoothly from the current
x/y position to (100, 100).
Multiple properties and transitions
-----------------------------------
You can animate multiple properties and use built-in or custom transition
functions using :attr:`transition` (or the `t=` shortcut). For example,
to animate the position and size using the 'in_quad' transition::
anim = Animation(x=50, size=(80, 80), t='in_quad')
anim.start(widget)
Note that the `t=` parameter can be the string name of a method in the
:class:`AnimationTransition` class or your own animation function.
Sequential animation
--------------------
To join animations sequentially, use the '+' operator. The following example
will animate to x=50 over 1 second, then animate the size to (80, 80) over the
next two seconds::
anim = Animation(x=50) + Animation(size=(80, 80), duration=2.)
anim.start(widget)
Parallel animation
------------------
To join animations in parallel, use the '&' operator. The following example
will animate the position to (80, 10) over 1 second, whilst in parallel
animating the size to (800, 800)::
anim = Animation(pos=(80, 10))
anim &= Animation(size=(800, 800), duration=2.)
anim.start(widget)
Repeating animation
-------------------
.. versionadded:: 1.8.0
.. note::
This is currently only implemented for 'Sequence' animations.
To set an animation to repeat, simply set the :attr:`Sequence.repeat`
property to `True`::
anim = Animation(...) + Animation(...)
anim.repeat = True
anim.start(widget)
For flow control of animations such as stopping and cancelling, use the methods
already in place in the animation module.
'''
__all__ = ('Animation', 'AnimationTransition')
from math import sqrt, cos, sin, pi
from kivy.event import EventDispatcher
from kivy.clock import Clock
from kivy.compat import string_types, iterkeys
class Animation(EventDispatcher):
'''Create an animation definition that can be used to animate a Widget.
:Parameters:
`duration` or `d`: float, defaults to 1.
Duration of the animation, in seconds.
`transition` or `t`: str or func
Transition function for animate properties. It can be the name of a
method from :class:`AnimationTransition`.
`step` or `s`: float
Step in milliseconds of the animation. Defaults to 1 / 60.
:Events:
`on_start`: widget
Fired when the animation is started on a widget.
`on_complete`: widget
Fired when the animation is completed or stopped on a widget.
`on_progress`: widget, progression
Fired when the progression of the animation is changing.
.. versionchanged:: 1.4.0
Added s/step parameter.
'''
_instances = set()
__events__ = ('on_start', 'on_progress', 'on_complete')
def __init__(self, **kw):
super(Animation, self).__init__(**kw)
# Initialize
self._clock_installed = False
self._duration = kw.get('d', kw.get('duration', 1.))
self._transition = kw.get('t', kw.get('transition', 'linear'))
self._step = kw.get('s', kw.get('step', 1. / 60.))
if isinstance(self._transition, string_types):
self._transition = getattr(AnimationTransition, self._transition)
for key in ('d', 't', 's', 'step', 'duration', 'transition'):
kw.pop(key, None)
self._animated_properties = kw
self._widgets = {}
@property
def duration(self):
'''Return the duration of the animation.
'''
return self._duration
@property
def transition(self):
'''Return the transition of the animation.
'''
return self._transition
@property
def animated_properties(self):
'''Return the properties used to animate.
'''
return self._animated_properties
@staticmethod
def stop_all(widget, *largs):
'''Stop all animations that concern a specific widget / list of
properties.
Example::
anim = Animation(x=50)
anim.start(widget)
# and later
Animation.stop_all(widget, 'x')
'''
if len(largs):
for animation in list(Animation._instances):
for x in largs:
animation.stop_property(widget, x)
else:
for animation in set(Animation._instances):
animation.stop(widget)
@staticmethod
def cancel_all(widget, *largs):
'''Cancel all animations that concern a specific widget / list of
properties. See :attr:`cancel`.
Example::
anim = Animation(x=50)
anim.start(widget)
# and later
Animation.cancel_all(widget, 'x')
.. versionadded:: 1.4.0
'''
if len(largs):
for animation in list(Animation._instances):
for x in largs:
animation.cancel_property(widget, x)
else:
for animation in set(Animation._instances):
animation.cancel(widget)
def start(self, widget):
'''Start the animation on a widget.
'''
self.stop(widget)
self._initialize(widget)
self._register()
self.dispatch('on_start', widget)
def stop(self, widget):
'''Stop the animation previously applied to a widget, triggering the
`on_complete` event.'''
props = self._widgets.pop(widget.uid, None)
if props:
self.dispatch('on_complete', widget)
self.cancel(widget)
def cancel(self, widget):
'''Cancel the animation previously applied to a widget. Same
effect as :attr:`stop`, except the `on_complete` event will
*not* be triggered!
.. versionadded:: 1.4.0
'''
self._widgets.pop(widget.uid, None)
self._clock_uninstall()
if not self._widgets:
self._unregister()
def stop_property(self, widget, prop):
'''Even if an animation is running, remove a property. It will not be
animated futher. If it was the only/last property being animated,
the animation will be stopped (see :attr:`stop`).
'''
props = self._widgets.get(widget.uid, None)
if not props:
return
props['properties'].pop(prop, None)
# no more properties to animation ? kill the animation.
if not props['properties']:
self.stop(widget)
def cancel_property(self, widget, prop):
'''Even if an animation is running, remove a property. It will not be
animated further. If it was the only/last property being animated,
the animation will be canceled (see :attr:`cancel`)
.. versionadded:: 1.4.0
'''
props = self._widgets.get(widget.uid, None)
if not props:
return
props['properties'].pop(prop, None)
# no more properties to animation ? kill the animation.
if not props['properties']:
self.cancel(widget)
def have_properties_to_animate(self, widget):
'''Return True if a widget still has properties to animate.
.. versionadded:: 1.8.0
'''
props = self._widgets.get(widget.uid, None)
if props and props['properties']:
return True
#
# Private
#
def _register(self):
Animation._instances.add(self)
def _unregister(self):
if self in Animation._instances:
Animation._instances.remove(self)
def _initialize(self, widget):
d = self._widgets[widget.uid] = {
'widget': widget,
'properties': {},
'time': None}
# get current values
p = d['properties']
for key, value in self._animated_properties.items():
p[key] = (getattr(widget, key), value)
# install clock
self._clock_install()
def _clock_install(self):
if self._clock_installed:
return
Clock.schedule_interval(self._update, self._step)
self._clock_installed = True
def _clock_uninstall(self):
if self._widgets or not self._clock_installed:
return
self._clock_installed = False
Clock.unschedule(self._update)
def _update(self, dt):
widgets = self._widgets
transition = self._transition
calculate = self._calculate
for uid in list(widgets.keys())[:]:
anim = widgets[uid]
widget = anim['widget']
if anim['time'] is None:
anim['time'] = 0.
else:
anim['time'] += dt
# calculate progression
if self._duration:
progress = min(1., anim['time'] / self._duration)
else:
progress = 1
t = transition(progress)
# apply progression on widget
for key, values in anim['properties'].items():
a, b = values
value = calculate(a, b, t)
setattr(widget, key, value)
self.dispatch('on_progress', widget, progress)
# time to stop ?
if progress >= 1.:
self.stop(widget)
def _calculate(self, a, b, t):
_calculate = self._calculate
if isinstance(a, list) or isinstance(a, tuple):
if isinstance(a, list):
tp = list
else:
tp = tuple
return tp([_calculate(a[x], b[x], t) for x in range(len(a))])
elif isinstance(a, dict):
d = {}
for x in iterkeys(a):
if x not in b:
# User requested to animate only part of the dict.
# Copy the rest
d[x] = a[x]
else:
d[x] = _calculate(a[x], b[x], t)
return d
else:
return (a * (1. - t)) + (b * t)
#
# Default handlers
#
def on_start(self, widget):
pass
def on_progress(self, widget, progress):
pass
def on_complete(self, widget):
pass
def __add__(self, animation):
return Sequence(self, animation)
def __and__(self, animation):
return Parallel(self, animation)
class Sequence(Animation):
def __init__(self, anim1, anim2):
super(Sequence, self).__init__()
#: Repeat the sequence. See 'Repeating animation' in the header
#: documentation.
self.repeat = False
self.anim1 = anim1
self.anim2 = anim2
self.anim1.bind(on_start=self.on_anim1_start,
on_complete=self.on_anim1_complete,
on_progress=self.on_anim1_progress)
self.anim2.bind(on_complete=self.on_anim2_complete,
on_progress=self.on_anim2_progress)
@property
def duration(self):
return self.anim1.duration + self.anim2.duration
def start(self, widget):
self.stop(widget)
self._widgets[widget.uid] = True
self._register()
self.anim1.start(widget)
def stop(self, widget):
self.anim1.stop(widget)
self.anim2.stop(widget)
props = self._widgets.pop(widget.uid, None)
if props:
self.dispatch('on_complete', widget)
super(Sequence, self).cancel(widget)
def stop_property(self, widget, prop):
self.anim1.stop_property(widget, prop)
self.anim2.stop_property(widget, prop)
if (not self.anim1.have_properties_to_animate(widget) and
not self.anim2.have_properties_to_animate(widget)):
self.stop(widget)
def cancel(self, widget):
self.anim1.cancel(widget)
self.anim2.cancel(widget)
super(Sequence, self).cancel(widget)
def on_anim1_start(self, instance, widget):
self.dispatch('on_start', widget)
def on_anim1_complete(self, instance, widget):
self.anim2.start(widget)
def on_anim1_progress(self, instance, widget, progress):
self.dispatch('on_progress', widget, progress / 2.)
def on_anim2_complete(self, instance, widget):
'''Repeating logic used with boolean variable "repeat".
.. versionadded:: 1.7.1
'''
if self.repeat:
self.anim1.start(widget)
else:
self.dispatch('on_complete', widget)
def on_anim2_progress(self, instance, widget, progress):
self.dispatch('on_progress', widget, .5 + progress / 2.)
class Parallel(Animation):
def __init__(self, anim1, anim2):
super(Parallel, self).__init__()
self.anim1 = anim1
self.anim2 = anim2
self.anim1.bind(on_complete=self.on_anim_complete)
self.anim2.bind(on_complete=self.on_anim_complete)
@property
def duration(self):
return max(self.anim1.duration, self.anim2.duration)
def start(self, widget):
self.stop(widget)
self.anim1.start(widget)
self.anim2.start(widget)
self._widgets[widget.uid] = {'complete': 0}
self._register()
self.dispatch('on_start', widget)
def stop(self, widget):
self.anim1.stop(widget)
self.anim2.stop(widget)
props = self._widgets.pop(widget.uid, None)
if props:
self.dispatch('on_complete', widget)
super(Parallel, self).cancel(widget)
def stop_property(self, widget, prop):
self.anim1.stop_property(widget, prop)
self.anim2.stop_property(widget, prop)
if (not self.anim1.have_properties_to_animate(widget) and
not self.anim2.have_properties_to_animate(widget)):
self.stop(widget)
def cancel(self, widget):
self.anim1.cancel(widget)
self.anim2.cancel(widget)
super(Parallel, self).cancel(widget)
def on_anim_complete(self, instance, widget):
self._widgets[widget.uid]['complete'] += 1
if self._widgets[widget.uid]['complete'] == 2:
self.stop(widget)
class AnimationTransition(object):
'''Collection of animation functions to be used with the Animation object.
Easing Functions ported to Kivy from the Clutter Project
http://www.clutter-project.org/docs/clutter/stable/ClutterAlpha.html
The `progress` parameter in each animation function is in the range 0-1.
'''
@staticmethod
def linear(progress):
'''.. image:: images/anim_linear.png'''
return progress
@staticmethod
def in_quad(progress):
'''.. image:: images/anim_in_quad.png
'''
return progress * progress
@staticmethod
def out_quad(progress):
'''.. image:: images/anim_out_quad.png
'''
return -1.0 * progress * (progress - 2.0)
@staticmethod
def in_out_quad(progress):
'''.. image:: images/anim_in_out_quad.png
'''
p = progress * 2
if p < 1:
return 0.5 * p * p
p -= 1.0
return -0.5 * (p * (p - 2.0) - 1.0)
@staticmethod
def in_cubic(progress):
'''.. image:: images/anim_in_cubic.png
'''
return progress * progress * progress
@staticmethod
def out_cubic(progress):
'''.. image:: images/anim_out_cubic.png
'''
p = progress - 1.0
return p * p * p + 1.0
@staticmethod
def in_out_cubic(progress):
'''.. image:: images/anim_in_out_cubic.png
'''
p = progress * 2
if p < 1:
return 0.5 * p * p * p
p -= 2
return 0.5 * (p * p * p + 2.0)
@staticmethod
def in_quart(progress):
'''.. image:: images/anim_in_quart.png
'''
return progress * progress * progress * progress
@staticmethod
def out_quart(progress):
'''.. image:: images/anim_out_quart.png
'''
p = progress - 1.0
return -1.0 * (p * p * p * p - 1.0)
@staticmethod
def in_out_quart(progress):
'''.. image:: images/anim_in_out_quart.png
'''
p = progress * 2
if p < 1:
return 0.5 * p * p * p * p
p -= 2
return -0.5 * (p * p * p * p - 2.0)
@staticmethod
def in_quint(progress):
'''.. image:: images/anim_in_quint.png
'''
return progress * progress * progress * progress * progress
@staticmethod
def out_quint(progress):
'''.. image:: images/anim_out_quint.png
'''
p = progress - 1.0
return p * p * p * p * p + 1.0
@staticmethod
def in_out_quint(progress):
'''.. image:: images/anim_in_out_quint.png
'''
p = progress * 2
if p < 1:
return 0.5 * p * p * p * p * p
p -= 2.0
return 0.5 * (p * p * p * p * p + 2.0)
@staticmethod
def in_sine(progress):
'''.. image:: images/anim_in_sine.png
'''
return -1.0 * cos(progress * (pi / 2.0)) + 1.0
@staticmethod
def out_sine(progress):
'''.. image:: images/anim_out_sine.png
'''
return sin(progress * (pi / 2.0))
@staticmethod
def in_out_sine(progress):
'''.. image:: images/anim_in_out_sine.png
'''
return -0.5 * (cos(pi * progress) - 1.0)
@staticmethod
def in_expo(progress):
'''.. image:: images/anim_in_expo.png
'''
if progress == 0:
return 0.0
return pow(2, 10 * (progress - 1.0))
@staticmethod
def out_expo(progress):
'''.. image:: images/anim_out_expo.png
'''
if progress == 1.0:
return 1.0
return -pow(2, -10 * progress) + 1.0
@staticmethod
def in_out_expo(progress):
'''.. image:: images/anim_in_out_expo.png
'''
if progress == 0:
return 0.0
if progress == 1.:
return 1.0
p = progress * 2
if p < 1:
return 0.5 * pow(2, 10 * (p - 1.0))
p -= 1.0
return 0.5 * (-pow(2, -10 * p) + 2.0)
@staticmethod
def in_circ(progress):
'''.. image:: images/anim_in_circ.png
'''
return -1.0 * (sqrt(1.0 - progress * progress) - 1.0)
@staticmethod
def out_circ(progress):
'''.. image:: images/anim_out_circ.png
'''
p = progress - 1.0
return sqrt(1.0 - p * p)
@staticmethod
def in_out_circ(progress):
'''.. image:: images/anim_in_out_circ.png
'''
p = progress * 2
if p < 1:
return -0.5 * (sqrt(1.0 - p * p) - 1.0)
p -= 2.0
return 0.5 * (sqrt(1.0 - p * p) + 1.0)
@staticmethod
def in_elastic(progress):
'''.. image:: images/anim_in_elastic.png
'''
p = .3
s = p / 4.0
q = progress
if q == 1:
return 1.0
q -= 1.0
return -(pow(2, 10 * q) * sin((q - s) * (2 * pi) / p))
@staticmethod
def out_elastic(progress):
'''.. image:: images/anim_out_elastic.png
'''
p = .3
s = p / 4.0
q = progress
if q == 1:
return 1.0
return pow(2, -10 * q) * sin((q - s) * (2 * pi) / p) + 1.0
@staticmethod
def in_out_elastic(progress):
'''.. image:: images/anim_in_out_elastic.png
'''
p = .3 * 1.5
s = p / 4.0
q = progress * 2
if q == 2:
return 1.0
if q < 1:
q -= 1.0
return -.5 * (pow(2, 10 * q) * sin((q - s) * (2.0 * pi) / p))
else:
q -= 1.0
return pow(2, -10 * q) * sin((q - s) * (2.0 * pi) / p) * .5 + 1.0
@staticmethod
def in_back(progress):
'''.. image:: images/anim_in_back.png
'''
return progress * progress * ((1.70158 + 1.0) * progress - 1.70158)
@staticmethod
def out_back(progress):
'''.. image:: images/anim_out_back.png
'''
p = progress - 1.0
return p * p * ((1.70158 + 1) * p + 1.70158) + 1.0
@staticmethod
def in_out_back(progress):
'''.. image:: images/anim_in_out_back.png
'''
p = progress * 2.
s = 1.70158 * 1.525
if p < 1:
return 0.5 * (p * p * ((s + 1.0) * p - s))
p -= 2.0
return 0.5 * (p * p * ((s + 1.0) * p + s) + 2.0)
@staticmethod
def _out_bounce_internal(t, d):
p = t / d
if p < (1.0 / 2.75):
return 7.5625 * p * p
elif p < (2.0 / 2.75):
p -= (1.5 / 2.75)
return 7.5625 * p * p + .75
elif p < (2.5 / 2.75):
p -= (2.25 / 2.75)
return 7.5625 * p * p + .9375
else:
p -= (2.625 / 2.75)
return 7.5625 * p * p + .984375
@staticmethod
def _in_bounce_internal(t, d):
return 1.0 - AnimationTransition._out_bounce_internal(d - t, d)
@staticmethod
def in_bounce(progress):
'''.. image:: images/anim_in_bounce.png
'''
return AnimationTransition._in_bounce_internal(progress, 1.)
@staticmethod
def out_bounce(progress):
'''.. image:: images/anim_out_bounce.png
'''
return AnimationTransition._out_bounce_internal(progress, 1.)
@staticmethod
def in_out_bounce(progress):
'''.. image:: images/anim_in_out_bounce.png
'''
p = progress * 2.
if p < 1.:
return AnimationTransition._in_bounce_internal(p, 1.) * .5
return AnimationTransition._out_bounce_internal(p - 1., 1.) * .5 + .5
| mit |
yenbekbay/clinic | assets/components/tinymce/jscripts/tiny_mce/plugins/modxlink/langs/es.js | 68 | tinyMCE.addI18n('es.modxlink',{
link_desc:"Insert/edit link"
}); | mit |
darshanhs90/Android-SuperUtiliser-App | ResideMenuDemo/src/com/vinsol/expensetracker/utils/DisplayTimeForChronometer.java | 718 | /**
* Copyright (c) 2012 Vinayak Solutions Private Limited
* See the file license.txt for copying permission.
*/
package com.vinsol.expensetracker.utils;
public class DisplayTimeForChronometer {
// ////// ******** Function which take time in millis and return time in 00:00 format ******* ////////
public String getDisplayTime(long timeinmillis) {
String minutes = "00";
if (timeinmillis >= 60000) {
Long temp = timeinmillis / 60000;
if (temp < 10) {
minutes = "0" + temp;
} else {
minutes = temp + "";
}
}
String seconds = (timeinmillis % 60000) / 1000 + "";
if ((timeinmillis % 60000) / 1000 < 10) {
seconds = "0" + seconds;
}
return minutes + ":" + seconds;
}
}
| mit |
bitpay/bitcore | packages/bitcore-p2p/lib/messages/commands/headers.js | 1915 | 'use strict';
var Message = require('../message');
var inherits = require('util').inherits;
var bitcore = require('bitcore-lib');
var utils = require('../utils');
var BufferReader = bitcore.encoding.BufferReader;
var BufferWriter = bitcore.encoding.BufferWriter;
var _ = bitcore.deps._;
var $ = bitcore.util.preconditions;
/**
* Sent in response to a `getheaders` message. It contains information about
* block headers.
* @param {Array} arg - An array of BlockHeader instances
* @param {Object=} options
* @param {Array=} options.headers - array of block headers
* @param {Function} options.BlockHeader - a BlockHeader constructor
* @extends Message
* @constructor
*/
function HeadersMessage(arg, options) {
Message.call(this, options);
this.BlockHeader = options.BlockHeader;
this.command = 'headers';
$.checkArgument(
_.isUndefined(arg) || (Array.isArray(arg) && arg[0] instanceof this.BlockHeader),
'First argument is expected to be an array of BlockHeader instances'
);
this.headers = arg;
}
inherits(HeadersMessage, Message);
HeadersMessage.prototype.setPayload = function(payload) {
$.checkArgument(payload && payload.length > 0, 'No data found to create Headers message');
var parser = new BufferReader(payload);
var count = parser.readVarintNum();
this.headers = [];
for (var i = 0; i < count; i++) {
var header = this.BlockHeader.fromBufferReader(parser);
this.headers.push(header);
var txn_count = parser.readUInt8();
$.checkState(txn_count === 0, 'txn_count should always be 0');
}
utils.checkFinished(parser);
};
HeadersMessage.prototype.getPayload = function() {
var bw = new BufferWriter();
bw.writeVarintNum(this.headers.length);
for (var i = 0; i < this.headers.length; i++) {
var buffer = this.headers[i].toBuffer();
bw.write(buffer);
bw.writeUInt8(0);
}
return bw.concat();
};
module.exports = HeadersMessage;
| mit |
hanjin66/communitycause | comments.php | 4633 | <?php
/**
* The template for displaying comments
*
* The area of the page that contains both current comments
* and the comment form.
*
* @package FoundationPress
* @since FoundationPress 1.0.0
*/
if ( have_comments() ) :
if ( (is_page() || is_single()) && ( ! is_home() && ! is_front_page()) ) :
?>
<section id="comments"><?php
wp_list_comments(
array(
'walker' => new Foundationpress_Comments(),
'max_depth' => '',
'style' => 'ol',
'callback' => null,
'end-callback' => null,
'type' => 'all',
'reply_text' => __( 'Reply', 'foundationpress' ),
'page' => '',
'per_page' => '',
'avatar_size' => 48,
'reverse_top_level' => null,
'reverse_children' => '',
'format' => 'html5',
'short_ping' => false,
'echo' => true,
'moderation' => __( 'Your comment is awaiting moderation.', 'foundationpress' ),
)
);
?>
</section>
<?php
endif;
endif;
?>
<?php
/*
Do not delete these lines.
Prevent access to this file directly
*/
defined( 'ABSPATH' ) || die( __( 'Please do not load this page directly. Thanks!', 'foundationpress' ) );
if ( post_password_required() ) { ?>
<section id="comments">
<div class="notice">
<p class="bottom"><?php _e( 'This post is password protected. Enter the password to view comments.', 'foundationpress' ); ?></p>
</div>
</section>
<?php
return;
}
?>
<?php
if ( comments_open() ) :
if ( (is_page() || is_single()) && ( ! is_home() && ! is_front_page()) ) :
?>
<section id="respond">
<h3>
<?php
comment_form_title(
__( 'Leave a Reply', 'foundationpress' ),
/* translators: %s: author of comment being replied to */
__( 'Leave a Reply to %s', 'foundationpress' )
);
?>
</h3>
<p class="cancel-comment-reply"><?php cancel_comment_reply_link(); ?></p>
<?php if ( get_option( 'comment_registration' ) && ! is_user_logged_in() ) : ?>
<p>
<?php
/* translators: %s: login url */
printf( __(
'You must be <a href="%s">logged in</a> to post a comment.', 'foundationpress' ),
wp_login_url( get_permalink() )
);
?>
</p>
<?php else : ?>
<form action="<?php echo get_option( 'siteurl' ); ?>/wp-comments-post.php" method="post" id="commentform">
<?php if ( is_user_logged_in() ) : ?>
<p>
<?php
/* translators: %1$s: site url, %2$s: user identity */
printf( __(
'Logged in as <a href="%1$s/wp-admin/profile.php">%2$s</a>.', 'foundationpress' ),
get_option( 'siteurl' ),
$user_identity
);
?> <a href="<?php echo wp_logout_url( get_permalink() ); ?>" title="<?php __( 'Log out of this account', 'foundationpress' ); ?>"><?php _e( 'Log out »', 'foundationpress' ); ?></a>
</p>
<?php else : ?>
<p>
<label for="author">
<?php
_e( 'Name', 'foundationpress' ); if ( $req ) { _e( ' (required)', 'foundationpress' ); }
?>
</label>
<input type="text" class="five" name="author" id="author" value="<?php echo esc_attr( $comment_author ); ?>" size="22" tabindex="1" <?php if ( $req ) { echo "aria-required='true'"; } ?>>
</p>
<p>
<label for="email">
<?php
_e( 'Email (will not be published)', 'foundationpress' ); if ( $req ) { _e( ' (required)', 'foundationpress' ); }
?>
</label>
<input type="text" class="five" name="email" id="email" value="<?php echo esc_attr( $comment_author_email ); ?>" size="22" tabindex="2" <?php if ( $req ) { echo "aria-required='true'"; } ?>>
</p>
<p>
<label for="url">
<?php
_e( 'Website', 'foundationpress' );
?>
</label>
<input type="text" class="five" name="url" id="url" value="<?php echo esc_attr( $comment_author_url ); ?>" size="22" tabindex="3">
</p>
<?php endif; ?>
<p>
<label for="comment">
<?php
_e( 'Comment', 'foundationpress' );
?>
</label>
<textarea name="comment" id="comment" tabindex="4"></textarea>
</p>
<p id="allowed_tags" class="small"><strong>XHTML:</strong>
<?php
_e( 'You can use these tags:','foundationpress' );
?>
<code>
<?php echo allowed_tags(); ?>
</code>
</p>
<p><input name="submit" class="button" type="submit" id="submit" tabindex="5" value="<?php esc_attr_e( 'Submit Comment', 'foundationpress' ); ?>"></p>
<?php comment_id_fields(); ?>
<?php do_action( 'comment_form', $post->ID ); ?>
</form>
<?php endif; // If registration required and not logged in. ?>
</section>
<?php
endif; // If you delete this the sky will fall on your head.
endif; // If you delete this the sky will fall on your head.
| mit |
mdoviedor/InscripcionVirtual | web/kendo/src/js/cultures/kendo.culture.ha-Latn.js | 2567 | /*
* Kendo UI Web v2013.3.1119 (http://kendoui.com)
* Copyright 2013 Telerik AD. All rights reserved.
*
* Kendo UI Web commercial licenses may be obtained at
* https://www.kendoui.com/purchase/license-agreement/kendo-ui-web-commercial.aspx
* If you do not own a commercial license, this file shall be governed by the
* GNU General Public License (GPL) version 3.
* For GPL requirements, please review: http://www.gnu.org/copyleft/gpl.html
*/
(function( window, undefined ) {
kendo.cultures["ha-Latn"] = {
name: "ha-Latn",
numberFormat: {
pattern: ["-n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
percent: {
pattern: ["-n %","n %"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
symbol: "%"
},
currency: {
pattern: ["$-n","$ n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
symbol: "N"
}
},
calendars: {
standard: {
days: {
names: ["Lahadi","Litinin","Talata","Laraba","Alhamis","Juma\u0027a","Asabar"],
namesAbbr: ["Lah","Lit","Tal","Lar","Alh","Jum","Asa"],
namesShort: ["L","L","T","L","A","J","A"]
},
months: {
names: ["Januwaru","Febreru","Maris","Afrilu","Mayu","Yuni","Yuli","Agusta","Satumba","Oktocba","Nuwamba","Disamba",""],
namesAbbr: ["Jan","Feb","Mar","Afr","May","Yun","Yul","Agu","Sat","Okt","Nuw","Dis",""]
},
AM: ["Safe","safe","SAFE"],
PM: ["Yamma","yamma","YAMMA"],
patterns: {
d: "d/M/yyyy",
D: "dddd, MMMM dd, yyyy",
F: "dddd, MMMM dd, yyyy h:mm:ss tt",
g: "d/M/yyyy h:mm tt",
G: "d/M/yyyy h:mm:ss tt",
m: "MMMM dd",
M: "MMMM dd",
s: "yyyy'-'MM'-'dd'T'HH':'mm':'ss",
t: "h:mm tt",
T: "h:mm:ss tt",
u: "yyyy'-'MM'-'dd HH':'mm':'ss'Z'",
y: "MMMM, yyyy",
Y: "MMMM, yyyy"
},
"/": "/",
":": ":",
firstDay: 0
}
}
}
})(this);
| mit |
Azure/azure-sdk-for-net | sdk/search/Azure.Search.Documents/src/Generated/Models/SentimentSkill.Serialization.cs | 4865 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// <auto-generated/>
#nullable disable
using System.Collections.Generic;
using System.Text.Json;
using Azure.Core;
namespace Azure.Search.Documents.Indexes.Models
{
public partial class SentimentSkill : IUtf8JsonSerializable
{
void IUtf8JsonSerializable.Write(Utf8JsonWriter writer)
{
writer.WriteStartObject();
if (Optional.IsDefined(DefaultLanguageCode))
{
if (DefaultLanguageCode != null)
{
writer.WritePropertyName("defaultLanguageCode");
writer.WriteStringValue(DefaultLanguageCode.Value.ToString());
}
else
{
writer.WriteNull("defaultLanguageCode");
}
}
writer.WritePropertyName("@odata.type");
writer.WriteStringValue(ODataType);
if (Optional.IsDefined(Name))
{
writer.WritePropertyName("name");
writer.WriteStringValue(Name);
}
if (Optional.IsDefined(Description))
{
writer.WritePropertyName("description");
writer.WriteStringValue(Description);
}
if (Optional.IsDefined(Context))
{
writer.WritePropertyName("context");
writer.WriteStringValue(Context);
}
writer.WritePropertyName("inputs");
writer.WriteStartArray();
foreach (var item in Inputs)
{
writer.WriteObjectValue(item);
}
writer.WriteEndArray();
writer.WritePropertyName("outputs");
writer.WriteStartArray();
foreach (var item in Outputs)
{
writer.WriteObjectValue(item);
}
writer.WriteEndArray();
writer.WriteEndObject();
}
internal static SentimentSkill DeserializeSentimentSkill(JsonElement element)
{
Optional<SentimentSkillLanguage?> defaultLanguageCode = default;
string odataType = default;
Optional<string> name = default;
Optional<string> description = default;
Optional<string> context = default;
IList<InputFieldMappingEntry> inputs = default;
IList<OutputFieldMappingEntry> outputs = default;
foreach (var property in element.EnumerateObject())
{
if (property.NameEquals("defaultLanguageCode"))
{
if (property.Value.ValueKind == JsonValueKind.Null)
{
defaultLanguageCode = null;
continue;
}
defaultLanguageCode = new SentimentSkillLanguage(property.Value.GetString());
continue;
}
if (property.NameEquals("@odata.type"))
{
odataType = property.Value.GetString();
continue;
}
if (property.NameEquals("name"))
{
name = property.Value.GetString();
continue;
}
if (property.NameEquals("description"))
{
description = property.Value.GetString();
continue;
}
if (property.NameEquals("context"))
{
context = property.Value.GetString();
continue;
}
if (property.NameEquals("inputs"))
{
List<InputFieldMappingEntry> array = new List<InputFieldMappingEntry>();
foreach (var item in property.Value.EnumerateArray())
{
array.Add(InputFieldMappingEntry.DeserializeInputFieldMappingEntry(item));
}
inputs = array;
continue;
}
if (property.NameEquals("outputs"))
{
List<OutputFieldMappingEntry> array = new List<OutputFieldMappingEntry>();
foreach (var item in property.Value.EnumerateArray())
{
array.Add(OutputFieldMappingEntry.DeserializeOutputFieldMappingEntry(item));
}
outputs = array;
continue;
}
}
return new SentimentSkill(odataType, name.Value, description.Value, context.Value, inputs, outputs, Optional.ToNullable(defaultLanguageCode));
}
}
}
| mit |
uhjish/bx-python | lib/bx/bbi/bigwig_tests.py | 3467 | import sys, os
import unittest
import numpy
try:
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
except:
sys.path.insert(0, os.path.dirname(os.path.abspath(".")))
from bx.bbi.bigwig_file import BigWigFile
def allclose( a, b, tol=0.00001 ):
"""
Like numpy.allclose but treat Nan == Nan
"""
d = numpy.absolute( a - b )
return numpy.all( numpy.isnan( d ) | ( d < tol ) )
class TestBigWig(unittest.TestCase):
def setUp(self):
f = open( "test_data/bbi_tests/test.bw" )
self.bw = BigWigFile(file=f)
def test_get_summary(self):
data = self.bw.query("chr1", 10000, 20000, 10)
means = [ x['mean'] for x in data ]
assert numpy.allclose( map(float, means), [-0.17557571594973645, -0.054009292602539061, -0.056892242431640622, -0.03650328826904297, 0.036112907409667966, 0.0064466032981872557, 0.036949024200439454, 0.076638259887695306, 0.043518108367919923, 0.01554749584197998] )
# Summarize variant
sd = self.bw.summarize( "chr1", 10000, 20000, 10)
assert numpy.allclose( sd.sum_data / sd.valid_count, [-0.17557571594973645, -0.054009292602539061, -0.056892242431640622, -0.03650328826904297, 0.036112907409667966, 0.0064466032981872557, 0.036949024200439454, 0.076638259887695306, 0.043518108367919923, 0.01554749584197998] )
# Test min and max for this entire summary region
data = self.bw.query("chr1", 10000, 20000, 1)
maxs = [ x['max'] for x in data ]
mins = [ x['min'] for x in data ]
self.assertEqual( map(float, maxs), [0.289000004529953] )
self.assertEqual( map(float, mins), [-3.9100000858306885] )
def test_get_leaf(self):
data = self.bw.query("chr1", 11000, 11005, 5)
means = [ x['mean'] for x in data ]
assert numpy.allclose( map(float, means), [0.050842501223087311, -2.4589500427246094, 0.050842501223087311, 0.050842501223087311, 0.050842501223087311] )
# Test min and max for this entire leaf region
data = self.bw.query("chr1", 11000, 11005, 1)
maxs = [ x['max'] for x in data ]
mins = [ x['min'] for x in data ]
self.assertEqual( map(float, maxs), [0.050842501223087311] )
self.assertEqual( map(float, mins), [-2.4589500427246094] )
def test_wrong_nochrom(self):
data = self.bw.query("chr2", 0, 10000, 10)
self.assertEqual( data, None )
# Nose test generator
def test_summaries_from_file():
bw = BigWigFile( file=open( "test_data/bbi_tests/test.bw" ) )
def check_summary( line ):
fields = line.split()
chrom = fields[0]
start = int( fields[1] )
end = int( fields[2] )
n = int( fields[3] )
t = fields[4]
values = [ float( v.replace( 'n/a', 'NaN' ) ) for v in fields[5:] ]
sd = bw.summarize( chrom, start, end, n )
if t == 'mean':
print sd.sum_data / sd.valid_count
print values
assert allclose( sd.sum_data / sd.valid_count, values )
elif t == 'min':
assert allclose( sd.min_val, values )
elif t == 'max':
assert allclose( sd.max_val, values )
#elif t == 'std':
# assert numpy.allclose( sd.max_val, values )
for line in open( "test_data/bbi_tests/test.expectation" ):
yield check_summary, line
if __name__ == '__main__':
unittest.main()
| mit |
jianpingw/sails | lib/hooks/http/start.js | 6081 | module.exports = function (sails) {
/**
* Module dependencies.
*/
var async = require('async');
return function startServer (cb) {
// Used to warn about possible issues if starting the server is taking a long time
var liftAbortTimer;
var liftTimeout = sails.config.liftTimeout || 4000;
async.auto({
// Start Express server
start: function (cb) {
var explicitHost = sails.config.explicitHost;
// If host is explicitly declared, include it in express's listen() call
if (explicitHost) {
sails.log.verbose('Restricting access to explicit host: '+explicitHost);
sails.hooks.http.server.listen(sails.config.port, explicitHost, cb);
}
else {
// Listen for error events that may be emitted as the server attempts to start
sails.hooks.http.server.on('error', failedToStart);
sails.hooks.http.server.listen(sails.config.port, function(err) {
// Remove the error listener so future error events emitted by the server
// don't get handled by the "failedToStart" function below.
sails.hooks.http.server.removeListener('error', failedToStart);
cb(err);
});
}
// Start timer in case this takes suspiciously long...
liftAbortTimer = setTimeout(failedToStart, liftTimeout);
// If the server fails to start because of an error, or if it's just taking
// too long, show some troubleshooting notes and bail out.
function failedToStart(err) {
// If this was called because of an actual error, clear the timeout
// so failedToStart doesn't get called again.
if (err) {
clearTimeout(liftAbortTimer);
}
// If sails is exiting already, don't worry about the timer going off.
if (sails._exiting) {return;}
// Figure out if this user is on Windows
var isWin = !!process.platform.match(/^win/);
// If server isn't starting, provide general troubleshooting information,
// sharpened with a few simple heuristics:
console.log('');
if (err) {
sails.log.error('Server failed to start.');
if (err.code) {
sails.log.error('(received error: ' + err.code + ')');
}
} else {
sails.log.error('Server is taking a while to start up (it\'s been 4 seconds).');
}
sails.log.error();
sails.log.error('Troubleshooting tips:');
sails.log.error();
// 0. Just a slow Grunt task
if (sails.hooks.grunt && ! (err && err.code == 'EADDRINUSE')) {
if (process.env.NODE_ENV === 'production') {
sails.log.error(
' -> Do you have a slow Grunt task? You are running in production mode where, by default, tasks are configured to minify the JavaScript and CSS/LESS files in your assets/ directory. Sometimes, these processes can be slow, particularly if you have lots of these types of files.'
);
}
else {
sails.log.error(
' -> Do you have a slow Grunt task, or lots of assets?'
);
}
sails.log.error();
}
// 1. Unauthorized
if (sails.config.port < 1024) {
sails.log.error(
' -> Do you have permission to use port ' + sails.config.port + ' on this system?',
// Don't mention `sudo` to Windows users-- I hear you guys get touchy about that sort of thing :)
(isWin) ? '' : '(you might try `sudo`)'
);
sails.log.error();
}
// 2. Invalid or unauthorized explicitHost configuration.
if (explicitHost) {
sails.log.error(
' -> You might remove your explicit host configuration and try lifting again (you specified',
'`'+explicitHost+'`',
'.)');
sails.log.error();
}
// 3. Something else is running on this port
sails.log.error(
' -> Is something else already running on port', sails.config.port,
(explicitHost ? (' with hostname ' + explicitHost) : '') + '?'
);
sails.log.error();
// 4. invalid explicitHost
if (!explicitHost) {
sails.log.error(
' -> Are you deploying on a platform that requires an explicit hostname,',
'like OpenShift?');
sails.log.error(
' (Try setting the `explicitHost` config to the hostname where the server will be accessible.)'
);
sails.log.error(
' (e.g. `mydomain.com` or `183.24.244.42`)'
);
}
console.log('');
// Lower Sails to do any necessary cleanup
sails.lower(function(){
// Exit with a non-zero value to indicate an error
process.exit(1);
});
}
},
verify: ['start', function (cb) {
var explicitHost = sails.config.explicitHost;
// Check for port conflicts
// Ignore this check if explicit host is set, since other more complicated things might be going on.
if( !explicitHost && !sails.hooks.http.server.address() ) {
var portBusyError = '';
portBusyError += 'Trying to start server on port ' + sails.config.port + ' but can\'t...';
portBusyError += 'Something else is probably running on that port!' + '\n';
portBusyError += 'Please disable the other server, or choose a different port and try again.';
sails.log.error(portBusyError);
throw new Error(portBusyError);
}
cb();
}]
}, function expressListening (err) {
clearTimeout(liftAbortTimer);
if (err) return cb(err);
// Announce that express is now listening on a port
sails.emit('hook:http:listening');
cb && cb(err);
});
};
};
| mit |
ericmartinezr/angular | modules/@angular/compiler/test/directive_resolver_spec.ts | 6317 | import {ddescribe, describe, it, iit, expect, beforeEach} from '@angular/core/testing';
import {DirectiveResolver} from '@angular/compiler/src/directive_resolver';
import {
DirectiveMetadata,
Directive,
Input,
Output,
HostBinding,
HostListener,
ContentChildren,
ContentChildrenMetadata,
ViewChildren,
ViewChildrenMetadata,
ContentChild,
ContentChildMetadata,
ViewChild,
ViewChildMetadata
} from '@angular/core/src/metadata';
@Directive({selector: 'someDirective'})
class SomeDirective {
}
@Directive({selector: 'someChildDirective'})
class SomeChildDirective extends SomeDirective {
}
@Directive({selector: 'someDirective', inputs: ['c']})
class SomeDirectiveWithInputs {
@Input() a;
@Input("renamed") b;
c;
}
@Directive({selector: 'someDirective', outputs: ['c']})
class SomeDirectiveWithOutputs {
@Output() a;
@Output("renamed") b;
c;
}
@Directive({selector: 'someDirective', outputs: ['a']})
class SomeDirectiveWithDuplicateOutputs {
@Output() a;
}
@Directive({selector: 'someDirective', properties: ['a']})
class SomeDirectiveWithProperties {
}
@Directive({selector: 'someDirective', events: ['a']})
class SomeDirectiveWithEvents {
}
@Directive({selector: 'someDirective'})
class SomeDirectiveWithSetterProps {
@Input("renamed")
set a(value) {
}
}
@Directive({selector: 'someDirective'})
class SomeDirectiveWithGetterOutputs {
@Output("renamed")
get a() {
return null;
}
}
@Directive({selector: 'someDirective', host: {'[c]': 'c'}})
class SomeDirectiveWithHostBindings {
@HostBinding() a;
@HostBinding("renamed") b;
c;
}
@Directive({selector: 'someDirective', host: {'(c)': 'onC()'}})
class SomeDirectiveWithHostListeners {
@HostListener('a')
onA() {
}
@HostListener('b', ['$event.value'])
onB(value) {
}
}
@Directive({selector: 'someDirective', queries: {"cs": new ContentChildren("c")}})
class SomeDirectiveWithContentChildren {
@ContentChildren("a") as: any;
c;
}
@Directive({selector: 'someDirective', queries: {"cs": new ViewChildren("c")}})
class SomeDirectiveWithViewChildren {
@ViewChildren("a") as: any;
c;
}
@Directive({selector: 'someDirective', queries: {"c": new ContentChild("c")}})
class SomeDirectiveWithContentChild {
@ContentChild("a") a: any;
c;
}
@Directive({selector: 'someDirective', queries: {"c": new ViewChild("c")}})
class SomeDirectiveWithViewChild {
@ViewChild("a") a: any;
c;
}
class SomeDirectiveWithoutMetadata {}
export function main() {
describe("DirectiveResolver", () => {
var resolver: DirectiveResolver;
beforeEach(() => { resolver = new DirectiveResolver(); });
it('should read out the Directive metadata', () => {
var directiveMetadata = resolver.resolve(SomeDirective);
expect(directiveMetadata)
.toEqual(new DirectiveMetadata(
{selector: 'someDirective', inputs: [], outputs: [], host: {}, queries: {}}));
});
it('should throw if not matching metadata is found', () => {
expect(() => { resolver.resolve(SomeDirectiveWithoutMetadata); })
.toThrowError('No Directive annotation found on SomeDirectiveWithoutMetadata');
});
it('should not read parent class Directive metadata', function() {
var directiveMetadata = resolver.resolve(SomeChildDirective);
expect(directiveMetadata)
.toEqual(new DirectiveMetadata(
{selector: 'someChildDirective', inputs: [], outputs: [], host: {}, queries: {}}));
});
describe('inputs', () => {
it('should append directive inputs', () => {
var directiveMetadata = resolver.resolve(SomeDirectiveWithInputs);
expect(directiveMetadata.inputs).toEqual(['c', 'a', 'b: renamed']);
});
it('should work with getters and setters', () => {
var directiveMetadata = resolver.resolve(SomeDirectiveWithSetterProps);
expect(directiveMetadata.inputs).toEqual(['a: renamed']);
});
});
describe('outputs', () => {
it('should append directive outputs', () => {
var directiveMetadata = resolver.resolve(SomeDirectiveWithOutputs);
expect(directiveMetadata.outputs).toEqual(['c', 'a', 'b: renamed']);
});
it('should work with getters and setters', () => {
var directiveMetadata = resolver.resolve(SomeDirectiveWithGetterOutputs);
expect(directiveMetadata.outputs).toEqual(['a: renamed']);
});
it('should throw if duplicate outputs', () => {
expect(() => { resolver.resolve(SomeDirectiveWithDuplicateOutputs); })
.toThrowError(
`Output event 'a' defined multiple times in 'SomeDirectiveWithDuplicateOutputs'`);
});
});
describe('host', () => {
it('should append host bindings', () => {
var directiveMetadata = resolver.resolve(SomeDirectiveWithHostBindings);
expect(directiveMetadata.host).toEqual({'[c]': 'c', '[a]': 'a', '[renamed]': 'b'});
});
it('should append host listeners', () => {
var directiveMetadata = resolver.resolve(SomeDirectiveWithHostListeners);
expect(directiveMetadata.host)
.toEqual({'(c)': 'onC()', '(a)': 'onA()', '(b)': 'onB($event.value)'});
});
});
describe('queries', () => {
it('should append ContentChildren', () => {
var directiveMetadata = resolver.resolve(SomeDirectiveWithContentChildren);
expect(directiveMetadata.queries)
.toEqual({"cs": new ContentChildren("c"), "as": new ContentChildren("a")});
});
it('should append ViewChildren', () => {
var directiveMetadata = resolver.resolve(SomeDirectiveWithViewChildren);
expect(directiveMetadata.queries)
.toEqual({"cs": new ViewChildren("c"), "as": new ViewChildren("a")});
});
it('should append ContentChild', () => {
var directiveMetadata = resolver.resolve(SomeDirectiveWithContentChild);
expect(directiveMetadata.queries)
.toEqual({"c": new ContentChild("c"), "a": new ContentChild("a")});
});
it('should append ViewChild', () => {
var directiveMetadata = resolver.resolve(SomeDirectiveWithViewChild);
expect(directiveMetadata.queries)
.toEqual({"c": new ViewChild("c"), "a": new ViewChild("a")});
});
});
});
}
| mit |
ealbertos/dotfiles | vscode.symlink/extensions/ms-mssql.mssql-1.11.1/node_modules/underscore/cjs/isMap.js | 305 | var _tagTester = require('./_tagTester.js');
var _stringTagBug = require('./_stringTagBug.js');
var _methodFingerprint = require('./_methodFingerprint.js');
var isMap = _stringTagBug.isIE11 ? _methodFingerprint.ie11fingerprint(_methodFingerprint.mapMethods) : _tagTester('Map');
module.exports = isMap;
| mit |
beni55/vektor | sonar-scan/public/js/node_modules/jeesh/node_modules/qwery/tests/tests.js | 25258 | // silly custom pseudo just for tests
Q.pseudos.humanoid = function(e, v) { return Q.is(e, 'li:contains(human)') || Q.is(e, 'ol:contains(human)') }
var hasQSA = !!document.querySelectorAll
, sinkSuite = function (label, suite) {
sink(label + (hasQSA ? ' [qSA]' : ''), function () {
hasQSA && Q.configure({ useNativeQSA: true })
suite.apply(null, arguments)
})
hasQSA && sink(label + ' [non-QSA]', function () {
Q.configure({ useNativeQSA: false })
suite.apply(null, arguments)
})
}
sinkSuite('Contexts', function (test, ok) {
test('should be able to pass optional context', 2, function () {
ok(Q('.a').length === 3, 'no context found 3 elements (.a)');
ok(Q('.a', Q('#boosh')).length === 2, 'context found 2 elements (#boosh .a)');
});
test('should be able to pass string as context', 5, function() {
ok(Q('.a', '#boosh').length == 2, 'context found 2 elements(.a, #boosh)');
ok(Q('.a', '.a').length == 0, 'context found 0 elements(.a, .a)');
ok(Q('.a', '.b').length == 1, 'context found 1 elements(.a, .b)');
ok(Q('.a', '#boosh .b').length == 1, 'context found 1 elements(.a, #boosh .b)');
ok(Q('.b', '#boosh .b').length == 0, 'context found 0 elements(.b, #boosh .b)');
});
test('should be able to pass qwery result as context', 5, function() {
ok(Q('.a', Q('#boosh')).length == 2, 'context found 2 elements(.a, #boosh)');
ok(Q('.a', Q('.a')).length == 0, 'context found 0 elements(.a, .a)');
ok(Q('.a', Q('.b')).length == 1, 'context found 1 elements(.a, .b)');
ok(Q('.a', Q('#boosh .b')).length == 1, 'context found 1 elements(.a, #boosh .b)');
ok(Q('.b', Q('#boosh .b')).length == 0, 'context found 0 elements(.b, #boosh .b)');
});
test('should not return duplicates from combinators', 2, function () {
ok(Q('#boosh,#boosh').length == 1, 'two booshes dont make a thing go right');
ok(Q('#boosh,.apples,#boosh').length == 1, 'two booshes and an apple dont make a thing go right');
});
test('byId sub-queries within context', 6, function() {
ok(Q('#booshTest', Q('#boosh')).length == 1, 'found "#id #id"')
ok(Q('.a.b #booshTest', Q('#boosh')).length == 1, 'found ".class.class #id"')
ok(Q('.a>#booshTest', Q('#boosh')).length == 1, 'found ".class>#id"')
ok(Q('>.a>#booshTest', Q('#boosh')).length == 1, 'found ">.class>#id"')
ok(!Q('#boosh', Q('#booshTest')).length, 'shouldn\'t find #boosh (ancestor) within #booshTest (descendent)')
ok(!Q('#boosh', Q('#lonelyBoosh')).length, 'shouldn\'t find #boosh within #lonelyBoosh (unrelated)')
})
})
sinkSuite('CSS 1', function (test, ok) {
test('get element by id', 2, function () {
var result = Q('#boosh');
ok(!!result[0], 'found element with id=boosh');
ok(!!Q('h1')[0], 'found 1 h1');
});
test('byId sub-queries', 4, function() {
ok(Q('#boosh #booshTest').length == 1, 'found "#id #id"')
ok(Q('.a.b #booshTest').length == 1, 'found ".class.class #id"')
ok(Q('#boosh>.a>#booshTest').length == 1, 'found "#id>.class>#id"')
ok(Q('.a>#booshTest').length == 1, 'found ".class>#id"')
})
test('get elements by class', 6, function () {
ok(Q('#boosh .a').length == 2, 'found two elements');
ok(!!Q('#boosh div.a')[0], 'found one element');
ok(Q('#boosh div').length == 2, 'found two {div} elements');
ok(!!Q('#boosh span')[0], 'found one {span} element');
ok(!!Q('#boosh div div')[0], 'found a single div');
ok(Q('a.odd').length == 1, 'found single a');
});
test('combos', 1, function () {
ok(Q('#boosh div,#boosh span').length == 3, 'found 2 divs and 1 span');
});
test('class with dashes', 1, function() {
ok(Q('.class-with-dashes').length == 1, 'found something');
});
test('should ignore comment nodes', 1, function() {
ok(Q('#boosh *').length === 4, 'found only 4 elements under #boosh')
});
test('deep messy relationships', 6, function() {
// these are mostly characterised by a combination of tight relationships and loose relationships
// on the right side of the query it's easy to find matches but they tighten up quickly as you
// go to the left
// they are useful for making sure the dom crawler doesn't stop short or over-extend as it works
// up the tree the crawl needs to be comprehensive
ok(Q('div#fixtures > div a').length == 5, 'found four results for "div#fixtures > div a"')
ok(Q('.direct-descend > .direct-descend .lvl2').length == 1, 'found one result for ".direct-descend > .direct-descend .lvl2"')
ok(Q('.direct-descend > .direct-descend div').length == 1, 'found one result for ".direct-descend > .direct-descend div"')
ok(Q('.direct-descend > .direct-descend div').length == 1, 'found one result for ".direct-descend > .direct-descend div"')
ok(Q('div#fixtures div ~ a div').length == 0, 'found no results for odd query')
ok(Q('.direct-descend > .direct-descend > .direct-descend ~ .lvl2').length == 0, 'found no results for another odd query')
});
});
sinkSuite('CSS 2', function (test, ok) {
test('get elements by attribute', 4, function () {
var wanted = Q('#boosh div[test]')[0];
var expected = document.getElementById('booshTest');
ok(wanted == expected, 'found attribute');
ok(Q('#boosh div[test=fg]')[0] == expected, 'found attribute with value');
ok(Q('em[rel~="copyright"]').length == 1, 'found em[rel~="copyright"]');
ok(Q('em[nopass~="copyright"]').length == 0, 'found em[nopass~="copyright"]');
});
test('should not throw error by attribute selector', 1, function () {
ok(Q('[foo^="bar"]').length === 1, 'found 1 element');
});
test('crazy town', 1, function () {
var el = document.getElementById('attr-test3');
ok(Q('div#attr-test3.found.you[title="whatup duders"]')[0] == el, 'found the right element');
});
});
sinkSuite('attribute selectors', function (test, ok, b, a, assert) {
/* CSS 2 SPEC */
test('[attr]', 1, function () {
var expected = document.getElementById('attr-test-1');
ok(Q('#attributes div[unique-test]')[0] == expected, 'found attribute with [attr]');
});
test('[attr=val]', 3, function () {
var expected = document.getElementById('attr-test-2');
ok(Q('#attributes div[test="two-foo"]')[0] == expected, 'found attribute with =');
ok(Q("#attributes div[test='two-foo']")[0] == expected, 'found attribute with =');
ok(Q('#attributes div[test=two-foo]')[0] == expected, 'found attribute with =');
});
test('[attr~=val]', 1, function () {
var expected = document.getElementById('attr-test-3');
ok(Q('#attributes div[test~=three]')[0] == expected, 'found attribute with ~=');
});
test('[attr|=val]', 2, function () {
var expected = document.getElementById('attr-test-2');
ok(Q('#attributes div[test|="two-foo"]')[0] == expected, 'found attribute with |=');
ok(Q('#attributes div[test|=two]')[0] == expected, 'found attribute with |=');
});
test('[href=#x] special case', 1, function () {
var expected = document.getElementById('attr-test-4');
ok(Q('#attributes a[href="#aname"]')[0] == expected, 'found attribute with href=#x');
});
/* CSS 3 SPEC */
test('[attr^=val]', 1, function () {
var expected = document.getElementById('attr-test-2');
ok(Q('#attributes div[test^=two]')[0] == expected, 'found attribute with ^=');
});
test('[attr$=val]', 1, function () {
var expected = document.getElementById('attr-test-2');
ok(Q('#attributes div[test$=foo]')[0] == expected, 'found attribute with $=');
});
test('[attr*=val]', 1, function () {
var expected = document.getElementById('attr-test-3');
ok(Q('#attributes div[test*=hree]')[0] == expected, 'found attribute with *=');
});
test('direct descendants', 2, function () {
ok(Q('#direct-descend > .direct-descend').length == 2, 'found two direct descendents');
ok(Q('#direct-descend > .direct-descend > .lvl2').length == 3, 'found three second-level direct descendents');
});
test('sibling elements', 17, function () {
assert(Q('#sibling-selector ~ .sibling-selector').length, 2, 'found two siblings')
assert(Q('#sibling-selector ~ div.sibling-selector').length, 2, 'found two siblings')
assert(Q('#sibling-selector + div.sibling-selector').length, 1, 'found one sibling')
assert(Q('#sibling-selector + .sibling-selector').length, 1, 'found one sibling')
assert(Q('.parent .oldest ~ .sibling').length, 4, 'found four younger siblings')
assert(Q('.parent .middle ~ .sibling').length, 2, 'found two younger siblings')
assert(Q('.parent .middle ~ h4').length, 1, 'found next sibling by tag')
assert(Q('.parent .middle ~ h4.younger').length, 1, 'found next sibling by tag and class')
assert(Q('.parent .middle ~ h3').length, 0, 'an element can\'t be its own sibling')
assert(Q('.parent .middle ~ h2').length, 0, 'didn\'t find an older sibling')
assert(Q('.parent .youngest ~ .sibling').length, 0, 'found no younger siblings')
assert(Q('.parent .oldest + .sibling').length, 1, 'found next sibling')
assert(Q('.parent .middle + .sibling').length, 1, 'found next sibling')
assert(Q('.parent .middle + h4').length, 1, 'found next sibling by tag')
assert(Q('.parent .middle + h3').length, 0, 'an element can\'t be its own sibling')
assert(Q('.parent .middle + h2').length, 0, 'didn\'t find an older sibling')
assert(Q('.parent .youngest + .sibling').length, 0, 'found no younger siblings')
});
});
sinkSuite('Uniq', function (test, ok) {
test('duplicates arent found in arrays', 2, function () {
ok(Q.uniq(['a', 'b', 'c', 'd', 'e', 'a', 'b', 'c', 'd', 'e']).length == 5, 'result should be a, b, c, d, e')
ok(Q.uniq(['a', 'b', 'c', 'c', 'c']).length == 3, 'result should be a, b, c')
})
})
sinkSuite('element-context queries', function(test, ok) {
test('relationship-first queries', 5, function() {
var pass = false
try { pass = Q('> .direct-descend', Q('#direct-descend')).length == 2 } catch (e) { }
ok(pass, 'found two direct descendents using > first');
pass = false
try { pass = Q('~ .sibling-selector', Q('#sibling-selector')).length == 2 } catch (e) { }
ok(pass, 'found two siblings with ~ first')
pass = false
try { pass = Q('+ .sibling-selector', Q('#sibling-selector')).length == 1 } catch (e) { }
ok(pass, 'found one sibling with + first')
pass = false
var ctx = Q('.idless')[0]
try { pass = Q('> .tokens a', ctx).length == 1 } catch (e) { }
ok(pass, 'found one sibling from a root with no id')
ok(!ctx.getAttribute('id'), 'root element used for selection still has no id')
})
// should be able to query on an element that hasn't been inserted into the dom
var frag = document.createElement('div')
frag.innerHTML = '<div class="d i v"><p id="oooo"><em></em><em id="emem"></em></p></div><p id="sep"><div class="a"><span></span></div></p>'
test('detached fragments', 2, function() {
ok(Q('.a span', frag).length == 1, 'should find child elements of fragment')
ok(Q('> div p em', frag).length == 2, 'should find child elements of fragment, relationship first')
})
test('byId sub-queries within detached fragment', 6, function () {
ok(Q('#emem', frag).length == 1, 'found "#id" in fragment')
ok(Q('.d.i #emem', frag).length == 1, 'found ".class.class #id" in fragment')
ok(Q('.d #oooo #emem', frag).length == 1, 'found ".class #id #id" in fragment')
ok(Q('> div #oooo', frag).length == 1, 'found "> .class #id" in fragment')
ok(!Q('#oooo', Q('#emem', frag)).length, 'shouldn\'t find #oooo (ancestor) within #emem (descendent)')
ok(!Q('#sep', Q('#emem', frag)).length, 'shouldn\'t find #sep within #emem (unrelated)')
})
test('exclude self in match', 1, function() {
ok(Q('.order-matters', Q('#order-matters')).length == 4, 'should not include self in element-context queries')
});
// because form's have .length
test('forms can be used as contexts', 1, function() {
ok(Q('*', Q('form')[0]).length === 3, 'found 3 elements under <form>')
})
})
sinkSuite('tokenizer', function (test, ok) {
test('should not get weird tokens', 5, function () {
ok(Q('div .tokens[title="one"]')[0] == document.getElementById('token-one'), 'found div .tokens[title="one"]');
ok(Q('div .tokens[title="one two"]')[0] == document.getElementById('token-two'), 'found div .tokens[title="one two"]');
ok(Q('div .tokens[title="one two three #%"]')[0] == document.getElementById('token-three'), 'found div .tokens[title="one two three #%"]');
ok(Q("div .tokens[title='one two three #%'] a")[0] == document.getElementById('token-four'), 'found div .tokens[title=\'one two three #%\'] a');
ok(Q('div .tokens[title="one two three #%"] a[href$=foo] div')[0] == document.getElementById('token-five'), 'found div .tokens[title="one two three #%"] a[href=foo] div');
});
});
sinkSuite('interesting syntaxes', function (test, ok) {
test('should parse bad selectors', 1, function () {
ok(Q('#spaced-tokens p em a').length, 'found element with funny tokens')
});
});
sinkSuite('order matters', function (test, ok) {
function tag(el) {
return el.tagName.toLowerCase();
}
// <div id="order-matters">
// <p class="order-matters"></p>
// <a class="order-matters">
// <em class="order-matters"></em><b class="order-matters"></b>
// </a>
// </div>
test('the order of elements return matters', 4, function () {
var els = Q('#order-matters .order-matters');
ok(tag(els[0]) == 'p', 'first element matched is a {p} tag');
ok(tag(els[1]) == 'a', 'first element matched is a {a} tag');
ok(tag(els[2]) == 'em', 'first element matched is a {em} tag');
ok(tag(els[3]) == 'b', 'first element matched is a {b} tag');
});
});
sinkSuite('pseudo-selectors', function (test, ok) {
test(':contains', 4, function() {
ok(Q('li:contains(humans)').length == 1, 'found by "element:contains(text)"')
ok(Q(':contains(humans)').length == 5, 'found by ":contains(text)", including all ancestors')
// * is an important case, can cause weird errors
ok(Q('*:contains(humans)').length == 5, 'found by "*:contains(text)", including all ancestors')
ok(Q('ol:contains(humans)').length == 1, 'found by "ancestor:contains(text)"')
})
test(':not', 1, function() {
ok(Q('.odd:not(div)').length == 1, 'found one .odd :not an <a>')
})
test(':first-child', 2, function () {
ok(Q('#pseudos div:first-child')[0] == document.getElementById('pseudos').getElementsByTagName('*')[0], 'found first child')
ok(Q('#pseudos div:first-child').length == 1, 'found only 1')
});
test(':last-child', 2, function () {
var all = document.getElementById('pseudos').getElementsByTagName('div');
ok(Q('#pseudos div:last-child')[0] == all[all.length - 1], 'found last child')
ok(Q('#pseudos div:last-child').length == 1, 'found only 1')
});
test('ol > li[attr="boosh"]:last-child', 2, function () {
var expected = document.getElementById('attr-child-boosh');
ok(Q('ol > li[attr="boosh"]:last-child').length == 1, 'only 1 element found');
ok(Q('ol > li[attr="boosh"]:last-child')[0] == expected, 'found correct element');
});
test(':nth-child(odd|even|x)', 4, function () {
var second = document.getElementById('pseudos').getElementsByTagName('div')[1];
ok(Q('#pseudos :nth-child(odd)').length == 4, 'found 4 odd elements');
ok(Q('#pseudos div:nth-child(odd)').length == 3, 'found 3 odd elements with div tag');
ok(Q('#pseudos div:nth-child(even)').length == 3, 'found 3 even elements with div tag');
ok(Q('#pseudos div:nth-child(2)')[0] == second, 'found 2nd nth-child of pseudos');
});
test(':nth-child(expr)', 6, function () {
var fifth = document.getElementById('pseudos').getElementsByTagName('a')[0];
var sixth = document.getElementById('pseudos').getElementsByTagName('div')[4];
ok(Q('#pseudos :nth-child(3n+1)').length == 3, 'found 3 elements');
ok(Q('#pseudos :nth-child(3n-2)').length == 3, 'found 3 elements'); // was +3n-2 but older safari no likey +
ok(Q('#pseudos :nth-child(-n+6)').length == 6, 'found 6 elements');
ok(Q('#pseudos :nth-child(-n+5)').length == 5, 'found 5 elements');
ok(Q('#pseudos :nth-child(3n+2)')[1] == fifth, 'second :nth-child(3n+2) is the fifth child');
ok(Q('#pseudos :nth-child(3n)')[1] == sixth, 'second :nth-child(3n) is the sixth child');
});
test(':nth-last-child(odd|even|x)', 4, function () {
var second = document.getElementById('pseudos').getElementsByTagName('div')[1];
ok(Q('#pseudos :nth-last-child(odd)').length == 4, 'found 4 odd elements');
ok(Q('#pseudos div:nth-last-child(odd)').length == 3, 'found 3 odd elements with div tag');
ok(Q('#pseudos div:nth-last-child(even)').length == 3, 'found 3 even elements with div tag');
ok(Q('#pseudos div:nth-last-child(6)')[0] == second, '6th nth-last-child should be 2nd of 7 elements');
});
test(':nth-last-child(expr)', 5, function () {
var third = document.getElementById('pseudos').getElementsByTagName('div')[2];
ok(Q('#pseudos :nth-last-child(3n+1)').length == 3, 'found 3 elements');
ok(Q('#pseudos :nth-last-child(3n-2)').length == 3, 'found 3 elements');
ok(Q('#pseudos :nth-last-child(-n+6)').length == 6, 'found 6 elements');
ok(Q('#pseudos :nth-last-child(-n+5)').length == 5, 'found 5 elements');
ok(Q('#pseudos :nth-last-child(3n+2)')[0] == third, 'first :nth-last-child(3n+2) is the third child');
});
test(':nth-of-type(expr)', 6, function () {
var a = document.getElementById('pseudos').getElementsByTagName('a')[0];
ok(Q('#pseudos div:nth-of-type(3n+1)').length == 2, 'found 2 div elements');
ok(Q('#pseudos a:nth-of-type(3n+1)').length == 1, 'found 1 a element');
ok(Q('#pseudos a:nth-of-type(3n+1)')[0] == a, 'found the right a element');
ok(Q('#pseudos a:nth-of-type(3n)').length == 0, 'no matches for every third a');
ok(Q('#pseudos a:nth-of-type(odd)').length == 1, 'found the odd a');
ok(Q('#pseudos a:nth-of-type(1)').length == 1, 'found the first a');
});
test(':nth-last-of-type(expr)', 3, function () {
var second = document.getElementById('pseudos').getElementsByTagName('div')[1];
ok(Q('#pseudos div:nth-last-of-type(3n+1)').length == 2, 'found 2 div elements');
ok(Q('#pseudos a:nth-last-of-type(3n+1)').length == 1, 'found 1 a element');
ok(Q('#pseudos div:nth-last-of-type(5)')[0] == second, '5th nth-last-of-type should be 2nd of 7 elements');
});
test(':first-of-type', 2, function () {
ok(Q('#pseudos a:first-of-type')[0] == document.getElementById('pseudos').getElementsByTagName('a')[0], 'found first a element')
ok(Q('#pseudos a:first-of-type').length == 1, 'found only 1')
});
test(':last-of-type', 2, function () {
var all = document.getElementById('pseudos').getElementsByTagName('div');
ok(Q('#pseudos div:last-of-type')[0] == all[all.length - 1], 'found last div element')
ok(Q('#pseudos div:last-of-type').length == 1, 'found only 1')
});
test(':only-of-type', 2, function () {
ok(Q('#pseudos a:only-of-type')[0] == document.getElementById('pseudos').getElementsByTagName('a')[0], 'found the only a element')
ok(Q('#pseudos a:first-of-type').length == 1, 'found only 1')
});
test(':target', 2, function () {
location.hash = '';
ok(Q('#pseudos:target').length == 0, '#pseudos is not the target');
location.hash = '#pseudos';
ok(Q('#pseudos:target').length == 1, 'now #pseudos is the target');
location.hash = '';
});
test('custom pseudos', 1, function() {
// :humanoid implemented just for testing purposes
ok(Q(':humanoid').length == 2, 'selected using custom pseudo')
});
});
sinkSuite('argument types', function (test, ok) {
test('should be able to pass in nodes as arguments', 5, function () {
var el = document.getElementById('boosh');
ok(Q(el)[0] == el, 'Q(el)[0] == el');
ok(Q(el, 'body')[0] == el, "Q(el, 'body')[0] == el");
ok(Q(el, document)[0] == el, "Q(el, document)[0] == el");
ok(Q(window)[0] == window, 'Q(window)[0] == window');
ok(Q(document)[0] == document, 'Q(document)[0] == document');
});
test('should be able to pass in an array of results as arguments', 5, function () {
var el = document.getElementById('boosh');
var result = Q([Q('#boosh'), Q(document), Q(window)]);
ok(result.length == 3, '3 elements in the combined set');
ok(result[0] == el, "result[0] == el");
ok(result[1] == document, "result[0] == document");
ok(result[2] == window, 'result[0] == window');
ok(Q([Q('#pseudos div.odd'), Q('#pseudos div.even')]).length == 6, 'found all the odd and even divs');
});
});
sinkSuite('is()', function (test, ok) {
var el = document.getElementById('attr-child-boosh');
test('simple selectors', 9, function () {
ok(Q.is(el, 'li'), 'tag');
ok(Q.is(el, '*'), 'wildcard');
ok(Q.is(el, '#attr-child-boosh'), '#id');
ok(Q.is(el, '[attr]'), '[attr]');
ok(Q.is(el, '[attr=boosh]'), '[attr=val]');
ok(!Q.is(el, 'div'), 'wrong tag');
ok(!Q.is(el, '#foo'), 'wrong #id');
ok(!Q.is(el, '[foo]'), 'wrong [attr]');
ok(!Q.is(el, '[attr=foo]'), 'wrong [attr=val]');
});
test('selector sequences', 2, function () {
ok(Q.is(el, 'li#attr-child-boosh[attr=boosh]'), 'tag#id[attr=val]');
ok(!Q.is(el, 'div#attr-child-boosh[attr=boosh]'), 'wrong tag#id[attr=val]');
});
test('selector sequences combinators', 7, function () {
ok(Q.is(el, 'ol li'), 'tag tag');
ok(Q.is(el, 'ol>li'), 'tag>tag');
ok(Q.is(el, 'ol>li+li'), 'tab>tag+tag');
ok(Q.is(el, 'ol#list li#attr-child-boosh[attr=boosh]'), 'tag#id tag#id[attr=val]');
ok(!Q.is(el, 'ol#list>li#attr-child-boosh[attr=boosh]'), 'wrong tag#id>tag#id[attr=val]');
ok(Q.is(el, 'ol ol li#attr-child-boosh[attr=boosh]'), 'tag tag tag#id[attr=val]');
ok(Q.is(Q('#token-four')[0], 'div#fixtures>div a'), 'tag#id>tag tag where ambiguous middle tag requires backtracking');
});
test('pseudos', 4, function() {
//TODO: more tests!
ok(Q.is(el, 'li:contains(hello)'), 'matching :contains(text)')
ok(!Q.is(el, 'li:contains(human)'), 'non-matching :contains(text)')
ok(Q.is(Q('#list>li')[2], ':humanoid'), 'matching custom pseudo')
ok(!Q.is(Q('#list>li')[1], ':humanoid'), 'non-matching custom pseudo')
})
test('context', 2, function () {
ok(Q.is(el, 'li#attr-child-boosh[attr=boosh]', Q('#list')[0]), 'context');
ok(!Q.is(el, 'ol#list li#attr-child-boosh[attr=boosh]', Q('#boosh')[0]), 'wrong context');
});
});
sinkSuite('selecting elements in other documents', function (test, ok) {
var doc = document.getElementById('frame').contentWindow.document
doc.body.innerHTML =
'<div id="hsoob">' +
'<div class="a b">' +
'<div class="d e sib" test="fg" id="booshTest"><p><span id="spanny"></span></p></div>' +
'<em nopass="copyrighters" rel="copyright booshrs" test="f g" class="sib"></em>' +
'<span class="h i a sib"></span>' +
'</div>' +
'<p class="odd"></p>' +
'</div>' +
'<div id="lonelyHsoob"></div>'
test('get element by id', 1, function () {
var result = Q('#hsoob', doc);
ok(!!result[0], 'found element with id=hsoob');
});
test('get elements by class', 6, function () {
ok(Q('#hsoob .a', doc).length == 2, 'found two elements');
ok(!!Q('#hsoob div.a', doc)[0], 'found one element');
ok(Q('#hsoob div', doc).length == 2, 'found two {div} elements');
ok(!!Q('#hsoob span', doc)[0], 'found one {span} element');
ok(!!Q('#hsoob div div', doc)[0], 'found a single div');
ok(Q('p.odd', doc).length == 1, 'found single br');
});
test('complex selectors', 4, function () {
ok(Q('.d ~ .sib', doc).length === 2, 'found one ~ sibling')
ok(Q('.a .d + .sib', doc).length === 1, 'found 2 + siblings')
ok(Q('#hsoob > div > .h', doc).length === 1, 'found span using child selectors')
ok(Q('.a .d ~ .sib[test="f g"]', doc).length === 1, 'found 1 ~ sibling with test attribute')
});
test('byId sub-queries', 3, function () {
ok(Q('#hsoob #spanny', doc).length == 1, 'found "#id #id" in frame')
ok(Q('.a #spanny', doc).length == 1, 'found ".class #id" in frame')
ok(Q('.a #booshTest #spanny', doc).length == 1, 'found ".class #id #id" in frame')
//ok(Q('> #hsoob', doc).length == 1, 'found "> #id" in frame') --> would be good to support this, needs some tweaking though
})
test('byId sub-queries within sub-context', 6, function () {
ok(Q('#spanny', Q('#hsoob', doc)).length == 1, 'found "#id -> #id" in frame')
ok(Q('.a #spanny', Q('#hsoob', doc)).length == 1, 'found ".class #id" in frame')
ok(Q('.a #booshTest #spanny', Q('#hsoob', doc)).length == 1, 'found ".class #id #id" in frame')
ok(Q('.a > #booshTest', Q('#hsoob', doc)).length == 1, 'found "> .class #id" in frame')
ok(!Q('#booshTest', Q('#spanny', doc)).length, 'shouldn\'t find #booshTest (ancestor) within #spanny (descendent)')
ok(!Q('#booshTest', Q('#lonelyHsoob', doc)).length, 'shouldn\'t find #booshTest within #lonelyHsoob (unrelated)')
})
});
start();
| mit |
knownasilya/ember-inspector | app/views/list.js | 697 | import Ember from "ember";
import ListItemView from "ember-inspector/views/list-item";
import ListView from "list-view/list-view";
const { computed } = Ember;
export default ListView.extend({
classNames: ["list-tree"],
contentHeight: Ember.computed.alias('controller.controllers.application.contentHeight'),
height: computed('contentHeight', function() {
let headerHeight = 31,
contentHeight = this.get('contentHeight');
// In testing list-view is created before `contentHeight` is set
// which will trigger an exception
if (!contentHeight) {
return 1;
}
return contentHeight - headerHeight;
}),
rowHeight: 30,
itemViewClass: ListItemView
});
| mit |
Mozan/Sylius | src/Sylius/Behat/Page/Shop/Checkout/AddressPage.php | 14882 | <?php
/*
* This file is part of the Sylius package.
*
* (c) Paweł Jędrzejewski
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
declare(strict_types=1);
namespace Sylius\Behat\Page\Shop\Checkout;
use Behat\Mink\Driver\Selenium2Driver;
use Behat\Mink\Element\NodeElement;
use Behat\Mink\Exception\ElementNotFoundException;
use Behat\Mink\Session;
use Sylius\Behat\Page\SymfonyPage;
use Sylius\Component\Core\Factory\AddressFactoryInterface;
use Sylius\Component\Core\Model\AddressInterface;
use Symfony\Component\Routing\RouterInterface;
use Webmozart\Assert\Assert;
class AddressPage extends SymfonyPage implements AddressPageInterface
{
public const TYPE_BILLING = 'billing';
public const TYPE_SHIPPING = 'shipping';
/**
* @var AddressFactoryInterface
*/
private $addressFactory;
/**
* @param Session $session
* @param array $parameters
* @param RouterInterface $router
* @param AddressFactoryInterface $addressFactory
*/
public function __construct(
Session $session,
array $parameters,
RouterInterface $router,
AddressFactoryInterface $addressFactory
) {
parent::__construct($session, $parameters, $router);
$this->addressFactory = $addressFactory;
}
/**
* {@inheritdoc}
*/
public function getRouteName()
{
return 'sylius_shop_checkout_address';
}
/**
* {@inheritdoc}
*/
public function chooseDifferentBillingAddress()
{
$driver = $this->getDriver();
if ($driver instanceof Selenium2Driver) {
$this->getElement('different_billing_address_label')->click();
return;
}
$billingAddressSwitch = $this->getElement('different_billing_address');
Assert::false(
$billingAddressSwitch->isChecked(),
'Previous state of different billing address switch was true expected to be false'
);
$billingAddressSwitch->check();
}
/**
* {@inheritdoc}
*/
public function checkInvalidCredentialsValidation()
{
$this->getElement('login_password')->waitFor(5, function () {
$validationElement = $this->getElement('login_password')->getParent()->find('css', '.red.label');
if (null === $validationElement) {
return false;
}
return $validationElement->isVisible();
});
return $this->checkValidationMessageFor('login_password', 'Invalid credentials.');
}
/**
* {@inheritdoc}
*
* @throws ElementNotFoundException
*/
public function checkValidationMessageFor($element, $message)
{
$foundElement = $this->getFieldElement($element);
if (null === $foundElement) {
throw new ElementNotFoundException($this->getSession(), 'Validation message', 'css', '.sylius-validation-error');
}
$validationMessage = $foundElement->find('css', '.sylius-validation-error');
if (null === $validationMessage) {
throw new ElementNotFoundException($this->getSession(), 'Validation message', 'css', '.sylius-validation-error');
}
return $message === $validationMessage->getText();
}
/**
* {@inheritdoc}
*/
public function specifyShippingAddress(AddressInterface $shippingAddress)
{
$this->specifyAddress($shippingAddress, self::TYPE_SHIPPING);
}
/**
* {@inheritdoc}
*/
public function selectShippingAddressProvince($province)
{
$this->waitForElement(5, 'shipping_country_province');
$this->getElement('shipping_country_province')->selectOption($province);
}
/**
* {@inheritdoc}
*/
public function specifyBillingAddress(AddressInterface $billingAddress)
{
$this->specifyAddress($billingAddress, self::TYPE_BILLING);
}
/**
* {@inheritdoc}
*/
public function selectBillingAddressProvince($province)
{
$this->waitForElement(5, 'billing_country_province');
$this->getElement('billing_country_province')->selectOption($province);
}
/**
* {@inheritdoc}
*/
public function specifyEmail($email)
{
$this->getElement('customer_email')->setValue($email);
}
/**
* {@inheritdoc}
*/
public function specifyShippingAddressFullName(string $fullName)
{
$names = explode(' ', $fullName);
$this->getElement('shipping_first_name')->setValue($names[0]);
$this->getElement('shipping_last_name')->setValue($names[1]);
}
/**
* {@inheritdoc}
*/
public function canSignIn()
{
return $this->waitForElement(5, 'login_button');
}
/**
* {@inheritdoc}
*/
public function signIn()
{
$this->waitForElement(5, 'login_button');
try {
$this->getElement('login_button')->press();
} catch (ElementNotFoundException $elementNotFoundException) {
$this->getElement('login_button')->click();
}
$this->waitForLoginAction();
}
/**
* {@inheritdoc}
*/
public function specifyPassword($password)
{
$this->getDocument()->waitFor(5, function () {
return $this->getElement('login_password')->isVisible();
});
$this->getElement('login_password')->setValue($password);
}
/**
* {@inheritdoc}
*/
public function getItemSubtotal($itemName)
{
$itemSlug = strtolower(str_replace('\"', '', str_replace(' ', '-', $itemName)));
$subtotalTable = $this->getElement('checkout_subtotal');
return $subtotalTable->find('css', sprintf('#sylius-item-%s-subtotal', $itemSlug))->getText();
}
/**
* {@inheritdoc}
*/
public function getShippingAddressCountry()
{
return $this->getElement('shipping_country')->find('css', 'option:selected')->getText();
}
public function nextStep()
{
$this->getElement('next_step')->press();
}
public function backToStore()
{
$this->getDocument()->clickLink('Back to store');
}
/**
* {@inheritdoc}
*/
public function specifyBillingAddressProvince($provinceName)
{
$this->waitForElement(5, 'billing_province');
$this->getElement('billing_province')->setValue($provinceName);
}
/**
* {@inheritdoc}
*/
public function specifyShippingAddressProvince($provinceName)
{
$this->waitForElement(5, 'shipping_province');
$this->getElement('shipping_province')->setValue($provinceName);
}
/**
* {@inheritdoc}
*/
public function hasShippingAddressInput()
{
return $this->waitForElement(5, 'shipping_province');
}
/**
* {@inheritdoc}
*/
public function hasBillingAddressInput()
{
return $this->waitForElement(5, 'billing_province');
}
/**
* {@inheritdoc}
*/
public function selectShippingAddressFromAddressBook(AddressInterface $address)
{
$this->waitForElement(2, sprintf('%s_province', self::TYPE_SHIPPING));
$addressBookSelect = $this->getElement('shipping_address_book');
$addressBookSelect->click();
$addressOption = $addressBookSelect->waitFor(5, function () use ($address, $addressBookSelect) {
return $addressBookSelect->find('css', sprintf('.item[data-id="%s"]', $address->getId()));
});
if (null === $addressOption) {
throw new ElementNotFoundException($this->getDriver(), 'option', 'css', sprintf('.item[data-id="%s"]', $address->getId()));
}
$addressOption->click();
}
/**
* {@inheritdoc}
*/
public function selectBillingAddressFromAddressBook(AddressInterface $address)
{
$this->waitForElement(2, sprintf('%s_province', self::TYPE_BILLING));
$addressBookSelect = $this->getElement('billing_address_book');
$addressBookSelect->click();
$addressOption = $addressBookSelect->waitFor(5, function () use ($address, $addressBookSelect) {
return $addressBookSelect->find('css', sprintf('.item[data-id="%s"]', $address->getId()));
});
if (null === $addressOption) {
throw new ElementNotFoundException($this->getDriver(), 'option', 'css', sprintf('.item[data-id="%s"]', $address->getId()));
}
$addressOption->click();
}
/**
* {@inheritdoc}
*/
public function getPreFilledShippingAddress()
{
return $this->getPreFilledAddress(self::TYPE_SHIPPING);
}
/**
* {@inheritdoc}
*/
public function getPreFilledBillingAddress()
{
return $this->getPreFilledAddress(self::TYPE_BILLING);
}
/**
* {@inheritdoc}
*/
protected function getDefinedElements()
{
return array_merge(parent::getDefinedElements(), [
'billing_address_book' => '#sylius-billing-address .ui.dropdown',
'billing_first_name' => '#sylius_checkout_address_billingAddress_firstName',
'billing_last_name' => '#sylius_checkout_address_billingAddress_lastName',
'billing_street' => '#sylius_checkout_address_billingAddress_street',
'billing_city' => '#sylius_checkout_address_billingAddress_city',
'billing_country' => '#sylius_checkout_address_billingAddress_countryCode',
'billing_country_province' => '[name="sylius_checkout_address[billingAddress][provinceCode]"]',
'billing_postcode' => '#sylius_checkout_address_billingAddress_postcode',
'billing_province' => '[name="sylius_checkout_address[billingAddress][provinceName]"]',
'checkout_subtotal' => '#sylius-checkout-subtotal',
'customer_email' => '#sylius_checkout_address_customer_email',
'different_billing_address' => '#sylius_checkout_address_differentBillingAddress',
'different_billing_address_label' => '#sylius_checkout_address_differentBillingAddress ~ label',
'login_button' => '#sylius-api-login-submit',
'login_password' => 'input[type=\'password\']',
'next_step' => '#next-step',
'shipping_address_book' => '#sylius-shipping-address .ui.dropdown',
'shipping_city' => '#sylius_checkout_address_shippingAddress_city',
'shipping_country' => '#sylius_checkout_address_shippingAddress_countryCode',
'shipping_country_province' => '[name="sylius_checkout_address[shippingAddress][provinceCode]"]',
'shipping_first_name' => '#sylius_checkout_address_shippingAddress_firstName',
'shipping_last_name' => '#sylius_checkout_address_shippingAddress_lastName',
'shipping_postcode' => '#sylius_checkout_address_shippingAddress_postcode',
'shipping_province' => '[name="sylius_checkout_address[shippingAddress][provinceName]"]',
'shipping_street' => '#sylius_checkout_address_shippingAddress_street',
]);
}
/**
* @param string $type
*
* @return AddressInterface
*/
private function getPreFilledAddress($type)
{
$this->assertAddressType($type);
/** @var AddressInterface $address */
$address = $this->addressFactory->createNew();
$address->setFirstName($this->getElement(sprintf('%s_first_name', $type))->getValue());
$address->setLastName($this->getElement(sprintf('%s_last_name', $type))->getValue());
$address->setStreet($this->getElement(sprintf('%s_street', $type))->getValue());
$address->setCountryCode($this->getElement(sprintf('%s_country', $type))->getValue());
$address->setCity($this->getElement(sprintf('%s_city', $type))->getValue());
$address->setPostcode($this->getElement(sprintf('%s_postcode', $type))->getValue());
$this->waitForElement(5, sprintf('%s_province', $type));
try {
$address->setProvinceName($this->getElement(sprintf('%s_province', $type))->getValue());
} catch (ElementNotFoundException $exception) {
$address->setProvinceCode($this->getElement(sprintf('%s_country_province', $type))->getValue());
}
return $address;
}
/**
* @param AddressInterface $address
* @param string $type
*/
private function specifyAddress(AddressInterface $address, $type)
{
$this->assertAddressType($type);
$this->getElement(sprintf('%s_first_name', $type))->setValue($address->getFirstName());
$this->getElement(sprintf('%s_last_name', $type))->setValue($address->getLastName());
$this->getElement(sprintf('%s_street', $type))->setValue($address->getStreet());
$this->getElement(sprintf('%s_country', $type))->selectOption($address->getCountryCode() ?: 'Select');
$this->getElement(sprintf('%s_city', $type))->setValue($address->getCity());
$this->getElement(sprintf('%s_postcode', $type))->setValue($address->getPostcode());
if (null !== $address->getProvinceName()) {
$this->waitForElement(5, sprintf('%s_province', $type));
$this->getElement(sprintf('%s_province', $type))->setValue($address->getProvinceName());
}
if (null !== $address->getProvinceCode()) {
$this->waitForElement(5, sprintf('%s_country_province', $type));
$this->getElement(sprintf('%s_country_province', $type))->selectOption($address->getProvinceCode());
}
}
/**
* @param string $element
*
* @return NodeElement|null
*
* @throws ElementNotFoundException
*/
private function getFieldElement($element)
{
$element = $this->getElement($element);
while (null !== $element && !$element->hasClass('field')) {
$element = $element->getParent();
}
return $element;
}
/**
* @return bool
*/
private function waitForLoginAction()
{
return $this->getDocument()->waitFor(5, function () {
return !$this->hasElement('login_password');
});
}
/**
* @return bool
*/
private function waitForElement($timeout, $elementName)
{
return $this->getDocument()->waitFor($timeout, function () use ($elementName) {
return $this->hasElement($elementName);
});
}
/**
* @param string $type
*/
private function assertAddressType($type)
{
$availableTypes = [self::TYPE_BILLING, self::TYPE_SHIPPING];
Assert::oneOf($type, $availableTypes, sprintf('There are only two available types %s, %s. %s given', self::TYPE_BILLING, self::TYPE_SHIPPING, $type));
}
}
| mit |
diimpp/Sylius | src/Sylius/Component/Core/Locale/Context/StorageBasedLocaleContext.php | 2109 | <?php
/*
* This file is part of the Sylius package.
*
* (c) Paweł Jędrzejewski
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
declare(strict_types=1);
namespace Sylius\Component\Core\Locale\Context;
use Sylius\Component\Channel\Context\ChannelContextInterface;
use Sylius\Component\Channel\Context\ChannelNotFoundException;
use Sylius\Component\Core\Locale\LocaleStorageInterface;
use Sylius\Component\Locale\Context\LocaleContextInterface;
use Sylius\Component\Locale\Context\LocaleNotFoundException;
use Sylius\Component\Locale\Provider\LocaleProviderInterface;
final class StorageBasedLocaleContext implements LocaleContextInterface
{
/**
* @var ChannelContextInterface
*/
private $channelContext;
/**
* @var LocaleStorageInterface
*/
private $localeStorage;
/**
* @var LocaleProviderInterface
*/
private $localeProvider;
/**
* @param ChannelContextInterface $channelContext
* @param LocaleStorageInterface $localeStorage
* @param LocaleProviderInterface $localeProvider
*/
public function __construct(
ChannelContextInterface $channelContext,
LocaleStorageInterface $localeStorage,
LocaleProviderInterface $localeProvider
) {
$this->channelContext = $channelContext;
$this->localeStorage = $localeStorage;
$this->localeProvider = $localeProvider;
}
/**
* {@inheritdoc}
*/
public function getLocaleCode(): string
{
$availableLocalesCodes = $this->localeProvider->getAvailableLocalesCodes();
try {
$localeCode = $this->localeStorage->get($this->channelContext->getChannel());
} catch (ChannelNotFoundException $exception) {
throw new LocaleNotFoundException(null, $exception);
}
if (!in_array($localeCode, $availableLocalesCodes, true)) {
throw LocaleNotFoundException::notAvailable($localeCode, $availableLocalesCodes);
}
return $localeCode;
}
}
| mit |
fcool/flow-development-collection | Neos.Utility.Unicode/Classes/Functions.php | 8249 | <?php
namespace Neos\Utility\Unicode;
/*
* This file is part of the Neos.Utility.Unicode package.
*
* (c) Contributors of the Neos Project - www.neos.io
*
* This package is Open Source Software. For the full copyright and license
* information, please view the LICENSE file which was distributed with this
* source code.
*/
/**
* A class with UTF-8 string functions, some inspired by what might be in some
* future PHP version...
*
* @api
*/
abstract class Functions
{
/**
* Converts the first character of each word to uppercase and all remaining characters
* to lowercase.
*
* @param string $string The string to convert
* @return string The converted string
* @api
*/
public static function strtotitle(string $string): string
{
$result = '';
$splitIntoLowerCaseWords = preg_split("/([\n\r\t ])/", self::strtolower($string), -1, PREG_SPLIT_DELIM_CAPTURE);
foreach ($splitIntoLowerCaseWords as $delimiterOrValue) {
$result .= self::strtoupper(self::substr($delimiterOrValue, 0, 1)) . self::substr($delimiterOrValue, 1);
}
return $result;
}
/**
* Unicode variant of substr()
*
* @param string $string The string to crop
* @param integer $start Position of the left boundary
* @param integer $length (optional) Length of the returned string
* @return string The processed string
* @api
*/
public static function substr(string $string, int $start, int $length = null)
{
if ($length === 0) {
return '';
}
// Cannot omit $length, when specifying charset
if ($length === null) {
// save internal encoding
$enc = mb_internal_encoding();
mb_internal_encoding('UTF-8');
$str = mb_substr($string, $start);
// restore internal encoding
mb_internal_encoding($enc);
return $str;
}
return mb_substr($string, $start, $length, 'UTF-8');
}
/**
* Unicode variant of strtoupper()
*
* @param string $string The string to uppercase
* @return string The processed string
* @api
*/
public static function strtoupper(string $string): string
{
return str_replace('ß', 'SS', mb_strtoupper($string, 'UTF-8'));
}
/**
* Unicode variant of strtolower()
*
* @param string $string The string to lowercase
* @return string The processed string
* @api
*/
public static function strtolower(string $string): string
{
return mb_strtolower($string, 'UTF-8');
}
/**
* Uniocde variant of strlen() - assumes that the string is a Unicode string, not binary
*
* @param string $string The string to count the characters of
* @return integer The number of characters
* @api
*/
public static function strlen(string $string): int
{
return mb_strlen($string, 'UTF-8');
}
/**
* Unicode variant of ucfirst() - assumes that the string is a Unicode string, not binary
*
* @param string $string The string whose first letter should be uppercased
* @return string The same string, first character uppercased
* @api
*/
public static function ucfirst(string $string): string
{
return self::strtoupper(self::substr($string, 0, 1)) . self::substr($string, 1);
}
/**
* Unicode variant of lcfirst() - assumes that the string is a Unicode string, not binary
*
* @param string $string The string whose first letter should be lowercased
* @return string The same string, first character lowercased
* @api
*/
public static function lcfirst(string $string): string
{
return self::strtolower(self::substr($string, 0, 1)) . self::substr($string, 1);
}
/**
* Unicode variant of strpos() - assumes that the string is a Unicode string, not binary
*
* @param string $haystack UTF-8 string to search in
* @param string $needle UTF-8 string to search for
* @param integer $offset Positition to start the search
* @return integer The character position
* @api
*/
public static function strpos(string $haystack, string $needle, int $offset = 0)
{
return mb_strpos($haystack, $needle, $offset, 'UTF-8');
}
/**
* Unicode variant of pathinfo()
* pathinfo() function is not unicode-friendly
* if setlocale is not set. It's sufficient to set it
* to any UTF-8 locale to correctly handle unicode strings.
* This wrapper function temporarily sets locale to 'en_US.UTF-8'
* and then restores original locale.
* It's not necessary to use this function in cases,
* where only file extension is determined, as it's
* hard to imagine a unicode file extension.
* @see http://www.php.net/manual/en/function.pathinfo.php
*
* @param string $path
* @param integer $options Optional, one of PATHINFO_DIRNAME, PATHINFO_BASENAME, PATHINFO_EXTENSION or PATHINFO_FILENAME.
* @return string|array
* @api
*/
public static function pathinfo(string $path, int $options = null)
{
$currentLocale = setlocale(LC_CTYPE, 0);
// Before we have a setting for setlocale, his should suffice for pathinfo
// to work correctly on Unicode strings
setlocale(LC_CTYPE, 'en_US.UTF-8');
$pathinfo = $options == null ? pathinfo($path) : pathinfo($path, $options);
setlocale(LC_CTYPE, $currentLocale);
return $pathinfo;
}
/**
* Parse a URL and return its components, UTF-8 safe
*
* @param string $url The URL to parse. Invalid characters are replaced by _.
* @param integer $component Specify one of PHP_URL_SCHEME, PHP_URL_HOST, PHP_URL_PORT, PHP_URL_USER, PHP_URL_PASS, PHP_URL_PATH, PHP_URL_QUERY or PHP_URL_FRAGMENT to retrieve just a specific URL component as a string (except when PHP_URL_PORT is given, in which case the return value will be an integer).
* @return mixed
*/
public static function parse_url(string $url, int $component = -1)
{
// the host and port must be used as is, to allow IPv6 syntax, e.g.: [3b00:f59:1008::212:183:20]:8080
// thus we parse here, before url-encoding
$componentsFromUrl = parse_url($url);
if ($componentsFromUrl === false) {
return false;
}
$encodedUrl = preg_replace_callback('%[^:@/?#&=\.]+%usD', function ($matches) {
return urlencode($matches[0]);
}, $url);
$components = parse_url($encodedUrl);
if ($components === false) {
return false;
}
foreach ($components as &$currentComponent) {
$currentComponent = urldecode((string)$currentComponent);
}
// the host and port must be used as is, to allow IPv6 syntax, e.g.: [3b00:f59:1008::212:183:20]:8080
if (array_key_exists('host', $componentsFromUrl)) {
$components['host'] = $componentsFromUrl['host'];
}
if (array_key_exists('port', $componentsFromUrl)) {
$components['port'] = (integer)$componentsFromUrl['port'];
} else {
unset($components['port']);
}
switch ($component) {
case -1:
return $components;
case PHP_URL_SCHEME:
return $components['scheme'] ?? null;
case PHP_URL_HOST:
return $components['host'] ?? null;
case PHP_URL_PORT:
return $components['port'] ?? null;
case PHP_URL_USER:
return $components['user'] ?? null;
case PHP_URL_PASS:
return $components['pass'] ?? null;
case PHP_URL_PATH:
return $components['path'] ?? null;
case PHP_URL_QUERY:
return $components['query'] ?? null;
case PHP_URL_FRAGMENT:
return $components['fragment'] ?? null;
default:
throw new \InvalidArgumentException('Invalid component requested for URL parsing.', 1406280743);
}
}
}
| mit |
jslhs/Windows-Driver-Frameworks | src/framework/shared/irphandlers/pnp/km/fxpkgfdokm.cpp | 15564 | /*++
Copyright (c) Microsoft Corporation
Module Name:
FxPkgFdo.cpp
Abstract:
This module implements the pnp/power package for the driver
framework.
Author:
Environment:
Kernel mode only
Revision History:
--*/
#include "..\pnppriv.hpp"
#include <initguid.h>
#include <wdmguid.h>
#if defined(EVENT_TRACING)
// Tracing support
extern "C" {
#include "FxPkgFdoKm.tmh"
}
#endif
_Must_inspect_result_
NTSTATUS
FxPkgFdo::PnpFilterResourceRequirements(
__inout FxIrp *Irp
)
/*++
Routine Description:
This method is invoked in response to a Pnp FilterResourceRequirements IRP.
Arguments:
Device - a pointer to the FxDevice
Irp - a pointer to the FxIrp
Returns:
NTSTATUS
--*/
{
PIO_RESOURCE_REQUIREMENTS_LIST pWdmRequirementsList;
PIO_RESOURCE_REQUIREMENTS_LIST pNewWdmList;
NTSTATUS status;
FxIoResReqList *pIoResReqList;
WDFIORESREQLIST reqlist;
DoTraceLevelMessage(GetDriverGlobals(), TRACE_LEVEL_VERBOSE, TRACINGPNP,
"Entering FilterResourceRequirements handler");
if (m_DeviceFilterRemoveResourceRequirements.m_Method != NULL) {
pWdmRequirementsList = (PIO_RESOURCE_REQUIREMENTS_LIST) Irp->GetInformation();
status = STATUS_INSUFFICIENT_RESOURCES;
pIoResReqList = FxIoResReqList::_CreateFromWdmList(GetDriverGlobals(),
pWdmRequirementsList,
FxResourceAllAccessAllowed);
if (pIoResReqList != NULL) {
status = pIoResReqList->Commit(NULL, (PWDFOBJECT) &reqlist);
// Commit should never fail because we own all object state
ASSERT(NT_SUCCESS(status));
UNREFERENCED_PARAMETER(status);
status = m_DeviceFilterRemoveResourceRequirements.Invoke(
m_Device->GetHandle(), pIoResReqList->GetHandle());
if (NT_SUCCESS(status) && pIoResReqList->IsChanged()) {
pNewWdmList = pIoResReqList->CreateWdmList();
if (pNewWdmList != NULL) {
//
// List could be missing previously
//
if (pWdmRequirementsList != NULL) {
//
// Propagate BusNumber to our new list.
//
pNewWdmList->BusNumber = pWdmRequirementsList->BusNumber;
MxMemory::MxFreePool(pWdmRequirementsList);
}
Irp->SetInformation((ULONG_PTR) pNewWdmList);
}
else {
status = STATUS_INSUFFICIENT_RESOURCES;
}
}
//
// No matter what, free the resource requirements list object. If
// we need another one when adding resources, another one will be
// allocated.
//
pIoResReqList->DeleteObject();
pIoResReqList = NULL;
}
}
else {
//
// No filtering on the way down, set status to STATUS_SUCCESS so we
// send the irp down the stack.
//
status = STATUS_SUCCESS;
}
if (NT_SUCCESS(status)) {
status = SendIrpSynchronously(Irp);
}
//
// If we do not handle the IRP on the way down and the PDO does not handle
// the IRP, we can have a status of STATUS_NOT_SUPPORTED. We still want to
// process the irp in this state.
//
if (NT_SUCCESS(status) || status == STATUS_NOT_SUPPORTED) {
NTSTATUS filterStatus;
//
// Give the Framework objects a pass at the list.
//
filterStatus = FxPkgPnp::FilterResourceRequirements(
(PIO_RESOURCE_REQUIREMENTS_LIST*)(&Irp->GetIrp()->IoStatus.Information)
);
if (!NT_SUCCESS(filterStatus)) {
status = filterStatus;
}
else if (m_DeviceFilterAddResourceRequirements.m_Method != NULL) {
//
// Now give the driver a shot at it.
//
pWdmRequirementsList = (PIO_RESOURCE_REQUIREMENTS_LIST)
Irp->GetInformation();
pIoResReqList = FxIoResReqList::_CreateFromWdmList(
GetDriverGlobals(), pWdmRequirementsList, FxResourceAllAccessAllowed);
if (pIoResReqList != NULL) {
status = pIoResReqList->Commit(NULL, (PWDFOBJECT) &reqlist);
UNREFERENCED_PARAMETER(status);
//
// Since we absolutely control the lifetime of pIoResReqList, this
// should never fail
//
ASSERT(NT_SUCCESS(status));
status = m_DeviceFilterAddResourceRequirements.Invoke(
m_Device->GetHandle(), reqlist);
//
// It is possible the child driver modified the resource list,
// and if so we need to update the requirements list.
//
if (NT_SUCCESS(status) && pIoResReqList->IsChanged()) {
pNewWdmList = pIoResReqList->CreateWdmList();
if (pNewWdmList != NULL) {
//
// List could be missing previously
//
if (pWdmRequirementsList != NULL) {
//
// Propagate BusNumber to our new list.
//
pNewWdmList->BusNumber = pWdmRequirementsList->BusNumber;
ExFreePool(pWdmRequirementsList);
}
Irp->SetInformation((ULONG_PTR) pNewWdmList);
}
else {
status = STATUS_INSUFFICIENT_RESOURCES;
}
}
pIoResReqList->DeleteObject();
pIoResReqList = NULL;
}
else {
status = STATUS_INSUFFICIENT_RESOURCES;
}
}
}
CompletePnpRequest(Irp, status);
DoTraceLevelMessage(GetDriverGlobals(), TRACE_LEVEL_VERBOSE, TRACINGPNP,
"Exiting FilterResourceRequirements handler, %!STATUS!",
status);
return status;
}
_Must_inspect_result_
NTSTATUS
FxPkgFdo::_PnpQueryCapabilitiesCompletionRoutine(
__in MdDeviceObject DeviceObject,
__inout MdIrp Irp,
__inout PVOID Context
)
{
UNREFERENCED_PARAMETER(DeviceObject);
UNREFERENCED_PARAMETER(Irp);
UNREFERENCED_PARAMETER(Context);
ASSERTMSG("Not implemented for KMDF\n", FALSE);
return STATUS_NOT_IMPLEMENTED;
}
_Must_inspect_result_
NTSTATUS
FxPkgFdo::PnpQueryCapabilities(
__inout FxIrp *Irp
)
/*++
Routine Description:
This method is invoked in response to a Pnp QueryCapabilities IRP.
Arguments:
Device - a pointer to the FxDevice
Irp - a pointer to the FxIrp
Returns:
NTSTATUS
--*/
{
NTSTATUS status;
HandleQueryCapabilities(Irp);
status = SendIrpSynchronously(Irp);
//
// Now that the IRP has returned to us, we modify what the bus driver
// set up.
//
if (NT_SUCCESS(status)) {
HandleQueryCapabilitiesCompletion(Irp);
}
CompletePnpRequest(Irp, status);
return status;
}
_Must_inspect_result_
NTSTATUS
FxPkgFdo::_PnpQueryPnpDeviceStateCompletionRoutine(
__in MdDeviceObject DeviceObject,
__inout MdIrp Irp,
__inout PVOID Context
)
{
UNREFERENCED_PARAMETER(DeviceObject);
UNREFERENCED_PARAMETER(Irp);
UNREFERENCED_PARAMETER(Context);
ASSERTMSG("Not implemented for KMDF\n", FALSE);
return STATUS_NOT_IMPLEMENTED;
}
_Must_inspect_result_
NTSTATUS
FxPkgFdo::_PnpQueryPnpDeviceState(
__inout FxPkgPnp* This,
__inout FxIrp *Irp
)
/*++
Routine Description:
This method is invoked in response to a Pnp QueryPnpDeviceState IRP.
Arguments:
Irp - a pointer to the FxIrp
Returns:
NTSTATUS
--*/
{
FxPkgFdo* pThis;
NTSTATUS status;
pThis = (FxPkgFdo*) This;
status = pThis->SendIrpSynchronously(Irp);
if (status == STATUS_NOT_SUPPORTED) {
//
// Morph into a successful code so that we process the request
//
status = STATUS_SUCCESS;
Irp->SetStatus(status);
}
if (NT_SUCCESS(status)) {
pThis->HandleQueryPnpDeviceStateCompletion(Irp);
}
else {
DoTraceLevelMessage(
This->GetDriverGlobals(), TRACE_LEVEL_ERROR, TRACINGPNP,
"Lower stack returned error for query pnp device state, %!STATUS!",
status);
}
//
// Since we already sent the request down the stack, we must complete it
// now.
//
return pThis->CompletePnpRequest(Irp, status);
}
_Must_inspect_result_
NTSTATUS
FxPkgFdo::Initialize(
__in PWDFDEVICE_INIT DeviceInit
)
/*++
Routine Description:
After creating a FxPkgFdo, the driver writer will initialize it by passing
a set of driver callbacks that allow the driver writer to customize the
behavior when handling certain IRPs.
This is the place to do any initialization that might fail.
Arguments:
Device - a pointer to the FxDevice
DispatchTable - a driver supplied table of callbacks
Returns:
NTSTATUS
--*/
{
PFX_DRIVER_GLOBALS pGlobals;
WDF_CHILD_LIST_CONFIG config;
size_t totalDescriptionSize = 0;
WDFCHILDLIST hList;
NTSTATUS status;
pGlobals = GetDriverGlobals();
status = FxPkgPnp::Initialize(DeviceInit);
if (!NT_SUCCESS(status)) {
return status;
}
status = AllocateEnumInfo();
if (!NT_SUCCESS(status)) {
return status;
}
#pragma prefast(suppress: __WARNING_PASSING_FUNCTION_UNEXPECTED_NULL, "Static child lists do not use the EvtChildListCreateDevice callback")
WDF_CHILD_LIST_CONFIG_INIT(&config,
sizeof(FxStaticChildDescription),
NULL);
status = FxChildList::_ComputeTotalDescriptionSize(pGlobals,
&config,
&totalDescriptionSize);
if (!NT_SUCCESS(status)) {
return status;
}
status = FxChildList::_CreateAndInit(&m_StaticDeviceList,
pGlobals,
WDF_NO_OBJECT_ATTRIBUTES,
totalDescriptionSize,
m_Device,
&config,
TRUE);
if (!NT_SUCCESS(status)) {
return status;
}
status = m_StaticDeviceList->Commit(WDF_NO_OBJECT_ATTRIBUTES,
(WDFOBJECT*) &hList,
m_Device);
if (!NT_SUCCESS(status)) {
m_StaticDeviceList->DeleteFromFailedCreate();
m_StaticDeviceList = NULL;
return status;
}
//
// This will be released in the destructor
//
m_StaticDeviceList->ADDREF(this);
return status;
}
_Must_inspect_result_
NTSTATUS
FxPkgFdo::QueryForDsfInterface(
VOID
)
{
WDF_DSF_INTERFACE dsfInterface;
NTSTATUS status;
BOOLEAN derefQI = FALSE;
RtlZeroMemory(&dsfInterface, sizeof(dsfInterface));
//
// Since there are some stacks that are not PnP re-entrant (like USBHUB,
// xpsp2), we specify that the QI goes only to our attached device and
// not to the top of the stack as a normal QI irp would.
//
// We also do this a preventative measure for other stacks we don't know
// about internally and do not have access to when testing.
//
status = m_Device->QueryForInterface(&GUID_WDF_DSF_INTERFACE,
(PINTERFACE) &dsfInterface,
sizeof(dsfInterface),
WDM_DSF_INTERFACE_V1_0,
NULL,
m_Device->GetAttachedDevice()
);
if (status == STATUS_NOT_SUPPORTED) {
DoTraceLevelMessage(
GetDriverGlobals(), TRACE_LEVEL_WARNING, TRACINGPNP,
"Lower stack does not have a DSF interface");
status = STATUS_SUCCESS;
goto Done;
}
if (!NT_SUCCESS(status)) {
DoTraceLevelMessage(
GetDriverGlobals(), TRACE_LEVEL_ERROR, TRACINGPNP,
"Lower stack returned an error for query DSF interface, %!STATUS!",
status);
goto Done;
}
derefQI = TRUE;
//
// Basic run time checks.
//
if (dsfInterface.Interface.Version != WDM_DSF_INTERFACE_V1_0) {
status = STATUS_REVISION_MISMATCH;
DoTraceLevelMessage(
GetDriverGlobals(), TRACE_LEVEL_ERROR, TRACINGPNP,
"Lower DSF stack supports v(%x), requested v(%x), %!STATUS!",
dsfInterface.Interface.Version,
WDM_DSF_INTERFACE_V1_0,
status);
goto Done;
}
//
// Ex functions should be both set or cleared.
// Active/Inactive functions should be both set or cleared.
// Ex function must be present.
// Note: !!(ptr) expression below converts ptr value to true/false value.
// I.e., ptr==NULL to false and ptr!=NULL to true.
//
if (!((!!(dsfInterface.IoConnectInterruptEx) ==
!!(dsfInterface.IoDisconnectInterruptEx)) &&
(!!(dsfInterface.IoReportInterruptActive) ==
!!(dsfInterface.IoReportInterruptInactive)) &&
(dsfInterface.IoConnectInterruptEx != NULL)
)) {
status = STATUS_DATA_ERROR;
DoTraceLevelMessage(
GetDriverGlobals(), TRACE_LEVEL_ERROR, TRACINGPNP,
"Function mismatch detected in DSF interface, %!STATUS!",
status);
goto Done;
}
//
// Info is correct.
//
m_IoConnectInterruptEx = dsfInterface.IoConnectInterruptEx;
m_IoDisconnectInterruptEx = dsfInterface.IoDisconnectInterruptEx;
//
// If DSF interface provides active/inactive functions then use them
//
if (dsfInterface.IoReportInterruptActive != NULL)
{
m_IoReportInterruptActive = dsfInterface.IoReportInterruptActive;
m_IoReportInterruptInactive = dsfInterface.IoReportInterruptInactive;
}
Done:
//
// The contract with the DSF layer is to release the interface right away;
// the embedded interrupt function ptrs will be valid until this driver is
// unloaded.
//
if (derefQI) {
dsfInterface.Interface.InterfaceDereference(dsfInterface.Interface.Context);
}
return status;
}
_Must_inspect_result_
NTSTATUS
FxPkgFdo::AskParentToRemoveAndReenumerate(
VOID
)
/*++
Routine Description:
This routine asks the PDO to ask its parent bus driver to Surprise-Remove
and re-enumerate the PDO. This will be done only at the point of
catastrophic software failure, and occasionally after catastrophic hardware
failure.
Arguments:
None
Return Value:
status
--*/
{
PREENUMERATE_SELF_INTERFACE_STANDARD pInterface;
pInterface = &m_SurpriseRemoveAndReenumerateSelfInterface;
if (pInterface->SurpriseRemoveAndReenumerateSelf != NULL) {
pInterface->SurpriseRemoveAndReenumerateSelf(pInterface->Context);
return STATUS_SUCCESS;
}
return STATUS_NOT_SUPPORTED;
}
| mit |