code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
#
# Author:: John Keiser (<jkeiser@opscode.com>)
# Copyright:: Copyright (c) 2013 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'support/shared/integration/integration_helper'
require 'chef/knife/upload'
require 'chef/knife/diff'
require 'chef/knife/raw'
describe 'knife upload' do
extend IntegrationSupport
include KnifeSupport
context 'without versioned cookbooks' do
when_the_chef_server "has one of each thing" do
client 'x', {}
cookbook 'x', '1.0.0', { 'metadata.rb' => 'version "1.0.0"' }
data_bag 'x', { 'y' => {} }
environment 'x', {}
node 'x', {}
role 'x', {}
user 'x', {}
when_the_repository 'has only top-level directories' do
directory 'clients'
directory 'cookbooks'
directory 'data_bags'
directory 'environments'
directory 'nodes'
directory 'roles'
directory 'users'
it 'knife upload does nothing' do
knife('upload /').should_succeed ''
knife('diff --name-status /').should_succeed <<EOM
D\t/clients/chef-validator.json
D\t/clients/chef-webui.json
D\t/clients/x.json
D\t/cookbooks/x
D\t/data_bags/x
D\t/environments/_default.json
D\t/environments/x.json
D\t/nodes/x.json
D\t/roles/x.json
D\t/users/admin.json
D\t/users/x.json
EOM
end
it 'knife upload --purge deletes everything' do
knife('upload --purge /').should_succeed(<<EOM, :stderr => "WARNING: /environments/_default.json cannot be deleted (default environment cannot be modified).\n")
Deleted extra entry /clients/chef-validator.json (purge is on)
Deleted extra entry /clients/chef-webui.json (purge is on)
Deleted extra entry /clients/x.json (purge is on)
Deleted extra entry /cookbooks/x (purge is on)
Deleted extra entry /data_bags/x (purge is on)
Deleted extra entry /environments/x.json (purge is on)
Deleted extra entry /nodes/x.json (purge is on)
Deleted extra entry /roles/x.json (purge is on)
Deleted extra entry /users/admin.json (purge is on)
Deleted extra entry /users/x.json (purge is on)
EOM
knife('diff --name-status /').should_succeed <<EOM
D\t/environments/_default.json
EOM
end
end
when_the_repository 'has an identical copy of each thing' do
file 'clients/chef-validator.json', { 'validator' => true, 'public_key' => ChefZero::PUBLIC_KEY }
file 'clients/chef-webui.json', { 'admin' => true, 'public_key' => ChefZero::PUBLIC_KEY }
file 'clients/x.json', { 'public_key' => ChefZero::PUBLIC_KEY }
file 'cookbooks/x/metadata.rb', 'version "1.0.0"'
file 'data_bags/x/y.json', {}
file 'environments/_default.json', { "description" => "The default Chef environment" }
file 'environments/x.json', {}
file 'nodes/x.json', {}
file 'roles/x.json', {}
file 'users/admin.json', { 'admin' => true, 'public_key' => ChefZero::PUBLIC_KEY }
file 'users/x.json', { 'public_key' => ChefZero::PUBLIC_KEY }
it 'knife upload makes no changes' do
knife('upload /cookbooks/x').should_succeed ''
knife('diff --name-status /').should_succeed ''
end
it 'knife upload --purge makes no changes' do
knife('upload --purge /').should_succeed ''
knife('diff --name-status /').should_succeed ''
end
context 'except the role file' do
file 'roles/x.json', { 'description' => 'blarghle' }
it 'knife upload changes the role' do
knife('upload /').should_succeed "Updated /roles/x.json\n"
knife('diff --name-status /').should_succeed ''
end
it 'knife upload --no-diff does not change the role' do
knife('upload --no-diff /').should_succeed ''
knife('diff --name-status /').should_succeed "M\t/roles/x.json\n"
end
end
context 'except the role file is textually different, but not ACTUALLY different' do
file 'roles/x.json', <<EOM
{
"chef_type": "role",
"default_attributes": {
},
"env_run_lists": {
},
"json_class": "Chef::Role",
"name": "x",
"description": "",
"override_attributes": {
},
"run_list": [
]
}
EOM
it 'knife upload / does not change anything' do
knife('upload /').should_succeed ''
knife('diff --name-status /').should_succeed ''
end
end
context 'as well as one extra copy of each thing' do
file 'clients/y.json', { 'public_key' => ChefZero::PUBLIC_KEY }
file 'cookbooks/x/blah.rb', ''
file 'cookbooks/y/metadata.rb', 'version "1.0.0"'
file 'data_bags/x/z.json', {}
file 'data_bags/y/zz.json', {}
file 'environments/y.json', {}
file 'nodes/y.json', {}
file 'roles/y.json', {}
file 'users/y.json', { 'public_key' => ChefZero::PUBLIC_KEY }
it 'knife upload adds the new files' do
knife('upload /').should_succeed <<EOM
Created /clients/y.json
Updated /cookbooks/x
Created /cookbooks/y
Created /data_bags/x/z.json
Created /data_bags/y
Created /data_bags/y/zz.json
Created /environments/y.json
Created /nodes/y.json
Created /roles/y.json
Created /users/y.json
EOM
knife('diff --name-status /').should_succeed ''
end
it 'knife upload --no-diff adds the new files' do
knife('upload --no-diff /').should_succeed <<EOM
Created /clients/y.json
Updated /cookbooks/x
Created /cookbooks/y
Created /data_bags/x/z.json
Created /data_bags/y
Created /data_bags/y/zz.json
Created /environments/y.json
Created /nodes/y.json
Created /roles/y.json
Created /users/y.json
EOM
knife('diff --name-status /').should_succeed ''
end
end
end
when_the_repository 'is empty' do
it 'knife upload does nothing' do
knife('upload /').should_succeed ''
knife('diff --name-status /').should_succeed <<EOM
D\t/clients
D\t/cookbooks
D\t/data_bags
D\t/environments
D\t/nodes
D\t/roles
D\t/users
EOM
end
it 'knife upload --purge deletes nothing' do
knife('upload --purge /').should_fail <<EOM
ERROR: /clients cannot be deleted.
ERROR: /cookbooks cannot be deleted.
ERROR: /data_bags cannot be deleted.
ERROR: /environments cannot be deleted.
ERROR: /nodes cannot be deleted.
ERROR: /roles cannot be deleted.
ERROR: /users cannot be deleted.
EOM
knife('diff --name-status /').should_succeed <<EOM
D\t/clients
D\t/cookbooks
D\t/data_bags
D\t/environments
D\t/nodes
D\t/roles
D\t/users
EOM
end
context 'when current directory is top level' do
cwd '.'
it 'knife upload with no parameters reports an error' do
knife('upload').should_fail "FATAL: Must specify at least one argument. If you want to upload everything in this directory, type \"knife upload .\"\n", :stdout => /USAGE/
end
end
end
end
when_the_chef_server 'is empty' do
when_the_repository 'has a data bag item' do
file 'data_bags/x/y.json', { 'foo' => 'bar' }
it 'knife upload of the data bag uploads only the values in the data bag item and no other' do
knife('upload /data_bags/x/y.json').should_succeed <<EOM
Created /data_bags/x
Created /data_bags/x/y.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
EOM
JSON.parse(knife('raw /data/x/y').stdout, :create_additions => false).keys.sort.should == [ 'foo', 'id' ]
end
end
when_the_repository 'has a data bag item with keys chef_type and data_bag' do
file 'data_bags/x/y.json', { 'chef_type' => 'aaa', 'data_bag' => 'bbb' }
it 'upload preserves chef_type and data_bag' do
knife('upload /data_bags/x/y.json').should_succeed <<EOM
Created /data_bags/x
Created /data_bags/x/y.json
EOM
knife('diff --name-status /data_bags').should_succeed ''
result = JSON.parse(knife('raw /data/x/y').stdout, :create_additions => false)
result.keys.sort.should == [ 'chef_type', 'data_bag', 'id' ]
result['chef_type'].should == 'aaa'
result['data_bag'].should == 'bbb'
end
end
# Test upload of an item when the other end doesn't even have the container
when_the_repository 'has two data bag items' do
file 'data_bags/x/y.json', {}
file 'data_bags/x/z.json', {}
it 'knife upload of one data bag item itself succeeds' do
knife('upload /data_bags/x/y.json').should_succeed <<EOM
Created /data_bags/x
Created /data_bags/x/y.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
A\t/data_bags/x/z.json
EOM
end
end
end
when_the_chef_server 'has three data bag items' do
data_bag 'x', { 'deleted' => {}, 'modified' => {}, 'unmodified' => {} }
when_the_repository 'has a modified, unmodified, added and deleted data bag item' do
file 'data_bags/x/added.json', {}
file 'data_bags/x/modified.json', { 'foo' => 'bar' }
file 'data_bags/x/unmodified.json', {}
it 'knife upload of the modified file succeeds' do
knife('upload /data_bags/x/modified.json').should_succeed <<EOM
Updated /data_bags/x/modified.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
A\t/data_bags/x/added.json
EOM
end
it 'knife upload of the unmodified file does nothing' do
knife('upload /data_bags/x/unmodified.json').should_succeed ''
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
M\t/data_bags/x/modified.json
A\t/data_bags/x/added.json
EOM
end
it 'knife upload of the added file succeeds' do
knife('upload /data_bags/x/added.json').should_succeed <<EOM
Created /data_bags/x/added.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
M\t/data_bags/x/modified.json
EOM
end
it 'knife upload of the deleted file does nothing' do
knife('upload /data_bags/x/deleted.json').should_succeed ''
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
M\t/data_bags/x/modified.json
A\t/data_bags/x/added.json
EOM
end
it 'knife upload --purge of the deleted file deletes it' do
knife('upload --purge /data_bags/x/deleted.json').should_succeed <<EOM
Deleted extra entry /data_bags/x/deleted.json (purge is on)
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
M\t/data_bags/x/modified.json
A\t/data_bags/x/added.json
EOM
end
it 'knife upload of the entire data bag uploads everything' do
knife('upload /data_bags/x').should_succeed <<EOM
Created /data_bags/x/added.json
Updated /data_bags/x/modified.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
EOM
end
it 'knife upload --purge of the entire data bag uploads everything' do
knife('upload --purge /data_bags/x').should_succeed <<EOM
Created /data_bags/x/added.json
Updated /data_bags/x/modified.json
Deleted extra entry /data_bags/x/deleted.json (purge is on)
EOM
knife('diff --name-status /data_bags').should_succeed ''
end
context 'when cwd is the /data_bags directory' do
cwd 'data_bags'
it 'knife upload fails' do
knife('upload').should_fail "FATAL: Must specify at least one argument. If you want to upload everything in this directory, type \"knife upload .\"\n", :stdout => /USAGE/
end
it 'knife upload --purge . uploads everything' do
knife('upload --purge .').should_succeed <<EOM
Created x/added.json
Updated x/modified.json
Deleted extra entry x/deleted.json (purge is on)
EOM
knife('diff --name-status /data_bags').should_succeed ''
end
it 'knife upload --purge * uploads everything' do
knife('upload --purge *').should_succeed <<EOM
Created x/added.json
Updated x/modified.json
Deleted extra entry x/deleted.json (purge is on)
EOM
knife('diff --name-status /data_bags').should_succeed ''
end
end
end
end
# Cookbook upload is a funny thing ... direct cookbook upload works, but
# upload of a file is designed not to work at present. Make sure that is the
# case.
when_the_chef_server 'has a cookbook' do
cookbook 'x', '1.0.0', { 'metadata.rb' => 'version "1.0.0"', 'z.rb' => '' }
when_the_repository 'has a modified, extra and missing file for the cookbook' do
file 'cookbooks/x/metadata.rb', 'version "1.0.0"'
file 'cookbooks/x/y.rb', 'hi'
it 'knife upload of any individual file fails' do
knife('upload /cookbooks/x/metadata.rb').should_fail "ERROR: /cookbooks/x/metadata.rb cannot be updated.\n"
knife('upload /cookbooks/x/y.rb').should_fail "ERROR: /cookbooks/x cannot have a child created under it.\n"
knife('upload --purge /cookbooks/x/z.rb').should_fail "ERROR: /cookbooks/x/z.rb cannot be deleted.\n"
end
# TODO this is a bit of an inconsistency: if we didn't specify --purge,
# technically we shouldn't have deleted missing files. But ... cookbooks
# are a special case.
it 'knife upload of the cookbook itself succeeds' do
knife('upload /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
it 'knife upload --purge of the cookbook itself succeeds' do
knife('upload /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_repository 'has a missing file for the cookbook' do
file 'cookbooks/x/metadata.rb', 'version "1.0.0"'
it 'knife upload of the cookbook succeeds' do
knife('upload /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_repository 'has an extra file for the cookbook' do
file 'cookbooks/x/metadata.rb', 'version "1.0.0"'
file 'cookbooks/x/z.rb', ''
file 'cookbooks/x/blah.rb', ''
it 'knife upload of the cookbook succeeds' do
knife('upload /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_repository 'has a different file in the cookbook' do
file 'cookbooks/x/metadata.rb', 'version "1.0.0"'
it 'knife upload --freeze freezes the cookbook' do
knife('upload --freeze /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
# Modify a file and attempt to upload
file 'cookbooks/x/metadata.rb', 'version "1.0.0" # This is different'
knife('upload /cookbooks/x').should_fail "ERROR: /cookbooks failed to write: Cookbook x is frozen\n"
end
end
end
when_the_chef_server 'has a frozen cookbook' do
cookbook 'frozencook', '1.0.0', {
'metadata.rb' => 'version "1.0.0"'
}, :frozen => true
when_the_repository 'has an update to said cookbook' do
file 'cookbooks/frozencook/metadata.rb', 'version "1.0.0" # This is different'
it 'knife upload fails to upload the frozen cookbook' do
knife('upload /cookbooks/frozencook').should_fail "ERROR: /cookbooks failed to write: Cookbook frozencook is frozen\n"
end
it 'knife upload --force uploads the frozen cookbook' do
knife('upload --force /cookbooks/frozencook').should_succeed <<EOM
Updated /cookbooks/frozencook
EOM
end
end
end
when_the_repository 'has a cookbook' do
file 'cookbooks/x/metadata.rb', 'version "1.0.0"'
file 'cookbooks/x/onlyin1.0.0.rb', 'old_text'
when_the_chef_server 'has a later version for the cookbook' do
cookbook 'x', '1.0.0', { 'metadata.rb' => 'version "1.0.0"', 'onlyin1.0.0.rb' => '' }
cookbook 'x', '1.0.1', { 'metadata.rb' => 'version "1.0.1"', 'onlyin1.0.1.rb' => 'hi' }
it 'knife upload /cookbooks/x uploads the local version' do
knife('diff --name-status /cookbooks').should_succeed <<EOM
M\t/cookbooks/x/metadata.rb
D\t/cookbooks/x/onlyin1.0.1.rb
A\t/cookbooks/x/onlyin1.0.0.rb
EOM
knife('upload --purge /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
knife('diff --name-status /cookbooks').should_succeed <<EOM
M\t/cookbooks/x/metadata.rb
D\t/cookbooks/x/onlyin1.0.1.rb
A\t/cookbooks/x/onlyin1.0.0.rb
EOM
end
end
when_the_chef_server 'has an earlier version for the cookbook' do
cookbook 'x', '1.0.0', { 'metadata.rb' => 'version "1.0.0"', 'onlyin1.0.0.rb' => ''}
cookbook 'x', '0.9.9', { 'metadata.rb' => 'version "0.9.9"', 'onlyin0.9.9.rb' => 'hi' }
it 'knife upload /cookbooks/x uploads the local version' do
knife('upload --purge /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_chef_server 'has a later version for the cookbook, and no current version' do
cookbook 'x', '1.0.1', { 'metadata.rb' => 'version "1.0.1"', 'onlyin1.0.1.rb' => 'hi' }
it 'knife upload /cookbooks/x uploads the local version' do
knife('diff --name-status /cookbooks').should_succeed <<EOM
M\t/cookbooks/x/metadata.rb
D\t/cookbooks/x/onlyin1.0.1.rb
A\t/cookbooks/x/onlyin1.0.0.rb
EOM
knife('upload --purge /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
knife('diff --name-status /cookbooks').should_succeed <<EOM
M\t/cookbooks/x/metadata.rb
D\t/cookbooks/x/onlyin1.0.1.rb
A\t/cookbooks/x/onlyin1.0.0.rb
EOM
end
end
when_the_chef_server 'has an earlier version for the cookbook, and no current version' do
cookbook 'x', '0.9.9', { 'metadata.rb' => 'version "0.9.9"', 'onlyin0.9.9.rb' => 'hi' }
it 'knife upload /cookbooks/x uploads the new version' do
knife('upload --purge /cookbooks/x').should_succeed <<EOM
Updated /cookbooks/x
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
end
when_the_chef_server 'has an environment' do
environment 'x', {}
when_the_repository 'has an environment with bad JSON' do
file 'environments/x.json', '{'
it 'knife upload tries and fails' do
knife('upload /environments/x.json').should_fail "WARN: Parse error reading #{path_to('environments/x.json')} as JSON: A JSON text must at least contain two octets!\nERROR: /environments/x.json failed to write: Parse error reading JSON: A JSON text must at least contain two octets!\n"
knife('diff --name-status /environments/x.json').should_succeed "M\t/environments/x.json\n", :stderr => "WARN: Parse error reading #{path_to('environments/x.json')} as JSON: A JSON text must at least contain two octets!\n"
end
end
when_the_repository 'has the same environment with the wrong name in the file' do
file 'environments/x.json', { 'name' => 'y' }
it 'knife upload fails' do
knife('upload /environments/x.json').should_fail "ERROR: /environments/x.json failed to write: Name must be 'x' (is 'y')\n"
knife('diff --name-status /environments/x.json').should_succeed "M\t/environments/x.json\n"
end
end
when_the_repository 'has the same environment with no name in the file' do
file 'environments/x.json', { 'description' => 'hi' }
it 'knife upload succeeds' do
knife('upload /environments/x.json').should_succeed "Updated /environments/x.json\n"
knife('diff --name-status /environments/x.json').should_succeed ''
end
end
end
when_the_chef_server 'is empty' do
when_the_repository 'has an environment with bad JSON' do
file 'environments/x.json', '{'
it 'knife upload tries and fails' do
knife('upload /environments/x.json').should_fail "ERROR: /environments failed to create_child: Parse error reading JSON creating child 'x.json': A JSON text must at least contain two octets!\n"
knife('diff --name-status /environments/x.json').should_succeed "A\t/environments/x.json\n"
end
end
when_the_repository 'has an environment with the wrong name in the file' do
file 'environments/x.json', { 'name' => 'y' }
it 'knife upload fails' do
knife('upload /environments/x.json').should_fail "ERROR: /environments failed to create_child: Error creating 'x.json': Name must be 'x' (is 'y')\n"
knife('diff --name-status /environments/x.json').should_succeed "A\t/environments/x.json\n"
end
end
when_the_repository 'has an environment with no name in the file' do
file 'environments/x.json', { 'description' => 'hi' }
it 'knife upload succeeds' do
knife('upload /environments/x.json').should_succeed "Created /environments/x.json\n"
knife('diff --name-status /environments/x.json').should_succeed ''
end
end
when_the_repository 'has a data bag with no id in the file' do
file 'data_bags/bag/x.json', { 'foo' => 'bar' }
it 'knife upload succeeds' do
knife('upload /data_bags/bag/x.json').should_succeed "Created /data_bags/bag\nCreated /data_bags/bag/x.json\n"
knife('diff --name-status /data_bags/bag/x.json').should_succeed ''
end
end
end
end # without versioned cookbooks
with_versioned_cookbooks do
when_the_chef_server "has one of each thing" do
client 'x', {}
cookbook 'x', '1.0.0', { 'metadata.rb' => 'version "1.0.0"' }
data_bag 'x', { 'y' => {} }
environment 'x', {}
node 'x', {}
role 'x', {}
user 'x', {}
when_the_repository 'has only top-level directories' do
directory 'clients'
directory 'cookbooks'
directory 'data_bags'
directory 'environments'
directory 'nodes'
directory 'roles'
directory 'users'
it 'knife upload does nothing' do
knife('upload /').should_succeed ''
knife('diff --name-status /').should_succeed <<EOM
D\t/clients/chef-validator.json
D\t/clients/chef-webui.json
D\t/clients/x.json
D\t/cookbooks/x-1.0.0
D\t/data_bags/x
D\t/environments/_default.json
D\t/environments/x.json
D\t/nodes/x.json
D\t/roles/x.json
D\t/users/admin.json
D\t/users/x.json
EOM
end
it 'knife upload --purge deletes everything' do
knife('upload --purge /').should_succeed(<<EOM, :stderr => "WARNING: /environments/_default.json cannot be deleted (default environment cannot be modified).\n")
Deleted extra entry /clients/chef-validator.json (purge is on)
Deleted extra entry /clients/chef-webui.json (purge is on)
Deleted extra entry /clients/x.json (purge is on)
Deleted extra entry /cookbooks/x-1.0.0 (purge is on)
Deleted extra entry /data_bags/x (purge is on)
Deleted extra entry /environments/x.json (purge is on)
Deleted extra entry /nodes/x.json (purge is on)
Deleted extra entry /roles/x.json (purge is on)
Deleted extra entry /users/admin.json (purge is on)
Deleted extra entry /users/x.json (purge is on)
EOM
knife('diff --name-status /').should_succeed <<EOM
D\t/environments/_default.json
EOM
end
end
when_the_repository 'has an identical copy of each thing' do
file 'clients/chef-validator.json', { 'validator' => true, 'public_key' => ChefZero::PUBLIC_KEY }
file 'clients/chef-webui.json', { 'admin' => true, 'public_key' => ChefZero::PUBLIC_KEY }
file 'clients/x.json', { 'public_key' => ChefZero::PUBLIC_KEY }
file 'cookbooks/x-1.0.0/metadata.rb', 'version "1.0.0"'
file 'data_bags/x/y.json', {}
file 'environments/_default.json', { 'description' => 'The default Chef environment' }
file 'environments/x.json', {}
file 'nodes/x.json', {}
file 'roles/x.json', {}
file 'users/admin.json', { 'admin' => true, 'public_key' => ChefZero::PUBLIC_KEY }
file 'users/x.json', { 'public_key' => ChefZero::PUBLIC_KEY }
it 'knife upload makes no changes' do
knife('upload /cookbooks/x-1.0.0').should_succeed ''
knife('diff --name-status /').should_succeed ''
end
it 'knife upload --purge makes no changes' do
knife('upload --purge /').should_succeed ''
knife('diff --name-status /').should_succeed ''
end
context 'except the role file' do
file 'roles/x.json', { 'description' => 'blarghle' }
it 'knife upload changes the role' do
knife('upload /').should_succeed "Updated /roles/x.json\n"
knife('diff --name-status /').should_succeed ''
end
end
context 'except the role file is textually different, but not ACTUALLY different' do
file 'roles/x.json', <<EOM
{
"chef_type": "role",
"default_attributes": {
},
"env_run_lists": {
},
"json_class": "Chef::Role",
"name": "x",
"description": "",
"override_attributes": {
},
"run_list": [
]
}
EOM
it 'knife upload / does not change anything' do
knife('upload /').should_succeed ''
knife('diff --name-status /').should_succeed ''
end
end
context 'as well as one extra copy of each thing' do
file 'clients/y.json', { 'public_key' => ChefZero::PUBLIC_KEY }
file 'cookbooks/x-1.0.0/blah.rb', ''
file 'cookbooks/x-2.0.0/metadata.rb', 'version "2.0.0"'
file 'cookbooks/y-1.0.0/metadata.rb', 'version "1.0.0"'
file 'data_bags/x/z.json', {}
file 'data_bags/y/zz.json', {}
file 'environments/y.json', {}
file 'nodes/y.json', {}
file 'roles/y.json', {}
file 'users/y.json', { 'public_key' => ChefZero::PUBLIC_KEY }
it 'knife upload adds the new files' do
knife('upload /').should_succeed <<EOM
Created /clients/y.json
Updated /cookbooks/x-1.0.0
Created /cookbooks/x-2.0.0
Created /cookbooks/y-1.0.0
Created /data_bags/x/z.json
Created /data_bags/y
Created /data_bags/y/zz.json
Created /environments/y.json
Created /nodes/y.json
Created /roles/y.json
Created /users/y.json
EOM
knife('diff --name-status /').should_succeed ''
end
end
end
when_the_repository 'is empty' do
it 'knife upload does nothing' do
knife('upload /').should_succeed ''
knife('diff --name-status /').should_succeed <<EOM
D\t/clients
D\t/cookbooks
D\t/data_bags
D\t/environments
D\t/nodes
D\t/roles
D\t/users
EOM
end
it 'knife upload --purge deletes nothing' do
knife('upload --purge /').should_fail <<EOM
ERROR: /clients cannot be deleted.
ERROR: /cookbooks cannot be deleted.
ERROR: /data_bags cannot be deleted.
ERROR: /environments cannot be deleted.
ERROR: /nodes cannot be deleted.
ERROR: /roles cannot be deleted.
ERROR: /users cannot be deleted.
EOM
knife('diff --name-status /').should_succeed <<EOM
D\t/clients
D\t/cookbooks
D\t/data_bags
D\t/environments
D\t/nodes
D\t/roles
D\t/users
EOM
end
context 'when current directory is top level' do
cwd '.'
it 'knife upload with no parameters reports an error' do
knife('upload').should_fail "FATAL: Must specify at least one argument. If you want to upload everything in this directory, type \"knife upload .\"\n", :stdout => /USAGE/
end
end
end
end
# Test upload of an item when the other end doesn't even have the container
when_the_chef_server 'is empty' do
when_the_repository 'has two data bag items' do
file 'data_bags/x/y.json', {}
file 'data_bags/x/z.json', {}
it 'knife upload of one data bag item itself succeeds' do
knife('upload /data_bags/x/y.json').should_succeed <<EOM
Created /data_bags/x
Created /data_bags/x/y.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
A\t/data_bags/x/z.json
EOM
end
end
end
when_the_chef_server 'has three data bag items' do
data_bag 'x', { 'deleted' => {}, 'modified' => {}, 'unmodified' => {} }
when_the_repository 'has a modified, unmodified, added and deleted data bag item' do
file 'data_bags/x/added.json', {}
file 'data_bags/x/modified.json', { 'foo' => 'bar' }
file 'data_bags/x/unmodified.json', {}
it 'knife upload of the modified file succeeds' do
knife('upload /data_bags/x/modified.json').should_succeed <<EOM
Updated /data_bags/x/modified.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
A\t/data_bags/x/added.json
EOM
end
it 'knife upload of the unmodified file does nothing' do
knife('upload /data_bags/x/unmodified.json').should_succeed ''
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
M\t/data_bags/x/modified.json
A\t/data_bags/x/added.json
EOM
end
it 'knife upload of the added file succeeds' do
knife('upload /data_bags/x/added.json').should_succeed <<EOM
Created /data_bags/x/added.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
M\t/data_bags/x/modified.json
EOM
end
it 'knife upload of the deleted file does nothing' do
knife('upload /data_bags/x/deleted.json').should_succeed ''
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
M\t/data_bags/x/modified.json
A\t/data_bags/x/added.json
EOM
end
it 'knife upload --purge of the deleted file deletes it' do
knife('upload --purge /data_bags/x/deleted.json').should_succeed <<EOM
Deleted extra entry /data_bags/x/deleted.json (purge is on)
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
M\t/data_bags/x/modified.json
A\t/data_bags/x/added.json
EOM
end
it 'knife upload of the entire data bag uploads everything' do
knife('upload /data_bags/x').should_succeed <<EOM
Created /data_bags/x/added.json
Updated /data_bags/x/modified.json
EOM
knife('diff --name-status /data_bags').should_succeed <<EOM
D\t/data_bags/x/deleted.json
EOM
end
it 'knife upload --purge of the entire data bag uploads everything' do
knife('upload --purge /data_bags/x').should_succeed <<EOM
Created /data_bags/x/added.json
Updated /data_bags/x/modified.json
Deleted extra entry /data_bags/x/deleted.json (purge is on)
EOM
knife('diff --name-status /data_bags').should_succeed ''
end
context 'when cwd is the /data_bags directory' do
cwd 'data_bags'
it 'knife upload fails' do
knife('upload').should_fail "FATAL: Must specify at least one argument. If you want to upload everything in this directory, type \"knife upload .\"\n", :stdout => /USAGE/
end
it 'knife upload --purge . uploads everything' do
knife('upload --purge .').should_succeed <<EOM
Created x/added.json
Updated x/modified.json
Deleted extra entry x/deleted.json (purge is on)
EOM
knife('diff --name-status /data_bags').should_succeed ''
end
it 'knife upload --purge * uploads everything' do
knife('upload --purge *').should_succeed <<EOM
Created x/added.json
Updated x/modified.json
Deleted extra entry x/deleted.json (purge is on)
EOM
knife('diff --name-status /data_bags').should_succeed ''
end
end
end
end
# Cookbook upload is a funny thing ... direct cookbook upload works, but
# upload of a file is designed not to work at present. Make sure that is the
# case.
when_the_chef_server 'has a cookbook' do
cookbook 'x', '1.0.0', { 'metadata.rb' => 'version "1.0.0"', 'z.rb' => '' }
when_the_repository 'has a modified, extra and missing file for the cookbook' do
file 'cookbooks/x-1.0.0/metadata.rb', 'version "1.0.0"'
file 'cookbooks/x-1.0.0/y.rb', 'hi'
it 'knife upload of any individual file fails' do
knife('upload /cookbooks/x-1.0.0/metadata.rb').should_fail "ERROR: /cookbooks/x-1.0.0/metadata.rb cannot be updated.\n"
knife('upload /cookbooks/x-1.0.0/y.rb').should_fail "ERROR: /cookbooks/x-1.0.0 cannot have a child created under it.\n"
knife('upload --purge /cookbooks/x-1.0.0/z.rb').should_fail "ERROR: /cookbooks/x-1.0.0/z.rb cannot be deleted.\n"
end
# TODO this is a bit of an inconsistency: if we didn't specify --purge,
# technically we shouldn't have deleted missing files. But ... cookbooks
# are a special case.
it 'knife upload of the cookbook itself succeeds' do
knife('upload /cookbooks/x-1.0.0').should_succeed <<EOM
Updated /cookbooks/x-1.0.0
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
it 'knife upload --purge of the cookbook itself succeeds' do
knife('upload /cookbooks/x-1.0.0').should_succeed <<EOM
Updated /cookbooks/x-1.0.0
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_repository 'has a missing file for the cookbook' do
file 'cookbooks/x-1.0.0/metadata.rb', 'version "1.0.0"'
it 'knife upload of the cookbook succeeds' do
knife('upload /cookbooks/x-1.0.0').should_succeed <<EOM
Updated /cookbooks/x-1.0.0
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_repository 'has an extra file for the cookbook' do
file 'cookbooks/x-1.0.0/metadata.rb', 'version "1.0.0"'
file 'cookbooks/x-1.0.0/z.rb', ''
file 'cookbooks/x-1.0.0/blah.rb', ''
it 'knife upload of the cookbook succeeds' do
knife('upload /cookbooks/x-1.0.0').should_succeed <<EOM
Updated /cookbooks/x-1.0.0
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
end
when_the_repository 'has a cookbook' do
file 'cookbooks/x-1.0.0/metadata.rb', 'version "1.0.0"'
file 'cookbooks/x-1.0.0/onlyin1.0.0.rb', 'old_text'
when_the_chef_server 'has a later version for the cookbook' do
cookbook 'x', '1.0.0', { 'metadata.rb' => 'version "1.0.0"', 'onlyin1.0.0.rb' => '' }
cookbook 'x', '1.0.1', { 'metadata.rb' => 'version "1.0.1"', 'onlyin1.0.1.rb' => 'hi' }
it 'knife upload /cookbooks uploads the local version' do
knife('diff --name-status /cookbooks').should_succeed <<EOM
M\t/cookbooks/x-1.0.0/onlyin1.0.0.rb
D\t/cookbooks/x-1.0.1
EOM
knife('upload --purge /cookbooks').should_succeed <<EOM
Updated /cookbooks/x-1.0.0
Deleted extra entry /cookbooks/x-1.0.1 (purge is on)
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_chef_server 'has an earlier version for the cookbook' do
cookbook 'x', '1.0.0', { 'metadata.rb' => 'version "1.0.0"', 'onlyin1.0.0.rb' => ''}
cookbook 'x', '0.9.9', { 'metadata.rb' => 'version "0.9.9"', 'onlyin0.9.9.rb' => 'hi' }
it 'knife upload /cookbooks uploads the local version' do
knife('upload --purge /cookbooks').should_succeed <<EOM
Updated /cookbooks/x-1.0.0
Deleted extra entry /cookbooks/x-0.9.9 (purge is on)
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_chef_server 'has a later version for the cookbook, and no current version' do
cookbook 'x', '1.0.1', { 'metadata.rb' => 'version "1.0.1"', 'onlyin1.0.1.rb' => 'hi' }
it 'knife upload /cookbooks/x uploads the local version' do
knife('diff --name-status /cookbooks').should_succeed <<EOM
D\t/cookbooks/x-1.0.1
A\t/cookbooks/x-1.0.0
EOM
knife('upload --purge /cookbooks').should_succeed <<EOM
Created /cookbooks/x-1.0.0
Deleted extra entry /cookbooks/x-1.0.1 (purge is on)
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
when_the_chef_server 'has an earlier version for the cookbook, and no current version' do
cookbook 'x', '0.9.9', { 'metadata.rb' => 'version "0.9.9"', 'onlyin0.9.9.rb' => 'hi' }
it 'knife upload /cookbooks/x uploads the new version' do
knife('upload --purge /cookbooks').should_succeed <<EOM
Created /cookbooks/x-1.0.0
Deleted extra entry /cookbooks/x-0.9.9 (purge is on)
EOM
knife('diff --name-status /cookbooks').should_succeed ''
end
end
end
when_the_chef_server 'has an environment' do
environment 'x', {}
when_the_repository 'has an environment with bad JSON' do
file 'environments/x.json', '{'
it 'knife upload tries and fails' do
knife('upload /environments/x.json').should_fail "WARN: Parse error reading #{path_to('environments/x.json')} as JSON: A JSON text must at least contain two octets!\nERROR: /environments/x.json failed to write: Parse error reading JSON: A JSON text must at least contain two octets!\n"
knife('diff --name-status /environments/x.json').should_succeed "M\t/environments/x.json\n", :stderr => "WARN: Parse error reading #{path_to('environments/x.json')} as JSON: A JSON text must at least contain two octets!\n"
end
end
when_the_repository 'has the same environment with the wrong name in the file' do
file 'environments/x.json', { 'name' => 'y' }
it 'knife upload fails' do
knife('upload /environments/x.json').should_fail "ERROR: /environments/x.json failed to write: Name must be 'x' (is 'y')\n"
knife('diff --name-status /environments/x.json').should_succeed "M\t/environments/x.json\n"
end
end
when_the_repository 'has the same environment with no name in the file' do
file 'environments/x.json', { 'description' => 'hi' }
it 'knife upload succeeds' do
knife('upload /environments/x.json').should_succeed "Updated /environments/x.json\n"
knife('diff --name-status /environments/x.json').should_succeed ''
end
end
end
when_the_chef_server 'is empty' do
when_the_repository 'has an environment with bad JSON' do
file 'environments/x.json', '{'
it 'knife upload tries and fails' do
knife('upload /environments/x.json').should_fail "ERROR: /environments failed to create_child: Parse error reading JSON creating child 'x.json': A JSON text must at least contain two octets!\n"
knife('diff --name-status /environments/x.json').should_succeed "A\t/environments/x.json\n"
end
end
when_the_repository 'has an environment with the wrong name in the file' do
file 'environments/x.json', { 'name' => 'y' }
it 'knife upload fails' do
knife('upload /environments/x.json').should_fail "ERROR: /environments failed to create_child: Error creating 'x.json': Name must be 'x' (is 'y')\n"
knife('diff --name-status /environments/x.json').should_succeed "A\t/environments/x.json\n"
end
end
when_the_repository 'has an environment with no name in the file' do
file 'environments/x.json', { 'description' => 'hi' }
it 'knife upload succeeds' do
knife('upload /environments/x.json').should_succeed "Created /environments/x.json\n"
knife('diff --name-status /environments/x.json').should_succeed ''
end
end
when_the_repository 'has a data bag with no id in the file' do
file 'data_bags/bag/x.json', { 'foo' => 'bar' }
it 'knife upload succeeds' do
knife('upload /data_bags/bag/x.json').should_succeed "Created /data_bags/bag\nCreated /data_bags/bag/x.json\n"
knife('diff --name-status /data_bags/bag/x.json').should_succeed ''
end
end
end
end # with versioned cookbooks
end
| luna1x/chef-server | vendor/ruby/1.9.1/gems/chef-11.6.2/spec/integration/knife/upload_spec.rb | Ruby | apache-2.0 | 41,092 |
/*
* Copyright 2002-2015 Drew Noakes
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* More information about this project is available at:
*
* https://drewnoakes.com/code/exif/
* https://github.com/drewnoakes/metadata-extractor
*/
package com.drew.metadata.exif.makernotes;
import com.drew.lang.annotations.NotNull;
import com.drew.lang.annotations.Nullable;
import com.drew.metadata.TagDescriptor;
import static com.drew.metadata.exif.makernotes.PentaxMakernoteDirectory.*;
/**
* Provides human-readable string representations of tag values stored in a {@link PentaxMakernoteDirectory}.
* <p>
* Some information about this makernote taken from here:
* http://www.ozhiker.com/electronics/pjmt/jpeg_info/pentax_mn.html
*
* @author Drew Noakes https://drewnoakes.com
*/
public class PentaxMakernoteDescriptor extends TagDescriptor<PentaxMakernoteDirectory>
{
public PentaxMakernoteDescriptor(@NotNull PentaxMakernoteDirectory directory)
{
super(directory);
}
@Override
@Nullable
public String getDescription(int tagType)
{
switch (tagType) {
case TAG_CAPTURE_MODE:
return getCaptureModeDescription();
case TAG_QUALITY_LEVEL:
return getQualityLevelDescription();
case TAG_FOCUS_MODE:
return getFocusModeDescription();
case TAG_FLASH_MODE:
return getFlashModeDescription();
case TAG_WHITE_BALANCE:
return getWhiteBalanceDescription();
case TAG_DIGITAL_ZOOM:
return getDigitalZoomDescription();
case TAG_SHARPNESS:
return getSharpnessDescription();
case TAG_CONTRAST:
return getContrastDescription();
case TAG_SATURATION:
return getSaturationDescription();
case TAG_ISO_SPEED:
return getIsoSpeedDescription();
case TAG_COLOUR:
return getColourDescription();
default:
return super.getDescription(tagType);
}
}
@Nullable
public String getColourDescription()
{
return getIndexedDescription(TAG_COLOUR, 1, "Normal", "Black & White", "Sepia");
}
@Nullable
public String getIsoSpeedDescription()
{
Integer value = _directory.getInteger(TAG_ISO_SPEED);
if (value == null)
return null;
switch (value) {
// TODO there must be other values which aren't catered for here
case 10: return "ISO 100";
case 16: return "ISO 200";
case 100: return "ISO 100";
case 200: return "ISO 200";
default: return "Unknown (" + value + ")";
}
}
@Nullable
public String getSaturationDescription()
{
return getIndexedDescription(TAG_SATURATION, "Normal", "Low", "High");
}
@Nullable
public String getContrastDescription()
{
return getIndexedDescription(TAG_CONTRAST, "Normal", "Low", "High");
}
@Nullable
public String getSharpnessDescription()
{
return getIndexedDescription(TAG_SHARPNESS, "Normal", "Soft", "Hard");
}
@Nullable
public String getDigitalZoomDescription()
{
Float value = _directory.getFloatObject(TAG_DIGITAL_ZOOM);
if (value == null)
return null;
if (value == 0)
return "Off";
return Float.toString(value);
}
@Nullable
public String getWhiteBalanceDescription()
{
return getIndexedDescription(TAG_WHITE_BALANCE,
"Auto", "Daylight", "Shade", "Tungsten", "Fluorescent", "Manual");
}
@Nullable
public String getFlashModeDescription()
{
return getIndexedDescription(TAG_FLASH_MODE,
1, "Auto", "Flash On", null, "Flash Off", null, "Red-eye Reduction");
}
@Nullable
public String getFocusModeDescription()
{
return getIndexedDescription(TAG_FOCUS_MODE, 2, "Custom", "Auto");
}
@Nullable
public String getQualityLevelDescription()
{
return getIndexedDescription(TAG_QUALITY_LEVEL, "Good", "Better", "Best");
}
@Nullable
public String getCaptureModeDescription()
{
return getIndexedDescription(TAG_CAPTURE_MODE,
"Auto", "Night-scene", "Manual", null, "Multiple");
}
}
| wswenyue/metadata-extractor | Source/com/drew/metadata/exif/makernotes/PentaxMakernoteDescriptor.java | Java | apache-2.0 | 4,966 |
/*
* Hibernate Validator, declare and validate application constraints
*
* License: Apache License, Version 2.0
* See the license.txt file in the root directory or <http://www.apache.org/licenses/LICENSE-2.0>.
*/
package org.hibernate.validator.test.internal.engine.methodvalidation.service;
import java.util.List;
import java.util.Map;
import javax.validation.Valid;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import org.joda.time.DateMidnight;
import org.hibernate.validator.constraints.NotEmpty;
import org.hibernate.validator.test.internal.engine.methodvalidation.model.Customer;
/**
* @author Gunnar Morling
*/
public interface CustomerRepository extends RepositoryBase<Customer> {
@Valid
Customer findCustomerByName(@NotNull String name);
void persistCustomer(@NotNull @Valid Customer customer);
void cascadingMapParameter(@Valid Map<String, Customer> customer);
void cascadingIterableParameter(@Valid List<Customer> customer);
void cascadingArrayParameter(@Valid Customer... customer);
void findCustomerByAgeAndName(@Min(5) Integer age, @NotNull String name);
void cascadingParameter(@NotNull @Valid Customer param1, @NotNull @Valid Customer param2);
@Override
void foo(Long id);
@Override
void bar(Customer customer);
void boz();
@Min(10)
int baz();
@Valid
Customer cascadingReturnValue();
@Valid
List<Customer> cascadingIterableReturnValue();
@Valid
Map<String, Customer> cascadingMapReturnValue();
@Valid
Customer[] cascadingArrayReturnValue();
@Override
Customer overriddenMethodWithCascadingReturnValue();
void parameterConstraintInGroup(@NotNull(groups = { ValidationGroup.class }) String name);
@Override
@Min(10)
int overriddenMethodWithReturnValueConstraint();
int getFoo(int i);
int getFoo(@NotEmpty String s);
@ConsistentDateParameters
void methodWithCrossParameterConstraint(@NotNull DateMidnight start, @NotNull DateMidnight end);
public interface ValidationGroup {
}
}
| mxrenkin/hibernate-validator | engine/src/test/java/org/hibernate/validator/test/internal/engine/methodvalidation/service/CustomerRepository.java | Java | apache-2.0 | 2,001 |
package org.zstack.network.service.lb;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Configurable;
import org.springframework.transaction.annotation.Transactional;
import org.zstack.core.Platform;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.db.DatabaseFacade;
import org.zstack.core.db.SimpleQuery;
import org.zstack.core.db.SimpleQuery.Op;
import org.zstack.core.db.UpdateQuery;
import org.zstack.core.errorcode.ErrorFacade;
import org.zstack.core.thread.ChainTask;
import org.zstack.core.thread.SyncTaskChain;
import org.zstack.core.thread.ThreadFacade;
import org.zstack.core.workflow.FlowChainBuilder;
import org.zstack.core.workflow.ShareFlow;
import org.zstack.header.core.Completion;
import org.zstack.header.core.NoErrorCompletion;
import org.zstack.header.core.workflow.*;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.errorcode.OperationFailureException;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.header.message.APIMessage;
import org.zstack.header.message.Message;
import org.zstack.header.network.l3.L3NetworkVO;
import org.zstack.header.network.service.NetworkServiceL3NetworkRefVO;
import org.zstack.header.vm.*;
import org.zstack.identity.AccountManager;
import org.zstack.network.service.vip.*;
import org.zstack.tag.TagManager;
import org.zstack.utils.CollectionUtils;
import org.zstack.utils.DebugUtils;
import org.zstack.utils.Utils;
import org.zstack.utils.function.Function;
import org.zstack.utils.logging.CLogger;
import static org.zstack.core.Platform.operr;
import javax.persistence.TypedQuery;
import java.util.*;
import java.util.stream.Collectors;
import static java.util.Arrays.asList;
/**
* Created by frank on 8/8/2015.
*/
@Configurable(preConstruction = true, autowire = Autowire.BY_TYPE)
public class LoadBalancerBase {
private static final CLogger logger = Utils.getLogger(LoadBalancerBase.class);
@Autowired
private CloudBus bus;
@Autowired
private DatabaseFacade dbf;
@Autowired
private LoadBalancerManager lbMgr;
@Autowired
private ThreadFacade thdf;
@Autowired
private ErrorFacade errf;
@Autowired
private AccountManager acntMgr;
@Autowired
private TagManager tagMgr;
private LoadBalancerVO self;
private String getSyncId() {
return String.format("operate-lb-%s", self.getUuid());
}
protected LoadBalancerInventory getInventory() {
return LoadBalancerInventory.valueOf(self);
}
private LoadBalancerInventory reloadAndGetInventory() {
self = dbf.reload(self);
return getInventory();
}
public LoadBalancerBase(LoadBalancerVO self) {
this.self = self;
}
void handleMessage(Message msg) {
if (msg instanceof APIMessage) {
handleApiMessage((APIMessage) msg);
} else {
handleLocalMessage(msg);
}
}
private void handleLocalMessage(Message msg) {
if (msg instanceof LoadBalancerActiveVmNicMsg) {
handle((LoadBalancerActiveVmNicMsg) msg);
} else if (msg instanceof LoadBalancerDeactiveVmNicMsg) {
handle((LoadBalancerDeactiveVmNicMsg) msg);
} else if (msg instanceof LoadBalancerRemoveVmNicMsg) {
handle((LoadBalancerRemoveVmNicMsg) msg);
} else if (msg instanceof RefreshLoadBalancerMsg) {
handle((RefreshLoadBalancerMsg) msg);
} else if (msg instanceof DeleteLoadBalancerMsg) {
handle((DeleteLoadBalancerMsg) msg);
} else if (msg instanceof DeleteLoadBalancerOnlyMsg) {
handle((DeleteLoadBalancerOnlyMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
private void handle(DeleteLoadBalancerOnlyMsg msg) {
DeleteLoadBalancerOnlyReply reply = new DeleteLoadBalancerOnlyReply();
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(SyncTaskChain chain) {
if (self.getProviderType() == null) {
// not initialized yet
dbf.remove(self);
bus.reply(msg, reply);
chain.next();
return;
}
LoadBalancerBackend bkd = getBackend();
bkd.destroyLoadBalancer(makeStruct(), new Completion(msg, chain) {
@Override
public void success() {
dbf.remove(self);
bus.reply(msg, reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
chain.next();
}
});
}
@Override
public String getName() {
return "delete-load-balancer-only";
}
});
}
private void handle(final DeleteLoadBalancerMsg msg) {
final DeleteLoadBalancerReply reply = new DeleteLoadBalancerReply();
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
delete(new Completion(msg, chain) {
@Override
public void success() {
bus.reply(msg ,reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg ,reply);
chain.next();
}
});
}
@Override
public String getName() {
return "delete-lb";
}
});
}
private void handle(final RefreshLoadBalancerMsg msg) {
final RefreshLoadBalancerReply reply = new RefreshLoadBalancerReply();
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
refresh(new Completion(msg, chain) {
@Override
public void success() {
reply.setInventory(getInventory());
bus.reply(msg, reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
chain.next();
}
});
}
@Override
public String getName() {
return "refresh-lb";
}
});
}
private void refresh(final Completion completion) {
LoadBalancerBackend bkd = getBackend();
bkd.refresh(makeStruct(), completion);
}
private void handle(final LoadBalancerRemoveVmNicMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
final LoadBalancerRemoveVmNicReply reply = new LoadBalancerRemoveVmNicReply();
removeNics(msg.getListenerUuid(), msg.getVmNicUuids(), new Completion(msg, chain) {
@Override
public void success() {
bus.reply(msg, reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
chain.next();
}
});
}
@Override
public String getName() {
return "remove-nic-from-lb";
}
});
}
private void checkIfNicIsAdded(List<String> nicUuids) {
List<String> allNicUuids = new ArrayList<String>();
for (LoadBalancerListenerVO l : self.getListeners()) {
allNicUuids.addAll(CollectionUtils.transformToList(l.getVmNicRefs(), new Function<String, LoadBalancerListenerVmNicRefVO>() {
@Override
public String call(LoadBalancerListenerVmNicRefVO arg) {
return arg.getVmNicUuid();
}
}));
}
for (String nicUuid : nicUuids) {
if (!allNicUuids.contains(nicUuid)) {
throw new CloudRuntimeException(String.format("the load balancer[uuid: %s] doesn't have a vm nic[uuid: %s] added", self.getUuid(), nicUuid));
}
}
}
private void handle(final LoadBalancerDeactiveVmNicMsg msg) {
checkIfNicIsAdded(msg.getVmNicUuids());
LoadBalancerListenerVO l = CollectionUtils.find(self.getListeners(), new Function<LoadBalancerListenerVO, LoadBalancerListenerVO>() {
@Override
public LoadBalancerListenerVO call(LoadBalancerListenerVO arg) {
return arg.getUuid().equals(msg.getListenerUuid()) ? arg : null;
}
});
final List<LoadBalancerListenerVmNicRefVO> refs = CollectionUtils.transformToList(l.getVmNicRefs(), new Function<LoadBalancerListenerVmNicRefVO, LoadBalancerListenerVmNicRefVO>() {
@Override
public LoadBalancerListenerVmNicRefVO call(LoadBalancerListenerVmNicRefVO arg) {
return msg.getVmNicUuids().contains(arg.getVmNicUuid()) ? arg : null;
}
});
final LoadBalancerDeactiveVmNicReply reply = new LoadBalancerDeactiveVmNicReply();
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("deactive-vm-nics-on-lb-%s", self.getUuid()));
chain.then(new ShareFlow() {
@Override
public void setup() {
flow(new Flow() {
String __name__ = "set-nics-to-inactive-in-db";
@Override
public void run(FlowTrigger trigger, Map data) {
for (LoadBalancerListenerVmNicRefVO ref : refs) {
ref.setStatus(LoadBalancerVmNicStatus.Inactive);
dbf.update(ref);
}
trigger.next();
}
@Override
public void rollback(FlowRollback trigger, Map data) {
for (LoadBalancerListenerVmNicRefVO ref : refs) {
ref.setStatus(LoadBalancerVmNicStatus.Active);
dbf.update(ref);
}
trigger.rollback();
}
});
flow(new NoRollbackFlow() {
String __name__ = "deactive-nics-on-backend";
@Override
public void run(final FlowTrigger trigger, Map data) {
SimpleQuery<VmNicVO> q = dbf.createQuery(VmNicVO.class);
q.add(VmNicVO_.uuid, Op.IN, CollectionUtils.transformToList(refs, new Function<String, LoadBalancerListenerVmNicRefVO>() {
@Override
public String call(LoadBalancerListenerVmNicRefVO arg) {
return arg.getVmNicUuid();
}
}));
List<VmNicVO> nicvos = q.list();
LoadBalancerBackend bkd = getBackend();
bkd.removeVmNics(makeStruct(), VmNicInventory.valueOf(nicvos), new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
bus.reply(msg, reply);
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
reply.setError(errCode);
bus.reply(msg, reply);
}
});
}
}).start();
}
private void activeVmNic(final LoadBalancerActiveVmNicMsg msg, final NoErrorCompletion completion) {
checkIfNicIsAdded(msg.getVmNicUuids());
LoadBalancerListenerVO l = CollectionUtils.find(self.getListeners(), new Function<LoadBalancerListenerVO, LoadBalancerListenerVO>() {
@Override
public LoadBalancerListenerVO call(LoadBalancerListenerVO arg) {
return arg.getUuid().equals(msg.getListenerUuid()) ? arg : null;
}
});
final List<LoadBalancerListenerVmNicRefVO> refs = CollectionUtils.transformToList(l.getVmNicRefs(), new Function<LoadBalancerListenerVmNicRefVO, LoadBalancerListenerVmNicRefVO>() {
@Override
public LoadBalancerListenerVmNicRefVO call(LoadBalancerListenerVmNicRefVO arg) {
return msg.getVmNicUuids().contains(arg.getVmNicUuid()) ? arg : null;
}
});
final LoadBalancerActiveVmNicReply reply = new LoadBalancerActiveVmNicReply();
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("active-vm-nics-on-lb-%s", self.getUuid()));
chain.then(new ShareFlow() {
@Override
public void setup() {
flow(new Flow() {
String __name__ = "set-nics-to-active-in-db";
@Override
public void run(FlowTrigger trigger, Map data) {
for (LoadBalancerListenerVmNicRefVO ref : refs) {
ref.setStatus(LoadBalancerVmNicStatus.Active);
dbf.update(ref);
}
trigger.next();
}
@Override
public void rollback(FlowRollback trigger, Map data) {
for (LoadBalancerListenerVmNicRefVO ref : refs) {
ref.setStatus(LoadBalancerVmNicStatus.Inactive);
dbf.update(ref);
}
trigger.rollback();
}
});
flow(new NoRollbackFlow() {
String __name__ = "active-nics-on-backend";
@Override
public void run(final FlowTrigger trigger, Map data) {
SimpleQuery<VmNicVO> q = dbf.createQuery(VmNicVO.class);
q.add(VmNicVO_.uuid, Op.IN, CollectionUtils.transformToList(refs, new Function<String, LoadBalancerListenerVmNicRefVO>() {
@Override
public String call(LoadBalancerListenerVmNicRefVO arg) {
return arg.getVmNicUuid();
}
}));
List<VmNicVO> nicvos = q.list();
LoadBalancerBackend bkd = getBackend();
bkd.addVmNics(makeStruct(), VmNicInventory.valueOf(nicvos), new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
bus.reply(msg, reply);
completion.done();
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
reply.setError(errCode);
bus.reply(msg, reply);
completion.done();
}
});
}
}).start();
}
private void handle(final LoadBalancerActiveVmNicMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
activeVmNic(msg, new NoErrorCompletion(msg, chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return "deactive-nic";
}
});
}
private void handleApiMessage(APIMessage msg) {
if (msg instanceof APICreateLoadBalancerListenerMsg) {
handle((APICreateLoadBalancerListenerMsg) msg);
} else if (msg instanceof APIAddVmNicToLoadBalancerMsg) {
handle((APIAddVmNicToLoadBalancerMsg) msg);
} else if (msg instanceof APIRemoveVmNicFromLoadBalancerMsg) {
handle((APIRemoveVmNicFromLoadBalancerMsg) msg);
} else if (msg instanceof APIDeleteLoadBalancerListenerMsg) {
handle((APIDeleteLoadBalancerListenerMsg) msg);
} else if (msg instanceof APIDeleteLoadBalancerMsg) {
handle((APIDeleteLoadBalancerMsg) msg);
} else if (msg instanceof APIRefreshLoadBalancerMsg) {
handle((APIRefreshLoadBalancerMsg) msg);
} else if (msg instanceof APIGetCandidateVmNicsForLoadBalancerMsg) {
handle((APIGetCandidateVmNicsForLoadBalancerMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
@Transactional(readOnly = true)
private void handle(APIGetCandidateVmNicsForLoadBalancerMsg msg) {
APIGetCandidateVmNicsForLoadBalancerReply reply = new APIGetCandidateVmNicsForLoadBalancerReply();
String sql = "select vip.peerL3NetworkUuid from VipVO vip where vip.uuid = :uuid";
TypedQuery<String> q = dbf.getEntityManager().createQuery(sql, String.class);
q.setParameter("uuid", self.getVipUuid());
List<String> ret = q.getResultList();
String peerL3Uuid = ret.isEmpty() ? null : ret.get(0);
if (peerL3Uuid != null) {
// the load balancer has been bound to a private L3 network
sql = "select nic from VmNicVO nic, VmInstanceVO vm where nic.l3NetworkUuid = :l3Uuid and nic.uuid not in (select ref.vmNicUuid from LoadBalancerListenerVmNicRefVO ref" +
" where ref.listenerUuid = :luuid) and nic.vmInstanceUuid = vm.uuid and vm.type = :vmType and vm.state in (:vmStates)";
TypedQuery<VmNicVO> pq = dbf.getEntityManager().createQuery(sql, VmNicVO.class);
pq.setParameter("l3Uuid", peerL3Uuid);
pq.setParameter("luuid", msg.getListenerUuid());
pq.setParameter("vmType", VmInstanceConstant.USER_VM_TYPE);
pq.setParameter("vmStates", asList(VmInstanceState.Running, VmInstanceState.Stopped));
List<VmNicVO> nics = pq.getResultList();
reply.setInventories(VmNicInventory.valueOf(nics));
bus.reply(msg, reply);
return;
}
// the load balancer has not been bound to any private L3 network
sql = "select l3.uuid from L3NetworkVO l3, NetworkServiceL3NetworkRefVO ref where l3.uuid = ref.l3NetworkUuid" +
" and ref.networkServiceType = :type";
q = dbf.getEntityManager().createQuery(sql, String.class);
q.setParameter("type", LoadBalancerConstants.LB_NETWORK_SERVICE_TYPE_STRING);
List<String> l3Uuids = q.getResultList();
if (l3Uuids.isEmpty()) {
reply.setInventories(new ArrayList<>());
bus.reply(msg, reply);
return;
}
sql = "select nic from VmNicVO nic, VmInstanceVO vm where nic.l3NetworkUuid in (select l3.uuid from L3NetworkVO l3, NetworkServiceL3NetworkRefVO ref where l3.uuid = ref.l3NetworkUuid" +
" and ref.networkServiceType = :type) and nic.vmInstanceUuid = vm.uuid and vm.type = :vmType and vm.state in (:vmStates)";
TypedQuery<VmNicVO> nq = dbf.getEntityManager().createQuery(sql, VmNicVO.class);
nq.setParameter("type", LoadBalancerConstants.LB_NETWORK_SERVICE_TYPE_STRING);
nq.setParameter("vmType", VmInstanceConstant.USER_VM_TYPE);
nq.setParameter("vmStates", asList(VmInstanceState.Running, VmInstanceState.Stopped));
List<VmNicVO> nics = nq.getResultList();
reply.setInventories(VmNicInventory.valueOf(nics));
bus.reply(msg, reply);
}
private void handle(final APIRefreshLoadBalancerMsg msg) {
final APIRefreshLoadBalancerEvent evt = new APIRefreshLoadBalancerEvent(msg.getId());
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
refresh(new Completion(msg, chain) {
@Override
public void success() {
evt.setInventory(getInventory());
bus.publish(evt);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
chain.next();
}
});
}
@Override
public String getName() {
return "refresh-lb";
}
});
}
private void handle(final APIDeleteLoadBalancerMsg msg) {
final APIDeleteLoadBalancerEvent evt = new APIDeleteLoadBalancerEvent(msg.getId());
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
delete(new Completion(msg, chain) {
@Override
public void success() {
bus.publish(evt);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
chain.next();
}
});
}
@Override
public String getName() {
return "delete-lb";
}
});
}
private void delete(final Completion completion) {
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("delete-lb-%s", self.getUuid()));
chain.then(new ShareFlow() {
@Override
public void setup() {
flow(new NoRollbackFlow() {
String __name__ = "delete-lb";
@Override
public void run(final FlowTrigger trigger, Map data) {
if (self.getProviderType() == null) {
trigger.next();
// not initialized yet
return;
}
LoadBalancerBackend bkd = getBackend();
bkd.destroyLoadBalancer(makeStruct(), new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
flow(new NoRollbackFlow() {
String __name__ = "release-vip";
@Override
public void run(FlowTrigger trigger, Map data) {
new Vip(self.getVipUuid()).release(new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
dbf.remove(self);
completion.success();
}
});
error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
});
}
}).start();
}
private void handle(final APIDeleteLoadBalancerListenerMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
deleteListener(msg, new NoErrorCompletion(msg, chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return "delete-listener";
}
});
}
private LoadBalancerStruct removeListenerStruct(LoadBalancerListenerInventory listener) {
LoadBalancerStruct s = makeStruct();
for (LoadBalancerListenerInventory l : s.getListeners()) {
if (l.getUuid().equals(listener.getUuid())) {
l.setVmNicRefs(new ArrayList<>());
}
}
return s;
}
private void deleteListener(APIDeleteLoadBalancerListenerMsg msg, final NoErrorCompletion completion) {
final APIDeleteLoadBalancerListenerEvent evt = new APIDeleteLoadBalancerListenerEvent(msg.getId());
final LoadBalancerListenerVO vo = dbf.findByUuid(msg.getUuid(), LoadBalancerListenerVO.class);
if (vo == null) {
evt.setInventory(getInventory());
bus.publish(evt);
completion.done();
return;
}
if (!needAction()) {
dbf.remove(vo);
evt.setInventory(reloadAndGetInventory());
bus.publish(evt);
completion.done();
return;
}
LoadBalancerListenerInventory listener = LoadBalancerListenerInventory.valueOf(vo);
LoadBalancerBackend bkd = getBackend();
bkd.removeListener(removeListenerStruct(listener), listener, new Completion(msg, completion) {
@Override
public void success() {
dbf.remove(vo);
evt.setInventory(reloadAndGetInventory());
bus.publish(evt);
completion.done();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
completion.done();
}
});
}
private void handle(final APIRemoveVmNicFromLoadBalancerMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
removeNic(msg, new NoErrorCompletion(msg, chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return "remove-nic";
}
});
}
private LoadBalancerStruct removeNicStruct(String listenerUuid, List<String> nicUuids) {
LoadBalancerStruct s = makeStruct();
Optional<LoadBalancerListenerInventory> opt = s.getListeners().stream().filter(it -> it.getUuid().equals(listenerUuid)).findAny();
DebugUtils.Assert(opt.isPresent(), String.format("cannot find listener[uuid:%s]", listenerUuid));
LoadBalancerListenerInventory l = opt.get();
l.getVmNicRefs().removeIf(loadBalancerListenerVmNicRefInventory -> nicUuids.contains(loadBalancerListenerVmNicRefInventory.getVmNicUuid()));
return s;
}
private void removeNics(String listenerUuid, final List<String> vmNicUuids, final Completion completion) {
SimpleQuery<VmNicVO> q = dbf.createQuery(VmNicVO.class);
q.add(VmNicVO_.uuid, Op.IN, vmNicUuids);
List<VmNicVO> vos = q.list();
List<VmNicInventory> nics = VmNicInventory.valueOf(vos);
LoadBalancerBackend bkd = getBackend();
bkd.removeVmNics(removeNicStruct(listenerUuid, vmNicUuids), nics, new Completion(completion) {
@Override
public void success() {
UpdateQuery.New(LoadBalancerListenerVmNicRefVO.class)
.condAnd(LoadBalancerListenerVmNicRefVO_.vmNicUuid, Op.IN, vmNicUuids)
.condAnd(LoadBalancerListenerVmNicRefVO_.listenerUuid, Op.EQ, listenerUuid)
.delete();
completion.success();
}
@Override
public void fail(ErrorCode errorCode) {
completion.fail(errorCode);
}
});
}
private void removeNic(APIRemoveVmNicFromLoadBalancerMsg msg, final NoErrorCompletion completion) {
final APIRemoveVmNicFromLoadBalancerEvent evt = new APIRemoveVmNicFromLoadBalancerEvent(msg.getId());
removeNics(msg.getListenerUuid(), msg.getVmNicUuids(), new Completion(msg, completion) {
@Override
public void success() {
evt.setInventory(reloadAndGetInventory());
bus.publish(evt);
completion.done();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
completion.done();
}
});
}
@Transactional(readOnly = true)
private String findProviderTypeByVmNicUuid(String nicUuid) {
String sql = "select l3 from L3NetworkVO l3, VmNicVO nic where nic.l3NetworkUuid = l3.uuid and nic.uuid = :uuid";
TypedQuery<L3NetworkVO> q = dbf.getEntityManager().createQuery(sql, L3NetworkVO.class);
q.setParameter("uuid", nicUuid);
L3NetworkVO l3 = q.getSingleResult();
for (NetworkServiceL3NetworkRefVO ref : l3.getNetworkServices()) {
if (LoadBalancerConstants.LB_NETWORK_SERVICE_TYPE_STRING.equals(ref.getNetworkServiceType())) {
sql = "select p.type from NetworkServiceProviderVO p where p.uuid = :uuid";
TypedQuery<String> nq = dbf.getEntityManager().createQuery(sql, String.class);
nq.setParameter("uuid", ref.getNetworkServiceProviderUuid());
return nq.getSingleResult();
}
}
return null;
}
private void handle(final APIAddVmNicToLoadBalancerMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
addVmNicToListener(msg, new NoErrorCompletion(chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return getSyncSignature();
}
});
}
private void addVmNicToListener(final APIAddVmNicToLoadBalancerMsg msg, final NoErrorCompletion completion) {
final APIAddVmNicToLoadBalancerEvent evt = new APIAddVmNicToLoadBalancerEvent(msg.getId());
final String providerType = findProviderTypeByVmNicUuid(msg.getVmNicUuids().get(0));
if (providerType == null) {
throw new OperationFailureException(operr("the L3 network of vm nic[uuid:%s] doesn't have load balancer service enabled", msg.getVmNicUuids().get(0)));
}
SimpleQuery<VmNicVO> q = dbf.createQuery(VmNicVO.class);
q.add(VmNicVO_.uuid, Op.IN, msg.getVmNicUuids());
List<VmNicVO> nicVOs = q.list();
final List<VmNicInventory> nics = VmNicInventory.valueOf(nicVOs);
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("add-vm-nic-to-lb-listener-%s", msg.getListenerUuid()));
chain.then(new ShareFlow() {
List<LoadBalancerListenerVmNicRefVO> refs = new ArrayList<LoadBalancerListenerVmNicRefVO>();
boolean init = false;
@Override
public void setup() {
flow(new Flow() {
String __name__ = "check-provider-type";
@Override
public void run(FlowTrigger trigger, Map data) {
if (self.getProviderType() == null) {
self.setProviderType(providerType);
self = dbf.updateAndRefresh(self);
init = true;
} else {
if (!providerType.equals(self.getProviderType())) {
throw new OperationFailureException(operr("service provider type mismatching. The load balancer[uuid:%s] is provided by the service provider[type:%s]," +
" but the L3 network of vm nic[uuid:%s] is enabled with the service provider[type: %s]", self.getUuid(), self.getProviderType(),
msg.getVmNicUuids().get(0), providerType));
}
}
trigger.next();
}
@Override
public void rollback(FlowRollback trigger, Map data) {
if (init) {
self = dbf.reload(self);
self.setProviderType(null);
dbf.update(self);
}
trigger.rollback();
}
});
flow(new Flow() {
String __name__ = "write-nic-to-db";
boolean s = false;
@Override
public void run(FlowTrigger trigger, Map data) {
for (String nicUuid : msg.getVmNicUuids()) {
LoadBalancerListenerVmNicRefVO ref = new LoadBalancerListenerVmNicRefVO();
ref.setListenerUuid(msg.getListenerUuid());
ref.setVmNicUuid(nicUuid);
ref.setStatus(LoadBalancerVmNicStatus.Pending);
refs.add(ref);
}
dbf.persistCollection(refs);
s = true;
trigger.next();
}
@Override
public void rollback(FlowRollback trigger, Map data) {
if (s) {
dbf.removeCollection(refs, LoadBalancerListenerVmNicRefVO.class);
}
trigger.rollback();
}
});
flow(new NoRollbackFlow() {
String __name__ = "add-nic-to-lb";
@Override
public void run(final FlowTrigger trigger, Map data) {
LoadBalancerBackend bkd = getBackend();
LoadBalancerStruct s = makeStruct();
s.setInit(init);
bkd.addVmNics(s, nics, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(msg, completion) {
@Override
public void handle(Map data) {
for (LoadBalancerListenerVmNicRefVO ref : refs) {
ref.setStatus(LoadBalancerVmNicStatus.Active);
}
dbf.updateCollection(refs);
evt.setInventory(LoadBalancerListenerInventory.valueOf(dbf.findByUuid(msg.getListenerUuid(), LoadBalancerListenerVO.class)));
bus.publish(evt);
completion.done();
}
});
error(new FlowErrorHandler(msg, completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
evt.setError(errCode);
bus.publish(evt);
completion.done();
}
});
}
}).start();
}
private boolean needAction() {
if (self.getProviderType() == null) {
return false;
}
LoadBalancerListenerVmNicRefVO activeNic = CollectionUtils.find(self.getListeners(), new Function<LoadBalancerListenerVmNicRefVO, LoadBalancerListenerVO>() {
@Override
public LoadBalancerListenerVmNicRefVO call(LoadBalancerListenerVO arg) {
for (LoadBalancerListenerVmNicRefVO ref : arg.getVmNicRefs()) {
if (ref.getStatus() == LoadBalancerVmNicStatus.Active || ref.getStatus() == LoadBalancerVmNicStatus.Pending) {
return ref;
}
}
return null;
}
});
if (activeNic == null) {
return false;
}
return true;
}
private LoadBalancerBackend getBackend() {
DebugUtils.Assert(self.getProviderType() != null, "providerType cannot be null");
return lbMgr.getBackend(self.getProviderType());
}
private LoadBalancerStruct makeStruct() {
LoadBalancerStruct struct = new LoadBalancerStruct();
struct.setLb(reloadAndGetInventory());
List<String> activeNicUuids = new ArrayList<String>();
for (LoadBalancerListenerVO l : self.getListeners()) {
activeNicUuids.addAll(CollectionUtils.transformToList(l.getVmNicRefs(), new Function<String, LoadBalancerListenerVmNicRefVO>() {
@Override
public String call(LoadBalancerListenerVmNicRefVO arg) {
return arg.getStatus() == LoadBalancerVmNicStatus.Active || arg.getStatus() == LoadBalancerVmNicStatus.Pending ? arg.getVmNicUuid() : null;
}
}));
}
if (activeNicUuids.isEmpty()) {
struct.setVmNics(new HashMap<String, VmNicInventory>());
} else {
SimpleQuery<VmNicVO> nq = dbf.createQuery(VmNicVO.class);
nq.add(VmNicVO_.uuid, Op.IN, activeNicUuids);
List<VmNicVO> nicvos = nq.list();
Map<String, VmNicInventory> m = new HashMap<String, VmNicInventory>();
for (VmNicVO n : nicvos) {
m.put(n.getUuid(), VmNicInventory.valueOf(n));
}
struct.setVmNics(m);
}
struct.setListeners(LoadBalancerListenerInventory.valueOf(self.getListeners()));
return struct;
}
private void handle(final APICreateLoadBalancerListenerMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return getSyncId();
}
@Override
public void run(final SyncTaskChain chain) {
createListener(msg, new NoErrorCompletion(chain) {
@Override
public void done() {
chain.next();
}
});
}
@Override
public String getName() {
return "create-listener";
}
});
}
private void createListener(final APICreateLoadBalancerListenerMsg msg, final NoErrorCompletion completion) {
final APICreateLoadBalancerListenerEvent evt = new APICreateLoadBalancerListenerEvent(msg.getId());
LoadBalancerListenerVO vo = new LoadBalancerListenerVO();
vo.setLoadBalancerUuid(self.getUuid());
vo.setUuid(msg.getResourceUuid() == null ? Platform.getUuid() : msg.getResourceUuid());
vo.setDescription(vo.getDescription());
vo.setName(msg.getName());
vo.setInstancePort(msg.getInstancePort());
vo.setLoadBalancerPort(msg.getLoadBalancerPort());
vo.setProtocol(msg.getProtocol());
vo = dbf.persistAndRefresh(vo);
acntMgr.createAccountResourceRef(msg.getSession().getAccountUuid(), vo.getUuid(), LoadBalancerListenerVO.class);
tagMgr.createNonInherentSystemTags(msg.getSystemTags(), vo.getUuid(), LoadBalancerListenerVO.class.getSimpleName());
evt.setInventory(LoadBalancerListenerInventory.valueOf(vo));
bus.publish(evt);
completion.done();
}
}
| winger007/zstack | plugin/loadBalancer/src/main/java/org/zstack/network/service/lb/LoadBalancerBase.java | Java | apache-2.0 | 44,011 |
class WorkspaceSearchController < ApplicationController
before_filter :require_full_search
def show
workspace = Workspace.find(params[:workspace_id])
authorize! :show, workspace
present WorkspaceSearch.new(current_user, params)
end
end
| nvoron23/chorus | app/controllers/workspace_search_controller.rb | Ruby | apache-2.0 | 255 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.iceberg;
import org.apache.iceberg.PartitionField;
import org.apache.iceberg.PartitionSpec;
import org.apache.iceberg.Schema;
import java.util.List;
import java.util.function.Consumer;
import java.util.regex.MatchResult;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static java.lang.Integer.parseInt;
import static java.lang.String.format;
public final class PartitionFields
{
private static final String NAME = "[a-z_][a-z0-9_]*";
private static final String FUNCTION_NAME = "\\((" + NAME + ")\\)";
private static final String FUNCTION_NAME_INT = "\\((" + NAME + "), *(\\d+)\\)";
private static final Pattern IDENTITY_PATTERN = Pattern.compile(NAME);
private static final Pattern YEAR_PATTERN = Pattern.compile("year" + FUNCTION_NAME);
private static final Pattern MONTH_PATTERN = Pattern.compile("month" + FUNCTION_NAME);
private static final Pattern DAY_PATTERN = Pattern.compile("day" + FUNCTION_NAME);
private static final Pattern HOUR_PATTERN = Pattern.compile("hour" + FUNCTION_NAME);
private static final Pattern BUCKET_PATTERN = Pattern.compile("bucket" + FUNCTION_NAME_INT);
private static final Pattern TRUNCATE_PATTERN = Pattern.compile("truncate" + FUNCTION_NAME_INT);
private static final Pattern ICEBERG_BUCKET_PATTERN = Pattern.compile("bucket\\[(\\d+)]");
private static final Pattern ICEBERG_TRUNCATE_PATTERN = Pattern.compile("truncate\\[(\\d+)]");
private PartitionFields() {}
public static PartitionSpec parsePartitionFields(Schema schema, List<String> fields)
{
PartitionSpec.Builder builder = PartitionSpec.builderFor(schema);
for (String field : fields) {
parsePartitionField(builder, field);
}
return builder.build();
}
public static void parsePartitionField(PartitionSpec.Builder builder, String field)
{
@SuppressWarnings("PointlessBooleanExpression")
boolean matched = false ||
tryMatch(field, IDENTITY_PATTERN, match -> builder.identity(match.group())) ||
tryMatch(field, YEAR_PATTERN, match -> builder.year(match.group(1))) ||
tryMatch(field, MONTH_PATTERN, match -> builder.month(match.group(1))) ||
tryMatch(field, DAY_PATTERN, match -> builder.day(match.group(1))) ||
tryMatch(field, HOUR_PATTERN, match -> builder.hour(match.group(1))) ||
tryMatch(field, BUCKET_PATTERN, match -> builder.bucket(match.group(1), parseInt(match.group(2)))) ||
tryMatch(field, TRUNCATE_PATTERN, match -> builder.truncate(match.group(1), parseInt(match.group(2))));
if (!matched) {
throw new IllegalArgumentException("Invalid partition field declaration: " + field);
}
}
private static boolean tryMatch(CharSequence value, Pattern pattern, Consumer<MatchResult> match)
{
Matcher matcher = pattern.matcher(value);
if (matcher.matches()) {
match.accept(matcher.toMatchResult());
return true;
}
return false;
}
public static List<String> toPartitionFields(PartitionSpec spec)
{
return spec.fields().stream()
.map(field -> toPartitionField(spec, field))
.collect(toImmutableList());
}
private static String toPartitionField(PartitionSpec spec, PartitionField field)
{
String name = spec.schema().findColumnName(field.sourceId());
String transform = field.transform().toString();
switch (transform) {
case "identity":
return name;
case "year":
case "month":
case "day":
case "hour":
return format("%s(%s)", transform, name);
}
Matcher matcher = ICEBERG_BUCKET_PATTERN.matcher(transform);
if (matcher.matches()) {
return format("bucket(%s, %s)", name, matcher.group(1));
}
matcher = ICEBERG_TRUNCATE_PATTERN.matcher(transform);
if (matcher.matches()) {
return format("truncate(%s, %s)", name, matcher.group(1));
}
throw new UnsupportedOperationException("Unsupported partition transform: " + field);
}
}
| mvp/presto | presto-iceberg/src/main/java/com/facebook/presto/iceberg/PartitionFields.java | Java | apache-2.0 | 4,926 |
/*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.play.internal.run;
import org.gradle.api.tasks.compile.BaseForkOptions;
import java.io.File;
public interface PlayRunSpec {
BaseForkOptions getForkOptions();
Iterable<File> getClasspath();
Iterable<File> getChangingClasspath();
File getApplicationJar();
File getAssetsJar();
Iterable<File> getAssetsDirs();
File getProjectPath();
int getHttpPort();
}
| gstevey/gradle | subprojects/platform-play/src/main/java/org/gradle/play/internal/run/PlayRunSpec.java | Java | apache-2.0 | 1,031 |
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
from boxsdk import Client
from boxsdk.exception import BoxAPIException
from boxsdk.object.collaboration import CollaborationRole
from auth import authenticate
def run_user_example(client):
# 'me' is a handy value to get info on the current authenticated user.
me = client.user(user_id='me').get(fields=['login'])
print('The email of the user is: {0}'.format(me['login']))
def run_folder_examples(client):
root_folder = client.folder(folder_id='0').get()
print('The root folder is owned by: {0}'.format(root_folder.owned_by['login']))
items = root_folder.get_items(limit=100, offset=0)
print('This is the first 100 items in the root folder:')
for item in items:
print(" " + item.name)
def run_collab_examples(client):
root_folder = client.folder(folder_id='0')
collab_folder = root_folder.create_subfolder('collab folder')
try:
print('Folder {0} created'.format(collab_folder.get()['name']))
collaboration = collab_folder.add_collaborator('someone@example.com', CollaborationRole.VIEWER)
print('Created a collaboration')
try:
modified_collaboration = collaboration.update_info(role=CollaborationRole.EDITOR)
print('Modified a collaboration: {0}'.format(modified_collaboration.role))
finally:
collaboration.delete()
print('Deleted a collaboration')
finally:
# Clean up
print('Delete folder collab folder succeeded: {0}'.format(collab_folder.delete()))
def rename_folder(client):
root_folder = client.folder(folder_id='0')
foo = root_folder.create_subfolder('foo')
try:
print('Folder {0} created'.format(foo.get()['name']))
bar = foo.rename('bar')
print('Renamed to {0}'.format(bar.get()['name']))
finally:
print('Delete folder bar succeeded: {0}'.format(foo.delete()))
def get_folder_shared_link(client):
root_folder = client.folder(folder_id='0')
collab_folder = root_folder.create_subfolder('shared link folder')
try:
print('Folder {0} created'.format(collab_folder.get().name))
shared_link = collab_folder.get_shared_link()
print('Got shared link:' + shared_link)
finally:
print('Delete folder collab folder succeeded: {0}'.format(collab_folder.delete()))
def upload_file(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
a_file = root_folder.upload(file_path, file_name='i-am-a-file.txt')
try:
print('{0} uploaded: '.format(a_file.get()['name']))
finally:
print('Delete i-am-a-file.txt succeeded: {0}'.format(a_file.delete()))
def upload_accelerator(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
a_file = root_folder.upload(file_path, file_name='i-am-a-file.txt', upload_using_accelerator=True)
try:
print('{0} uploaded via Accelerator: '.format(a_file.get()['name']))
file_v2_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file_v2.txt')
a_file = a_file.update_contents(file_v2_path, upload_using_accelerator=True)
print('{0} updated via Accelerator: '.format(a_file.get()['name']))
finally:
print('Delete i-am-a-file.txt succeeded: {0}'.format(a_file.delete()))
def rename_file(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
foo = root_folder.upload(file_path, file_name='foo.txt')
try:
print('{0} uploaded '.format(foo.get()['name']))
bar = foo.rename('bar.txt')
print('Rename succeeded: {0}'.format(bool(bar)))
finally:
foo.delete()
def update_file(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
file_v1 = root_folder.upload(file_path, file_name='file_v1.txt')
try:
# print 'File content after upload: {}'.format(file_v1.content())
file_v2_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file_v2.txt')
file_v2 = file_v1.update_contents(file_v2_path)
# print 'File content after update: {}'.format(file_v2.content())
finally:
file_v1.delete()
def search_files(client):
search_results = client.search(
'i-am-a-file.txt',
limit=2,
offset=0,
ancestor_folders=[client.folder(folder_id='0')],
file_extensions=['txt'],
)
for item in search_results:
item_with_name = item.get(fields=['name'])
print('matching item: ' + item_with_name.id)
else:
print('no matching items')
def copy_item(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
a_file = root_folder.upload(file_path, file_name='a file.txt')
try:
subfolder1 = root_folder.create_subfolder('copy_sub')
try:
a_file.copy(subfolder1)
print(subfolder1.get_items(limit=10, offset=0))
subfolder2 = root_folder.create_subfolder('copy_sub2')
try:
subfolder1.copy(subfolder2)
print(subfolder2.get_items(limit=10, offset=0))
finally:
subfolder2.delete()
finally:
subfolder1.delete()
finally:
a_file.delete()
def move_item(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
a_file = root_folder.upload(file_path, file_name='a file.txt')
try:
subfolder1 = root_folder.create_subfolder('move_sub')
try:
a_file.move(subfolder1)
print(subfolder1.get_items(limit=10, offset=0))
subfolder2 = root_folder.create_subfolder('move_sub2')
try:
subfolder1.move(subfolder2)
print(subfolder2.get_items(limit=10, offset=0))
finally:
subfolder2.delete()
finally:
try:
subfolder1.delete()
except BoxAPIException:
pass
finally:
try:
a_file.delete()
except BoxAPIException:
pass
def get_events(client):
print(client.events().get_events(limit=100, stream_position='now'))
def get_latest_stream_position(client):
print(client.events().get_latest_stream_position())
def long_poll(client):
print(client.events().long_poll())
def _delete_leftover_group(existing_groups, group_name):
"""
delete group if it already exists
"""
existing_group = next((g for g in existing_groups if g.name == group_name), None)
if existing_group:
existing_group.delete()
def run_groups_example(client):
"""
Shows how to interact with 'Groups' in the Box API. How to:
- Get info about all the Groups to which the current user belongs
- Create a Group
- Rename a Group
- Add a member to the group
- Remove a member from a group
- Delete a Group
"""
try:
# First delete group if it already exists
original_groups = client.groups()
_delete_leftover_group(original_groups, 'box_sdk_demo_group')
_delete_leftover_group(original_groups, 'renamed_box_sdk_demo_group')
new_group = client.create_group('box_sdk_demo_group')
except BoxAPIException as ex:
if ex.status != 403:
raise
print('The authenticated user does not have permissions to manage groups. Skipping the test of this demo.')
return
print('New group:', new_group.name, new_group.id)
new_group = new_group.update_info({'name': 'renamed_box_sdk_demo_group'})
print("Group's new name:", new_group.name)
me_dict = client.user().get(fields=['login'])
me = client.user(user_id=me_dict['id'])
group_membership = new_group.add_member(me, 'member')
members = list(new_group.membership())
print('The group has a membership of: ', len(members))
print('The id of that membership: ', group_membership.object_id)
group_membership.delete()
print('After deleting that membership, the group has a membership of: ', len(list(new_group.membership())))
new_group.delete()
groups_after_deleting_demo = client.groups()
has_been_deleted = not any(g.name == 'renamed_box_sdk_demo_group' for g in groups_after_deleting_demo)
print('The new group has been deleted: ', has_been_deleted)
def run_metadata_example(client):
root_folder = client.folder(folder_id='0')
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt')
foo = root_folder.upload(file_path, file_name='foo.txt')
print('{0} uploaded '.format(foo.get()['name']))
try:
metadata = foo.metadata()
metadata.create({'foo': 'bar'})
print('Created metadata: {0}'.format(metadata.get()))
update = metadata.start_update()
update.update('/foo', 'baz', 'bar')
print('Updated metadata: {0}'.format(metadata.update(update)))
finally:
foo.delete()
def run_examples(oauth):
client = Client(oauth)
run_user_example(client)
run_folder_examples(client)
run_collab_examples(client)
rename_folder(client)
get_folder_shared_link(client)
upload_file(client)
rename_file(client)
update_file(client)
search_files(client)
copy_item(client)
move_item(client)
get_events(client)
get_latest_stream_position(client)
# long_poll(client)
# Enterprise accounts only
run_groups_example(client)
run_metadata_example(client)
# Premium Apps only
upload_accelerator(client)
def main():
# Please notice that you need to put in your client id and client secret in demo/auth.py in order to make this work.
oauth = authenticate()
run_examples(oauth)
os._exit(0)
if __name__ == '__main__':
main()
| lkabongoVC/box-python-sdk | demo/example.py | Python | apache-2.0 | 10,222 |
class Foo<R> {
public interface Factory<U> {
U make();
}
interface ASink<R, K extends ASink<R, K>> {
public void combine(K other);
}
static <R, S extends ASink<R, S>> R reduce(Factory<S> factory) {
return null;
}
public void foo() {
reduce(Moo::new);
reduce<error descr="'reduce(Foo.Factory<Foo.ASink>)' in 'Foo' cannot be applied to '(<method reference>)'">(AMoo::new)</error>;
reduce(AAMoo::new);
reduce(AAAMoo::new);
}
private class Moo implements ASink<R, Moo> {
@Override
public void combine(Moo other) {
}
}
private class AMoo {
}
private class AAMoo implements ASink<AAMoo, AAMoo> {
@Override
public void combine(AAMoo other) {
}
}
private class AAAMoo implements ASink<R, AAAMoo> {
private AAAMoo() {
}
@Override
public void combine(AAAMoo other) {
}
}
} | android-ia/platform_tools_idea | java/java-tests/testData/codeInsight/daemonCodeAnalyzer/lambda/methodRef/ConstructorAssignability.java | Java | apache-2.0 | 978 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.processing.loading.converter;
import org.apache.carbondata.core.datastore.row.CarbonRow;
import org.apache.carbondata.processing.loading.exception.CarbonDataLoadingException;
/**
* This interface converts/transforms the column field.
*/
public interface FieldConverter {
/**
* It converts the column field and updates the data in same location/index in row.
* @param row
* @return the status whether it could be loaded or not, usually when record is added
* to bad records then it returns false.
* @throws CarbonDataLoadingException
*/
void convert(CarbonRow row, BadRecordLogHolder logHolder) throws CarbonDataLoadingException;
/**
* It convert the literal value to carbon internal value
*/
Object convert(Object value, BadRecordLogHolder logHolder) throws RuntimeException;
/**
* This method clears all the dictionary caches being acquired.
*/
void clear();
}
| jatin9896/incubator-carbondata | processing/src/main/java/org/apache/carbondata/processing/loading/converter/FieldConverter.java | Java | apache-2.0 | 1,743 |
/**
* Copyright (c) 2007-2014 Kaazing Corporation. All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.kaazing.gateway.transport.wsr;
import static java.util.Collections.singleton;
import java.util.Collection;
import org.kaazing.gateway.transport.dispatch.ProtocolDispatcher;
class RtmpProtocolDispatcher implements ProtocolDispatcher {
private static final String RTMP_PROTOCOL = "rtmp/1.0";
private static final Collection<byte[]> RTMP_DISCRIMINATORS = singleton(new byte[] { 0x03 });
@Override
public int compareTo(ProtocolDispatcher pd) {
return protocolDispatchComparator.compare(this, pd);
}
@Override
public String getProtocolName() {
return RTMP_PROTOCOL;
}
@Override
public Collection<byte[]> getDiscriminators() {
return RTMP_DISCRIMINATORS;
}
}
| EArdeleanu/gateway | transport/wsr/src/main/java/org/kaazing/gateway/transport/wsr/RtmpProtocolDispatcher.java | Java | apache-2.0 | 1,627 |
"""
GeoJSON example using addItem
Python 2/3
ArcREST version 3.5.0
"""
from __future__ import print_function
import arcrest
if __name__ == "__main__":
username = ""
password = ""
geojsonFile = r""
sh = arcrest.AGOLTokenSecurityHandler(username, password)
admin = arcrest.manageorg.Administration(securityHandler=sh)
user = admin.content.users.user()
ip = arcrest.manageorg.ItemParameter()
ip.title = "MyGeoJSONTestFile"
ip.type = "GeoJson"
ip.tags = "Geo1,Geo2"
ip.description = "Publishing a geojson file"
addedItem = user.addItem(itemParameters=ip, filePath=geojsonFile)
itemId = addedItem.id
pp = arcrest.manageorg.PublishGeoJSONParameter()
pp.name = "Geojsonrocks"
pp.hasStaticData = True
print( user.publishItem(fileType="geojson", publishParameters=pp, itemId=itemId, wait=True)) | Esri/ArcREST | samples/publishingGeoJSON.py | Python | apache-2.0 | 864 |
/*
* Copyright 2017 Amadeus s.a.s.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
Aria.classDefinition({
$classpath : "test.aria.widgets.container.tab.focusTab.FocusTabTestCase",
$extends : "aria.jsunit.TemplateTestCase",
$prototype : {
runTemplateTest : function () {
this.templateCtxt._tpl.$focus("summaryTab");
var domElt = this.getElementById("summaryTab");
var anchor = domElt.getElementsByTagName("a")[0];
this.waitForDomEltFocus(anchor, function () {
this.templateCtxt._tpl.$focus("mapTab");
var span = this.getElementById("mapTab");
this.waitForDomEltFocus(span, this.end());
});
}
}
});
| fbasso/ariatemplates | test/aria/widgets/container/tab/focusTab/FocusTabTestCase.js | JavaScript | apache-2.0 | 1,249 |
//
// immer: immutable data structures for C++
// Copyright (C) 2016, 2017, 2018 Juan Pedro Bolivar Puente
//
// This software is distributed under the Boost Software License, Version 1.0.
// See accompanying file LICENSE or copy at http://boost.org/LICENSE_1_0.txt
//
#include <immer/set.hpp>
template <typename T,
typename Hash = std::hash<T>,
typename Eq = std::equal_to<T>>
using test_set_t = immer::set<T, Hash, Eq, immer::default_memory_policy, 3u>;
#define SET_T test_set_t
#include "generic.ipp"
| wiltonlazary/arangodb | 3rdParty/immer/v0.7.0/test/set/B3.cpp | C++ | apache-2.0 | 529 |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.rules.macros;
import static org.hamcrest.MatcherAssert.assertThat;
import com.facebook.buck.core.cell.CellPathResolver;
import com.facebook.buck.core.cell.TestCellPathResolver;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.BuildTargetFactory;
import com.facebook.buck.core.rules.ActionGraphBuilder;
import com.facebook.buck.core.rules.resolver.impl.TestActionGraphBuilder;
import com.facebook.buck.io.filesystem.impl.FakeProjectFilesystem;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.args.CompositeArg;
import com.facebook.buck.rules.args.SanitizedArg;
import com.facebook.buck.rules.args.SourcePathArg;
import com.facebook.buck.rules.args.StringArg;
import com.facebook.buck.rules.args.WriteToFileArg;
import com.facebook.buck.shell.Genrule;
import com.facebook.buck.shell.GenruleBuilder;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import java.util.Optional;
import org.hamcrest.Matchers;
import org.junit.Test;
public class StringWithMacrosConverterTest {
private static final BuildTarget TARGET = BuildTargetFactory.newInstance("//:rule");
private static final CellPathResolver CELL_ROOTS =
TestCellPathResolver.get(new FakeProjectFilesystem());
private static final ImmutableList<MacroExpander<? extends Macro, ?>> MACRO_EXPANDERS =
ImmutableList.of(LocationMacroExpander.INSTANCE);
@Test
public void noMacros() {
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
StringWithMacrosConverter converter =
StringWithMacrosConverter.of(
TARGET, CELL_ROOTS.getCellNameResolver(), graphBuilder, MACRO_EXPANDERS);
assertThat(
converter.convert(StringWithMacrosUtils.format("something")),
Matchers.equalTo(StringArg.of("something")));
}
@Test
public void macro() {
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
Genrule genrule =
GenruleBuilder.newGenruleBuilder(BuildTargetFactory.newInstance("//:dep"))
.setOut("out")
.build(graphBuilder);
StringWithMacrosConverter converter =
StringWithMacrosConverter.of(
TARGET, CELL_ROOTS.getCellNameResolver(), graphBuilder, MACRO_EXPANDERS);
assertThat(
converter.convert(
StringWithMacrosUtils.format("%s", LocationMacro.of(genrule.getBuildTarget()))),
Matchers.equalTo(
SourcePathArg.of(Preconditions.checkNotNull(genrule.getSourcePathToOutput()))));
}
@Test
public void macroAndString() {
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
Genrule genrule =
GenruleBuilder.newGenruleBuilder(BuildTargetFactory.newInstance("//:dep"))
.setOut("out")
.build(graphBuilder);
StringWithMacrosConverter converter =
StringWithMacrosConverter.of(
TARGET, CELL_ROOTS.getCellNameResolver(), graphBuilder, MACRO_EXPANDERS);
assertThat(
converter.convert(
StringWithMacrosUtils.format("--foo=%s", LocationMacro.of(genrule.getBuildTarget()))),
Matchers.equalTo(
CompositeArg.of(
ImmutableList.of(
StringArg.of("--foo="),
SourcePathArg.of(
Preconditions.checkNotNull(genrule.getSourcePathToOutput()))))));
}
@Test
public void sanitization() {
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
StringWithMacrosConverter converter =
StringWithMacrosConverter.of(
TARGET,
CELL_ROOTS.getCellNameResolver(),
graphBuilder,
MACRO_EXPANDERS,
Optional.of(s -> "something else"));
assertThat(
converter.convert(StringWithMacrosUtils.format("something")),
Matchers.equalTo(SanitizedArg.create(s -> "something else", "something")));
}
@Test
public void outputToFileMacro() {
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
Genrule genrule =
GenruleBuilder.newGenruleBuilder(BuildTargetFactory.newInstance("//:dep"))
.setOut("out")
.build(graphBuilder);
StringWithMacrosConverter converter =
StringWithMacrosConverter.of(
TARGET, CELL_ROOTS.getCellNameResolver(), graphBuilder, MACRO_EXPANDERS);
Arg result =
converter.convert(
StringWithMacrosUtils.format(
"%s", MacroContainer.of(LocationMacro.of(genrule.getBuildTarget()), true)));
assertThat(result, Matchers.instanceOf(WriteToFileArg.class));
}
}
| facebook/buck | test/com/facebook/buck/rules/macros/StringWithMacrosConverterTest.java | Java | apache-2.0 | 5,254 |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.cxx;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.core.rules.BuildRuleResolver;
import com.facebook.buck.core.rules.common.BuildableSupport;
import com.facebook.buck.core.util.immutables.BuckStyleValueWithBuilder;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.coercer.FrameworkPath;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
import java.util.Optional;
/** The components that get contributed to a top-level run of the C++ preprocessor. */
@BuckStyleValueWithBuilder
public abstract class CxxPreprocessorInput {
private static final CxxPreprocessorInput INSTANCE =
ImmutableCxxPreprocessorInput.builder().build();
public abstract Multimap<CxxSource.Type, Arg> getPreprocessorFlags();
public abstract ImmutableList<CxxHeaders> getIncludes();
// Framework paths.
public abstract ImmutableSet<FrameworkPath> getFrameworks();
// The build rules which produce headers found in the includes below.
protected abstract ImmutableSet<BuildTarget> getRules();
public Iterable<BuildRule> getDeps(BuildRuleResolver ruleResolver) {
ImmutableList.Builder<BuildRule> builder = ImmutableList.builder();
for (CxxHeaders cxxHeaders : getIncludes()) {
cxxHeaders.getDeps(ruleResolver).forEachOrdered(builder::add);
}
builder.addAll(ruleResolver.getAllRules(getRules()));
for (FrameworkPath frameworkPath : getFrameworks()) {
if (frameworkPath.getSourcePath().isPresent()) {
Optional<BuildRule> frameworkRule =
ruleResolver.getRule(frameworkPath.getSourcePath().get());
if (frameworkRule.isPresent()) {
builder.add(frameworkRule.get());
}
}
}
for (Arg arg : getPreprocessorFlags().values()) {
builder.addAll(BuildableSupport.getDepsCollection(arg, ruleResolver));
}
return builder.build();
}
public static CxxPreprocessorInput concat(Iterable<CxxPreprocessorInput> inputs) {
CxxPreprocessorInput.Builder builder = CxxPreprocessorInput.builder();
for (CxxPreprocessorInput input : inputs) {
builder.putAllPreprocessorFlags(input.getPreprocessorFlags());
builder.addAllIncludes(input.getIncludes());
builder.addAllFrameworks(input.getFrameworks());
builder.addAllRules(input.getRules());
}
return builder.build();
}
public static CxxPreprocessorInput of() {
return INSTANCE;
}
public static Builder builder() {
return new Builder();
}
public static class Builder extends ImmutableCxxPreprocessorInput.Builder {
@Override
public CxxPreprocessorInput build() {
CxxPreprocessorInput cxxPreprocessorInput = super.build();
if (cxxPreprocessorInput.equals(INSTANCE)) {
return INSTANCE;
}
return cxxPreprocessorInput;
}
}
}
| facebook/buck | src/com/facebook/buck/cxx/CxxPreprocessorInput.java | Java | apache-2.0 | 3,593 |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.embedding;
import com.intellij.lang.ASTNode;
import com.intellij.lang.LighterLazyParseableNode;
import com.intellij.lang.ParserDefinition;
import com.intellij.lang.PsiBuilder;
import com.intellij.lang.impl.DelegateMarker;
import com.intellij.lang.impl.PsiBuilderAdapter;
import com.intellij.lang.impl.PsiBuilderImpl;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.psi.TokenType;
import com.intellij.psi.tree.IElementType;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
/**
* A delegate PsiBuilder that hides or substitutes some tokens (namely, the ones provided by {@link MasqueradingLexer})
* from a parser, however, _still inserting_ them into a production tree in their initial appearance.
* @see MasqueradingLexer
*/
public class MasqueradingPsiBuilderAdapter extends PsiBuilderAdapter {
private final static Logger LOG = Logger.getInstance(MasqueradingPsiBuilderAdapter.class);
private List<MyShiftedToken> myShrunkSequence;
private CharSequence myShrunkCharSequence;
private int myLexPosition;
private final PsiBuilderImpl myBuilderDelegate;
private final MasqueradingLexer myLexer;
public MasqueradingPsiBuilderAdapter(@NotNull final Project project,
@NotNull final ParserDefinition parserDefinition,
@NotNull final MasqueradingLexer lexer,
@NotNull final ASTNode chameleon,
@NotNull final CharSequence text) {
this(new PsiBuilderImpl(project, parserDefinition, lexer, chameleon, text));
}
public MasqueradingPsiBuilderAdapter(@NotNull final Project project,
@NotNull final ParserDefinition parserDefinition,
@NotNull final MasqueradingLexer lexer,
@NotNull final LighterLazyParseableNode chameleon,
@NotNull final CharSequence text) {
this(new PsiBuilderImpl(project, parserDefinition, lexer, chameleon, text));
}
private MasqueradingPsiBuilderAdapter(PsiBuilderImpl builder) {
super(builder);
LOG.assertTrue(myDelegate instanceof PsiBuilderImpl);
myBuilderDelegate = ((PsiBuilderImpl)myDelegate);
LOG.assertTrue(myBuilderDelegate.getLexer() instanceof MasqueradingLexer);
myLexer = ((MasqueradingLexer)myBuilderDelegate.getLexer());
initShrunkSequence();
}
@Override
public CharSequence getOriginalText() {
return myShrunkCharSequence;
}
@Override
public void advanceLexer() {
myLexPosition++;
skipWhitespace();
synchronizePositions(false);
}
/**
* @param exact if true then positions should be equal;
* else delegate should be behind, not including exactly all foreign (skipped) or whitespace tokens
*/
private void synchronizePositions(boolean exact) {
final PsiBuilder delegate = getDelegate();
if (myLexPosition >= myShrunkSequence.size() || delegate.eof()) {
myLexPosition = myShrunkSequence.size();
while (!delegate.eof()) {
delegate.advanceLexer();
}
return;
}
if (delegate.getCurrentOffset() > myShrunkSequence.get(myLexPosition).realStart) {
LOG.error("delegate is ahead of my builder!");
return;
}
final int keepUpPosition = getKeepUpPosition(exact);
while (!delegate.eof()) {
final int delegatePosition = delegate.getCurrentOffset();
if (delegatePosition < keepUpPosition) {
delegate.advanceLexer();
}
else {
break;
}
}
}
private int getKeepUpPosition(boolean exact) {
if (exact) {
return myShrunkSequence.get(myLexPosition).realStart;
}
int lexPosition = myLexPosition;
while (lexPosition > 0 && (myShrunkSequence.get(lexPosition - 1).shrunkStart == myShrunkSequence.get(lexPosition).shrunkStart
|| isWhiteSpaceOnPos(lexPosition - 1))) {
lexPosition--;
}
if (lexPosition == 0) {
return myShrunkSequence.get(lexPosition).realStart;
}
return myShrunkSequence.get(lexPosition - 1).realStart + 1;
}
@Override
public IElementType lookAhead(int steps) {
if (eof()) { // ensure we skip over whitespace if it's needed
return null;
}
int cur = myLexPosition;
while (steps > 0) {
++cur;
while (cur < myShrunkSequence.size() && isWhiteSpaceOnPos(cur)) {
cur++;
}
steps--;
}
return cur < myShrunkSequence.size() ? myShrunkSequence.get(cur).elementType : null;
}
@Override
public IElementType rawLookup(int steps) {
int cur = myLexPosition + steps;
return cur >= 0 && cur < myShrunkSequence.size() ? myShrunkSequence.get(cur).elementType : null;
}
@Override
public int rawTokenTypeStart(int steps) {
int cur = myLexPosition + steps;
if (cur < 0) return -1;
if (cur >= myShrunkSequence.size()) return getOriginalText().length();
return myShrunkSequence.get(cur).shrunkStart;
}
@Override
public int rawTokenIndex() {
return myLexPosition;
}
@Override
public int getCurrentOffset() {
return myLexPosition < myShrunkSequence.size() ? myShrunkSequence.get(myLexPosition).shrunkStart : myShrunkCharSequence.length();
}
@Nullable
@Override
public IElementType getTokenType() {
if (allIsEmpty()) {
return TokenType.DUMMY_HOLDER;
}
skipWhitespace();
return myLexPosition < myShrunkSequence.size() ? myShrunkSequence.get(myLexPosition).elementType : null;
}
@Nullable
@Override
public String getTokenText() {
if (allIsEmpty()) {
return getDelegate().getOriginalText().toString();
}
skipWhitespace();
if (myLexPosition >= myShrunkSequence.size()) {
return null;
}
final MyShiftedToken token = myShrunkSequence.get(myLexPosition);
return myShrunkCharSequence.subSequence(token.shrunkStart, token.shrunkEnd).toString();
}
@Override
public boolean eof() {
boolean isEof = myLexPosition >= myShrunkSequence.size();
if (!isEof) {
return false;
}
synchronizePositions(true);
return true;
}
@Override
public Marker mark() {
// In the case of the topmost node all should be inserted
if (myLexPosition != 0) {
synchronizePositions(true);
}
final Marker mark = super.mark();
return new MyMarker(mark, myLexPosition);
}
private boolean allIsEmpty() {
return myShrunkSequence.isEmpty() && getDelegate().getOriginalText().length() != 0;
}
private void skipWhitespace() {
while (myLexPosition < myShrunkSequence.size() && isWhiteSpaceOnPos(myLexPosition)) {
myLexPosition++;
}
}
private boolean isWhiteSpaceOnPos(int pos) {
return myBuilderDelegate.whitespaceOrComment(myShrunkSequence.get(pos).elementType);
}
protected void initShrunkSequence() {
initTokenListAndCharSequence(myLexer);
myLexPosition = 0;
}
private void initTokenListAndCharSequence(MasqueradingLexer lexer) {
lexer.start(getDelegate().getOriginalText());
myShrunkSequence = new ArrayList<MyShiftedToken>();
StringBuilder charSequenceBuilder = new StringBuilder();
int realPos = 0;
int shrunkPos = 0;
while (lexer.getTokenType() != null) {
final IElementType masqueTokenType = lexer.getMasqueTokenType();
final String masqueTokenText = lexer.getMasqueTokenText();
final int realLength = lexer.getTokenEnd() - lexer.getTokenStart();
if (masqueTokenType != null) {
assert masqueTokenText != null;
final int masqueLength = masqueTokenText.length();
myShrunkSequence.add(new MyShiftedToken(masqueTokenType,
realPos, realPos + realLength,
shrunkPos, shrunkPos + masqueLength));
charSequenceBuilder.append(masqueTokenText);
shrunkPos += masqueLength;
}
realPos += realLength;
lexer.advance();
}
myShrunkCharSequence = charSequenceBuilder.toString();
}
@SuppressWarnings({"StringConcatenationInsideStringBufferAppend", "UnusedDeclaration"})
private void logPos() {
StringBuilder sb = new StringBuilder();
sb.append("\nmyLexPosition=" + myLexPosition + "/" + myShrunkSequence.size());
if (myLexPosition < myShrunkSequence.size()) {
final MyShiftedToken token = myShrunkSequence.get(myLexPosition);
sb.append("\nshrunk:" + token.shrunkStart + "," + token.shrunkEnd);
sb.append("\nreal:" + token.realStart + "," + token.realEnd);
sb.append("\nTT:" + getTokenText());
}
sb.append("\ndelegate:");
sb.append("eof=" + myDelegate.eof());
if (!myDelegate.eof()) {
//noinspection ConstantConditions
sb.append("\nposition:" + myDelegate.getCurrentOffset() + "," + (myDelegate.getCurrentOffset() + myDelegate.getTokenText().length()));
sb.append("\nTT:" + myDelegate.getTokenText());
}
LOG.info(sb.toString());
}
private static class MyShiftedToken {
public final IElementType elementType;
public final int realStart;
public final int realEnd;
public final int shrunkStart;
public final int shrunkEnd;
public MyShiftedToken(IElementType elementType, int realStart, int realEnd, int shrunkStart, int shrunkEnd) {
this.elementType = elementType;
this.realStart = realStart;
this.realEnd = realEnd;
this.shrunkStart = shrunkStart;
this.shrunkEnd = shrunkEnd;
}
@Override
public String toString() {
return "MSTk: [" + realStart + ", " + realEnd + "] -> [" + shrunkStart + ", " + shrunkEnd + "]: " + elementType.toString();
}
}
private class MyMarker extends DelegateMarker {
private final int myBuilderPosition;
public MyMarker(Marker delegate, int builderPosition) {
super(delegate);
myBuilderPosition = builderPosition;
}
@Override
public void rollbackTo() {
super.rollbackTo();
myLexPosition = myBuilderPosition;
}
@Override
public void doneBefore(IElementType type, Marker before) {
super.doneBefore(type, getDelegateOrThis(before));
}
@Override
public void doneBefore(IElementType type, Marker before, String errorMessage) {
super.doneBefore(type, getDelegateOrThis(before), errorMessage);
}
@NotNull
private Marker getDelegateOrThis(@NotNull Marker marker) {
if (marker instanceof DelegateMarker) {
return ((DelegateMarker)marker).getDelegate();
}
else {
return marker;
}
}
}
}
| akosyakov/intellij-community | xml/xml-psi-impl/src/com/intellij/embedding/MasqueradingPsiBuilderAdapter.java | Java | apache-2.0 | 11,304 |
package storage
import (
"fmt"
)
// ErrOldVersion is returned when a newer version of TUF metadata is already available
type ErrOldVersion struct{}
// ErrOldVersion is returned when a newer version of TUF metadata is already available
func (err ErrOldVersion) Error() string {
return fmt.Sprintf("Error updating metadata. A newer version is already available")
}
// ErrNotFound is returned when TUF metadata isn't found for a specific record
type ErrNotFound struct{}
// Error implements error
func (err ErrNotFound) Error() string {
return fmt.Sprintf("No record found")
}
// ErrKeyExists is returned when a key already exists
type ErrKeyExists struct {
gun string
role string
}
// ErrKeyExists is returned when a key already exists
func (err ErrKeyExists) Error() string {
return fmt.Sprintf("Error, timestamp key already exists for %s:%s", err.gun, err.role)
}
// ErrNoKey is returned when no timestamp key is found
type ErrNoKey struct {
gun string
}
// ErrNoKey is returned when no timestamp key is found
func (err ErrNoKey) Error() string {
return fmt.Sprintf("Error, no timestamp key found for %s", err.gun)
}
// ErrBadQuery is used when the parameters provided cannot be appropriately
// coerced.
type ErrBadQuery struct {
msg string
}
func (err ErrBadQuery) Error() string {
return fmt.Sprintf("did not recognize parameters: %s", err.msg)
}
| jfrazelle/notary | server/storage/errors.go | GO | apache-2.0 | 1,372 |
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.nio.serialization;
/**
* This interface adds the possibility for the class to act as if it is another class when being deserialized using
* DataSerializable
*/
public interface TypedDataSerializable extends DataSerializable {
/**
*
* @return The class type that this serializable wants to act as.
*/
Class getClassType();
}
| emre-aydin/hazelcast | hazelcast/src/main/java/com/hazelcast/nio/serialization/TypedDataSerializable.java | Java | apache-2.0 | 1,000 |
/**
* View attribute injection library for Android which generates the obtainStyledAttributes() and
* TypedArray boilerplate code for you at compile time.
* <p>
* No more handing to deal with context.obtainStyledAttributes(...) or manually retrieving values
* from the resulting {@link android.content.res.TypedArray TypedArray} instance. Just annotate your
* field or method with {@link io.sweers.barber.StyledAttr @StyledAttr}.
*/
package io.sweers.barber; | lord19871207/barber | api/src/main/java/io/sweers/barber/package-info.java | Java | apache-2.0 | 465 |
/*******************************************************************************
* Copyright 2015 Ivan Shubin http://galenframework.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.galenframework.components.report;
import static java.lang.String.format;
import java.io.PrintStream;
import java.io.PrintWriter;
public class FakeException extends RuntimeException {
/**
*
*/
private static final long serialVersionUID = -4840622707009032748L;
public FakeException(String string) {
super(string);
}
@Override
public StackTraceElement[] getStackTrace() {
return new StackTraceElement[]{
new StackTraceElement("net.mindengine.someclass.SomeClass", "method1", "SomeClass.java", 4),
new StackTraceElement("net.mindengine.someclass.SomeClass2", "method2", "SomeClass2.java", 5),
new StackTraceElement("net.mindengine.someclass.SomeClass3", "method3", "SomeClass3.java", 6)
};
}
@Override
public void printStackTrace(PrintStream ps) {
ps.println(getClass().getName() + ": " + getMessage());
for (StackTraceElement element : getStackTrace()) {
ps.println(format("\tat %s.%s(%s:%d)", element.getClassName(), element.getMethodName(), element.getFileName(), element.getLineNumber()));
}
}
@Override
public void printStackTrace(PrintWriter s) {
s.println(getClass().getName() + ": " + getMessage());
for (StackTraceElement element : getStackTrace()) {
s.println(format("\tat %s.%s(%s:%d)", element.getClassName(), element.getMethodName(), element.getFileName(), element.getLineNumber()));
}
}
}
| thhiep/galen | galen-core/src/test/java/com/galenframework/components/report/FakeException.java | Java | apache-2.0 | 2,298 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.collections4.comparators;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
/**
* Tests for ComparableComparator.
*
* @version $Id$
*
*/
@SuppressWarnings("boxing")
public class ComparableComparatorTest extends AbstractComparatorTest<Integer> {
public ComparableComparatorTest(final String testName) {
super(testName);
}
@Override
public Comparator<Integer> makeObject() {
return new ComparableComparator<Integer>();
}
@Override
public List<Integer> getComparableObjectsOrdered() {
final List<Integer> list = new LinkedList<Integer>();
list.add(1);
list.add(2);
list.add(3);
list.add(4);
list.add(5);
return list;
}
@Override
public String getCompatibilityVersion() {
return "4";
}
// public void testCreate() throws Exception {
// writeExternalFormToDisk((java.io.Serializable) makeObject(), "src/test/resources/data/test/ComparableComparator.version4.obj");
// }
}
| MuShiiii/commons-collections | src/test/java/org/apache/commons/collections4/comparators/ComparableComparatorTest.java | Java | apache-2.0 | 1,878 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.slm;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.XPackField;
import org.elasticsearch.xpack.core.XPackSettings;
import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction;
import org.elasticsearch.xpack.core.action.XPackInfoFeatureTransportAction;
public class SLMInfoTransportAction extends XPackInfoFeatureTransportAction {
private final boolean enabled;
private final XPackLicenseState licenseState;
@Inject
public SLMInfoTransportAction(TransportService transportService, ActionFilters actionFilters,
Settings settings, XPackLicenseState licenseState) {
super(XPackInfoFeatureAction.SNAPSHOT_LIFECYCLE.name(), transportService, actionFilters);
this.enabled = XPackSettings.SNAPSHOT_LIFECYCLE_ENABLED.get(settings);
this.licenseState = licenseState;
}
@Override
public String name() {
return XPackField.SNAPSHOT_LIFECYCLE;
}
@Override
public boolean available() {
return licenseState.isIndexLifecycleAllowed();
}
@Override
public boolean enabled() {
return enabled;
}
}
| HonzaKral/elasticsearch | x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/SLMInfoTransportAction.java | Java | apache-2.0 | 1,649 |
/*
* Copyright 2016 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableList;
import com.google.javascript.jscomp.PolymerBehaviorExtractor.BehaviorDefinition;
import com.google.javascript.rhino.Node;
/**
* Unit tests for {@link PolymerBehaviorExtractor}.
*/
public class PolymerBehaviorExtractorTest extends CompilerTypeTestCase {
private PolymerBehaviorExtractor extractor;
private Node behaviorArray;
@Override
protected void setUp() {
super.setUp();
behaviorArray = null;
}
public void testArrayBehavior() {
parseAndInitializeExtractor(
LINE_JOINER.join(
"/** @polymerBehavior */",
"var FunBehavior = {",
" properties: {",
" isFun: Boolean",
" },",
" /** @param {string} funAmount */",
" doSomethingFun: function(funAmount) { alert('Something ' + funAmount + ' fun!'); },",
" /** @override */",
" created: function() {}",
"};",
"/** @polymerBehavior */",
"var RadBehavior = {",
" properties: {",
" howRad: Number",
" },",
" /** @param {number} radAmount */",
" doSomethingRad: function(radAmount) { alert('Something ' + radAmount + ' rad!'); },",
" /** @override */",
" ready: function() {}",
"};",
"/** @polymerBehavior */",
"var SuperCoolBehaviors = [FunBehavior, RadBehavior];",
"/** @polymerBehavior */",
"var BoringBehavior = {",
" properties: {",
" boringString: String",
" },",
" /** @param {boolean} boredYet */",
" doSomething: function(boredYet) { alert(boredYet + ' ' + this.boringString); },",
"};",
"var A = Polymer({",
" is: 'x-element',",
" behaviors: [ SuperCoolBehaviors, BoringBehavior ],",
"});"));
ImmutableList<BehaviorDefinition> defs = extractor.extractBehaviors(behaviorArray);
assertThat(defs).hasSize(3);
// TODO(jlklein): Actually verify the properties of the BehaviorDefinitions.
}
public void testInlineLiteralBehavior() {
parseAndInitializeExtractor(
LINE_JOINER.join(
"/** @polymerBehavior */",
"var FunBehavior = {",
" properties: {",
" isFun: Boolean",
" },",
" /** @param {string} funAmount */",
" doSomethingFun: function(funAmount) { alert('Something ' + funAmount + ' fun!'); },",
" /** @override */",
" created: function() {}",
"};",
"/** @polymerBehavior */",
"var SuperCoolBehaviors = [FunBehavior, {",
" properties: {",
" howRad: Number",
" },",
" /** @param {number} radAmount */",
" doSomethingRad: function(radAmount) { alert('Something ' + radAmount + ' rad!'); },",
" /** @override */",
" ready: function() {}",
"}];",
"var A = Polymer({",
" is: 'x-element',",
" behaviors: [ SuperCoolBehaviors ],",
"});"));
ImmutableList<BehaviorDefinition> defs = extractor.extractBehaviors(behaviorArray);
assertThat(defs).hasSize(2);
// TODO(jlklein): Actually verify the properties of the BehaviorDefinitions.
}
// TODO(jlklein): Test more use cases: names to avoid copying, global vs. non-global, etc.
private void parseAndInitializeExtractor(String code) {
Node root = compiler.parseTestCode(code);
GlobalNamespace globalNamespace = new GlobalNamespace(compiler, root);
extractor = new PolymerBehaviorExtractor(compiler, globalNamespace);
NodeUtil.visitPostOrder(root, new NodeUtil.Visitor() {
@Override
public void visit(Node node) {
if (isBehaviorArrayDeclaration(node)) {
behaviorArray = node;
}
}
}, Predicates.<Node>alwaysTrue());
assertNotNull(behaviorArray);
}
private boolean isBehaviorArrayDeclaration(Node node) {
return node.isArrayLit()
&& node.getParent().isStringKey() && node.getParent().getString().equals("behaviors");
}
}
| selkhateeb/closure-compiler | test/com/google/javascript/jscomp/PolymerBehaviorExtractorTest.java | Java | apache-2.0 | 5,025 |
// legal JS, if nonsensical, which also triggers the issue
const {
date,
} = (inspectedElement: any) => 0;
date.toISOString();
// Working flow code
const {
date2,
} = (inspectedElement: any).props;
date2.toISOString();
// It could also be an async function
const { constructor } = async () => {};
| Microsoft/TypeScript | tests/cases/compiler/destructuringControlFlowNoCrash.ts | TypeScript | apache-2.0 | 323 |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.psi.impl.blockEvaluator;
import com.jetbrains.python.psi.PyExpression;
import org.jetbrains.annotations.NotNull;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author Ilya.Kazakevich
*/
@SuppressWarnings("PackageVisibleField") // Package-only class
class PyEvaluationResult {
@NotNull
final Map<String, Object> myNamespace = new HashMap<>();
@NotNull
final Map<String, List<PyExpression>> myDeclarations = new HashMap<>();
@NotNull
List<PyExpression> getDeclarations(@NotNull final String name) {
final List<PyExpression> expressions = myDeclarations.get(name);
return (expressions != null) ? expressions : Collections.<PyExpression>emptyList();
}
}
| hurricup/intellij-community | python/src/com/jetbrains/python/psi/impl/blockEvaluator/PyEvaluationResult.java | Java | apache-2.0 | 1,369 |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.reservoirsampling;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.trans.steps.loadsave.LoadSaveTester;
public class ReservoirSamplingMetaTest {
@Test
public void testLoadSaveMeta() throws KettleException {
List<String> attributes = Arrays.asList( "sample_size", "seed" );
Map<String, String> getterMap = new HashMap<String, String>();
getterMap.put( "sample_size", "getSampleSize" );
getterMap.put( "seed", "getSeed" );
Map<String, String> setterMap = new HashMap<String, String>();
setterMap.put( "sample_size", "setSampleSize" );
setterMap.put( "seed", "setSeed" );
LoadSaveTester tester = new LoadSaveTester( ReservoirSamplingMeta.class, attributes, getterMap, setterMap );
tester.testSerialization();
}
}
| aminmkhan/pentaho-kettle | engine/src/test/java/org/pentaho/di/trans/steps/reservoirsampling/ReservoirSamplingMetaTest.java | Java | apache-2.0 | 1,856 |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Formatting.Rules;
using Microsoft.CodeAnalysis.Text;
namespace Microsoft.CodeAnalysis.Formatting
{
/// <summary>
/// this collector gathers formatting operations that are based on a node
/// </summary>
internal class NodeOperations
{
public static NodeOperations Empty = new NodeOperations();
public Task<List<IndentBlockOperation>> IndentBlockOperationTask { get; private set; }
public Task<List<SuppressOperation>> SuppressOperationTask { get; private set; }
public Task<List<AlignTokensOperation>> AlignmentOperationTask { get; private set; }
public Task<List<AnchorIndentationOperation>> AnchorIndentationOperationsTask { get; private set; }
public NodeOperations(Task<List<IndentBlockOperation>> indentBlockOperationTask, Task<List<SuppressOperation>> suppressOperationTask, Task<List<AnchorIndentationOperation>> anchorIndentationOperationsTask, Task<List<AlignTokensOperation>> alignmentOperationTask)
{
this.IndentBlockOperationTask = indentBlockOperationTask;
this.SuppressOperationTask = suppressOperationTask;
this.AlignmentOperationTask = alignmentOperationTask;
this.AnchorIndentationOperationsTask = anchorIndentationOperationsTask;
}
private NodeOperations()
{
this.IndentBlockOperationTask = Task.FromResult(new List<IndentBlockOperation>());
this.SuppressOperationTask = Task.FromResult(new List<SuppressOperation>());
this.AlignmentOperationTask = Task.FromResult(new List<AlignTokensOperation>());
this.AnchorIndentationOperationsTask = Task.FromResult(new List<AnchorIndentationOperation>());
}
}
}
| DavidKarlas/roslyn | src/Workspaces/Core/Portable/Formatting/Engine/NodeOperations.cs | C# | apache-2.0 | 2,013 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#nullable disable
namespace Microsoft.CodeAnalysis.Operations
{
/// <summary>
/// Represents a <see cref="IOperation"/> visitor that visits only the single IOperation
/// passed into its Visit method.
/// </summary>
public abstract partial class OperationVisitor
{
// Make public after review: https://github.com/dotnet/roslyn/issues/21281
internal virtual void VisitFixed(IFixedOperation operation) =>
// https://github.com/dotnet/roslyn/issues/21281
//DefaultVisit(operation);
VisitNoneOperation(operation);
}
/// <summary>
/// Represents a <see cref="IOperation"/> visitor that visits only the single IOperation
/// passed into its Visit method with an additional argument of the type specified by the
/// <typeparamref name="TArgument"/> parameter and produces a value of the type specified by
/// the <typeparamref name="TResult"/> parameter.
/// </summary>
/// <typeparam name="TArgument">
/// The type of the additional argument passed to this visitor's Visit method.
/// </typeparam>
/// <typeparam name="TResult">
/// The type of the return value of this visitor's Visit method.
/// </typeparam>
public abstract partial class OperationVisitor<TArgument, TResult>
{
// Make public after review: https://github.com/dotnet/roslyn/issues/21281
internal virtual TResult VisitFixed(IFixedOperation operation, TArgument argument) =>
// https://github.com/dotnet/roslyn/issues/21281
//return DefaultVisit(operation, argument);
VisitNoneOperation(operation, argument);
}
}
| brettfo/roslyn | src/Compilers/Core/Portable/Operations/OperationVisitor.cs | C# | apache-2.0 | 1,874 |
module Clever
module APIOperations
# Represents a list of results for a paged request.
class ResultsList
include Enumerable
# Create a results list from a PageList
# @api private
# @return [ResultsList]
def initialize(pagelist)
@pages = pagelist
end
# Iterate over results list
# @api public
# @return [nil]
# @example
# results = Clever::District.find # returns a ResultsList
# results.each do |district|
# puts district.name
# end
def each
@pages.each do |page|
page.each do |elem|
yield elem
end
end
end
end
end
end
| mchavarriagam/clever-ruby | lib/clever-ruby/api_operations/results_list.rb | Ruby | apache-2.0 | 699 |
/*
* Minio Cloud Storage, (C) 2016, 2017 Minio, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmd
import (
"testing"
"github.com/minio/minio/pkg/disk"
)
const invalidToken = "invalidToken"
type testStorageRPCServer struct {
configDir string
token string
diskDirs []string
stServer *storageServer
endpoints EndpointList
}
func createTestStorageServer(t *testing.T) *testStorageRPCServer {
testPath, err := newTestConfig(globalMinioDefaultRegion)
if err != nil {
t.Fatalf("unable initialize config file, %s", err)
}
serverCred := serverConfig.GetCredential()
token, err := authenticateNode(serverCred.AccessKey, serverCred.SecretKey)
if err != nil {
t.Fatalf("unable for JWT to generate token, %s", err)
}
fsDirs, err := getRandomDisks(1)
if err != nil {
t.Fatalf("unable to create FS backend, %s", err)
}
endpoints := mustGetNewEndpointList(fsDirs...)
storageDisks, err := initStorageDisks(endpoints)
if err != nil {
t.Fatalf("unable to initialize storage disks, %s", err)
}
stServer := &storageServer{
storage: storageDisks[0],
path: "/disk1",
timestamp: UTCNow(),
}
return &testStorageRPCServer{
token: token,
configDir: testPath,
diskDirs: fsDirs,
endpoints: endpoints,
stServer: stServer,
}
}
func errorIfInvalidToken(t *testing.T, err error) {
realErr := errorCause(err)
if realErr != errInvalidToken {
t.Errorf("Expected to fail with %s but failed with %s", errInvalidToken, realErr)
}
}
func TestStorageRPCInvalidToken(t *testing.T) {
st := createTestStorageServer(t)
defer removeRoots(st.diskDirs)
defer removeAll(st.configDir)
storageRPC := st.stServer
// Following test cases are meant to exercise the invalid
// token code path of the storage RPC methods.
var err error
badAuthRPCArgs := AuthRPCArgs{AuthToken: "invalidToken"}
badGenericVolArgs := GenericVolArgs{
AuthRPCArgs: badAuthRPCArgs,
Vol: "myvol",
}
// 1. DiskInfoHandler
diskInfoReply := &disk.Info{}
err = storageRPC.DiskInfoHandler(&badAuthRPCArgs, diskInfoReply)
errorIfInvalidToken(t, err)
// 2. MakeVolHandler
makeVolArgs := &badGenericVolArgs
makeVolReply := &AuthRPCReply{}
err = storageRPC.MakeVolHandler(makeVolArgs, makeVolReply)
errorIfInvalidToken(t, err)
// 3. ListVolsHandler
listVolReply := &ListVolsReply{}
err = storageRPC.ListVolsHandler(&badAuthRPCArgs, listVolReply)
errorIfInvalidToken(t, err)
// 4. StatVolHandler
statVolReply := &VolInfo{}
statVolArgs := &badGenericVolArgs
err = storageRPC.StatVolHandler(statVolArgs, statVolReply)
errorIfInvalidToken(t, err)
// 5. DeleteVolHandler
deleteVolArgs := &badGenericVolArgs
deleteVolReply := &AuthRPCReply{}
err = storageRPC.DeleteVolHandler(deleteVolArgs, deleteVolReply)
errorIfInvalidToken(t, err)
// 6. StatFileHandler
statFileArgs := &StatFileArgs{
AuthRPCArgs: badAuthRPCArgs,
}
statReply := &FileInfo{}
err = storageRPC.StatFileHandler(statFileArgs, statReply)
errorIfInvalidToken(t, err)
// 7. ListDirHandler
listDirArgs := &ListDirArgs{
AuthRPCArgs: badAuthRPCArgs,
}
listDirReply := &[]string{}
err = storageRPC.ListDirHandler(listDirArgs, listDirReply)
errorIfInvalidToken(t, err)
// 8. ReadAllHandler
readFileArgs := &ReadFileArgs{
AuthRPCArgs: badAuthRPCArgs,
}
readFileReply := &[]byte{}
err = storageRPC.ReadAllHandler(readFileArgs, readFileReply)
errorIfInvalidToken(t, err)
// 9. ReadFileHandler
err = storageRPC.ReadFileHandler(readFileArgs, readFileReply)
errorIfInvalidToken(t, err)
// 10. PrepareFileHandler
prepFileArgs := &PrepareFileArgs{
AuthRPCArgs: badAuthRPCArgs,
}
prepFileReply := &AuthRPCReply{}
err = storageRPC.PrepareFileHandler(prepFileArgs, prepFileReply)
errorIfInvalidToken(t, err)
// 11. AppendFileHandler
appendArgs := &AppendFileArgs{
AuthRPCArgs: badAuthRPCArgs,
}
appendReply := &AuthRPCReply{}
err = storageRPC.AppendFileHandler(appendArgs, appendReply)
errorIfInvalidToken(t, err)
// 12. DeleteFileHandler
delFileArgs := &DeleteFileArgs{
AuthRPCArgs: badAuthRPCArgs,
}
delFileRely := &AuthRPCReply{}
err = storageRPC.DeleteFileHandler(delFileArgs, delFileRely)
errorIfInvalidToken(t, err)
// 13. RenameFileHandler
renameArgs := &RenameFileArgs{
AuthRPCArgs: badAuthRPCArgs,
}
renameReply := &AuthRPCReply{}
err = storageRPC.RenameFileHandler(renameArgs, renameReply)
errorIfInvalidToken(t, err)
}
| hackintoshrao/minio | cmd/storage-rpc-server_test.go | GO | apache-2.0 | 4,913 |
({
L_MENU_GRID: "Valikkoruudukko",
L_MENU_ITEM_DISABLED: "%1 ei ole k\u00e4ytett\u00e4viss\u00e4",
L_MENU_ITEM_SUBMENU: "%1 (alivalikko)",
L_MENU_SUBMENU: "alivalikko",
L_MENU_CHECK: "valinta"
})
| iharkhukhrakou/XPagesExtensionLibrary | extlib/lwp/product/runtime/eclipse/plugins/com.ibm.xsp.extlib.domino/resources/web/dwa/common/nls/fi/menu.js | JavaScript | apache-2.0 | 196 |
package antlr;
/* ANTLR Translator Generator
* Project led by Terence Parr at http://www.jGuru.com
* Software rights: http://www.antlr.org/RIGHTS.html
*
* $Id: CppCharFormatter.java,v 1.1 2003/06/04 20:54:22 greg Exp $
*/
// C++ code generator by Pete Wells: pete@yamuna.demon.co.uk
class CppCharFormatter implements CharFormatter {
/** Given a character value, return a string representing the character
* that can be embedded inside a string literal or character literal
* This works for Java/C/C++ code-generation and languages with compatible
* special-character-escapment.
* Code-generators for languages should override this method.
* @param c The character of interest.
* @param forCharLiteral true to escape for char literal, false for string literal
*/
public String escapeChar(int c, boolean forCharLiteral) {
switch (c) {
case '\n' : return "\\n";
case '\t' : return "\\t";
case '\r' : return "\\r";
case '\\' : return "\\\\";
case '\'' : return forCharLiteral ? "\\'" : "'";
case '"' : return forCharLiteral ? "\"" : "\\\"";
default :
if ( c<' '||c>126 ) {
if (c > 255) {
return "\\u" + Integer.toString(c,16);
}
else {
return "\\" + Integer.toString(c,8);
}
}
else {
return String.valueOf((char)c);
}
}
}
/** Converts a String into a representation that can be use as a literal
* when surrounded by double-quotes.
* @param s The String to be changed into a literal
*/
public String escapeString(String s)
{
String retval = new String();
for (int i = 0; i < s.length(); i++)
{
retval += escapeChar(s.charAt(i), false);
}
return retval;
}
/** Given a character value, return a string representing the character
* literal that can be recognized by the target language compiler.
* This works for languages that use single-quotes for character literals.
* Code-generators for languages should override this method.
* @param c The character of interest.
*/
public String literalChar(int c) {
return "static_cast<unsigned char>('" + escapeChar(c, true) + "')";
}
/** Converts a String into a string literal
* This works for languages that use double-quotes for string literals.
* Code-generators for languages should override this method.
* @param s The String to be changed into a literal
*/
public String literalString(String s)
{
return "\"" + escapeString(s) + "\"";
}
}
| HebaKhaled/bposs | src/pt_antlr/antlr/CppCharFormatter.java | Java | apache-2.0 | 2,509 |
/**
* @@@ START COPYRIGHT @@@
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
* @@@ END COPYRIGHT @@@
**/
package org.trafodion.wms;
public interface SmallTests {
}
| apache/incubator-trafodion | wms/src/test/java/org/trafodion/wms/SmallTests.java | Java | apache-2.0 | 909 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.cache.hibernate;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
import javax.cache.Cache;
import javax.cache.expiry.ExpiryPolicy;
import javax.cache.processor.EntryProcessor;
import javax.cache.processor.EntryProcessorResult;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.cache.CacheEntry;
import org.apache.ignite.cache.CacheMetrics;
import org.apache.ignite.cache.CachePeekMode;
import org.apache.ignite.cache.affinity.Affinity;
import org.apache.ignite.cluster.ClusterGroup;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.IgniteInternalCache;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxLocal;
import org.apache.ignite.lang.IgniteBiPredicate;
import org.apache.ignite.mxbean.CacheMetricsMXBean;
import org.apache.ignite.transactions.Transaction;
import org.apache.ignite.transactions.TransactionConcurrency;
import org.apache.ignite.transactions.TransactionIsolation;
import org.jetbrains.annotations.Nullable;
/**
* Hibernate cache proxy used to substitute hibernate keys with ignite keys.
*/
public class HibernateCacheProxy implements IgniteInternalCache<Object, Object> {
/** Delegate is lazily loaded which allows for creation of caches after the SPI is bootstrapped */
private final Supplier<IgniteInternalCache<Object, Object>> delegate;
/** Transformer. */
private final HibernateKeyTransformer keyTransformer;
/** */
private String cacheName;
/**
* @param cacheName Cache name. Should match delegate.get().name(). Needed for lazy loading.
* @param delegate Delegate.
* @param keyTransformer Key keyTransformer.
*/
HibernateCacheProxy(
String cacheName,
Supplier<IgniteInternalCache<Object, Object>> delegate,
HibernateKeyTransformer keyTransformer
) {
assert cacheName != null;
assert delegate != null;
assert keyTransformer != null;
this.cacheName = cacheName;
this.delegate = delegate;
this.keyTransformer = keyTransformer;
}
/**
* @return HibernateKeyTransformer
*/
public HibernateKeyTransformer keyTransformer() {
return keyTransformer;
}
/** {@inheritDoc} */
@Override public String name() {
return cacheName;
}
/** {@inheritDoc} */
@Override public boolean skipStore() {
return delegate.get().skipStore();
}
/** {@inheritDoc} */
@Override public IgniteInternalCache setSkipStore(boolean skipStore) {
return delegate.get().setSkipStore(skipStore);
}
/** {@inheritDoc} */
@Override public boolean isEmpty() {
return delegate.get().isEmpty();
}
/** {@inheritDoc} */
@Override public boolean containsKey(Object key) {
return delegate.get().containsKey(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> containsKeyAsync(Object key) {
return delegate.get().containsKeyAsync(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public boolean containsKeys(Collection keys) {
return delegate.get().containsKey(transform(keys));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> containsKeysAsync(Collection keys) {
return delegate.get().containsKeysAsync(transform(keys));
}
/** {@inheritDoc} */
@Nullable @Override public Object localPeek(
Object key,
CachePeekMode[] peekModes
) throws IgniteCheckedException {
return delegate.get().localPeek(keyTransformer.transform(key), peekModes);
}
/** {@inheritDoc} */
@Override public Iterable<Cache.Entry<Object, Object>> localEntries(
CachePeekMode[] peekModes
) throws IgniteCheckedException {
return delegate.get().localEntries(peekModes);
}
/** {@inheritDoc} */
@Nullable @Override public Object get(Object key) throws IgniteCheckedException {
return delegate.get().get(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Nullable @Override public CacheEntry getEntry(Object key) throws IgniteCheckedException {
return delegate.get().getEntry(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture getAsync(Object key) {
return delegate.get().getAsync(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<CacheEntry<Object, Object>> getEntryAsync(Object key) {
return delegate.get().getEntryAsync(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public Map getAll(@Nullable Collection keys) throws IgniteCheckedException {
return delegate.get().getAll(transform(keys));
}
/** {@inheritDoc} */
@Override public Collection<CacheEntry<Object, Object>> getEntries(
@Nullable Collection keys) throws IgniteCheckedException {
return delegate.get().getEntries(transform(keys));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Map<Object, Object>> getAllAsync(@Nullable Collection keys) {
return delegate.get().getAllAsync(transform(keys));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Collection<CacheEntry<Object, Object>>> getEntriesAsync(
@Nullable Collection keys
) {
return delegate.get().getEntriesAsync(transform(keys));
}
/** {@inheritDoc} */
@Nullable @Override public Object getAndPut(Object key, Object val) throws IgniteCheckedException {
return delegate.get().getAndPut(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture getAndPutAsync(Object key, Object val) {
return delegate.get().getAndPutAsync(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public boolean put(Object key, Object val) throws IgniteCheckedException {
return delegate.get().put(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> putAsync(Object key, Object val) {
return delegate.get().putAsync(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Nullable @Override public Object getAndPutIfAbsent(Object key, Object val) throws IgniteCheckedException {
return delegate.get().getAndPutIfAbsent(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture getAndPutIfAbsentAsync(Object key, Object val) {
return delegate.get().getAndPutIfAbsentAsync(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public boolean putIfAbsent(Object key, Object val) throws IgniteCheckedException {
return delegate.get().putIfAbsent(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> putIfAbsentAsync(Object key, Object val) {
return delegate.get().putIfAbsentAsync(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Nullable @Override public Object getAndReplace(Object key, Object val) throws IgniteCheckedException {
return delegate.get().getAndReplace(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture getAndReplaceAsync(Object key, Object val) {
return delegate.get().getAndReplaceAsync(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public boolean replace(Object key, Object val) throws IgniteCheckedException {
return delegate.get().replace(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> replaceAsync(Object key, Object val) {
return delegate.get().replaceAsync(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public boolean replace(Object key, Object oldVal, Object newVal) throws IgniteCheckedException {
return delegate.get().replace(keyTransformer.transform(key), oldVal, newVal);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> replaceAsync(Object key, Object oldVal, Object newVal) {
return delegate.get().replaceAsync(keyTransformer.transform(key), oldVal, newVal);
}
/** {@inheritDoc} */
@Override public void putAll(@Nullable Map m) throws IgniteCheckedException {
delegate.get().putAll(transform(m));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> putAllAsync(@Nullable Map m) {
return delegate.get().putAllAsync(transform(m));
}
/** {@inheritDoc} */
@Override public Set keySet() {
return delegate.get().keySet();
}
/** {@inheritDoc} */
@Override public Set<Cache.Entry<Object, Object>> entrySet() {
return delegate.get().entrySet();
}
/** {@inheritDoc} */
@Override public Transaction txStart(
TransactionConcurrency concurrency,
TransactionIsolation isolation
) {
return delegate.get().txStart(concurrency, isolation);
}
/** {@inheritDoc} */
@Override public GridNearTxLocal txStartEx(
TransactionConcurrency concurrency,
TransactionIsolation isolation
) {
return delegate.get().txStartEx(concurrency, isolation);
}
/** {@inheritDoc} */
@Override public Transaction txStart(
TransactionConcurrency concurrency,
TransactionIsolation isolation,
long timeout,
int txSize
) {
return delegate.get().txStart(concurrency, isolation, timeout, txSize);
}
/** {@inheritDoc} */
@Nullable @Override public GridNearTxLocal tx() {
return delegate.get().tx();
}
/** {@inheritDoc} */
@Override public boolean evict(Object key) {
return delegate.get().evict(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public void evictAll(@Nullable Collection keys) {
delegate.get().evictAll(transform(keys));
}
/** {@inheritDoc} */
@Override public void clearLocally(boolean srv, boolean near, boolean readers) {
delegate.get().clearLocally(srv, near, readers);
}
/** {@inheritDoc} */
@Override public boolean clearLocally(Object key) {
return delegate.get().clearLocally(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public void clearLocallyAll(Set keys, boolean srv, boolean near, boolean readers) {
delegate.get().clearLocallyAll((Set<?>)transform(keys), srv, near, readers);
}
/** {@inheritDoc} */
@Override public void clear(Object key) throws IgniteCheckedException {
delegate.get().clear(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public void clearAll(Set keys) throws IgniteCheckedException {
delegate.get().clearAll((Set<?>)transform(keys));
}
/** {@inheritDoc} */
@Override public void clear() throws IgniteCheckedException {
delegate.get().clear();
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> clearAsync() {
return delegate.get().clearAsync();
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> clearAsync(Object key) {
return delegate.get().clearAsync(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> clearAllAsync(Set keys) {
return delegate.get().clearAllAsync((Set<?>)transform(keys));
}
/** {@inheritDoc} */
@Nullable @Override public Object getAndRemove(Object key) throws IgniteCheckedException {
return delegate.get().getAndRemove(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture getAndRemoveAsync(Object key) {
return delegate.get().getAndRemoveAsync(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public boolean remove(Object key) throws IgniteCheckedException {
return delegate.get().remove(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> removeAsync(Object key) {
return delegate.get().removeAsync(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public boolean remove(Object key, Object val) throws IgniteCheckedException {
return delegate.get().remove(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> removeAsync(Object key, Object val) {
return delegate.get().removeAsync(keyTransformer.transform(key), val);
}
/** {@inheritDoc} */
@Override public void removeAll(@Nullable Collection keys) throws IgniteCheckedException {
delegate.get().removeAll(transform(keys));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> removeAllAsync(@Nullable Collection keys) {
return delegate.get().removeAllAsync(transform(keys));
}
/** {@inheritDoc} */
@Override public void removeAll() throws IgniteCheckedException {
delegate.get().removeAll();
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> removeAllAsync() {
return delegate.get().removeAllAsync();
}
/** {@inheritDoc} */
@Override public boolean lock(Object key, long timeout) throws IgniteCheckedException {
return delegate.get().lock(keyTransformer.transform(key), timeout);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> lockAsync(Object key, long timeout) {
return delegate.get().lockAsync(keyTransformer.transform(key), timeout);
}
/** {@inheritDoc} */
@Override public boolean lockAll(@Nullable Collection keys, long timeout) throws IgniteCheckedException {
return delegate.get().lockAll(transform(keys), timeout);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Boolean> lockAllAsync(@Nullable Collection keys, long timeout) {
return delegate.get().lockAllAsync(transform(keys), timeout);
}
/** {@inheritDoc} */
@Override public void unlock(Object key) throws IgniteCheckedException {
delegate.get().unlock(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public void unlockAll(@Nullable Collection keys) throws IgniteCheckedException {
delegate.get().unlockAll(transform(keys));
}
/** {@inheritDoc} */
@Override public boolean isLocked(Object key) {
return delegate.get().isLocked(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public boolean isLockedByThread(Object key) {
return delegate.get().isLockedByThread(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public int size() {
return delegate.get().size();
}
/** {@inheritDoc} */
@Override public long sizeLong() {
return delegate.get().sizeLong();
}
/** {@inheritDoc} */
@Override public int localSize(CachePeekMode[] peekModes) throws IgniteCheckedException {
return delegate.get().localSize(peekModes);
}
/** {@inheritDoc} */
@Override public long localSizeLong(CachePeekMode[] peekModes) throws IgniteCheckedException {
return delegate.get().localSizeLong(peekModes);
}
/** {@inheritDoc} */
@Override public long localSizeLong(int partition, CachePeekMode[] peekModes) throws IgniteCheckedException {
return delegate.get().localSizeLong(partition, peekModes);
}
/** {@inheritDoc} */
@Override public int size(CachePeekMode[] peekModes) throws IgniteCheckedException {
return delegate.get().size(peekModes);
}
/** {@inheritDoc} */
@Override public long sizeLong(CachePeekMode[] peekModes) throws IgniteCheckedException {
return delegate.get().sizeLong(peekModes);
}
/** {@inheritDoc} */
@Override public long sizeLong(int partition, CachePeekMode[] peekModes) throws IgniteCheckedException {
return delegate.get().sizeLong(partition, peekModes);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Integer> sizeAsync(CachePeekMode[] peekModes) {
return delegate.get().sizeAsync(peekModes);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Long> sizeLongAsync(CachePeekMode[] peekModes) {
return delegate.get().sizeLongAsync(peekModes);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Long> sizeLongAsync(int partition, CachePeekMode[] peekModes) {
return delegate.get().sizeLongAsync(partition, peekModes);
}
/** {@inheritDoc} */
@Override public int nearSize() {
return delegate.get().nearSize();
}
/** {@inheritDoc} */
@Override public int primarySize() {
return delegate.get().primarySize();
}
/** {@inheritDoc} */
@Override public long primarySizeLong() {
return delegate.get().primarySizeLong();
}
/** {@inheritDoc} */
@Override public CacheConfiguration configuration() {
return delegate.get().configuration();
}
/** {@inheritDoc} */
@Override public Affinity affinity() {
return delegate.get().affinity();
}
/** {@inheritDoc} */
@Override public CacheMetrics clusterMetrics() {
return delegate.get().clusterMetrics();
}
/** {@inheritDoc} */
@Override public CacheMetrics clusterMetrics(ClusterGroup grp) {
return delegate.get().clusterMetrics(grp);
}
/** {@inheritDoc} */
@Override public CacheMetrics localMetrics() {
return delegate.get().localMetrics();
}
/** {@inheritDoc} */
@Override public CacheMetricsMXBean clusterMxBean() {
return delegate.get().clusterMxBean();
}
/** {@inheritDoc} */
@Override public CacheMetricsMXBean localMxBean() {
return delegate.get().localMxBean();
}
/** {@inheritDoc} */
@Override public long offHeapEntriesCount() {
return delegate.get().offHeapEntriesCount();
}
/** {@inheritDoc} */
@Override public long offHeapAllocatedSize() {
return delegate.get().offHeapAllocatedSize();
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> rebalance() {
return delegate.get().rebalance();
}
/** {@inheritDoc} */
@Nullable @Override public Object getForcePrimary(Object key) throws IgniteCheckedException {
return delegate.get().getForcePrimary(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture getForcePrimaryAsync(Object key) {
return delegate.get().getForcePrimaryAsync(keyTransformer.transform(key));
}
/** {@inheritDoc} */
@Override public Map getAllOutTx(Set keys) throws IgniteCheckedException {
return delegate.get().getAllOutTx((Set<?>)transform(keys));
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Map<Object, Object>> getAllOutTxAsync(Set keys) {
return delegate.get().getAllOutTxAsync((Set<?>)transform(keys));
}
/** {@inheritDoc} */
@Nullable @Override public ExpiryPolicy expiry() {
return delegate.get().expiry();
}
/** {@inheritDoc} */
@Override public IgniteInternalCache withExpiryPolicy(ExpiryPolicy plc) {
return delegate.get().withExpiryPolicy(plc);
}
/** {@inheritDoc} */
@Override public IgniteInternalCache withNoRetries() {
return delegate.get().withNoRetries();
}
/** {@inheritDoc} */
@Override public <K1, V1> IgniteInternalCache<K1, V1> withAllowAtomicOpsInTx() {
return delegate.get().withAllowAtomicOpsInTx();
}
/** {@inheritDoc} */
@Override public GridCacheContext context() {
return delegate.get().context();
}
/** {@inheritDoc} */
@Override public void localLoadCache(
@Nullable IgniteBiPredicate p,
@Nullable Object... args
) throws IgniteCheckedException {
delegate.get().localLoadCache(p, args);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> localLoadCacheAsync(
@Nullable IgniteBiPredicate p,
@Nullable Object... args
) {
return delegate.get().localLoadCacheAsync(p, args);
}
/** {@inheritDoc} */
@Override public Collection<Integer> lostPartitions() {
return delegate.get().lostPartitions();
}
/** {@inheritDoc} */
@Override public void preloadPartition(int part) throws IgniteCheckedException {
delegate.get().preloadPartition(part);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> preloadPartitionAsync(int part) throws IgniteCheckedException {
return delegate.get().preloadPartitionAsync(part);
}
/** {@inheritDoc} */
@Override public boolean localPreloadPartition(int part) throws IgniteCheckedException {
return delegate.get().localPreloadPartition(part);
}
/** {@inheritDoc} */
@Nullable @Override public EntryProcessorResult invoke(
@Nullable AffinityTopologyVersion topVer,
Object key,
EntryProcessor entryProcessor,
Object... args
) throws IgniteCheckedException {
return delegate.get().invoke(topVer, key, entryProcessor, args);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Map> invokeAllAsync(Map map, Object... args) {
return delegate.get().invokeAllAsync(map, args);
}
/** {@inheritDoc} */
@Override public Map invokeAll(Map map, Object... args) throws IgniteCheckedException {
return delegate.get().invokeAll(map, args);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<Map> invokeAllAsync(Set keys, EntryProcessor entryProcessor, Object... args) {
return delegate.get().invokeAllAsync((Set<?>)transform(keys), entryProcessor, args);
}
/** {@inheritDoc} */
@Override public Map invokeAll(Set keys, EntryProcessor entryProcessor, Object... args) throws IgniteCheckedException {
return delegate.get().invokeAll((Set<?>)transform(keys), entryProcessor, args);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<EntryProcessorResult> invokeAsync(
Object key,
EntryProcessor entryProcessor,
Object... args
) {
return delegate.get().invokeAsync(keyTransformer.transform(key), entryProcessor, args);
}
/** {@inheritDoc} */
@Nullable @Override public EntryProcessorResult invoke(
Object key,
EntryProcessor entryProcessor,
Object... args
) throws IgniteCheckedException {
return delegate.get().invoke(keyTransformer.transform(key), entryProcessor, args);
}
/** {@inheritDoc} */
@Override public Iterator<Cache.Entry<Object, Object>> scanIterator(
boolean keepBinary,
@Nullable IgniteBiPredicate p
) throws IgniteCheckedException {
return delegate.get().scanIterator(keepBinary, p);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> removeAllConflictAsync(Map drMap) throws IgniteCheckedException {
return delegate.get().removeAllConflictAsync(drMap);
}
/** {@inheritDoc} */
@Override public void removeAllConflict(Map drMap) throws IgniteCheckedException {
delegate.get().removeAllConflictAsync(drMap);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> putAllConflictAsync(Map drMap) throws IgniteCheckedException {
return delegate.get().putAllConflictAsync(drMap);
}
/** {@inheritDoc} */
@Override public void putAllConflict(Map drMap) throws IgniteCheckedException {
delegate.get().putAllConflict(drMap);
}
/** {@inheritDoc} */
@Override public IgniteInternalCache keepBinary() {
return delegate.get().keepBinary();
}
/** {@inheritDoc} */
@Override public IgniteInternalCache cache() {
return delegate.get().cache();
}
/** {@inheritDoc} */
@Override public Iterator iterator() {
return delegate.get().iterator();
}
/**
* @param keys Keys.
*/
private Collection<Object> transform(Collection<Object> keys) {
Collection<Object> res = new LinkedList<>();
for (Object o : keys)
res.add(keyTransformer.transform(o));
return res;
}
/**
* @param map Map.
*/
private Map<Object, Object> transform(Map<Object, Object> map) {
Map<Object, Object> res = new HashMap<>();
Set<Map.Entry<Object, Object>> ents = map.entrySet();
for (Map.Entry<Object, Object> e : ents)
res.put(keyTransformer.transform(e.getKey()), e.getValue());
return res;
}
}
| chandresh-pancholi/ignite | modules/hibernate-core/src/main/java/org/apache/ignite/cache/hibernate/HibernateCacheProxy.java | Java | apache-2.0 | 26,223 |
module td.output
{
/**
* List of states the parser of [[PrettyPrintPlugin]] can be in.
*/
enum PrettyPrintState {
/**
* Default state of the parser. Empty lines will be removed and indention will be adjusted.
*/
Default,
/**
* Comment state, the parser waits for a comment closing tag.
*/
Comment,
/**
* Pre state, the parser waits for the closing tag of the current pre block.
*/
Pre
}
/**
* A plugin that pretty prints the generated html.
*
* This not only aids in making the generated html source code more readable, by removing
* blank lines and unnecessary whitespaces the size of the documentation is reduced without
* visual impact.
*
* At the point writing this the docs of TypeDoc took 97.8 MB without and 66.4 MB with this
* plugin enabled, so it reduced the size to 68% of the original output.
*/
export class PrettyPrintPlugin extends RendererPlugin
{
/**
* Map of all tags that will be ignored.
*/
static IGNORED_TAGS:any = {
area: true,
base: true,
br: true,
wbr: true,
col: true,
command: true,
embed: true,
hr: true,
img: true,
input: true,
link: true,
meta: true,
param: true,
source: true
};
/**
* Map of all tags that prevent this plugin form modifying the following code.
*/
static PRE_TAGS:any = {
pre: true,
code: true,
textarea: true,
script: true,
style: true
};
/**
* Create a new PrettyPrintPlugin instance.
*
* @param renderer The renderer this plugin should be attached to.
*/
constructor(renderer:Renderer) {
super(renderer);
renderer.on(Renderer.EVENT_END_PAGE, this.onRendererEndPage, this, -1024);
}
/**
* Triggered after a document has been rendered, just before it is written to disc.
*
* @param event
*/
onRendererEndPage(event:OutputPageEvent) {
var match, line, lineState, lineDepth, tagName, preName;
var tagExp = /<\s*(\w+)[^>]*>|<\/\s*(\w+)[^>]*>|<!--|-->/g;
var emptyLineExp = /^[\s]*$/;
var minLineDepth = 1;
var state = PrettyPrintState.Default;
var stack = [];
var lines = event.contents.split(/\r\n?|\n/);
var index = 0;
var count = lines.length;
while (index < count) {
line = lines[index];
if (emptyLineExp.test(line)) {
if (state == PrettyPrintState.Default) {
lines.splice(index, 1);
count -= 1;
continue;
}
} else {
lineState = state;
lineDepth = stack.length;
while (match = tagExp.exec(line)) {
if (state == PrettyPrintState.Comment) {
if (match[0] == '-->') {
state = PrettyPrintState.Default;
}
} else if (state == PrettyPrintState.Pre) {
if (match[2] && match[2].toLowerCase() == preName) {
state = PrettyPrintState.Default;
}
} else {
if (match[0] == '<!--') {
state = PrettyPrintState.Comment;
} else if (match[1]) {
tagName = match[1].toLowerCase();
if (tagName in PrettyPrintPlugin.IGNORED_TAGS) continue;
if (tagName in PrettyPrintPlugin.PRE_TAGS) {
state = PrettyPrintState.Pre;
preName = tagName;
} else {
if (tagName == 'body') minLineDepth = 2;
stack.push(tagName);
}
} else if (match[2]) {
tagName = match[2].toLowerCase();
if (tagName in PrettyPrintPlugin.IGNORED_TAGS) continue;
var n = stack.lastIndexOf(tagName);
if (n != -1) {
stack.length = n;
}
}
}
}
if (lineState == PrettyPrintState.Default) {
lineDepth = Math.min(lineDepth, stack.length);
line = line.replace(/^\s+/, '').replace(/\s+$/, '');
if (lineDepth > minLineDepth) {
line = Array(lineDepth - minLineDepth + 1).join('\t') + line;
}
lines[index] = line;
}
}
index++;
}
event.contents = lines.join('\n');
}
}
/**
* Register this plugin.
*/
Renderer.registerPlugin('prettyPrint', PrettyPrintPlugin);
} | innerverse/typedoc | src/td/output/plugins/PrettyPrintPlugin.ts | TypeScript | apache-2.0 | 5,741 |
/**
* @@@ START COPYRIGHT @@@
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* @@@ END COPYRIGHT @@@
*/
package org.trafodion.dcs.master.listener;
import java.sql.SQLException;
import java.io.*;
import java.nio.*;
import java.nio.channels.*;
import java.nio.channels.spi.*;
import java.net.*;
import java.util.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
class ConnectionContext {
private static final Log LOG = LogFactory.getLog(ConnectionContext.class);
String datasource = "";
String catalog = "";
String schema = "";
String location = "";
String userRole = "";
String connectOptions = "";
short accessMode;
short autoCommit;
int queryTimeoutSec;
int idleTimeoutSec;
int loginTimeoutSec;
short txnIsolationLevel;
short rowSetSize;
int diagnosticFlag;
int processId;
String computerName = "";
String windowText = "";
VersionList clientVersionList = null;
UserDesc user = null;
int ctxACP;
int ctxDataLang;
int ctxErrorLang;
short ctxCtrlInferNXHAR;
short cpuToUse;
short cpuToUseEnd;
int srvrType;
short retryCount;
int optionFlags1;
int optionFlags2;
String vproc;
String client;
ConnectionContext(){
clientVersionList = new VersionList();
user = new UserDesc();
}
void extractFromByteBuffer(ByteBuffer buf) throws java.io.UnsupportedEncodingException {
datasource = Util.extractString(buf);
catalog= Util.extractString(buf);
schema= Util.extractString(buf);
location= Util.extractString(buf);
userRole= Util.extractString(buf);
accessMode=buf.getShort();
autoCommit=buf.getShort();
queryTimeoutSec=buf.getInt();
idleTimeoutSec=buf.getInt();
loginTimeoutSec=buf.getInt();
txnIsolationLevel=buf.getShort();
rowSetSize=buf.getShort();
diagnosticFlag=buf.getInt();
processId=buf.getInt();
computerName=Util.extractString(buf);
windowText=Util.extractString(buf);
ctxACP=buf.getInt();
ctxDataLang=buf.getInt();
ctxErrorLang=buf.getInt();
ctxCtrlInferNXHAR=buf.getShort();
cpuToUse=buf.getShort();
cpuToUseEnd=buf.getShort();
connectOptions=Util.extractString(buf);
clientVersionList.extractFromByteBuffer(buf);
user.extractFromByteBuffer(buf);
srvrType = buf.getInt();
retryCount = buf.getShort();
optionFlags1 = buf.getInt();
optionFlags2 = buf.getInt();
vproc= Util.extractString(buf);
client= Util.extractString(buf);
}
}
| apache/incubator-trafodion | dcs/src/main/java/org/trafodion/dcs/master/listener/ConnectionContext.java | Java | apache-2.0 | 3,102 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('event_mapper', '0005_user_is_confirmed'),
]
operations = [
migrations.AlterField(
model_name='event',
name='date_time',
field=models.DateTimeField(help_text=b'Date and time when the event happened.', verbose_name=b'Date and Time'),
preserve_default=True,
),
migrations.AlterField(
model_name='event',
name='victim',
field=models.ForeignKey(default=0, verbose_name=b'Victim', to='event_mapper.Victim', help_text=b'The victim of the event.'),
preserve_default=True,
),
]
| MariaSolovyeva/watchkeeper | django_project/event_mapper/migrations/0006_auto_20150505_0922.py | Python | bsd-2-clause | 789 |
package vektah.rust;
import com.intellij.openapi.fileTypes.LanguageFileType;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import vektah.rust.i18n.RustBundle;
import javax.swing.*;
public class RustFileType extends LanguageFileType {
public static final RustFileType INSTANCE = new RustFileType();
private RustFileType() {
super(RustLanguage.INSTANCE);
}
@NotNull
@Override
public String getName() {
return RustBundle.message("file.type.name.rust");
}
@NotNull
@Override
public String getDescription() {
return RustBundle.message("file.type.description.rust");
}
@NotNull
@Override
public String getDefaultExtension() {
return "rs";
}
@Nullable
@Override
public Icon getIcon() {
return RustIcons.ICON_RUST_16;
}
}
| tempbottle/idea-rust | src/java/main/vektah/rust/RustFileType.java | Java | bsd-2-clause | 790 |
cask 'picka' do
version '1.0.0'
sha256 '981209f1bd432d99ce082429cbb182b17194063b6b0eb8ae9fa22a0dbe37bca8'
url 'https://getpicka.com/downloads/Picka.zip'
appcast 'https://getpicka.com/appcast-trial.xml'
name 'Picka'
homepage 'https://getpicka.com/'
app 'Picka.app'
end
| jawshooah/homebrew-cask | Casks/picka.rb | Ruby | bsd-2-clause | 284 |
cask 'watchguard-mobile-vpn-with-ssl' do
version '12.5.3,615421'
sha256 'b8a4f9ce908f19df6122fdf24445fdb233d812f2f6b5f08261ca2e4cca0c3784'
url "http://cdn.watchguard.com/SoftwareCenter/Files/MUVPN_SSL/#{version.before_comma.dots_to_underscores}/WG-MVPN-SSL_#{version.before_comma.dots_to_underscores}.dmg"
name 'WatchGuard Mobile VPN with SSL'
homepage 'https://www.watchguard.com/'
pkg "WatchGuard Mobile VPN with SSL Installer V#{version.after_comma}.mpkg"
uninstall pkgutil: 'com.watchguard.*'
end
| sscotth/homebrew-cask | Casks/watchguard-mobile-vpn-with-ssl.rb | Ruby | bsd-2-clause | 518 |
#include "AppConfig.h"
#include "PathUtils.h"
#define BASE_DATA_PATH (L"McServTest Data Files")
#define CONFIG_FILENAME (L"config.xml")
CAppConfig::CAppConfig()
: CConfig(BuildConfigPath())
{
}
CAppConfig::~CAppConfig()
{
}
Framework::CConfig::PathType CAppConfig::GetBasePath()
{
auto result = Framework::PathUtils::GetPersonalDataPath() / BASE_DATA_PATH;
return result;
}
Framework::CConfig::PathType CAppConfig::BuildConfigPath()
{
auto userPath(GetBasePath());
Framework::PathUtils::EnsurePathExists(userPath);
return userPath / CONFIG_FILENAME;
}
| Alloyed/Play- | tools/McServTest/AppConfig.cpp | C++ | bsd-2-clause | 568 |
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'chrome://resources/cr_elements/cr_button/cr_button.m.js';
import './advanced_settings_dialog.js';
import './print_preview_shared_css.js';
import './settings_section.js';
import {CrButtonElement} from 'chrome://resources/cr_elements/cr_button/cr_button.m.js';
import {PolymerElement} from 'chrome://resources/polymer/v3_0/polymer/polymer_bundled.min.js';
import {Destination} from '../data/destination.js';
import {Settings} from '../data/model.js';
import {getTemplate} from './advanced_options_settings.html.js';
interface PrintPreviewAdvancedOptionsSettingsElement {
$: {
button: CrButtonElement,
}
}
class PrintPreviewAdvancedOptionsSettingsElement extends PolymerElement {
static get is() {
return 'print-preview-advanced-options-settings';
}
static get template() {
return getTemplate();
}
static get properties() {
return {
disabled: Boolean,
destination: Object,
settings: Object,
showAdvancedDialog_: {
type: Boolean,
value: false,
},
};
}
disabled: boolean;
destination: Destination;
settings: Settings;
private showAdvancedDialog_: boolean;
private onButtonClick_() {
this.showAdvancedDialog_ = true;
}
private onDialogClose_() {
this.showAdvancedDialog_ = false;
this.$.button.focus();
}
}
customElements.define(
PrintPreviewAdvancedOptionsSettingsElement.is,
PrintPreviewAdvancedOptionsSettingsElement);
| chromium/chromium | chrome/browser/resources/print_preview/ui/advanced_options_settings.ts | TypeScript | bsd-3-clause | 1,620 |
<?php
/**
* Client test for InternetExplorer 6 (auth).
*
* @package Webdav
* @subpackage Tests
* @version 1.1.4
* @copyright Copyright (C) 2005-2010 eZ Systems AS. All rights reserved.
* @license http://ez.no/licenses/new_bsd New BSD License
*/
require_once 'client_test_suite.php';
require_once 'client_test_continuous_ie_auth_setup.php';
/**
* Client test for InternetExplorer 6 (auth).
*
* @package Webdav
* @subpackage Tests
*/
class ezcWebdavIe6AuthClientTest extends ezcTestCase
{
public static function suite()
{
return new ezcWebdavClientTestSuite(
'InternetExplorer 6 (auth)',
'clients/ie6_auth.php',
new ezcWebdavClientTestContinuousIeAuthSetup()
);
}
}
?>
| faclib/ezcomponents | Webdav/tests/client_ie6_auth_test.php | PHP | bsd-3-clause | 748 |
<?php
/**
* PSR1_Sniffs_Methods_CamelCapsMethodNameSniff.
*
* PHP version 5
*
* @category PHP
* @package PHP_CodeSniffer
* @author Greg Sherwood <gsherwood@squiz.net>
* @copyright 2006-2012 Squiz Pty Ltd (ABN 77 084 670 600)
* @license https://github.com/squizlabs/PHP_CodeSniffer/blob/master/licence.txt BSD Licence
* @link http://pear.php.net/package/PHP_CodeSniffer
*/
if (class_exists('PHP_CodeSniffer_Standards_AbstractScopeSniff', true) === false) {
throw new PHP_CodeSniffer_Exception('Class PHP_CodeSniffer_Standards_AbstractScopeSniff not found');
}
/**
* PSR1_Sniffs_Methods_CamelCapsMethodNameSniff.
*
* Ensures method names are defined using camel case.
*
* @category PHP
* @package PHP_CodeSniffer
* @author Greg Sherwood <gsherwood@squiz.net>
* @copyright 2006-2012 Squiz Pty Ltd (ABN 77 084 670 600)
* @license https://github.com/squizlabs/PHP_CodeSniffer/blob/master/licence.txt BSD Licence
* @version Release: @package_version@
* @link http://pear.php.net/package/PHP_CodeSniffer
*/
class PSR1_Sniffs_Methods_CamelCapsMethodNameSniff extends PHP_CodeSniffer_Standards_AbstractScopeSniff
{
/**
* Constructs a PSR1_Sniffs_Methods_CamelCapsMethodNameSniff.
*/
public function __construct()
{
parent::__construct(array(T_CLASS, T_INTERFACE, T_TRAIT), array(T_FUNCTION), true);
}//end __construct()
/**
* Processes the tokens within the scope.
*
* @param PHP_CodeSniffer_File $phpcsFile The file being processed.
* @param int $stackPtr The position where this token was
* found.
* @param int $currScope The position of the current scope.
*
* @return void
*/
protected function processTokenWithinScope(PHP_CodeSniffer_File $phpcsFile, $stackPtr, $currScope)
{
$methodName = $phpcsFile->getDeclarationName($stackPtr);
if ($methodName === null) {
// Ignore closures.
return;
}
$testName = ltrim($methodName, '_');
if (PHP_CodeSniffer::isCamelCaps($testName, false, true, false) === false) {
$error = 'Method name "%s" is not in camel caps format';
$className = $phpcsFile->getDeclarationName($currScope);
$errorData = array($className.'::'.$methodName);
$phpcsFile->addError($error, $stackPtr, 'NotCamelCaps', $errorData);
}
}//end processTokenWithinScope()
/**
* Processes the tokens outside the scope.
*
* @param PHP_CodeSniffer_File $phpcsFile The file being processed.
* @param int $stackPtr The position where this token was
* found.
*
* @return void
*/
protected function processTokenOutsideScope(PHP_CodeSniffer_File $phpcsFile, $stackPtr)
{
}//end processTokenOutsideScope()
}//end class
?>
| scaryml1000/ZendSkeleton | vendor/squizlabs/php_codesniffer/CodeSniffer/Standards/PSR1/Sniffs/Methods/CamelCapsMethodNameSniff.php | PHP | bsd-3-clause | 2,984 |
<?php declare(strict_types=1);
/*
* This file is part of PHPUnit.
*
* (c) Sebastian Bergmann <sebastian@phpunit.de>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace PHPUnit\TestFixture\Metadata\Attribute;
use PHPUnit\Framework\Attributes\DoesNotPerformAssertions;
use PHPUnit\Framework\TestCase;
#[DoesNotPerformAssertions]
final class DoesNotPerformAssertionsTest extends TestCase
{
#[DoesNotPerformAssertions]
public function testOne(): void
{
}
}
| Firehed/phpunit | tests/_files/Metadata/Attribute/tests/DoesNotPerformAssertionsTest.php | PHP | bsd-3-clause | 566 |
// Copyright 2010, Shuo Chen. All rights reserved.
// http://code.google.com/p/evproto/
//
// Use of this source code is governed by a BSD-style license
// that can be found in the License file.
// Author: Shuo Chen (chenshuo at chenshuo dot com)
//
#include "evproto/evproto.h"
#include <gflags/gflags.h>
#include <glog/logging.h>
#include <google/protobuf/message.h>
#include <event2/event.h>
#include <event2/thread.h>
#if !defined(LIBEVENT_VERSION_NUMBER) || LIBEVENT_VERSION_NUMBER < 0x02000400
#error "This version of Libevent is not supported; Get 2.0.4-alpha or later."
#endif
namespace evproto
{
namespace internal
{
void eventLogToGlog(int severity, const char *msg)
{
switch (severity) {
case _EVENT_LOG_DEBUG:
VLOG(1) << msg;
break;
case _EVENT_LOG_MSG:
LOG(INFO) << msg;
break;
case _EVENT_LOG_WARN:
LOG(WARNING) << msg;
break;
case _EVENT_LOG_ERR:
LOG(ERROR) << msg;
break;
default:
LOG(ERROR) << msg;
break;
}
}
void protobufLogHandler(google::protobuf::LogLevel level, const char* filename, int line,
const std::string& message)
{
google::LogMessage(filename, line, level).stream() << message;
}
void eventFatal(int err)
{
LOG(FATAL) << "libevent2 fatal " << err;
}
} // namespace internal
// TODO: pass back modified argc and argv.
void initialize(int argc, char* argv[])
{
google::InitGoogleLogging(argv[0]);
::event_set_log_callback(internal::eventLogToGlog);
google::protobuf::SetLogHandler(internal::protobufLogHandler);
#if EVTHREAD_USE_WINDOWS_THREADS_IMPLEMENTED
CHECK_EQ(::evthread_use_windows_threads(), 0);
#elif EVTHREAD_USE_PTHREADS_IMPLEMENTED
CHECK_EQ(::evthread_use_pthreads(), 0);
#endif
#ifndef NDEBUG
// ::evthread_enable_lock_debuging();
// ::event_enable_debug_mode();
#endif
CHECK_EQ(LIBEVENT_VERSION_NUMBER, ::event_get_version_number())
<< "libevent2 version number mismatch";
google::ParseCommandLineFlags(&argc, &argv, true);
LOG(INFO) << argv[0] << " initialized";
}
}
| cetium/evproto | evproto/evproto.cc | C++ | bsd-3-clause | 2,066 |
--TEST--
Protocol Buffers setting integer value
--SKIPIF--
<?php require 'skipif.inc' ?>
--FILE--
<?php
require 'test.inc';
$foo = new Foo();
/* from int type */
$foo->setInt32Field(2);
var_dump($foo->getInt32Field());
/* from float type */
$foo->setInt32Field(3.0);
var_dump($foo->getInt32Field());
/* from string type */
$foo->setInt32Field('4');
var_dump($foo->getInt32Field());
?>
--EXPECT--
int(2)
int(3)
int(4)
| nosun/php-protobuf | tests/set_int_field.phpt | PHP | bsd-3-clause | 421 |
// (C) Copyright Joel de Guzman 2003.
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
// Modified by Troy D. Straszheim and Jakob van Santen, 2009-03-26
// Pulled in to ecto in 2010-11 by Troy D. Straszheim
// Willow Garage BSD License not applicable
#ifndef ICETRAY_PYTHON_STD_MAP_INDEXING_SUITE_HPP_INCLUDED
# define ICETRAY_PYTHON_STD_MAP_INDEXING_SUITE_HPP_INCLUDED
# include <ecto/python.hpp>
# include <boost/python/suite/indexing/indexing_suite.hpp>
# include <boost/python/iterator.hpp>
# include <boost/python/call_method.hpp>
# include <boost/python/tuple.hpp>
# include <boost/iterator/transform_iterator.hpp>
namespace bp = boost::python;
namespace boost { namespace python {
// Forward declaration
template <class Container, bool NoProxy, class DerivedPolicies>
class std_map_indexing_suite;
namespace detail
{
template <class Container, bool NoProxy>
class final_std_map_derived_policies
: public std_map_indexing_suite<Container,
NoProxy, final_std_map_derived_policies<Container, NoProxy> > {};
}
// The map_indexing_suite class is a predefined indexing_suite derived
// class for wrapping std::vector (and std::vector like) classes. It provides
// all the policies required by the indexing_suite (see indexing_suite).
// Example usage:
//
// class X {...};
//
// ...
//
// class_<std::map<std::string, X> >("XMap")
// .def(map_indexing_suite<std::map<std::string, X> >())
// ;
//
// By default indexed elements are returned by proxy. This can be
// disabled by supplying *true* in the NoProxy template parameter.
//
template <
class Container,
bool NoProxy = false,
class DerivedPolicies
= detail::final_std_map_derived_policies<Container, NoProxy> >
class std_map_indexing_suite
: public indexing_suite<
Container
, DerivedPolicies
, NoProxy
, true
, typename Container::value_type::second_type
, typename Container::key_type
, typename Container::key_type
>
{
public:
typedef typename Container::value_type value_type;
typedef typename Container::value_type::second_type data_type;
typedef typename Container::key_type key_type;
typedef typename Container::key_type index_type;
typedef typename Container::size_type size_type;
typedef typename Container::difference_type difference_type;
typedef typename Container::const_iterator const_iterator;
// __getitem__ for std::pair
// FIXME: horrible (20x) performance regression vs. (pair.key(),pair.data())
static object pair_getitem(value_type const& x, int i) {
if (i==0 || i==-2) return object(x.first);
else if (i==1 || i==-1) return object(x.second);
else {
PyErr_SetString(PyExc_IndexError,"Index out of range.");
throw_error_already_set();
return object(); // None
}
}
// __iter__ for std::pair
// here we cheat by making a tuple and returning its iterator
// FIXME: replace this with a pure C++ iterator
// how to handle the different return types of first and second?
static PyObject* pair_iter(value_type const& x) {
object tuple = bp::make_tuple(x.first,x.second);
return incref(tuple.attr("__iter__")().ptr());
}
// __len__ std::pair = 2
static int pair_len(value_type const& x) { return 2; }
// return a list of keys
static bp::list keys(Container const& x)
{
bp::list t;
for(typename Container::const_iterator it = x.begin(); it != x.end(); it++)
t.append(it->first);
return t;
}
// return a list of values
static bp::list values(Container const& x)
{
bp::list t;
for(typename Container::const_iterator it = x.begin(); it != x.end(); it++)
t.append(it->second);
return t;
}
// return a list of (key,value) tuples
static bp::list items(Container const& x)
{
bp::list t;
for(typename Container::const_iterator it = x.begin(); it != x.end(); it++)
t.append(bp::make_tuple(it->first, it->second));
return t;
}
#if 0
// return a shallow copy of the map
// FIXME: is this actually a shallow copy, or did i duplicate the pairs?
static Container copy(Container const& x)
{
Container newmap;
for(const_iterator it = x.begin();it != x.end();it++) newmap.insert(*it);
return newmap;
}
#endif
// get with default value
static object dict_get(Container const& x, index_type const& k, object const& default_val = object())
{
const_iterator it = x.find(k);
if (it != x.end()) return object(it->second);
else return default_val;
}
// preserve default value info
BOOST_PYTHON_FUNCTION_OVERLOADS(dict_get_overloads, dict_get, 2, 3);
// pop map[key], or throw an error if it doesn't exist
static object dict_pop(Container & x, index_type const& k)
{
const_iterator it = x.find(k);
object result;
if (it != x.end()) {
result = object(it->second);
x.erase(it->first);
return result;
}
else {
PyErr_SetString(PyExc_KeyError,"Key not found.");
throw_error_already_set();
return object(); // None
};
}
// pop map[key], or return default_val if it doesn't exist
static object dict_pop_default(Container & x, index_type const& k, object const& default_val)
{
const_iterator it = x.find(k);
object result;
if (it != x.end()) {
result = object(it->second);
x.erase(it->first);
return result;
}
else return default_val;
}
// pop a tuple, or throw an error if empty
static object dict_pop_item(Container & x)
{
const_iterator it = x.begin();
object result;
if (it != x.end()) {
result = boost::python::make_tuple(it->first,it->second);
x.erase(it->first);
return result;
}
else {
PyErr_SetString(PyExc_KeyError,"No more items to pop");
throw_error_already_set();
return object(); // None
};
}
// create a new map with given keys, initialialized to value
static object dict_fromkeys(object const& keys, object const& value)
{
object newmap = object(typename Container::storage_type());
int numkeys = extract<int>(keys.attr("__len__")());
for(int i=0;i<numkeys;i++) { // 'cuz python is more fun in C++...
newmap.attr("__setitem__")
(keys.attr("__getitem__")(i),value);
}
return newmap;
}
// spice up the constructors a bit
template <typename PyClassT>
struct init_factory {
typedef typename PyClassT::metadata::holder Holder;
typedef bp::objects::instance<Holder> instance_t;
// connect the PyObject to a wrapped C++ instance
// borrowed from boost/python/object/make_holder.hpp
static void make_holder(PyObject *p)
{
void* memory = Holder::allocate(p, offsetof(instance_t, storage), sizeof(Holder));
try {
// this only works for blank () constructors
(new (memory) Holder(p))->install(p);
}
catch(...) {
Holder::deallocate(p, memory);
throw;
}
}
static void from_dict(PyObject *p, bp::dict const& dict)
{
make_holder(p);
object newmap = object(bp::handle<>(borrowed(p)));
newmap.attr("update")(dict);
}
static void from_list(PyObject *p, bp::list const& list)
{
make_holder(p);
object newmap = object(bp::handle<>(borrowed(p)));
newmap.attr("update")(bp::dict(list));
}
};
// copy keys and values from dictlike object (anything with keys())
static void dict_update(object & x, object const& dictlike)
{
object key;
object keys = dictlike.attr("keys")();
int numkeys = extract<int>(keys.attr("__len__")());
for(int i=0;i<numkeys;i++) {
key = keys.attr("__getitem__")(i);
x.attr("__setitem__")(key,dictlike.attr("__getitem__")(key));
}
}
// set up operators to sample the key, value, or a tuple from a std::pair
struct iterkeys
{
typedef key_type result_type;
result_type operator()(value_type const& x) const
{
return x.first;
}
};
struct itervalues
{
typedef data_type result_type;
result_type operator()(value_type const& x) const
{
return x.second;
}
};
struct iteritems {
typedef tuple result_type;
result_type operator()(value_type const& x) const
{
return boost::python::make_tuple(x.first,x.second);
}
};
template <typename Transform>
struct make_transform_impl
{
typedef boost::transform_iterator<Transform, const_iterator> iterator;
static iterator begin(const Container& m)
{
return boost::make_transform_iterator(m.begin(), Transform());
}
static iterator end(const Container& m)
{
return boost::make_transform_iterator(m.end(), Transform());
}
static bp::object range()
{
return bp::range(&begin, &end);
}
};
template <typename Transform>
static bp::object
make_transform()
{
return make_transform_impl<Transform>::range();
}
static object
print_elem(typename Container::value_type const& e)
{
return "(%s, %s)" % python::make_tuple(e.first, e.second);
}
static
typename mpl::if_<
is_class<data_type>
, data_type&
, data_type
>::type
get_data(typename Container::value_type& e)
{
return e.second;
}
static typename Container::key_type
get_key(typename Container::value_type& e)
{
return e.first;
}
static data_type&
get_item(Container& container, index_type i_)
{
typename Container::iterator i = container.find(i_);
if (i == container.end())
{
PyErr_SetString(PyExc_KeyError, "Invalid key");
throw_error_already_set();
}
return i->second;
}
static void
set_item(Container& container, index_type i, data_type const& v)
{
container[i] = v;
}
static void
delete_item(Container& container, index_type i)
{
container.erase(i);
}
static size_t
size(Container& container)
{
return container.size();
}
static bool
contains(Container& container, key_type const& key)
{
return container.find(key) != container.end();
}
static bool
compare_index(Container& container, index_type a, index_type b)
{
return container.key_comp()(a, b);
}
static index_type
convert_index(Container& container, PyObject* i_)
{
extract<key_type const&> i(i_);
if (i.check())
{
return i();
}
else
{
extract<key_type> i(i_);
if (i.check())
return i();
}
PyErr_SetString(PyExc_TypeError, "Invalid index type");
throw_error_already_set();
return index_type();
}
template <class Class>
static void
extension_def(Class& cl)
{
// Wrap the map's element (value_type)
std::string elem_name = "std_map_indexing_suite_";
std::string cl_name;
object class_name(cl.attr("__name__"));
extract<std::string> class_name_extractor(class_name);
cl_name = class_name_extractor();
elem_name += cl_name;
elem_name += "_entry";
typedef typename mpl::if_<
is_class<data_type>
, return_internal_reference<>
, default_call_policies
>::type get_data_return_policy;
class_<value_type>(elem_name.c_str())
.def("__repr__", &DerivedPolicies::print_elem)
.def("data", &DerivedPolicies::get_data, get_data_return_policy(),
"K.data() -> the value associated with this pair.\n")
.def("key", &DerivedPolicies::get_key,
"K.key() -> the key associated with this pair.\n")
.def("__getitem__",&pair_getitem)
.def("__iter__",&pair_iter)
.def("__len__",&pair_len)
.def("first",&DerivedPolicies::get_key,
"K.first() -> the first item in this pair.\n")
.def("second",&DerivedPolicies::get_data, get_data_return_policy(),
"K.second() -> the second item in this pair.\n")
;
// add convenience methods to the map
cl
// declare constructors in descending order of arity
.def("__init__", init_factory<Class>::from_list,
"Initialize with keys and values from a Python dictionary: {'key':'value'}\n")
.def("__init__", init_factory<Class>::from_dict,
"Initialize with keys and values as tuples in a Python list: [('key','value')]\n")
.def(init<>()) // restore default constructor
.def("keys", &keys, "D.keys() -> list of D's keys\n")
.def("has_key", &contains, "D.has_key(k) -> True if D has a key k, else False\n") // don't re-invent the wheel
.def("values", &values, "D.values() -> list of D's values\n")
.def("items", &items, "D.items() -> list of D's (key, value) pairs, as 2-tuples\n")
.def("clear", &Container::clear, "D.clear() -> None. Remove all items from D.\n")
//.def("copy", ©, "D.copy() -> a shallow copy of D\n")
.def("get", dict_get, dict_get_overloads(args("default_val"),
"D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.\n"))
.def("pop", &dict_pop )
.def("pop", &dict_pop_default,
"D.pop(k[,d]) -> v, remove specified key and return the corresponding value\nIf key is not found, d is returned if given, otherwise KeyError is raised\n")
.def("popitem", &dict_pop_item,
"D.popitem() -> (k, v), remove and return some (key, value) pair as a\n2-tuple; but raise KeyError if D is empty\n")
.def("fromkeys", &dict_fromkeys,
(cl_name+".fromkeys(S,v) -> New "+cl_name+" with keys from S and values equal to v.\n").c_str())
.staticmethod("fromkeys")
.def("update", &dict_update,
"D.update(E) -> None. Update D from E: for k in E: D[k] = E[k]\n")
.def("iteritems",
make_transform<iteritems>(),
"D.iteritems() -> an iterator over the (key, value) items of D\n")
.def("iterkeys",
make_transform<iterkeys>(),
"D.iterkeys() -> an iterator over the keys of D\n")
.def("itervalues",
make_transform<itervalues>(),
"D.itervalues() -> an iterator over the values of D\n")
;
}
};
}} // namespace boost::python
#endif // ICETRAY_PYTHON_STD_MAP_INDEXING_SUITE_HPP_INCLUDED
| stonier/ecto | include/ecto/python/std_map_indexing_suite.hpp | C++ | bsd-3-clause | 17,030 |
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.components.autofill_assistant.user_data;
import static org.chromium.components.autofill_assistant.AssistantAccessibilityUtils.setAccessibility;
import android.content.Context;
import android.text.TextUtils;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import androidx.annotation.DrawableRes;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import org.chromium.components.autofill_assistant.R;
import org.chromium.components.autofill_assistant.user_data.AssistantCollectUserDataModel.LoginChoiceModel;
import java.util.List;
/**
* The login details section of the Autofill Assistant payment request.
*/
public class AssistantLoginSection extends AssistantCollectUserDataSection<LoginChoiceModel> {
AssistantLoginSection(Context context, ViewGroup parent) {
super(context, parent, R.layout.autofill_assistant_login, R.layout.autofill_assistant_login,
context.getResources().getDimensionPixelSize(
org.chromium.components.autofill_assistant.R.dimen
.autofill_assistant_payment_request_title_padding),
/*titleAddButton=*/null, /*listAddButton=*/null);
}
@Override
protected void createOrEditItem(@NonNull LoginChoiceModel oldItem) {
assert oldItem != null;
assert oldItem.mOption.getInfoPopup() != null;
oldItem.mOption.getInfoPopup().show(mContext);
}
@Override
protected void updateFullView(View fullView, LoginChoiceModel model) {
updateSummaryView(fullView, model);
}
@Override
protected void updateSummaryView(View summaryView, LoginChoiceModel model) {
AssistantLoginChoice option = model.mOption;
TextView labelView = summaryView.findViewById(R.id.label);
labelView.setText(option.getLabel());
TextView sublabelView = summaryView.findViewById(R.id.sublabel);
if (TextUtils.isEmpty(option.getSublabel())) {
sublabelView.setVisibility(View.GONE);
} else {
sublabelView.setText(option.getSublabel());
setAccessibility(sublabelView, option.getSublabelAccessibilityHint());
}
}
@Override
protected boolean canEditOption(LoginChoiceModel model) {
return model.mOption.getInfoPopup() != null;
}
@Override
protected @DrawableRes int getEditButtonDrawable(LoginChoiceModel model) {
return R.drawable.btn_info;
}
@Override
protected String getEditButtonContentDescription(LoginChoiceModel model) {
if (model.mOption.getEditButtonContentDescription() != null) {
return model.mOption.getEditButtonContentDescription();
} else {
return mContext.getString(R.string.learn_more);
}
}
@Override
protected boolean areEqual(
@Nullable LoginChoiceModel modelA, @Nullable LoginChoiceModel modelB) {
if (modelA == null || modelB == null) {
return modelA == modelB;
}
// Native ensures that each login choice has a unique identifier.
return TextUtils.equals(modelA.mOption.getIdentifier(), modelB.mOption.getIdentifier());
}
/**
* The login options have changed externally. This will rebuild the UI with the new/changed
* set of login options, while keeping the selected item if possible.
*/
void onLoginsChanged(List<LoginChoiceModel> options) {
int indexToSelect = -1;
if (mSelectedOption != null) {
for (int i = 0; i < getItems().size(); i++) {
if (areEqual(mSelectedOption, getItems().get(i))) {
indexToSelect = i;
break;
}
}
}
setItems(options, indexToSelect);
}
}
| chromium/chromium | components/autofill_assistant/android/java/src/org/chromium/components/autofill_assistant/user_data/AssistantLoginSection.java | Java | bsd-3-clause | 4,011 |
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Template support for writing HTML documents.
// Documents that include Template: true in their
// metadata are executed as input to text/template.
//
// This file defines functions for those templates to invoke.
// The template uses the function "code" to inject program
// source into the output by extracting code from files and
// injecting them as HTML-escaped <pre> blocks.
//
// The syntax is simple: 1, 2, or 3 space-separated arguments:
//
// Whole file:
// {{code "foo.go"}}
// One line (here the signature of main):
// {{code "foo.go" `/^func.main/`}}
// Block of text, determined by start and end (here the body of main):
// {{code "foo.go" `/^func.main/` `/^}/`
//
// Patterns can be `/regular expression/`, a decimal number, or "$"
// to signify the end of the file. In multi-line matches,
// lines that end with the four characters
// OMIT
// are omitted from the output, making it easy to provide marker
// lines in the input that will not appear in the output but are easy
// to identify by pattern.
package main
import (
"bytes"
"fmt"
"log"
"regexp"
"strings"
"text/template"
)
// Functions in this file panic on error, but the panic is recovered
// to an error by 'code'.
var templateFuncs = template.FuncMap{
"code": code,
}
// contents reads and returns the content of the named file
// (from the virtual file system, so for example /doc refers to $GOROOT/doc).
func contents(name string) string {
file, err := ReadFile(fs, name)
if err != nil {
log.Panic(err)
}
return string(file)
}
// format returns a textual representation of the arg, formatted according to its nature.
func format(arg interface{}) string {
switch arg := arg.(type) {
case int:
return fmt.Sprintf("%d", arg)
case string:
if len(arg) > 2 && arg[0] == '/' && arg[len(arg)-1] == '/' {
return fmt.Sprintf("%#q", arg)
}
return fmt.Sprintf("%q", arg)
default:
log.Panicf("unrecognized argument: %v type %T", arg, arg)
}
return ""
}
func code(file string, arg ...interface{}) (s string, err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("%v", r)
}
}()
text := contents(file)
var command string
switch len(arg) {
case 0:
// text is already whole file.
command = fmt.Sprintf("code %q", file)
case 1:
command = fmt.Sprintf("code %q %s", file, format(arg[0]))
text = oneLine(file, text, arg[0])
case 2:
command = fmt.Sprintf("code %q %s %s", file, format(arg[0]), format(arg[1]))
text = multipleLines(file, text, arg[0], arg[1])
default:
return "", fmt.Errorf("incorrect code invocation: code %q %q", file, arg)
}
// Trim spaces from output.
text = strings.Trim(text, "\n")
// Replace tabs by spaces, which work better in HTML.
text = strings.Replace(text, "\t", " ", -1)
var buf bytes.Buffer
// HTML-escape text and syntax-color comments like elsewhere.
FormatText(&buf, []byte(text), -1, true, "", nil)
// Include the command as a comment.
text = fmt.Sprintf("<pre><!--{{%s}}\n-->%s</pre>", command, buf.Bytes())
return text, nil
}
// parseArg returns the integer or string value of the argument and tells which it is.
func parseArg(arg interface{}, file string, max int) (ival int, sval string, isInt bool) {
switch n := arg.(type) {
case int:
if n <= 0 || n > max {
log.Panicf("%q:%d is out of range", file, n)
}
return n, "", true
case string:
return 0, n, false
}
log.Panicf("unrecognized argument %v type %T", arg, arg)
return
}
// oneLine returns the single line generated by a two-argument code invocation.
func oneLine(file, text string, arg interface{}) string {
lines := strings.SplitAfter(contents(file), "\n")
line, pattern, isInt := parseArg(arg, file, len(lines))
if isInt {
return lines[line-1]
}
return lines[match(file, 0, lines, pattern)-1]
}
// multipleLines returns the text generated by a three-argument code invocation.
func multipleLines(file, text string, arg1, arg2 interface{}) string {
lines := strings.SplitAfter(contents(file), "\n")
line1, pattern1, isInt1 := parseArg(arg1, file, len(lines))
line2, pattern2, isInt2 := parseArg(arg2, file, len(lines))
if !isInt1 {
line1 = match(file, 0, lines, pattern1)
}
if !isInt2 {
line2 = match(file, line1, lines, pattern2)
} else if line2 < line1 {
log.Panicf("lines out of order for %q: %d %d", text, line1, line2)
}
for k := line1 - 1; k < line2; k++ {
if strings.HasSuffix(lines[k], "OMIT\n") {
lines[k] = ""
}
}
return strings.Join(lines[line1-1:line2], "")
}
// match identifies the input line that matches the pattern in a code invocation.
// If start>0, match lines starting there rather than at the beginning.
// The return value is 1-indexed.
func match(file string, start int, lines []string, pattern string) int {
// $ matches the end of the file.
if pattern == "$" {
if len(lines) == 0 {
log.Panicf("%q: empty file", file)
}
return len(lines)
}
// /regexp/ matches the line that matches the regexp.
if len(pattern) > 2 && pattern[0] == '/' && pattern[len(pattern)-1] == '/' {
re, err := regexp.Compile(pattern[1 : len(pattern)-1])
if err != nil {
log.Panic(err)
}
for i := start; i < len(lines); i++ {
if re.MatchString(lines[i]) {
return i + 1
}
}
log.Panicf("%s: no match for %#q", file, pattern)
}
log.Panicf("unrecognized pattern: %q", pattern)
return 0
}
| oopos/go | src/cmd/godoc/template.go | GO | bsd-3-clause | 5,495 |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.android_webview;
import android.annotation.SuppressLint;
import android.content.Context;
import android.media.AudioManager;
import android.net.Uri;
import android.os.Handler;
import android.os.Message;
import android.provider.MediaStore;
import android.text.TextUtils;
import android.util.Log;
import android.view.KeyEvent;
import android.view.View;
import android.webkit.URLUtil;
import android.widget.FrameLayout;
import org.chromium.android_webview.common.AwFeatures;
import org.chromium.base.Callback;
import org.chromium.base.ContentUriUtils;
import org.chromium.base.ThreadUtils;
import org.chromium.base.task.AsyncTask;
import org.chromium.content_public.browser.InvalidateTypes;
import org.chromium.content_public.common.ContentUrlConstants;
import org.chromium.content_public.common.ResourceRequestBody;
import org.chromium.url.GURL;
/**
* Adapts the AwWebContentsDelegate interface to the AwContentsClient interface.
* This class also serves a secondary function of routing certain callbacks from the content layer
* to specific listener interfaces.
*/
class AwWebContentsDelegateAdapter extends AwWebContentsDelegate {
private static final String TAG = "AwWebContentsDelegateAdapter";
private final AwContents mAwContents;
private final AwContentsClient mContentsClient;
private final AwSettings mAwSettings;
private final Context mContext;
private View mContainerView;
private FrameLayout mCustomView;
private boolean mDidSynthesizePageLoad;
public AwWebContentsDelegateAdapter(AwContents awContents, AwContentsClient contentsClient,
AwSettings settings, Context context, View containerView) {
mAwContents = awContents;
mContentsClient = contentsClient;
mAwSettings = settings;
mContext = context;
mDidSynthesizePageLoad = false;
setContainerView(containerView);
}
public void setContainerView(View containerView) {
mContainerView = containerView;
mContainerView.setClickable(true);
}
@Override
public void handleKeyboardEvent(KeyEvent event) {
if (event.getAction() == KeyEvent.ACTION_DOWN) {
int direction;
switch (event.getKeyCode()) {
case KeyEvent.KEYCODE_DPAD_DOWN:
direction = View.FOCUS_DOWN;
break;
case KeyEvent.KEYCODE_DPAD_UP:
direction = View.FOCUS_UP;
break;
case KeyEvent.KEYCODE_DPAD_LEFT:
direction = View.FOCUS_LEFT;
break;
case KeyEvent.KEYCODE_DPAD_RIGHT:
direction = View.FOCUS_RIGHT;
break;
default:
direction = 0;
break;
}
if (direction != 0 && tryToMoveFocus(direction)) return;
}
handleMediaKey(event);
mContentsClient.onUnhandledKeyEvent(event);
}
/**
* Redispatches unhandled media keys. This allows bluetooth headphones with play/pause or
* other buttons to function correctly.
*/
private void handleMediaKey(KeyEvent e) {
switch (e.getKeyCode()) {
case KeyEvent.KEYCODE_MUTE:
case KeyEvent.KEYCODE_HEADSETHOOK:
case KeyEvent.KEYCODE_MEDIA_PLAY:
case KeyEvent.KEYCODE_MEDIA_PAUSE:
case KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE:
case KeyEvent.KEYCODE_MEDIA_STOP:
case KeyEvent.KEYCODE_MEDIA_NEXT:
case KeyEvent.KEYCODE_MEDIA_PREVIOUS:
case KeyEvent.KEYCODE_MEDIA_REWIND:
case KeyEvent.KEYCODE_MEDIA_RECORD:
case KeyEvent.KEYCODE_MEDIA_FAST_FORWARD:
case KeyEvent.KEYCODE_MEDIA_CLOSE:
case KeyEvent.KEYCODE_MEDIA_EJECT:
case KeyEvent.KEYCODE_MEDIA_AUDIO_TRACK:
AudioManager am = (AudioManager) mContext.getSystemService(Context.AUDIO_SERVICE);
am.dispatchMediaKeyEvent(e);
break;
default:
break;
}
}
@Override
public boolean takeFocus(boolean reverse) {
int direction =
(reverse == (mContainerView.getLayoutDirection() == View.LAYOUT_DIRECTION_RTL))
? View.FOCUS_RIGHT : View.FOCUS_LEFT;
if (tryToMoveFocus(direction)) return true;
direction = reverse ? View.FOCUS_BACKWARD : View.FOCUS_FORWARD;
return tryToMoveFocus(direction);
}
private boolean tryToMoveFocus(int direction) {
View focus = mContainerView.focusSearch(direction);
return focus != null && focus != mContainerView && focus.requestFocus();
}
@Override
public boolean addMessageToConsole(int level, String message, int lineNumber,
String sourceId) {
@AwConsoleMessage.MessageLevel
int messageLevel = AwConsoleMessage.MESSAGE_LEVEL_DEBUG;
switch(level) {
case LOG_LEVEL_TIP:
messageLevel = AwConsoleMessage.MESSAGE_LEVEL_TIP;
break;
case LOG_LEVEL_LOG:
messageLevel = AwConsoleMessage.MESSAGE_LEVEL_LOG;
break;
case LOG_LEVEL_WARNING:
messageLevel = AwConsoleMessage.MESSAGE_LEVEL_WARNING;
break;
case LOG_LEVEL_ERROR:
messageLevel = AwConsoleMessage.MESSAGE_LEVEL_ERROR;
break;
default:
Log.w(TAG, "Unknown message level, defaulting to DEBUG");
break;
}
boolean result = mContentsClient.onConsoleMessage(
new AwConsoleMessage(message, sourceId, lineNumber, messageLevel));
return result;
}
@Override
public void onUpdateUrl(GURL url) {
// TODO: implement
}
@Override
public void openNewTab(GURL url, String extraHeaders, ResourceRequestBody postData,
int disposition, boolean isRendererInitiated) {
// This is only called in chrome layers.
assert false;
}
@Override
public void closeContents() {
mContentsClient.onCloseWindow();
}
@Override
@SuppressLint("HandlerLeak")
public void showRepostFormWarningDialog() {
// TODO(mkosiba) We should be using something akin to the JsResultReceiver as the
// callback parameter (instead of WebContents) and implement a way of converting
// that to a pair of messages.
final int msgContinuePendingReload = 1;
final int msgCancelPendingReload = 2;
// TODO(sgurun) Remember the URL to cancel the reload behavior
// if it is different than the most recent NavigationController entry.
final Handler handler = new Handler(ThreadUtils.getUiThreadLooper()) {
@Override
public void handleMessage(Message msg) {
if (mAwContents.getNavigationController() == null) return;
switch(msg.what) {
case msgContinuePendingReload: {
mAwContents.getNavigationController().continuePendingReload();
break;
}
case msgCancelPendingReload: {
mAwContents.getNavigationController().cancelPendingReload();
break;
}
default:
throw new IllegalStateException(
"WebContentsDelegateAdapter: unhandled message " + msg.what);
}
}
};
Message resend = handler.obtainMessage(msgContinuePendingReload);
Message dontResend = handler.obtainMessage(msgCancelPendingReload);
mContentsClient.getCallbackHelper().postOnFormResubmission(dontResend, resend);
}
@Override
public void runFileChooser(final int processId, final int renderId, final int modeFlags,
String acceptTypes, String title, String defaultFilename, boolean capture) {
int correctedModeFlags = FileModeConversionHelper.convertFileChooserMode(modeFlags);
AwContentsClient.FileChooserParamsImpl params = new AwContentsClient.FileChooserParamsImpl(
correctedModeFlags, acceptTypes, title, defaultFilename, capture);
mContentsClient.showFileChooser(new Callback<String[]>() {
boolean mCompleted;
@Override
public void onResult(String[] results) {
if (mCompleted) {
throw new IllegalStateException("Duplicate showFileChooser result");
}
mCompleted = true;
if (results == null) {
AwWebContentsDelegateJni.get().filesSelectedInChooser(
processId, renderId, correctedModeFlags, null, null);
return;
}
GetDisplayNameTask task = new GetDisplayNameTask(
mContext, processId, renderId, correctedModeFlags, results);
task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}
}, params);
}
@Override
public boolean addNewContents(boolean isDialog, boolean isUserGesture) {
return mContentsClient.onCreateWindow(isDialog, isUserGesture);
}
@Override
public void activateContents() {
mContentsClient.onRequestFocus();
}
@Override
public void navigationStateChanged(int flags) {
// If this is a popup whose document has been accessed by script, hint
// the client to show the last committed url through synthesizing a page
// load, as it may be unsafe to show the pending entry.
boolean shouldSynthesizePageLoad = ((flags & InvalidateTypes.URL) != 0)
&& mAwContents.isPopupWindow() && mAwContents.hasAccessedInitialDocument();
if (AwFeatureList.isEnabled(
AwFeatures.WEBVIEW_SYNTHESIZE_PAGE_LOAD_ONLY_ON_INITIAL_MAIN_DOCUMENT_ACCESS)) {
// Since we want to synthesize the page load only once for when the
// NavigationStateChange call is triggered by the first initial main
// document access, the flag must match InvalidateTypes.URL (the flag
// fired by NavigationControllerImpl::DidAccessInitialMainDocument())
// and we must check whether a page load has previously been
// synthesized here.
shouldSynthesizePageLoad &= (flags == InvalidateTypes.URL) && !mDidSynthesizePageLoad;
}
if (shouldSynthesizePageLoad) {
String url = mAwContents.getLastCommittedUrl();
url = TextUtils.isEmpty(url) ? ContentUrlConstants.ABOUT_BLANK_DISPLAY_URL : url;
mContentsClient.getCallbackHelper().postSynthesizedPageLoadingForUrlBarUpdate(url);
mDidSynthesizePageLoad = true;
}
}
@Override
public void enterFullscreenModeForTab(boolean prefersNavigationBar) {
enterFullscreen();
}
@Override
public void exitFullscreenModeForTab() {
exitFullscreen();
}
@Override
public int getDisplayMode() {
return mAwContents.getDisplayMode();
}
@Override
public void loadingStateChanged() {
mContentsClient.updateTitle(mAwContents.getTitle(), false);
}
/**
* Called to show the web contents in fullscreen mode.
*
* <p>If entering fullscreen on a video element the web contents will contain just
* the html5 video controls. {@link #enterFullscreenVideo(View)} will be called later
* once the ContentVideoView, which contains the hardware accelerated fullscreen video,
* is ready to be shown.
*/
private void enterFullscreen() {
if (mAwContents.isFullScreen()) {
return;
}
View fullscreenView = mAwContents.enterFullScreen();
if (fullscreenView == null) {
return;
}
AwContentsClient.CustomViewCallback cb = () -> {
if (mCustomView != null) {
mAwContents.requestExitFullscreen();
}
};
mCustomView = new FrameLayout(mContext);
mCustomView.addView(fullscreenView);
mContentsClient.onShowCustomView(mCustomView, cb);
}
/**
* Called to show the web contents in embedded mode.
*/
private void exitFullscreen() {
if (mCustomView != null) {
mCustomView = null;
mAwContents.exitFullScreen();
mContentsClient.onHideCustomView();
}
}
@Override
public boolean shouldBlockMediaRequest(GURL url) {
return mAwSettings != null
? mAwSettings.getBlockNetworkLoads() && URLUtil.isNetworkUrl(url.getSpec())
: true;
}
private static class GetDisplayNameTask extends AsyncTask<String[]> {
final int mProcessId;
final int mRenderId;
final int mModeFlags;
final String[] mFilePaths;
// The task doesn't run long, so we don't gain anything from a weak ref.
@SuppressLint("StaticFieldLeak")
final Context mContext;
public GetDisplayNameTask(
Context context, int processId, int renderId, int modeFlags, String[] filePaths) {
mProcessId = processId;
mRenderId = renderId;
mModeFlags = modeFlags;
mFilePaths = filePaths;
mContext = context;
}
@Override
protected String[] doInBackground() {
String[] displayNames = new String[mFilePaths.length];
for (int i = 0; i < mFilePaths.length; i++) {
displayNames[i] = resolveFileName(mFilePaths[i]);
}
return displayNames;
}
@Override
protected void onPostExecute(String[] result) {
AwWebContentsDelegateJni.get().filesSelectedInChooser(
mProcessId, mRenderId, mModeFlags, mFilePaths, result);
}
/**
* @return the display name of a path if it is a content URI and is present in the database
* or an empty string otherwise.
*/
private String resolveFileName(String filePath) {
if (filePath == null) return "";
Uri uri = Uri.parse(filePath);
return ContentUriUtils.getDisplayName(
uri, mContext, MediaStore.MediaColumns.DISPLAY_NAME);
}
}
}
| chromium/chromium | android_webview/java/src/org/chromium/android_webview/AwWebContentsDelegateAdapter.java | Java | bsd-3-clause | 14,739 |
require('should');
var option = require('..').sdk.option;
describe('option', function() {
it('can get default values', function() {
option.get('encoding').should.equal('utf8');
});
it('can set values', function() {
option.set('encoding', 'unicode');
option.get('encoding').should.equal('unicode');
option.clean();
option.get('encoding').should.equal('utf8');
option.option('encoding').should.equal('utf8');
option.option('encoding', 'unicode');
option.get('encoding').should.equal('unicode');
option.clean();
});
it('will init with some values', function() {
var o = new option.Option({foo: 'bar'});
o.get('foo').should.equal('bar');
});
it('can clean a key', function() {
var o = new option.Option({foo: 'bar'});
o.clean('foo');
o._cache.should.eql({});
});
it('can set defaults', function() {
option.defaults({
foo: {
foo: 'bar'
}
});
option.set('foo', {bar: 'foo'});
option.get('foo').should.have.ownProperty('foo');
option.get('foo').should.have.ownProperty('bar');
});
});
| thcode/nico | tests/sdk.option.test.js | JavaScript | bsd-3-clause | 1,096 |
import datetime
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.http import Http404
from django.utils import timezone
from django.utils.functional import cached_property
from django.utils.translation import gettext as _
from django.views.generic.base import View
from django.views.generic.detail import (
BaseDetailView, SingleObjectTemplateResponseMixin,
)
from django.views.generic.list import (
MultipleObjectMixin, MultipleObjectTemplateResponseMixin,
)
class YearMixin:
"""Mixin for views manipulating year-based data."""
year_format = '%Y'
year = None
def get_year_format(self):
"""
Get a year format string in strptime syntax to be used to parse the
year from url variables.
"""
return self.year_format
def get_year(self):
"""Return the year for which this view should display data."""
year = self.year
if year is None:
try:
year = self.kwargs['year']
except KeyError:
try:
year = self.request.GET['year']
except KeyError:
raise Http404(_("No year specified"))
return year
def get_next_year(self, date):
"""Get the next valid year."""
return _get_next_prev(self, date, is_previous=False, period='year')
def get_previous_year(self, date):
"""Get the previous valid year."""
return _get_next_prev(self, date, is_previous=True, period='year')
def _get_next_year(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
try:
return date.replace(year=date.year + 1, month=1, day=1)
except ValueError:
raise Http404(_("Date out of range"))
def _get_current_year(self, date):
"""Return the start date of the current interval."""
return date.replace(month=1, day=1)
class MonthMixin:
"""Mixin for views manipulating month-based data."""
month_format = '%b'
month = None
def get_month_format(self):
"""
Get a month format string in strptime syntax to be used to parse the
month from url variables.
"""
return self.month_format
def get_month(self):
"""Return the month for which this view should display data."""
month = self.month
if month is None:
try:
month = self.kwargs['month']
except KeyError:
try:
month = self.request.GET['month']
except KeyError:
raise Http404(_("No month specified"))
return month
def get_next_month(self, date):
"""Get the next valid month."""
return _get_next_prev(self, date, is_previous=False, period='month')
def get_previous_month(self, date):
"""Get the previous valid month."""
return _get_next_prev(self, date, is_previous=True, period='month')
def _get_next_month(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
if date.month == 12:
try:
return date.replace(year=date.year + 1, month=1, day=1)
except ValueError:
raise Http404(_("Date out of range"))
else:
return date.replace(month=date.month + 1, day=1)
def _get_current_month(self, date):
"""Return the start date of the previous interval."""
return date.replace(day=1)
class DayMixin:
"""Mixin for views manipulating day-based data."""
day_format = '%d'
day = None
def get_day_format(self):
"""
Get a day format string in strptime syntax to be used to parse the day
from url variables.
"""
return self.day_format
def get_day(self):
"""Return the day for which this view should display data."""
day = self.day
if day is None:
try:
day = self.kwargs['day']
except KeyError:
try:
day = self.request.GET['day']
except KeyError:
raise Http404(_("No day specified"))
return day
def get_next_day(self, date):
"""Get the next valid day."""
return _get_next_prev(self, date, is_previous=False, period='day')
def get_previous_day(self, date):
"""Get the previous valid day."""
return _get_next_prev(self, date, is_previous=True, period='day')
def _get_next_day(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
return date + datetime.timedelta(days=1)
def _get_current_day(self, date):
"""Return the start date of the current interval."""
return date
class WeekMixin:
"""Mixin for views manipulating week-based data."""
week_format = '%U'
week = None
def get_week_format(self):
"""
Get a week format string in strptime syntax to be used to parse the
week from url variables.
"""
return self.week_format
def get_week(self):
"""Return the week for which this view should display data."""
week = self.week
if week is None:
try:
week = self.kwargs['week']
except KeyError:
try:
week = self.request.GET['week']
except KeyError:
raise Http404(_("No week specified"))
return week
def get_next_week(self, date):
"""Get the next valid week."""
return _get_next_prev(self, date, is_previous=False, period='week')
def get_previous_week(self, date):
"""Get the previous valid week."""
return _get_next_prev(self, date, is_previous=True, period='week')
def _get_next_week(self, date):
"""
Return the start date of the next interval.
The interval is defined by start date <= item date < next start date.
"""
try:
return date + datetime.timedelta(days=7 - self._get_weekday(date))
except OverflowError:
raise Http404(_("Date out of range"))
def _get_current_week(self, date):
"""Return the start date of the current interval."""
return date - datetime.timedelta(self._get_weekday(date))
def _get_weekday(self, date):
"""
Return the weekday for a given date.
The first day according to the week format is 0 and the last day is 6.
"""
week_format = self.get_week_format()
if week_format == '%W': # week starts on Monday
return date.weekday()
elif week_format == '%U': # week starts on Sunday
return (date.weekday() + 1) % 7
else:
raise ValueError("unknown week format: %s" % week_format)
class DateMixin:
"""Mixin class for views manipulating date-based data."""
date_field = None
allow_future = False
def get_date_field(self):
"""Get the name of the date field to be used to filter by."""
if self.date_field is None:
raise ImproperlyConfigured("%s.date_field is required." % self.__class__.__name__)
return self.date_field
def get_allow_future(self):
"""
Return `True` if the view should be allowed to display objects from
the future.
"""
return self.allow_future
# Note: the following three methods only work in subclasses that also
# inherit SingleObjectMixin or MultipleObjectMixin.
@cached_property
def uses_datetime_field(self):
"""
Return `True` if the date field is a `DateTimeField` and `False`
if it's a `DateField`.
"""
model = self.get_queryset().model if self.model is None else self.model
field = model._meta.get_field(self.get_date_field())
return isinstance(field, models.DateTimeField)
def _make_date_lookup_arg(self, value):
"""
Convert a date into a datetime when the date field is a DateTimeField.
When time zone support is enabled, `date` is assumed to be in the
current time zone, so that displayed items are consistent with the URL.
"""
if self.uses_datetime_field:
value = datetime.datetime.combine(value, datetime.time.min)
if settings.USE_TZ:
value = timezone.make_aware(value, timezone.get_current_timezone())
return value
def _make_single_date_lookup(self, date):
"""
Get the lookup kwargs for filtering on a single date.
If the date field is a DateTimeField, we can't just filter on
date_field=date because that doesn't take the time into account.
"""
date_field = self.get_date_field()
if self.uses_datetime_field:
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(date + datetime.timedelta(days=1))
return {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
else:
# Skip self._make_date_lookup_arg, it's a no-op in this branch.
return {date_field: date}
class BaseDateListView(MultipleObjectMixin, DateMixin, View):
"""Abstract base class for date-based views displaying a list of objects."""
allow_empty = False
date_list_period = 'year'
def get(self, request, *args, **kwargs):
self.date_list, self.object_list, extra_context = self.get_dated_items()
context = self.get_context_data(
object_list=self.object_list,
date_list=self.date_list,
**extra_context
)
return self.render_to_response(context)
def get_dated_items(self):
"""Obtain the list of dates and items."""
raise NotImplementedError('A DateView must provide an implementation of get_dated_items()')
def get_ordering(self):
"""
Return the field or fields to use for ordering the queryset; use the
date field by default.
"""
return '-%s' % self.get_date_field() if self.ordering is None else self.ordering
def get_dated_queryset(self, **lookup):
"""
Get a queryset properly filtered according to `allow_future` and any
extra lookup kwargs.
"""
qs = self.get_queryset().filter(**lookup)
date_field = self.get_date_field()
allow_future = self.get_allow_future()
allow_empty = self.get_allow_empty()
paginate_by = self.get_paginate_by(qs)
if not allow_future:
now = timezone.now() if self.uses_datetime_field else timezone_today()
qs = qs.filter(**{'%s__lte' % date_field: now})
if not allow_empty:
# When pagination is enabled, it's better to do a cheap query
# than to load the unpaginated queryset in memory.
is_empty = len(qs) == 0 if paginate_by is None else not qs.exists()
if is_empty:
raise Http404(_("No %(verbose_name_plural)s available") % {
'verbose_name_plural': qs.model._meta.verbose_name_plural,
})
return qs
def get_date_list_period(self):
"""
Get the aggregation period for the list of dates: 'year', 'month', or
'day'.
"""
return self.date_list_period
def get_date_list(self, queryset, date_type=None, ordering='ASC'):
"""
Get a date list by calling `queryset.dates/datetimes()`, checking
along the way for empty lists that aren't allowed.
"""
date_field = self.get_date_field()
allow_empty = self.get_allow_empty()
if date_type is None:
date_type = self.get_date_list_period()
if self.uses_datetime_field:
date_list = queryset.datetimes(date_field, date_type, ordering)
else:
date_list = queryset.dates(date_field, date_type, ordering)
if date_list is not None and not date_list and not allow_empty:
raise Http404(
_("No %(verbose_name_plural)s available") % {
'verbose_name_plural': queryset.model._meta.verbose_name_plural,
}
)
return date_list
class BaseArchiveIndexView(BaseDateListView):
"""
Base class for archives of date-based items. Requires a response mixin.
"""
context_object_name = 'latest'
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
qs = self.get_dated_queryset()
date_list = self.get_date_list(qs, ordering='DESC')
if not date_list:
qs = qs.none()
return (date_list, qs, {})
class ArchiveIndexView(MultipleObjectTemplateResponseMixin, BaseArchiveIndexView):
"""Top-level archive of date-based items."""
template_name_suffix = '_archive'
class BaseYearArchiveView(YearMixin, BaseDateListView):
"""List of objects published in a given year."""
date_list_period = 'month'
make_object_list = False
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
year = self.get_year()
date_field = self.get_date_field()
date = _date_from_string(year, self.get_year_format())
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_year(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
date_list = self.get_date_list(qs)
if not self.get_make_object_list():
# We need this to be a queryset since parent classes introspect it
# to find information about the model.
qs = qs.none()
return (date_list, qs, {
'year': date,
'next_year': self.get_next_year(date),
'previous_year': self.get_previous_year(date),
})
def get_make_object_list(self):
"""
Return `True` if this view should contain the full list of objects in
the given year.
"""
return self.make_object_list
class YearArchiveView(MultipleObjectTemplateResponseMixin, BaseYearArchiveView):
"""List of objects published in a given year."""
template_name_suffix = '_archive_year'
class BaseMonthArchiveView(YearMixin, MonthMixin, BaseDateListView):
"""List of objects published in a given month."""
date_list_period = 'day'
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
year = self.get_year()
month = self.get_month()
date_field = self.get_date_field()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format())
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_month(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
date_list = self.get_date_list(qs)
return (date_list, qs, {
'month': date,
'next_month': self.get_next_month(date),
'previous_month': self.get_previous_month(date),
})
class MonthArchiveView(MultipleObjectTemplateResponseMixin, BaseMonthArchiveView):
"""List of objects published in a given month."""
template_name_suffix = '_archive_month'
class BaseWeekArchiveView(YearMixin, WeekMixin, BaseDateListView):
"""List of objects published in a given week."""
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
year = self.get_year()
week = self.get_week()
date_field = self.get_date_field()
week_format = self.get_week_format()
week_start = {
'%W': '1',
'%U': '0',
}[week_format]
date = _date_from_string(year, self.get_year_format(),
week_start, '%w',
week, week_format)
since = self._make_date_lookup_arg(date)
until = self._make_date_lookup_arg(self._get_next_week(date))
lookup_kwargs = {
'%s__gte' % date_field: since,
'%s__lt' % date_field: until,
}
qs = self.get_dated_queryset(**lookup_kwargs)
return (None, qs, {
'week': date,
'next_week': self.get_next_week(date),
'previous_week': self.get_previous_week(date),
})
class WeekArchiveView(MultipleObjectTemplateResponseMixin, BaseWeekArchiveView):
"""List of objects published in a given week."""
template_name_suffix = '_archive_week'
class BaseDayArchiveView(YearMixin, MonthMixin, DayMixin, BaseDateListView):
"""List of objects published on a given day."""
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
year = self.get_year()
month = self.get_month()
day = self.get_day()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format(),
day, self.get_day_format())
return self._get_dated_items(date)
def _get_dated_items(self, date):
"""
Do the actual heavy lifting of getting the dated items; this accepts a
date object so that TodayArchiveView can be trivial.
"""
lookup_kwargs = self._make_single_date_lookup(date)
qs = self.get_dated_queryset(**lookup_kwargs)
return (None, qs, {
'day': date,
'previous_day': self.get_previous_day(date),
'next_day': self.get_next_day(date),
'previous_month': self.get_previous_month(date),
'next_month': self.get_next_month(date)
})
class DayArchiveView(MultipleObjectTemplateResponseMixin, BaseDayArchiveView):
"""List of objects published on a given day."""
template_name_suffix = "_archive_day"
class BaseTodayArchiveView(BaseDayArchiveView):
"""List of objects published today."""
def get_dated_items(self):
"""Return (date_list, items, extra_context) for this request."""
return self._get_dated_items(datetime.date.today())
class TodayArchiveView(MultipleObjectTemplateResponseMixin, BaseTodayArchiveView):
"""List of objects published today."""
template_name_suffix = "_archive_day"
class BaseDateDetailView(YearMixin, MonthMixin, DayMixin, DateMixin, BaseDetailView):
"""
Detail view of a single object on a single date; this differs from the
standard DetailView by accepting a year/month/day in the URL.
"""
def get_object(self, queryset=None):
"""Get the object this request displays."""
year = self.get_year()
month = self.get_month()
day = self.get_day()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format(),
day, self.get_day_format())
# Use a custom queryset if provided
qs = self.get_queryset() if queryset is None else queryset
if not self.get_allow_future() and date > datetime.date.today():
raise Http404(_(
"Future %(verbose_name_plural)s not available because "
"%(class_name)s.allow_future is False."
) % {
'verbose_name_plural': qs.model._meta.verbose_name_plural,
'class_name': self.__class__.__name__,
})
# Filter down a queryset from self.queryset using the date from the
# URL. This'll get passed as the queryset to DetailView.get_object,
# which'll handle the 404
lookup_kwargs = self._make_single_date_lookup(date)
qs = qs.filter(**lookup_kwargs)
return super().get_object(queryset=qs)
class DateDetailView(SingleObjectTemplateResponseMixin, BaseDateDetailView):
"""
Detail view of a single object on a single date; this differs from the
standard DetailView by accepting a year/month/day in the URL.
"""
template_name_suffix = '_detail'
def _date_from_string(year, year_format, month='', month_format='', day='', day_format='', delim='__'):
"""
Get a datetime.date object given a format string and a year, month, and day
(only year is mandatory). Raise a 404 for an invalid date.
"""
format = year_format + delim + month_format + delim + day_format
datestr = str(year) + delim + str(month) + delim + str(day)
try:
return datetime.datetime.strptime(datestr, format).date()
except ValueError:
raise Http404(_("Invalid date string '%(datestr)s' given format '%(format)s'") % {
'datestr': datestr,
'format': format,
})
def _get_next_prev(generic_view, date, is_previous, period):
"""
Get the next or the previous valid date. The idea is to allow links on
month/day views to never be 404s by never providing a date that'll be
invalid for the given view.
This is a bit complicated since it handles different intervals of time,
hence the coupling to generic_view.
However in essence the logic comes down to:
* If allow_empty and allow_future are both true, this is easy: just
return the naive result (just the next/previous day/week/month,
regardless of object existence.)
* If allow_empty is true, allow_future is false, and the naive result
isn't in the future, then return it; otherwise return None.
* If allow_empty is false and allow_future is true, return the next
date *that contains a valid object*, even if it's in the future. If
there are no next objects, return None.
* If allow_empty is false and allow_future is false, return the next
date that contains a valid object. If that date is in the future, or
if there are no next objects, return None.
"""
date_field = generic_view.get_date_field()
allow_empty = generic_view.get_allow_empty()
allow_future = generic_view.get_allow_future()
get_current = getattr(generic_view, '_get_current_%s' % period)
get_next = getattr(generic_view, '_get_next_%s' % period)
# Bounds of the current interval
start, end = get_current(date), get_next(date)
# If allow_empty is True, the naive result will be valid
if allow_empty:
if is_previous:
result = get_current(start - datetime.timedelta(days=1))
else:
result = end
if allow_future or result <= timezone_today():
return result
else:
return None
# Otherwise, we'll need to go to the database to look for an object
# whose date_field is at least (greater than/less than) the given
# naive result
else:
# Construct a lookup and an ordering depending on whether we're doing
# a previous date or a next date lookup.
if is_previous:
lookup = {'%s__lt' % date_field: generic_view._make_date_lookup_arg(start)}
ordering = '-%s' % date_field
else:
lookup = {'%s__gte' % date_field: generic_view._make_date_lookup_arg(end)}
ordering = date_field
# Filter out objects in the future if appropriate.
if not allow_future:
# Fortunately, to match the implementation of allow_future,
# we need __lte, which doesn't conflict with __lt above.
if generic_view.uses_datetime_field:
now = timezone.now()
else:
now = timezone_today()
lookup['%s__lte' % date_field] = now
qs = generic_view.get_queryset().filter(**lookup).order_by(ordering)
# Snag the first object from the queryset; if it doesn't exist that
# means there's no next/previous link available.
try:
result = getattr(qs[0], date_field)
except IndexError:
return None
# Convert datetimes to dates in the current time zone.
if generic_view.uses_datetime_field:
if settings.USE_TZ:
result = timezone.localtime(result)
result = result.date()
# Return the first day of the period.
return get_current(result)
def timezone_today():
"""Return the current date in the current time zone."""
if settings.USE_TZ:
return timezone.localdate()
else:
return datetime.date.today()
| shacker/django | django/views/generic/dates.py | Python | bsd-3-clause | 25,251 |
from __future__ import absolute_import, print_function
import inspect
import logging
import raven
import sentry
from django.conf import settings
from django.db.utils import DatabaseError
from raven.contrib.django.client import DjangoClient
from . import metrics
UNSAFE_FILES = (
'sentry/event_manager.py',
'sentry/tasks/process_buffer.py',
)
def can_record_current_event():
"""
Tests the current stack for unsafe locations that would likely cause
recursion if an attempt to send to Sentry was made.
"""
for _, filename, _, _, _, _ in inspect.stack():
if filename.endswith(UNSAFE_FILES):
return False
return True
class SentryInternalClient(DjangoClient):
def is_enabled(self):
if getattr(settings, 'DISABLE_RAVEN', False):
return False
return settings.SENTRY_PROJECT is not None
def capture(self, *args, **kwargs):
if not can_record_current_event():
metrics.incr('internal.uncaptured.events')
self.error_logger.error('Not capturing event due to unsafe stacktrace:\n%r', kwargs)
return
return super(SentryInternalClient, self).capture(*args, **kwargs)
def send(self, **kwargs):
# TODO(dcramer): this should respect rate limits/etc and use the normal
# pipeline
from sentry.app import tsdb
from sentry.coreapi import ClientApiHelper
from sentry.event_manager import EventManager
from sentry.models import Project
helper = ClientApiHelper(
agent='raven-python/%s (sentry %s)' % (raven.VERSION, sentry.VERSION),
project_id=settings.SENTRY_PROJECT,
version=self.protocol_version,
)
try:
project = Project.objects.get_from_cache(id=settings.SENTRY_PROJECT)
except DatabaseError:
self.error_logger.error('Unable to fetch internal project',
exc_info=True)
except Project.DoesNotExist:
self.error_logger.error('Internal project (id=%s) does not exist',
settings.SENTRY_PROJECT)
return
helper.context.bind_project(project)
metrics.incr('events.total', 1)
kwargs['project'] = project.id
try:
manager = EventManager(kwargs)
data = manager.normalize()
tsdb.incr_multi([
(tsdb.models.project_total_received, project.id),
(tsdb.models.organization_total_received, project.organization_id),
])
helper.insert_data_to_database(data)
except Exception as e:
if self.raise_send_errors:
raise
self.error_logger.error(
'Unable to record event: %s\nEvent was: %r', e,
kwargs['message'], exc_info=True)
class SentryInternalFilter(logging.Filter):
def filter(self, record):
metrics.incr('internal.uncaptured.logs')
return can_record_current_event()
| hongliang5623/sentry | src/sentry/utils/raven.py | Python | bsd-3-clause | 3,051 |
using Shouldly.Tests.TestHelpers;
namespace Shouldly.Tests.Strings.DetailedDifference.CaseInsensitive.LongStrings.MultipleDiffs
{
// Just before the edge case for consolidation. 2 differences are exactly the required length apart to be consolidated into one diff
public class DiffsCloseToEachOtherAreConsolidatedBorderConditionOne: ShouldlyShouldTestScenario
{
protected override void ShouldPass()
{
"1A,1b,1c,1d,1e,1f,1g,1h,1i,1j,1k,1l,1m,1n,1o,1p,1q,1r,1s,1t,1u,1v"
.ShouldBe(
"1a,1b,1c,1d,1e,1f,1g,1h,1i,1j,1k,1l,1m,1n,1o,1p,1q,1r,1s,1t,1u,1v",
Case.Insensitive);
}
protected override void ShouldThrowAWobbly()
{
"1a,1b,1c,1d,1e,1f,1g,1h,1i,1j,1k,1l,1m,1n,1o,1p,1q,1r,1s,1t,1u,1v"
.ShouldBe(
"1a,1b.1c,1d,1e,1f,1g,1h,1j,1j,1k,1l,1m,1n,1o.1p,1q,1r,1s,1t,1u,1w",
Case.Insensitive);
}
protected override string ChuckedAWobblyErrorMessage
{
get
{
return @"""1a,1b,1c,1d,1e,1f,1g,1h,1i,1j,1k,1l,1m,1n,1o,1p,1q,1r,1s,1t,1u,1v""
should be
""1a,1b.1c,1d,1e,1f,1g,1h,1j,1j,1k,1l,1m,1n,1o.1p,1q,1r,1s,1t,1u,1w""
but was
""1a,1b,1c,1d,1e,1f,1g,1h,1i,1j,1k,1l,1m,1n,1o,1p,1q,1r,1s,1t,1u,1v""
difference
Case Insensitive Comparison
Difference | | |
| \|/ \|/
Index | ... 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 ...
Expected Value | ... . 1 c , 1 d , 1 e , 1 f , 1 g , 1 h , 1 j ...
Actual Value | ... , 1 c , 1 d , 1 e , 1 f , 1 g , 1 h , 1 i ...
Expected Code | ... 46 49 99 44 49 100 44 49 101 44 49 102 44 49 103 44 49 104 44 49 106 ...
Actual Code | ... 44 49 99 44 49 100 44 49 101 44 49 102 44 49 103 44 49 104 44 49 105 ...
Difference | | |
| \|/ \|/
Index | ... 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64
Expected Value | ... . 1 p , 1 q , 1 r , 1 s , 1 t , 1 u , 1 w
Actual Value | ... , 1 p , 1 q , 1 r , 1 s , 1 t , 1 u , 1 v
Expected Code | ... 46 49 112 44 49 113 44 49 114 44 49 115 44 49 116 44 49 117 44 49 119
Actual Code | ... 44 49 112 44 49 113 44 49 114 44 49 115 44 49 116 44 49 117 44 49 118 "
;
}
}
}
}
| yannisgu/shouldly | src/Shouldly.Tests/Strings/DetailedDifference/CaseInsensitive/LongStrings/MultipleDiffs/DiffsCloseToEachOtherAreConsolidatedBorderConditionOne.cs | C# | bsd-3-clause | 3,647 |
function setSearchTextField(paramname, field) {
var passed = location.search.substring(1);
var query = getParm(passed,paramname);
var query = getParm(passed,paramname);
query = query.replace(/\+/g," ");
var loc = document.location;
if(/.*search.html/.test(loc)) {
document.title = decodeURIComponent(query) + ' - Wolfram Search';
}
field.value = decodeURIComponent(query);
}
function getParm(string,parm) {
// returns value of parm from string
var startPos = string.indexOf(parm + "=");
if (startPos > -1) {
startPos = startPos + parm.length + 1;
var endPos = string.indexOf("&",startPos);
if (endPos == -1)
endPos = string.length;
return string.substring(startPos,endPos);
}
return '';
}
| mfroeling/DTITools | docs/htmldoc/standard/javascript/search.js | JavaScript | bsd-3-clause | 723 |
# Copyright 2012, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
import os
import shlex
from subprocess import Popen, PIPE
import time
import unittest
import utils
class TestCase(unittest.TestCase):
@classmethod
def setenv(cls, env):
cls.env = env
def assertContains(self, b, a):
self.assertTrue(a in b, "%r not found in %r" % (a, b))
class MultiDict(dict):
def __getattr__(self, name):
v = self[name]
if type(v)==dict:
v=MultiDict(v)
return v
def mget(self, mkey, default=None):
keys = mkey.split(".")
try:
v = self
for key in keys:
v = v[key]
except KeyError:
v = default
if type(v)==dict:
v = MultiDict(v)
return v
class Tailer(object):
def __init__(self, filepath, flush=None, sleep=0, timeout=10.0):
self.filepath = filepath
self.flush = flush
self.sleep = sleep
self.timeout = timeout
self.f = None
self.reset()
def reset(self):
"""Call reset when you want to start using the tailer."""
if self.flush:
self.flush()
else:
time.sleep(self.sleep)
# Re-open the file if open.
if self.f:
self.f.close()
self.f = None
# Wait for file to exist.
timeout = self.timeout
while not os.path.exists(self.filepath):
timeout = utils.wait_step('file exists: ' + self.filepath, timeout)
self.f = open(self.filepath)
self.f.seek(0, os.SEEK_END)
self.pos = self.f.tell()
def read(self):
"""Returns a string which may contain multiple lines."""
if self.flush:
self.flush()
else:
time.sleep(self.sleep)
self.f.seek(0, os.SEEK_END)
newpos = self.f.tell()
if newpos < self.pos:
return ""
self.f.seek(self.pos, os.SEEK_SET)
size = newpos-self.pos
self.pos = newpos
return self.f.read(size)
def readLines(self):
"""Returns a list of read lines."""
return self.read().splitlines()
# FIXME: Hijacked from go/vt/tabletserver/test.py
# Reuse when things come together
def execute(cmd, trap_output=False, verbose=False, **kargs):
args = shlex.split(cmd)
if trap_output:
kargs['stdout'] = PIPE
kargs['stderr'] = PIPE
if verbose:
print "Execute:", cmd, ', '.join('%s=%s' % x for x in kargs.iteritems())
proc = Popen(args, **kargs)
proc.args = args
stdout, stderr = proc.communicate()
if proc.returncode:
raise Exception('FAIL: %s %s %s' % (args, stdout, stderr))
return stdout, stderr
| anusornc/vitess | test/framework.py | Python | bsd-3-clause | 2,552 |
#include "consoletools.h"
#include "log/logger.h"
#include <QTextStream>
#include <Windows.h>
LOGGER(ConsoleTools);
class ConsoleTools::Private
{
public:
Private()
{
hConsole = ::GetStdHandle(STD_INPUT_HANDLE);
if (hConsole == INVALID_HANDLE_VALUE)
{
LOG_ERROR("Unable to get console handle");
}
}
HANDLE hConsole;
};
ConsoleTools::ConsoleTools()
: d(new Private)
{
}
ConsoleTools::~ConsoleTools()
{
enableEcho();
delete d;
}
bool ConsoleTools::enableEcho()
{
DWORD value;
::GetConsoleMode(d->hConsole, &value);
value |= ENABLE_ECHO_INPUT;
::SetConsoleMode(d->hConsole, value);
return true;
}
bool ConsoleTools::disableEcho()
{
DWORD value;
::GetConsoleMode(d->hConsole, &value);
value &= ~ENABLE_ECHO_INPUT;
::SetConsoleMode(d->hConsole, value);
return true;
}
QString ConsoleTools::readLine()
{
QTextStream stream(stdin);
return stream.readLine();
}
QString ConsoleTools::readPassword()
{
disableEcho();
QTextStream stream(stdin);
QString pw = stream.readLine();
enableEcho();
return pw;
}
| MKV21/glimpse_client | src/console/consoletools_win.cpp | C++ | bsd-3-clause | 1,142 |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "extensions/common/permissions/api_permission_set.h"
#include "base/containers/contains.h"
#include "base/logging.h"
#include "base/ranges/algorithm.h"
#include "base/stl_util.h"
#include "base/strings/string_number_conversions.h"
#include "base/values.h"
#include "extensions/common/error_utils.h"
#include "extensions/common/manifest_constants.h"
#include "extensions/common/permissions/permissions_info.h"
using extensions::mojom::APIPermissionID;
namespace extensions {
namespace errors = manifest_errors;
namespace {
// Helper object that is implicitly constructible from both a PermissionID and
// from an mojom::APIPermissionID.
struct PermissionIDCompareHelper {
PermissionIDCompareHelper(const PermissionID& id) : id(id.id()) {}
PermissionIDCompareHelper(const APIPermissionID id) : id(id) {}
APIPermissionID id;
};
bool CreateAPIPermission(const std::string& permission_str,
const base::Value* permission_value,
APIPermissionSet::ParseSource source,
APIPermissionSet* api_permissions,
std::u16string* error,
std::vector<std::string>* unhandled_permissions) {
const APIPermissionInfo* permission_info =
PermissionsInfo::GetInstance()->GetByName(permission_str);
if (permission_info) {
std::unique_ptr<APIPermission> permission(
permission_info->CreateAPIPermission());
if (source != APIPermissionSet::kAllowInternalPermissions &&
permission_info->is_internal()) {
// An internal permission specified in permissions list is an error.
if (error) {
*error = ErrorUtils::FormatErrorMessageUTF16(
errors::kPermissionNotAllowedInManifest, permission_str);
}
return false;
}
std::string error_details;
if (!permission->FromValue(permission_value, &error_details,
unhandled_permissions)) {
if (error) {
if (error_details.empty()) {
*error = ErrorUtils::FormatErrorMessageUTF16(
errors::kInvalidPermission,
permission_info->name());
} else {
*error = ErrorUtils::FormatErrorMessageUTF16(
errors::kInvalidPermissionWithDetail,
permission_info->name(),
error_details);
}
return false;
}
VLOG(1) << "Parse permission failed.";
} else {
api_permissions->insert(std::move(permission));
}
return true;
}
if (unhandled_permissions)
unhandled_permissions->push_back(permission_str);
else
VLOG(1) << "Unknown permission[" << permission_str << "].";
return true;
}
bool ParseChildPermissions(const std::string& base_name,
const base::Value* permission_value,
APIPermissionSet::ParseSource source,
APIPermissionSet* api_permissions,
std::u16string* error,
std::vector<std::string>* unhandled_permissions) {
if (permission_value) {
if (!permission_value->is_list()) {
if (error) {
*error = ErrorUtils::FormatErrorMessageUTF16(
errors::kInvalidPermission, base_name);
return false;
}
VLOG(1) << "Permission value is not a list.";
// Failed to parse, but since error is NULL, failures are not fatal so
// return true here anyway.
return true;
}
base::Value::ConstListView list_view =
permission_value->GetListDeprecated();
for (size_t i = 0; i < list_view.size(); ++i) {
std::string permission_str;
if (!list_view[i].is_string()) {
// permission should be a string
if (error) {
*error = ErrorUtils::FormatErrorMessageUTF16(
errors::kInvalidPermission,
base_name + '.' + base::NumberToString(i));
return false;
}
VLOG(1) << "Permission is not a string.";
continue;
}
if (!CreateAPIPermission(base_name + '.' + list_view[i].GetString(),
nullptr, source, api_permissions, error,
unhandled_permissions))
return false;
}
}
return CreateAPIPermission(base_name, nullptr, source, api_permissions, error,
nullptr);
}
} // namespace
void APIPermissionSet::insert(APIPermissionID id) {
const APIPermissionInfo* permission_info =
PermissionsInfo::GetInstance()->GetByID(id);
DCHECK(permission_info);
insert(permission_info->CreateAPIPermission());
}
void APIPermissionSet::insert(std::unique_ptr<APIPermission> permission) {
BaseSetOperators<APIPermissionSet>::insert(std::move(permission));
}
// static
bool APIPermissionSet::ParseFromJSON(
const base::Value* permissions,
APIPermissionSet::ParseSource source,
APIPermissionSet* api_permissions,
std::u16string* error,
std::vector<std::string>* unhandled_permissions) {
if (!permissions->is_list()) {
if (error) {
*error = ErrorUtils::FormatErrorMessageUTF16(errors::kInvalidPermission,
"<root>");
return false;
}
VLOG(1) << "Root Permissions value is not a list.";
// Failed to parse, but since error is NULL, failures are not fatal so
// return true here anyway.
return true;
}
base::Value::ConstListView list_view = permissions->GetListDeprecated();
for (size_t i = 0; i < list_view.size(); ++i) {
std::string permission_str;
const base::Value* permission_value = nullptr;
// permission should be a string or a single key dict.
if (list_view[i].is_string()) {
permission_str = list_view[i].GetString();
} else if (list_view[i].is_dict() && list_view[i].DictSize() == 1) {
auto dict_iter = list_view[i].DictItems().begin();
permission_str = dict_iter->first;
permission_value = &dict_iter->second;
} else {
if (error) {
*error = ErrorUtils::FormatErrorMessageUTF16(errors::kInvalidPermission,
base::NumberToString(i));
return false;
}
VLOG(1) << "Permission is not a string or single key dict.";
continue;
}
// Check if this permission is a special case where its value should
// be treated as a list of child permissions.
if (PermissionsInfo::GetInstance()->HasChildPermissions(permission_str)) {
if (!ParseChildPermissions(permission_str, permission_value, source,
api_permissions, error, unhandled_permissions))
return false;
continue;
}
if (!CreateAPIPermission(permission_str, permission_value, source,
api_permissions, error, unhandled_permissions))
return false;
}
return true;
}
PermissionID::PermissionID(APIPermissionID id)
: std::pair<APIPermissionID, std::u16string>(id, std::u16string()) {}
PermissionID::PermissionID(APIPermissionID id, const std::u16string& parameter)
: std::pair<APIPermissionID, std::u16string>(id, parameter) {}
PermissionID::~PermissionID() {
}
PermissionIDSet::PermissionIDSet() {
}
PermissionIDSet::PermissionIDSet(
std::initializer_list<APIPermissionID> permissions) {
for (auto permission : permissions) {
permissions_.insert(PermissionID(permission));
}
}
PermissionIDSet::PermissionIDSet(const PermissionIDSet& other) = default;
PermissionIDSet::~PermissionIDSet() {
}
void PermissionIDSet::insert(APIPermissionID permission_id) {
insert(permission_id, std::u16string());
}
void PermissionIDSet::insert(APIPermissionID permission_id,
const std::u16string& permission_detail) {
permissions_.insert(PermissionID(permission_id, permission_detail));
}
void PermissionIDSet::InsertAll(const PermissionIDSet& permission_set) {
for (const auto& permission : permission_set.permissions_) {
permissions_.insert(permission);
}
}
void PermissionIDSet::erase(APIPermissionID permission_id) {
auto lower_bound = permissions_.lower_bound(PermissionID(permission_id));
auto upper_bound = lower_bound;
while (upper_bound != permissions_.end() &&
upper_bound->id() == permission_id) {
++upper_bound;
}
permissions_.erase(lower_bound, upper_bound);
}
std::vector<std::u16string> PermissionIDSet::GetAllPermissionParameters()
const {
std::vector<std::u16string> params;
for (const auto& permission : permissions_) {
params.push_back(permission.parameter());
}
return params;
}
bool PermissionIDSet::ContainsID(PermissionID permission_id) const {
auto it = permissions_.lower_bound(permission_id);
return it != permissions_.end() && it->id() == permission_id.id();
}
bool PermissionIDSet::ContainsID(APIPermissionID permission_id) const {
return ContainsID(PermissionID(permission_id));
}
bool PermissionIDSet::ContainsAllIDs(
const std::set<APIPermissionID>& permission_ids) const {
return std::includes(permissions_.begin(), permissions_.end(),
permission_ids.begin(), permission_ids.end(),
[] (const PermissionIDCompareHelper& lhs,
const PermissionIDCompareHelper& rhs) {
return lhs.id < rhs.id;
});
}
bool PermissionIDSet::ContainsAnyID(
const std::set<APIPermissionID>& permission_ids) const {
for (APIPermissionID id : permission_ids) {
if (ContainsID(id))
return true;
}
return false;
}
bool PermissionIDSet::ContainsAnyID(const PermissionIDSet& other) const {
for (const auto& id : other) {
if (ContainsID(id))
return true;
}
return false;
}
PermissionIDSet PermissionIDSet::GetAllPermissionsWithID(
APIPermissionID permission_id) const {
PermissionIDSet subset;
auto it = permissions_.lower_bound(PermissionID(permission_id));
while (it != permissions_.end() && it->id() == permission_id) {
subset.permissions_.insert(*it);
++it;
}
return subset;
}
PermissionIDSet PermissionIDSet::GetAllPermissionsWithIDs(
const std::set<APIPermissionID>& permission_ids) const {
PermissionIDSet subset;
for (const auto& permission : permissions_) {
if (base::Contains(permission_ids, permission.id())) {
subset.permissions_.insert(permission);
}
}
return subset;
}
bool PermissionIDSet::Includes(const PermissionIDSet& subset) const {
return base::ranges::includes(permissions_, subset.permissions_);
}
bool PermissionIDSet::Equals(const PermissionIDSet& set) const {
return permissions_ == set.permissions_;
}
// static
PermissionIDSet PermissionIDSet::Difference(const PermissionIDSet& set_1,
const PermissionIDSet& set_2) {
return PermissionIDSet(base::STLSetDifference<std::set<PermissionID>>(
set_1.permissions_, set_2.permissions_));
}
size_t PermissionIDSet::size() const {
return permissions_.size();
}
bool PermissionIDSet::empty() const {
return permissions_.empty();
}
PermissionIDSet::PermissionIDSet(const std::set<PermissionID>& permissions)
: permissions_(permissions) {
}
} // namespace extensions
| chromium/chromium | extensions/common/permissions/api_permission_set.cc | C++ | bsd-3-clause | 11,420 |
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import json
from command_executor import CommandExecutor
_THIS_DIR = os.path.abspath(os.path.dirname(__file__))
_PARENT_DIR = os.path.join(_THIS_DIR, os.pardir)
sys.path.insert(1, _PARENT_DIR)
import chrome_paths
sys.path.remove(_PARENT_DIR)
sys.path.insert(0,os.path.join(chrome_paths.GetSrc(), 'third_party',
'catapult', 'telemetry', 'third_party',
'websocket-client'))
import websocket
class WebSocketCommands:
CREATE_WEBSOCKET = \
'/session/:sessionId'
SEND_OVER_WEBSOCKET = \
'/session/:sessionId/chromium/send_command_from_websocket'
class WebSocketConnection(object):
def __init__(self, server_url, session_id):
self._server_url = server_url.replace('http', 'ws')
self._session_id = session_id
self._command_id = -1
cmd_params = {'sessionId': session_id}
path = CommandExecutor.CreatePath(
WebSocketCommands.CREATE_WEBSOCKET, cmd_params)
self._websocket = websocket.create_connection(self._server_url + path)
def SendCommand(self, cmd_params):
cmd_params['id'] = self._command_id
self._command_id -= 1
self._websocket.send(json.dumps(cmd_params))
def ReadMessage(self):
return self._websocket.recv()
def Close(self):
self._websocket.close();
| chromium/chromium | chrome/test/chromedriver/client/websocket_connection.py | Python | bsd-3-clause | 1,471 |
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/autofill/core/browser/payments/test_authentication_requester.h"
#include <string>
#include "build/build_config.h"
#include "components/autofill/core/browser/data_model/credit_card.h"
namespace autofill {
TestAuthenticationRequester::TestAuthenticationRequester() {}
TestAuthenticationRequester::~TestAuthenticationRequester() {}
base::WeakPtr<TestAuthenticationRequester>
TestAuthenticationRequester::GetWeakPtr() {
return weak_ptr_factory_.GetWeakPtr();
}
void TestAuthenticationRequester::OnCVCAuthenticationComplete(
const CreditCardCVCAuthenticator::CVCAuthenticationResponse& response) {
did_succeed_ = response.did_succeed;
if (*did_succeed_) {
DCHECK(response.card);
number_ = response.card->number();
}
}
#if BUILDFLAG(IS_ANDROID)
bool TestAuthenticationRequester::ShouldOfferFidoAuth() const {
return false;
}
bool TestAuthenticationRequester::UserOptedInToFidoFromSettingsPageOnMobile()
const {
return false;
}
#endif
#if !BUILDFLAG(IS_IOS)
void TestAuthenticationRequester::OnFIDOAuthenticationComplete(
const CreditCardFIDOAuthenticator::FidoAuthenticationResponse& response) {
did_succeed_ = response.did_succeed;
if (*did_succeed_) {
DCHECK(response.card);
number_ = response.card->number();
}
failure_type_ = response.failure_type;
}
void TestAuthenticationRequester::OnFidoAuthorizationComplete(
bool did_succeed) {
did_succeed_ = did_succeed;
}
void TestAuthenticationRequester::IsUserVerifiableCallback(
bool is_user_verifiable) {
is_user_verifiable_ = is_user_verifiable;
}
#endif
void TestAuthenticationRequester::OnOtpAuthenticationComplete(
const CreditCardOtpAuthenticator::OtpAuthenticationResponse& response) {
did_succeed_ =
response.result ==
CreditCardOtpAuthenticator::OtpAuthenticationResponse::Result::kSuccess;
if (*did_succeed_) {
DCHECK(response.card);
number_ = response.card->number();
}
}
} // namespace autofill
| chromium/chromium | components/autofill/core/browser/payments/test_authentication_requester.cc | C++ | bsd-3-clause | 2,142 |
// stdafx.cpp : source file that includes just the standard includes
// StdUtils_UnitTests.pch will be the pre-compiled header
// stdafx.obj will contain the pre-compiled type information
#include "stdafx.h"
// TODO: reference any additional headers you need in STDAFX.H
// and not in this file
| NeuroRoboticTech/AnimatLabPublicSource | Libraries/VortexAnimatSim/Vortex_UnitTests/stdafx.cpp | C++ | bsd-3-clause | 297 |
package ping
import (
"bytes"
"errors"
"io"
"time"
context "github.com/ipfs/go-ipfs/Godeps/_workspace/src/golang.org/x/net/context"
host "github.com/ipfs/go-ipfs/p2p/host"
inet "github.com/ipfs/go-ipfs/p2p/net"
peer "github.com/ipfs/go-ipfs/p2p/peer"
logging "github.com/ipfs/go-ipfs/vendor/go-log-v1.0.0"
u "github.com/ipfs/go-ipfs/util"
)
var log = logging.Logger("ping")
const PingSize = 32
const ID = "/ipfs/ping"
type PingService struct {
Host host.Host
}
func NewPingService(h host.Host) *PingService {
ps := &PingService{h}
h.SetStreamHandler(ID, ps.PingHandler)
return ps
}
func (p *PingService) PingHandler(s inet.Stream) {
buf := make([]byte, PingSize)
for {
_, err := io.ReadFull(s, buf)
if err != nil {
log.Debug(err)
return
}
_, err = s.Write(buf)
if err != nil {
log.Debug(err)
return
}
}
}
func (ps *PingService) Ping(ctx context.Context, p peer.ID) (<-chan time.Duration, error) {
s, err := ps.Host.NewStream(ID, p)
if err != nil {
return nil, err
}
out := make(chan time.Duration)
go func() {
defer close(out)
for {
select {
case <-ctx.Done():
return
default:
t, err := ping(s)
if err != nil {
log.Debugf("ping error: %s", err)
return
}
select {
case out <- t:
case <-ctx.Done():
return
}
}
}
}()
return out, nil
}
func ping(s inet.Stream) (time.Duration, error) {
buf := make([]byte, PingSize)
u.NewTimeSeededRand().Read(buf)
before := time.Now()
_, err := s.Write(buf)
if err != nil {
return 0, err
}
rbuf := make([]byte, PingSize)
_, err = io.ReadFull(s, rbuf)
if err != nil {
return 0, err
}
if !bytes.Equal(buf, rbuf) {
return 0, errors.New("ping packet was incorrect!")
}
return time.Now().Sub(before), nil
}
| willglynn/go-ipfs | p2p/protocol/ping/ping.go | GO | mit | 1,786 |
import * as React from 'react'
import { ICommonImageDiffProperties } from './modified-image-diff'
import { ImageContainer } from './image-container'
interface IOnionSkinState {
readonly crossfade: number
}
export class OnionSkin extends React.Component<
ICommonImageDiffProperties,
IOnionSkinState
> {
public constructor(props: ICommonImageDiffProperties) {
super(props)
this.state = { crossfade: 1 }
}
public render() {
const style: React.CSSProperties = {
height: this.props.maxSize.height,
width: this.props.maxSize.width,
}
const maxSize: React.CSSProperties = {
maxHeight: this.props.maxSize.height,
maxWidth: this.props.maxSize.width,
}
return (
<div className="image-diff-onion-skin">
<div className="sizing-container" ref={this.props.onContainerRef}>
<div className="image-container" style={style}>
<div className="image-diff-previous" style={style}>
<ImageContainer
image={this.props.previous}
onElementLoad={this.props.onPreviousImageLoad}
style={maxSize}
/>
</div>
<div
className="image-diff-current"
style={{
...style,
opacity: this.state.crossfade,
}}
>
<ImageContainer
image={this.props.current}
onElementLoad={this.props.onCurrentImageLoad}
style={maxSize}
/>
</div>
</div>
</div>
<input
style={{
width: this.props.maxSize.width / 2,
}}
className="slider"
type="range"
max={1}
min={0}
value={this.state.crossfade}
step={0.001}
onChange={this.onValueChange}
/>
</div>
)
}
private onValueChange = (e: React.ChangeEvent<HTMLInputElement>) => {
this.setState({ crossfade: e.currentTarget.valueAsNumber })
}
}
| j-f1/forked-desktop | app/src/ui/diff/image-diffs/onion-skin.tsx | TypeScript | mit | 2,059 |
'use strict';
const hljs = require('highlight.js');
const languages = hljs.listLanguages();
const fs = require('fs');
const result = {
languages: languages,
aliases: {}
};
languages.forEach(lang => {
result.aliases[lang] = lang;
const def = require('highlight.js/lib/languages/' + lang)(hljs);
const aliases = def.aliases;
if (aliases) {
aliases.forEach(alias => {
result.aliases[alias] = lang;
});
}
});
const stream = fs.createWriteStream('highlight_alias.json');
stream.write(JSON.stringify(result));
stream.on('end', () => {
stream.end();
});
| hexojs/hexo-util | scripts/build_highlight_alias.js | JavaScript | mit | 583 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows.Forms;
namespace ScintillaNET
{
/// <summary>
/// Provides data for the <see cref="Scintilla.DoubleClick" /> event.
/// </summary>
public class DoubleClickEventArgs : EventArgs
{
private readonly Scintilla scintilla;
private readonly int bytePosition;
private int? position;
/// <summary>
/// Gets the line double clicked.
/// </summary>
/// <returns>The zero-based index of the double clicked line.</returns>
public int Line { get; private set; }
/// <summary>
/// Gets the modifier keys (SHIFT, CTRL, ALT) held down when double clicked.
/// </summary>
/// <returns>A bitwise combination of the Keys enumeration indicating the modifier keys.</returns>
public Keys Modifiers { get; private set; }
/// <summary>
/// Gets the zero-based document position of the text double clicked.
/// </summary>
/// <returns>
/// The zero-based character position within the document of the double clicked text;
/// otherwise, -1 if not a document position.
/// </returns>
public int Position
{
get
{
if (position == null)
position = scintilla.Lines.ByteToCharPosition(bytePosition);
return (int)position;
}
}
/// <summary>
/// Initializes a new instance of the <see cref="DoubleClickEventArgs" /> class.
/// </summary>
/// <param name="scintilla">The <see cref="Scintilla" /> control that generated this event.</param>
/// <param name="modifiers">The modifier keys that where held down at the time of the double click.</param>
/// <param name="bytePosition">The zero-based byte position of the double clicked text.</param>
/// <param name="line">The zero-based line index of the double clicked text.</param>
public DoubleClickEventArgs(Scintilla scintilla, Keys modifiers, int bytePosition, int line)
{
this.scintilla = scintilla;
this.bytePosition = bytePosition;
Modifiers = modifiers;
Line = line;
if (bytePosition == -1)
position = -1;
}
}
}
| suvjunmd/ScintillaNET | src/ScintillaNET/DoubleClickEventArgs.cs | C# | mit | 2,400 |
module Gitlab
module SlashCommands
class IssueShow < IssueCommand
def self.match(text)
/\Aissue\s+show\s+#{Issue.reference_prefix}?(?<iid>\d+)/.match(text)
end
def self.help_message
"issue show <id>"
end
def execute(match)
issue = find_by_iid(match[:iid])
if issue
Gitlab::SlashCommands::Presenters::IssueShow.new(issue).present
else
Gitlab::SlashCommands::Presenters::Access.new.not_found
end
end
end
end
end
| t-zuehlsdorff/gitlabhq | lib/gitlab/slash_commands/issue_show.rb | Ruby | mit | 529 |
'use strict';
/*
jQuery UI Sortable plugin wrapper
@param [ui-sortable] {object} Options to pass to $.fn.sortable() merged onto ui.config
*/
angular.module('mpk').value('uiSortableConfig',{}).directive('uiSortable', [
'uiSortableConfig', '$timeout', '$log',
function(uiSortableConfig, $timeout, $log) {
return {
require: '?ngModel',
link: function(scope, element, attrs, ngModel) {
var savedNodes;
function combineCallbacks(first,second){
if(second && (typeof second === 'function')) {
return function(e, ui) {
first(e, ui);
second(e, ui);
};
}
return first;
}
var opts = {};
var callbacks = {
receive: null,
remove:null,
start:null,
stop:null,
update:null
};
angular.extend(opts, uiSortableConfig);
if (ngModel) {
// When we add or remove elements, we need the sortable to 'refresh'
// so it can find the new/removed elements.
scope.$watch(attrs.ngModel+'.length', function() {
// Timeout to let ng-repeat modify the DOM
$timeout(function() {
element.sortable('refresh');
});
});
callbacks.start = function(e, ui) {
// Save the starting position of dragged item
ui.item.sortable = {
index: ui.item.index(),
cancel: function () {
ui.item.sortable._isCanceled = true;
},
isCanceled: function () {
return ui.item.sortable._isCanceled;
},
_isCanceled: false
};
};
callbacks.activate = function(/*e, ui*/) {
// We need to make a copy of the current element's contents so
// we can restore it after sortable has messed it up.
// This is inside activate (instead of start) in order to save
// both lists when dragging between connected lists.
savedNodes = element.contents();
// If this list has a placeholder (the connected lists won't),
// don't inlcude it in saved nodes.
var placeholder = element.sortable('option','placeholder');
// placeholder.element will be a function if the placeholder, has
// been created (placeholder will be an object). If it hasn't
// been created, either placeholder will be false if no
// placeholder class was given or placeholder.element will be
// undefined if a class was given (placeholder will be a string)
if (placeholder && placeholder.element && typeof placeholder.element === 'function') {
var phElement = placeholder.element();
// workaround for jquery ui 1.9.x,
// not returning jquery collection
if (!phElement.jquery) {
phElement = angular.element(phElement);
}
// exact match with the placeholder's class attribute to handle
// the case that multiple connected sortables exist and
// the placehoilder option equals the class of sortable items
var excludes = element.find('[class="' + phElement.attr('class') + '"]');
savedNodes = savedNodes.not(excludes);
}
};
callbacks.update = function(e, ui) {
// Save current drop position but only if this is not a second
// update that happens when moving between lists because then
// the value will be overwritten with the old value
if(!ui.item.sortable.received) {
ui.item.sortable.dropindex = ui.item.index();
ui.item.sortable.droptarget = ui.item.parent();
// Cancel the sort (let ng-repeat do the sort for us)
// Don't cancel if this is the received list because it has
// already been canceled in the other list, and trying to cancel
// here will mess up the DOM.
element.sortable('cancel');
}
// Put the nodes back exactly the way they started (this is very
// important because ng-repeat uses comment elements to delineate
// the start and stop of repeat sections and sortable doesn't
// respect their order (even if we cancel, the order of the
// comments are still messed up).
savedNodes.detach();
if (element.sortable('option','helper') === 'clone') {
// first detach all the savedNodes and then restore all of them
// except .ui-sortable-helper element (which is placed last).
// That way it will be garbage collected.
savedNodes = savedNodes.not(savedNodes.last());
}
savedNodes.appendTo(element);
// If received is true (an item was dropped in from another list)
// then we add the new item to this list otherwise wait until the
// stop event where we will know if it was a sort or item was
// moved here from another list
if(ui.item.sortable.received && !ui.item.sortable.isCanceled()) {
scope.$apply(function () {
ngModel.$modelValue.splice(ui.item.sortable.dropindex, 0,
ui.item.sortable.moved);
});
}
};
callbacks.stop = function(e, ui) {
// If the received flag hasn't be set on the item, this is a
// normal sort, if dropindex is set, the item was moved, so move
// the items in the list.
if(!ui.item.sortable.received &&
('dropindex' in ui.item.sortable) &&
!ui.item.sortable.isCanceled()) {
scope.$apply(function () {
ngModel.$modelValue.splice(
ui.item.sortable.dropindex, 0,
ngModel.$modelValue.splice(ui.item.sortable.index, 1)[0]);
});
} else {
// if the item was not moved, then restore the elements
// so that the ngRepeat's comment are correct.
if((!('dropindex' in ui.item.sortable) || ui.item.sortable.isCanceled()) && element.sortable('option','helper') !== 'clone') {
savedNodes.detach().appendTo(element);
}
}
};
callbacks.receive = function(e, ui) {
// An item was dropped here from another list, set a flag on the
// item.
ui.item.sortable.received = true;
};
callbacks.remove = function(e, ui) {
// Remove the item from this list's model and copy data into item,
// so the next list can retrive it
if (!ui.item.sortable.isCanceled()) {
scope.$apply(function () {
ui.item.sortable.moved = ngModel.$modelValue.splice(
ui.item.sortable.index, 1)[0];
});
}
};
scope.$watch(attrs.uiSortable, function(newVal /*, oldVal*/) {
angular.forEach(newVal, function(value, key) {
if(callbacks[key]) {
if( key === 'stop' ){
// call apply after stop
value = combineCallbacks(
value, function() { scope.$apply(); });
}
// wrap the callback
value = combineCallbacks(callbacks[key], value);
}
element.sortable('option', key, value);
});
}, true);
angular.forEach(callbacks, function(value, key) {
opts[key] = combineCallbacks(value, opts[key]);
});
} else {
$log.info('ui.sortable: ngModel not provided!', element);
}
// Create sortable
element.sortable(opts);
}
};
}
]);
| mabotech/maboss-admin | public/kanban/scripts/directives/sortable.js | JavaScript | mit | 8,434 |
using System;
using ProvisioningLibrary;
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.Table;
namespace ProvisioningLibrary
{
public class GroupBudgetState : TableEntity
{
private string _ResourceId = string.Empty;
public GroupBudgetState()
{
}
public GroupBudgetState(string groupId) : this()
{
this.RowKey = groupId;
this.PartitionKey = groupId;
}
public string GroupId {
get
{
return this.PartitionKey;
}
}
public long UnitsBudgetted { get; set; }
public long UnitsAllocated { get; set; }
public long UnitsUsed { get; set; }
}
}
| GabrieleCastellani/SCAMP | ProvisioningLibrary/VolatileStorage/GroupBudgetState.cs | C# | mit | 755 |
<?php
/**
* Magento
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://opensource.org/licenses/osl-3.0.php
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@magento.com so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade Magento to newer
* versions in the future. If you wish to customize Magento for your
* needs please refer to http://www.magento.com for more information.
*
* @category Mage
* @package Mage_GoogleBase
* @copyright Copyright (c) 2006-2016 X.commerce, Inc. and affiliates (http://www.magento.com)
* @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0)
*/
/**
* Google Base Item Types Model
*
* @method Mage_GoogleBase_Model_Resource_Item _getResource()
* @method Mage_GoogleBase_Model_Resource_Item getResource()
* @method int getTypeId()
* @method Mage_GoogleBase_Model_Item setTypeId(int $value)
* @method int getProductId()
* @method Mage_GoogleBase_Model_Item setProductId(int $value)
* @method string getGbaseItemId()
* @method Mage_GoogleBase_Model_Item setGbaseItemId(string $value)
* @method int getStoreId()
* @method Mage_GoogleBase_Model_Item setStoreId(int $value)
* @method string getPublished()
* @method Mage_GoogleBase_Model_Item setPublished(string $value)
* @method string getExpires()
* @method Mage_GoogleBase_Model_Item setExpires(string $value)
* @method int getImpr()
* @method Mage_GoogleBase_Model_Item setImpr(int $value)
* @method int getClicks()
* @method Mage_GoogleBase_Model_Item setClicks(int $value)
* @method int getViews()
* @method Mage_GoogleBase_Model_Item setViews(int $value)
* @method int getIsHidden()
* @method Mage_GoogleBase_Model_Item setIsHidden(int $value)
*
* @deprecated after 1.5.1.0
* @category Mage
* @package Mage_GoogleBase
* @author Magento Core Team <core@magentocommerce.com>
*/
class Mage_GoogleBase_Model_Item extends Mage_Core_Model_Abstract
{
const ATTRIBUTES_REGISTRY_KEY = 'gbase_attributes_registry';
const TYPES_REGISTRY_KEY = 'gbase_types_registry';
protected function _construct()
{
parent::_construct();
$this->_init('googlebase/item');
}
/**
* Return Service Item Instance
*
* @return Mage_GoogleBase_Model_Service_Item
*/
public function getServiceItem()
{
return Mage::getModel('googlebase/service_item')->setStoreId($this->getStoreId());
}
/**
* Target Country
*
* @return string Two-letters country ISO code
*/
public function getTargetCountry()
{
return Mage::getSingleton('googlebase/config')->getTargetCountry($this->getStoreId());
}
/**
* Save item to Google Base
*
* @return Mage_GoogleBase_Model_Item
*/
public function insertItem()
{
$this->_checkProduct()
->_prepareProductObject();
$typeModel = $this->_getTypeModel();
$this->getServiceItem()
->setItem($this)
->setObject($this->getProduct())
->setAttributeValues($this->_getAttributeValues())
->setItemType($typeModel->getGbaseItemtype())
->insert();
$this->setTypeId($typeModel->getTypeId());
return $this;
}
/**
* Update Item data
*
* @return Mage_GoogleBase_Model_Item
*/
public function updateItem()
{
$this->_checkProduct()
->_prepareProductObject();
$this->loadByProduct($this->getProduct());
if ($this->getId()) {
$typeModel = $this->_getTypeModel();
$this->getServiceItem()
->setItem($this)
->setObject($this->getProduct())
->setAttributeValues($this->_getAttributeValues())
->setItemType($typeModel->getGbaseItemtype())
->update();
}
return $this;
}
/**
* Delete Item from Google Base
*
* @return Mage_GoogleBase_Model_Item
*/
public function deleteItem()
{
$this->getServiceItem()->setItem($this)->delete();
return $this;
}
/**
* Delete Item from Google Base
*
* @return Mage_GoogleBase_Model_Item
*/
public function hideItem()
{
$this->getServiceItem()->setItem($this)->hide();
$this->setIsHidden(1);
$this->save();
return $this;
}
/**
* Delete Item from Google Base
*
* @return Mage_GoogleBase_Model_Item
*/
public function activateItem()
{
$this->getServiceItem()->setItem($this)->activate();
$this->setIsHidden(0);
$this->save();
return $this;
}
/**
* Load Item Model by Product
*
* @param Mage_Catalog_Model_Product $product
* @return Mage_GoogleBase_Model_Item
*/
public function loadByProduct($product)
{
if (!$this->getProduct()) {
$this->setProduct($product);
}
$this->getResource()->loadByProduct($this);
return $this;
}
/**
* Product Setter
*
* @param Mage_Catalog_Model_Product
* @return Mage_GoogleBase_Model_Item
*/
public function setProduct($product)
{
if (!($product instanceof Mage_Catalog_Model_Product)) {
Mage::throwException(Mage::helper('googlebase')->__('Invalid Product Model for Google Base Item'));
}
$this->setData('product', $product);
$this->setProductId($product->getId());
$this->setStoreId($product->getStoreId());
return $this;
}
/**
* Check product instance
*
* @return Mage_GoogleBase_Model_Item
*/
protected function _checkProduct()
{
if (!($this->getProduct() instanceof Mage_Catalog_Model_Product)) {
Mage::throwException(Mage::helper('googlebase')->__('Invalid Product Model for Google Base Item'));
}
return $this;
}
/**
* Copy Product object and assign additional data to the copy
*
* @return Mage_GoogleBase_Model_Item
*/
protected function _prepareProductObject()
{
$product = clone $this->getProduct();
/* @var $product Mage_Catalog_Model_Product */
$url = $product->getProductUrl(false);
if (!Mage::getStoreConfigFlag('web/url/use_store')) {
$urlInfo = parse_url($url);
$store = $product->getStore()->getCode();
if (isset($urlInfo['query']) && $urlInfo['query'] != '') {
$url .= '&___store=' . $store;
} else {
$url .= '?___store=' . $store;
}
}
$product->setUrl($url)
->setQuantity( $this->getProduct()->getStockItem()->getQty() )
->setImageUrl( Mage::helper('catalog/product')->getImageUrl($product) );
$this->setProduct($product);
return $this;
}
/**
* Return Product attribute values array
*
* @return array Product attribute values
*/
protected function _getAttributeValues()
{
$result = array();
$productAttributes = $this->_getProductAttributes();
foreach ($this->_getAttributesCollection() as $attribute) {
$attributeId = $attribute->getAttributeId();
if (isset($productAttributes[$attributeId])) {
$productAttribute = $productAttributes[$attributeId];
if ($attribute->getGbaseAttribute()) {
$name = $attribute->getGbaseAttribute();
} else {
$name = $this->_getAttributeLabel($productAttribute, $this->getProduct()->getStoreId());
}
$value = $productAttribute->getGbaseValue();
$type = Mage::getSingleton('googlebase/attribute')->getGbaseAttributeType($productAttribute);
if ($name && $value && $type) {
$result[$name] = array(
'value' => $value,
'type' => $type
);
}
}
}
return $result;
}
/**
* Return Product Attribute Store Label
*
* @param Mage_Catalog_Model_Resource_Eav_Attribute $attribute
* @param int $storeId Store View Id
* @return string Attribute Store View Label or Attribute code
*/
protected function _getAttributeLabel($attribute, $storeId)
{
$frontendLabel = $attribute->getFrontend()->getLabel();
if (is_array($frontendLabel)) {
$frontendLabel = array_shift($frontendLabel);
}
if (!$this->_translations) {
$moduleName = Mage_Catalog_Model_Entity_Attribute::MODULE_NAME;
$separator = Mage_Core_Model_Translate::SCOPE_SEPARATOR;
$this->_translations = Mage::getModel('core/translate_string')
->load($moduleName . $separator . $frontendLabel)
->getStoreTranslations();
}
if (isset($this->_translations[$storeId])) {
return $this->_translations[$storeId];
} else {
return $attribute->getAttributeCode();
}
}
/**
* Return Google Base Item Type Model for current Product Attribute Set
*
* @return Mage_GoogleBase_Model_Type
*/
protected function _getTypeModel()
{
$registry = Mage::registry(self::TYPES_REGISTRY_KEY);
$attributeSetId = $this->getProduct()->getAttributeSetId();
if (is_array($registry) && isset($registry[$attributeSetId])) {
return $registry[$attributeSetId];
}
$model = Mage::getModel('googlebase/type')->loadByAttributeSetId($attributeSetId, $this->getTargetCountry());
$registry[$attributeSetId] = $model;
Mage::unregister(self::TYPES_REGISTRY_KEY);
Mage::register(self::TYPES_REGISTRY_KEY, $registry);
return $model;
}
/**
* Return Product attributes array
*
* @return array Product attributes
*/
protected function _getProductAttributes()
{
$product = $this->getProduct();
$attributes = $product->getAttributes();
$result = array();
foreach ($attributes as $attribute) {
$value = $attribute->getFrontend()->getValue($product);
if (is_string($value) && strlen($value) && $product->hasData($attribute->getAttributeCode())) {
$attribute->setGbaseValue($value);
$result[$attribute->getAttributeId()] = $attribute;
}
}
return $result;
}
/**
* Get Product Media files info
*
* @return array Media files info
*/
protected function _getProductImages()
{
$product = $this->getProduct();
$galleryData = $product->getData('media_gallery');
if (!isset($galleryData['images']) || !is_array($galleryData['images'])) {
return array();
}
$result = array();
foreach ($galleryData['images'] as $image) {
$image['url'] = Mage::getSingleton('catalog/product_media_config')
->getMediaUrl($image['file']);
$result[] = $image;
}
return $result;
}
/**
* Return attribute collection for current Product Attribute Set
*
* @return Mage_GoogleBase_Model_Mysql4_Attribute_Collection
*/
protected function _getAttributesCollection()
{
$registry = Mage::registry(self::ATTRIBUTES_REGISTRY_KEY);
$attributeSetId = $this->getProduct()->getAttributeSetId();
if (is_array($registry) && isset($registry[$attributeSetId])) {
return $registry[$attributeSetId];
}
$collection = Mage::getResourceModel('googlebase/attribute_collection')
->addAttributeSetFilter($attributeSetId, $this->getTargetCountry())
->load();
$registry[$attributeSetId] = $collection;
Mage::unregister(self::ATTRIBUTES_REGISTRY_KEY);
Mage::register(self::ATTRIBUTES_REGISTRY_KEY, $registry);
return $collection;
}
}
| hansbonini/cloud9-magento | www/app/code/core/Mage/GoogleBase/Model/Item.php | PHP | mit | 12,442 |
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/snapshot/roots-serializer.h"
#include "src/execution/isolate.h"
#include "src/heap/heap.h"
#include "src/objects/objects-inl.h"
#include "src/objects/slots.h"
namespace v8 {
namespace internal {
RootsSerializer::RootsSerializer(Isolate* isolate,
RootIndex first_root_to_be_serialized)
: Serializer(isolate),
first_root_to_be_serialized_(first_root_to_be_serialized),
can_be_rehashed_(true) {
for (size_t i = 0; i < static_cast<size_t>(first_root_to_be_serialized);
++i) {
root_has_been_serialized_[i] = true;
}
}
int RootsSerializer::SerializeInObjectCache(HeapObject heap_object) {
int index;
if (!object_cache_index_map_.LookupOrInsert(heap_object, &index)) {
// This object is not part of the object cache yet. Add it to the cache so
// we can refer to it via cache index from the delegating snapshot.
SerializeObject(heap_object);
}
return index;
}
void RootsSerializer::Synchronize(VisitorSynchronization::SyncTag tag) {
sink_.Put(kSynchronize, "Synchronize");
}
void RootsSerializer::VisitRootPointers(Root root, const char* description,
FullObjectSlot start,
FullObjectSlot end) {
RootsTable& roots_table = isolate()->roots_table();
if (start ==
roots_table.begin() + static_cast<int>(first_root_to_be_serialized_)) {
// Serializing the root list needs special handling:
// - Only root list elements that have been fully serialized can be
// referenced using kRootArray bytecodes.
for (FullObjectSlot current = start; current < end; ++current) {
SerializeRootObject(*current);
size_t root_index = current - roots_table.begin();
root_has_been_serialized_.set(root_index);
}
} else {
Serializer::VisitRootPointers(root, description, start, end);
}
}
void RootsSerializer::CheckRehashability(HeapObject obj) {
if (!can_be_rehashed_) return;
if (!obj.NeedsRehashing()) return;
if (obj.CanBeRehashed()) return;
can_be_rehashed_ = false;
}
} // namespace internal
} // namespace v8
| enclose-io/compiler | lts/deps/v8/src/snapshot/roots-serializer.cc | C++ | mit | 2,307 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "gpu/command_buffer/service/gles2_cmd_copy_texture_chromium.h"
#include <algorithm>
#include "base/basictypes.h"
#include "gpu/command_buffer/service/gl_utils.h"
#include "gpu/command_buffer/service/gles2_cmd_decoder.h"
#define SHADER(src) \
"#ifdef GL_ES\n" \
"precision mediump float;\n" \
"#define TexCoordPrecision mediump\n" \
"#else\n" \
"#define TexCoordPrecision\n" \
"#endif\n" #src
#define SHADER_2D(src) \
"#define SamplerType sampler2D\n" \
"#define TextureLookup texture2D\n" SHADER(src)
#define SHADER_RECTANGLE_ARB(src) \
"#define SamplerType sampler2DRect\n" \
"#define TextureLookup texture2DRect\n" SHADER(src)
#define SHADER_EXTERNAL_OES(src) \
"#extension GL_OES_EGL_image_external : require\n" \
"#define SamplerType samplerExternalOES\n" \
"#define TextureLookup texture2D\n" SHADER(src)
#define FRAGMENT_SHADERS(src) \
SHADER_2D(src), SHADER_RECTANGLE_ARB(src), SHADER_EXTERNAL_OES(src)
namespace {
const GLfloat kIdentityMatrix[16] = {1.0f, 0.0f, 0.0f, 0.0f,
0.0f, 1.0f, 0.0f, 0.0f,
0.0f, 0.0f, 1.0f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f};
enum VertexShaderId {
VERTEX_SHADER_COPY_TEXTURE,
VERTEX_SHADER_COPY_TEXTURE_FLIP_Y,
NUM_VERTEX_SHADERS,
};
enum FragmentShaderId {
FRAGMENT_SHADER_COPY_TEXTURE_2D,
FRAGMENT_SHADER_COPY_TEXTURE_RECTANGLE_ARB,
FRAGMENT_SHADER_COPY_TEXTURE_EXTERNAL_OES,
FRAGMENT_SHADER_COPY_TEXTURE_PREMULTIPLY_ALPHA_2D,
FRAGMENT_SHADER_COPY_TEXTURE_PREMULTIPLY_ALPHA_RECTANGLE_ARB,
FRAGMENT_SHADER_COPY_TEXTURE_PREMULTIPLY_ALPHA_EXTERNAL_OES,
FRAGMENT_SHADER_COPY_TEXTURE_UNPREMULTIPLY_ALPHA_2D,
FRAGMENT_SHADER_COPY_TEXTURE_UNPREMULTIPLY_ALPHA_RECTANGLE_ARB,
FRAGMENT_SHADER_COPY_TEXTURE_UNPREMULTIPLY_ALPHA_EXTERNAL_OES,
NUM_FRAGMENT_SHADERS,
};
const char* vertex_shader_source[NUM_VERTEX_SHADERS] = {
// VERTEX_SHADER_COPY_TEXTURE
SHADER(
uniform vec2 u_vertex_translate;
uniform vec2 u_half_size;
attribute vec4 a_position;
varying TexCoordPrecision vec2 v_uv;
void main(void) {
gl_Position = a_position + vec4(u_vertex_translate, 0.0, 0.0);
v_uv = a_position.xy * vec2(u_half_size.s, u_half_size.t) +
vec2(u_half_size.s, u_half_size.t);
}),
// VERTEX_SHADER_COPY_TEXTURE_FLIP_Y
SHADER(
uniform vec2 u_vertex_translate;
uniform vec2 u_half_size;
attribute vec4 a_position;
varying TexCoordPrecision vec2 v_uv;
void main(void) {
gl_Position = a_position + vec4(u_vertex_translate, 0.0, 0.0);
v_uv = a_position.xy * vec2(u_half_size.s, -u_half_size.t) +
vec2(u_half_size.s, u_half_size.t);
}),
};
const char* fragment_shader_source[NUM_FRAGMENT_SHADERS] = {
// FRAGMENT_SHADER_COPY_TEXTURE_*
FRAGMENT_SHADERS(
uniform SamplerType u_sampler;
uniform mat4 u_tex_coord_transform;
varying TexCoordPrecision vec2 v_uv;
void main(void) {
TexCoordPrecision vec4 uv = u_tex_coord_transform * vec4(v_uv, 0, 1);
gl_FragColor = TextureLookup(u_sampler, uv.st);
}),
// FRAGMENT_SHADER_COPY_TEXTURE_PREMULTIPLY_ALPHA_*
FRAGMENT_SHADERS(
uniform SamplerType u_sampler;
uniform mat4 u_tex_coord_transform;
varying TexCoordPrecision vec2 v_uv;
void main(void) {
TexCoordPrecision vec4 uv = u_tex_coord_transform * vec4(v_uv, 0, 1);
gl_FragColor = TextureLookup(u_sampler, uv.st);
gl_FragColor.rgb *= gl_FragColor.a;
}),
// FRAGMENT_SHADER_COPY_TEXTURE_UNPREMULTIPLY_ALPHA_*
FRAGMENT_SHADERS(
uniform SamplerType u_sampler;
uniform mat4 u_tex_coord_transform;
varying TexCoordPrecision vec2 v_uv;
void main(void) {
TexCoordPrecision vec4 uv = u_tex_coord_transform * vec4(v_uv, 0, 1);
gl_FragColor = TextureLookup(u_sampler, uv.st);
if (gl_FragColor.a > 0.0)
gl_FragColor.rgb /= gl_FragColor.a;
}),
};
// Returns the correct vertex shader id to evaluate the copy operation for
// the CHROMIUM_flipy setting.
VertexShaderId GetVertexShaderId(bool flip_y) {
// bit 0: flip y
static VertexShaderId shader_ids[] = {
VERTEX_SHADER_COPY_TEXTURE,
VERTEX_SHADER_COPY_TEXTURE_FLIP_Y,
};
unsigned index = flip_y ? 1 : 0;
return shader_ids[index];
}
// Returns the correct fragment shader id to evaluate the copy operation for
// the premultiply alpha pixel store settings and target.
FragmentShaderId GetFragmentShaderId(bool premultiply_alpha,
bool unpremultiply_alpha,
GLenum target) {
enum {
SAMPLER_2D,
SAMPLER_RECTANGLE_ARB,
SAMPLER_EXTERNAL_OES,
NUM_SAMPLERS
};
// bit 0: premultiply alpha
// bit 1: unpremultiply alpha
static FragmentShaderId shader_ids[][NUM_SAMPLERS] = {
{
FRAGMENT_SHADER_COPY_TEXTURE_2D,
FRAGMENT_SHADER_COPY_TEXTURE_RECTANGLE_ARB,
FRAGMENT_SHADER_COPY_TEXTURE_EXTERNAL_OES,
},
{
FRAGMENT_SHADER_COPY_TEXTURE_PREMULTIPLY_ALPHA_2D,
FRAGMENT_SHADER_COPY_TEXTURE_PREMULTIPLY_ALPHA_RECTANGLE_ARB,
FRAGMENT_SHADER_COPY_TEXTURE_PREMULTIPLY_ALPHA_EXTERNAL_OES,
},
{
FRAGMENT_SHADER_COPY_TEXTURE_UNPREMULTIPLY_ALPHA_2D,
FRAGMENT_SHADER_COPY_TEXTURE_UNPREMULTIPLY_ALPHA_RECTANGLE_ARB,
FRAGMENT_SHADER_COPY_TEXTURE_UNPREMULTIPLY_ALPHA_EXTERNAL_OES,
},
{
FRAGMENT_SHADER_COPY_TEXTURE_2D,
FRAGMENT_SHADER_COPY_TEXTURE_RECTANGLE_ARB,
FRAGMENT_SHADER_COPY_TEXTURE_EXTERNAL_OES,
}};
unsigned index = (premultiply_alpha ? (1 << 0) : 0) |
(unpremultiply_alpha ? (1 << 1) : 0);
switch (target) {
case GL_TEXTURE_2D:
return shader_ids[index][SAMPLER_2D];
case GL_TEXTURE_RECTANGLE_ARB:
return shader_ids[index][SAMPLER_RECTANGLE_ARB];
case GL_TEXTURE_EXTERNAL_OES:
return shader_ids[index][SAMPLER_EXTERNAL_OES];
default:
break;
}
NOTREACHED();
return shader_ids[0][SAMPLER_2D];
}
void CompileShader(GLuint shader, const char* shader_source) {
glShaderSource(shader, 1, &shader_source, 0);
glCompileShader(shader);
#ifndef NDEBUG
GLint compile_status;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compile_status);
if (GL_TRUE != compile_status)
DLOG(ERROR) << "CopyTextureCHROMIUM: shader compilation failure.";
#endif
}
void DeleteShader(GLuint shader) {
if (shader)
glDeleteShader(shader);
}
bool BindFramebufferTexture2D(GLenum target,
GLuint texture_id,
GLuint framebuffer) {
DCHECK(target == GL_TEXTURE_2D || target == GL_TEXTURE_RECTANGLE_ARB);
glActiveTexture(GL_TEXTURE0);
glBindTexture(target, texture_id);
// NVidia drivers require texture settings to be a certain way
// or they won't report FRAMEBUFFER_COMPLETE.
glTexParameterf(target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(target, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(target, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, framebuffer);
glFramebufferTexture2DEXT(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, target,
texture_id, 0);
#ifndef NDEBUG
GLenum fb_status = glCheckFramebufferStatusEXT(GL_FRAMEBUFFER);
if (GL_FRAMEBUFFER_COMPLETE != fb_status) {
DLOG(ERROR) << "CopyTextureCHROMIUM: Incomplete framebuffer.";
return false;
}
#endif
return true;
}
void DoCopyTexImage2D(const gpu::gles2::GLES2Decoder* decoder,
GLenum source_target,
GLuint source_id,
GLuint dest_id,
GLenum dest_internal_format,
GLsizei width,
GLsizei height,
GLuint framebuffer) {
DCHECK(source_target == GL_TEXTURE_2D ||
source_target == GL_TEXTURE_RECTANGLE_ARB);
if (BindFramebufferTexture2D(source_target, source_id, framebuffer)) {
glBindTexture(GL_TEXTURE_2D, dest_id);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glCopyTexImage2D(GL_TEXTURE_2D, 0 /* level */, dest_internal_format,
0 /* x */, 0 /* y */, width, height, 0 /* border */);
}
decoder->RestoreTextureState(source_id);
decoder->RestoreTextureState(dest_id);
decoder->RestoreTextureUnitBindings(0);
decoder->RestoreActiveTexture();
decoder->RestoreFramebufferBindings();
}
void DoCopyTexSubImage2D(const gpu::gles2::GLES2Decoder* decoder,
GLenum source_target,
GLuint source_id,
GLuint dest_id,
GLint xoffset,
GLint yoffset,
GLint source_x,
GLint source_y,
GLsizei source_width,
GLsizei source_height,
GLuint framebuffer) {
DCHECK(source_target == GL_TEXTURE_2D ||
source_target == GL_TEXTURE_RECTANGLE_ARB);
if (BindFramebufferTexture2D(source_target, source_id, framebuffer)) {
glBindTexture(GL_TEXTURE_2D, dest_id);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glCopyTexSubImage2D(GL_TEXTURE_2D, 0 /* level */, xoffset, yoffset,
source_x, source_y, source_width, source_height);
}
decoder->RestoreTextureState(source_id);
decoder->RestoreTextureState(dest_id);
decoder->RestoreTextureUnitBindings(0);
decoder->RestoreActiveTexture();
decoder->RestoreFramebufferBindings();
}
} // namespace
namespace gpu {
CopyTextureCHROMIUMResourceManager::CopyTextureCHROMIUMResourceManager()
: initialized_(false),
vertex_shaders_(NUM_VERTEX_SHADERS, 0u),
fragment_shaders_(NUM_FRAGMENT_SHADERS, 0u),
buffer_id_(0u),
framebuffer_(0u) {}
CopyTextureCHROMIUMResourceManager::~CopyTextureCHROMIUMResourceManager() {
// |buffer_id_| and |framebuffer_| can be not-null because when GPU context is
// lost, this class can be deleted without releasing resources like
// GLES2DecoderImpl.
}
void CopyTextureCHROMIUMResourceManager::Initialize(
const gles2::GLES2Decoder* decoder) {
static_assert(
kVertexPositionAttrib == 0u,
"kVertexPositionAttrib must be 0");
DCHECK(!buffer_id_);
DCHECK(!framebuffer_);
DCHECK(programs_.empty());
// Initialize all of the GPU resources required to perform the copy.
glGenBuffersARB(1, &buffer_id_);
glBindBuffer(GL_ARRAY_BUFFER, buffer_id_);
const GLfloat kQuadVertices[] = {-1.0f, -1.0f,
1.0f, -1.0f,
1.0f, 1.0f,
-1.0f, 1.0f};
glBufferData(
GL_ARRAY_BUFFER, sizeof(kQuadVertices), kQuadVertices, GL_STATIC_DRAW);
glGenFramebuffersEXT(1, &framebuffer_);
decoder->RestoreBufferBindings();
initialized_ = true;
}
void CopyTextureCHROMIUMResourceManager::Destroy() {
if (!initialized_)
return;
glDeleteFramebuffersEXT(1, &framebuffer_);
framebuffer_ = 0;
std::for_each(vertex_shaders_.begin(), vertex_shaders_.end(), DeleteShader);
std::for_each(
fragment_shaders_.begin(), fragment_shaders_.end(), DeleteShader);
for (ProgramMap::const_iterator it = programs_.begin(); it != programs_.end();
++it) {
const ProgramInfo& info = it->second;
glDeleteProgram(info.program);
}
glDeleteBuffersARB(1, &buffer_id_);
buffer_id_ = 0;
}
void CopyTextureCHROMIUMResourceManager::DoCopyTexture(
const gles2::GLES2Decoder* decoder,
GLenum source_target,
GLuint source_id,
GLenum source_internal_format,
GLuint dest_id,
GLenum dest_internal_format,
GLsizei width,
GLsizei height,
bool flip_y,
bool premultiply_alpha,
bool unpremultiply_alpha) {
bool premultiply_alpha_change = premultiply_alpha ^ unpremultiply_alpha;
// GL_INVALID_OPERATION is generated if the currently bound framebuffer's
// format does not contain a superset of the components required by the base
// format of internalformat.
// https://www.khronos.org/opengles/sdk/docs/man/xhtml/glCopyTexImage2D.xml
bool source_format_contain_superset_of_dest_format =
(source_internal_format == dest_internal_format &&
source_internal_format != GL_BGRA_EXT) ||
(source_internal_format == GL_RGBA && dest_internal_format == GL_RGB);
// GL_TEXTURE_RECTANGLE_ARB on FBO is supported by OpenGL, not GLES2,
// so restrict this to GL_TEXTURE_2D.
if (source_target == GL_TEXTURE_2D && !flip_y && !premultiply_alpha_change &&
source_format_contain_superset_of_dest_format) {
DoCopyTexImage2D(decoder,
source_target,
source_id,
dest_id,
dest_internal_format,
width,
height,
framebuffer_);
return;
}
// Use kIdentityMatrix if no transform passed in.
DoCopyTextureWithTransform(decoder, source_target, source_id, dest_id, width,
height, flip_y, premultiply_alpha,
unpremultiply_alpha, kIdentityMatrix);
}
void CopyTextureCHROMIUMResourceManager::DoCopySubTexture(
const gles2::GLES2Decoder* decoder,
GLenum source_target,
GLuint source_id,
GLenum source_internal_format,
GLuint dest_id,
GLenum dest_internal_format,
GLint xoffset,
GLint yoffset,
GLint x,
GLint y,
GLsizei width,
GLsizei height,
GLsizei dest_width,
GLsizei dest_height,
GLsizei source_width,
GLsizei source_height,
bool flip_y,
bool premultiply_alpha,
bool unpremultiply_alpha) {
bool premultiply_alpha_change = premultiply_alpha ^ unpremultiply_alpha;
// GL_INVALID_OPERATION is generated if the currently bound framebuffer's
// format does not contain a superset of the components required by the base
// format of internalformat.
// https://www.khronos.org/opengles/sdk/docs/man/xhtml/glCopyTexImage2D.xml
bool source_format_contain_superset_of_dest_format =
(source_internal_format == dest_internal_format &&
source_internal_format != GL_BGRA_EXT) ||
(source_internal_format == GL_RGBA && dest_internal_format == GL_RGB);
// GL_TEXTURE_RECTANGLE_ARB on FBO is supported by OpenGL, not GLES2,
// so restrict this to GL_TEXTURE_2D.
if (source_target == GL_TEXTURE_2D && !flip_y && !premultiply_alpha_change &&
source_format_contain_superset_of_dest_format) {
DoCopyTexSubImage2D(decoder, source_target, source_id, dest_id, xoffset,
yoffset, x, y, width, height, framebuffer_);
return;
}
DoCopyTextureInternal(decoder, source_target, source_id, dest_id, xoffset,
yoffset, x, y, width, height, dest_width, dest_height,
source_width, source_height, flip_y, premultiply_alpha,
unpremultiply_alpha, kIdentityMatrix);
}
void CopyTextureCHROMIUMResourceManager::DoCopyTextureWithTransform(
const gles2::GLES2Decoder* decoder,
GLenum source_target,
GLuint source_id,
GLuint dest_id,
GLsizei width,
GLsizei height,
bool flip_y,
bool premultiply_alpha,
bool unpremultiply_alpha,
const GLfloat transform_matrix[16]) {
GLsizei dest_width = width;
GLsizei dest_height = height;
DoCopyTextureInternal(decoder, source_target, source_id, dest_id, 0, 0, 0, 0,
width, height, dest_width, dest_height, width, height,
flip_y, premultiply_alpha, unpremultiply_alpha,
transform_matrix);
}
void CopyTextureCHROMIUMResourceManager::DoCopyTextureInternal(
const gles2::GLES2Decoder* decoder,
GLenum source_target,
GLuint source_id,
GLuint dest_id,
GLint xoffset,
GLint yoffset,
GLint x,
GLint y,
GLsizei width,
GLsizei height,
GLsizei dest_width,
GLsizei dest_height,
GLsizei source_width,
GLsizei source_height,
bool flip_y,
bool premultiply_alpha,
bool unpremultiply_alpha,
const GLfloat transform_matrix[16]) {
DCHECK(source_target == GL_TEXTURE_2D ||
source_target == GL_TEXTURE_RECTANGLE_ARB ||
source_target == GL_TEXTURE_EXTERNAL_OES);
DCHECK_GE(xoffset, 0);
DCHECK_LE(xoffset + width, dest_width);
DCHECK_GE(yoffset, 0);
DCHECK_LE(yoffset + height, dest_height);
if (!initialized_) {
DLOG(ERROR) << "CopyTextureCHROMIUM: Uninitialized manager.";
return;
}
VertexShaderId vertex_shader_id = GetVertexShaderId(flip_y);
DCHECK_LT(static_cast<size_t>(vertex_shader_id), vertex_shaders_.size());
FragmentShaderId fragment_shader_id = GetFragmentShaderId(
premultiply_alpha, unpremultiply_alpha, source_target);
DCHECK_LT(static_cast<size_t>(fragment_shader_id), fragment_shaders_.size());
ProgramMapKey key(vertex_shader_id, fragment_shader_id);
ProgramInfo* info = &programs_[key];
// Create program if necessary.
if (!info->program) {
info->program = glCreateProgram();
GLuint* vertex_shader = &vertex_shaders_[vertex_shader_id];
if (!*vertex_shader) {
*vertex_shader = glCreateShader(GL_VERTEX_SHADER);
CompileShader(*vertex_shader, vertex_shader_source[vertex_shader_id]);
}
glAttachShader(info->program, *vertex_shader);
GLuint* fragment_shader = &fragment_shaders_[fragment_shader_id];
if (!*fragment_shader) {
*fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
CompileShader(*fragment_shader,
fragment_shader_source[fragment_shader_id]);
}
glAttachShader(info->program, *fragment_shader);
glBindAttribLocation(info->program, kVertexPositionAttrib, "a_position");
glLinkProgram(info->program);
#ifndef NDEBUG
GLint linked;
glGetProgramiv(info->program, GL_LINK_STATUS, &linked);
if (!linked)
DLOG(ERROR) << "CopyTextureCHROMIUM: program link failure.";
#endif
info->vertex_translate_handle = glGetUniformLocation(info->program,
"u_vertex_translate");
info->tex_coord_transform_handle =
glGetUniformLocation(info->program, "u_tex_coord_transform");
info->half_size_handle = glGetUniformLocation(info->program, "u_half_size");
info->sampler_handle = glGetUniformLocation(info->program, "u_sampler");
}
glUseProgram(info->program);
glUniformMatrix4fv(info->tex_coord_transform_handle, 1, GL_FALSE,
transform_matrix);
GLint x_translate = xoffset - x;
GLint y_translate = yoffset - y;
if (!x_translate && !y_translate) {
glUniform2f(info->vertex_translate_handle, 0.0f, 0.0f);
} else {
// transform offsets from ([0, dest_width], [0, dest_height]) coord.
// to ([-1, 1], [-1, 1]) coord.
GLfloat x_translate_on_vertex = ((2.f * x_translate) / dest_width);
GLfloat y_translate_on_vertex = ((2.f * y_translate) / dest_height);
// Pass translation to the shader program.
glUniform2f(info->vertex_translate_handle, x_translate_on_vertex,
y_translate_on_vertex);
}
if (source_target == GL_TEXTURE_RECTANGLE_ARB)
glUniform2f(info->half_size_handle, source_width / 2.0f,
source_height / 2.0f);
else
glUniform2f(info->half_size_handle, 0.5f, 0.5f);
if (BindFramebufferTexture2D(GL_TEXTURE_2D, dest_id, framebuffer_)) {
#ifndef NDEBUG
// glValidateProgram of MACOSX validates FBO unlike other platforms, so
// glValidateProgram must be called after FBO binding. crbug.com/463439
glValidateProgram(info->program);
GLint validation_status;
glGetProgramiv(info->program, GL_VALIDATE_STATUS, &validation_status);
if (GL_TRUE != validation_status) {
DLOG(ERROR) << "CopyTextureCHROMIUM: Invalid shader.";
return;
}
#endif
decoder->ClearAllAttributes();
glEnableVertexAttribArray(kVertexPositionAttrib);
glBindBuffer(GL_ARRAY_BUFFER, buffer_id_);
glVertexAttribPointer(kVertexPositionAttrib, 2, GL_FLOAT, GL_FALSE, 0, 0);
glUniform1i(info->sampler_handle, 0);
glBindTexture(source_target, source_id);
glTexParameterf(source_target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(source_target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(source_target, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(source_target, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glDisable(GL_DEPTH_TEST);
glDisable(GL_STENCIL_TEST);
glDisable(GL_CULL_FACE);
glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);
glDepthMask(GL_FALSE);
glDisable(GL_BLEND);
bool need_scissor =
xoffset || yoffset || width != dest_width || height != dest_height;
if (need_scissor) {
glEnable(GL_SCISSOR_TEST);
glScissor(xoffset, yoffset, width, height);
} else {
glDisable(GL_SCISSOR_TEST);
}
glViewport(0, 0, dest_width, dest_height);
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
}
decoder->RestoreAllAttributes();
decoder->RestoreTextureState(source_id);
decoder->RestoreTextureState(dest_id);
decoder->RestoreTextureUnitBindings(0);
decoder->RestoreActiveTexture();
decoder->RestoreProgramBindings();
decoder->RestoreBufferBindings();
decoder->RestoreFramebufferBindings();
decoder->RestoreGlobalState();
}
} // namespace gpu
| Teamxrtc/webrtc-streaming-node | third_party/webrtc/src/chromium/src/gpu/command_buffer/service/gles2_cmd_copy_texture_chromium.cc | C++ | mit | 22,247 |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.resources.implementation;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.microsoft.azure.Page;
import java.util.List;
/**
* An instance of this class defines a page of Azure resources and a link to
* get the next page of resources, if any.
*
* @param <T> type of Azure resource
*/
public class PageImpl<T> implements Page<T> {
/**
* The link to the next page.
*/
@JsonProperty("nextLink")
private String nextPageLink;
/**
* The list of items.
*/
@JsonProperty("value")
private List<T> items;
/**
* Gets the link to the next page.
*
* @return the link to the next page.
*/
@Override
public String nextPageLink() {
return this.nextPageLink;
}
/**
* Gets the list of items.
*
* @return the list of items in {@link List}.
*/
@Override
public List<T> items() {
return items;
}
/**
* Sets the link to the next page.
*
* @param nextPageLink the link to the next page.
* @return this Page object itself.
*/
public PageImpl<T> setNextPageLink(String nextPageLink) {
this.nextPageLink = nextPageLink;
return this;
}
/**
* Sets the list of items.
*
* @param items the list of items in {@link List}.
* @return this Page object itself.
*/
public PageImpl<T> setItems(List<T> items) {
this.items = items;
return this;
}
}
| jianghaolu/azure-sdk-for-java | azure-mgmt-resources/src/main/java/com/microsoft/azure/management/resources/implementation/PageImpl.java | Java | mit | 1,747 |
@extends($master)
@section('page', trans('ticketit::admin.category-edit-title', ['name' => ucwords($category->name)]))
@section('content')
@include('ticketit::shared.header')
<div class="well bs-component">
{!! CollectiveForm::model($category, [
'route' => [$setting->grab('admin_route').'.category.update', $category->id],
'method' => 'PATCH',
'class' => 'form-horizontal'
]) !!}
<legend>{{ trans('ticketit::admin.category-edit-title', ['name' => ucwords($category->name)]) }}</legend>
@include('ticketit::admin.category.form', ['update', true])
{!! CollectiveForm::close() !!}
</div>
@stop
| thekordy/ticketit | src/Views/bootstrap3/admin/category/edit.blade.php | PHP | mit | 781 |
<?php
/**
* Magento
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://opensource.org/licenses/osl-3.0.php
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@magento.com so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade Magento to newer
* versions in the future. If you wish to customize Magento for your
* needs please refer to http://www.magento.com for more information.
*
* @category Mage
* @package Mage_Api2
* @copyright Copyright (c) 2006-2016 X.commerce, Inc. and affiliates (http://www.magento.com)
* @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0)
*/
/**
* Request content interpreter factory
*
* @category Mage
* @package Mage_Api2
* @author Magento Core Team <core@magentocommerce.com>
*/
abstract class Mage_Api2_Model_Request_Interpreter
{
/**
* Request body interpreters factory
*
* @param string $type
* @return Mage_Api2_Model_Request_Interpreter_Interface
* @throws Exception|Mage_Api2_Exception
*/
public static function factory($type)
{
/** @var $helper Mage_Api2_Helper_Data */
$helper = Mage::helper('api2/data');
$adapters = $helper->getRequestInterpreterAdapters();
if (empty($adapters) || !is_array($adapters)) {
throw new Exception('Request interpreter adapters is not set.');
}
$adapterModel = null;
foreach ($adapters as $item) {
$itemType = $item->type;
if ($itemType == $type) {
$adapterModel = $item->model;
break;
}
}
if ($adapterModel === null) {
throw new Mage_Api2_Exception(
sprintf('Server can not understand Content-Type HTTP header media type "%s"', $type),
Mage_Api2_Model_Server::HTTP_BAD_REQUEST
);
}
$adapter = Mage::getModel($adapterModel);
if (!$adapter) {
throw new Exception(sprintf('Request interpreter adapter "%s" not found.', $type));
}
return $adapter;
}
}
| hansbonini/cloud9-magento | www/app/code/core/Mage/Api2/Model/Request/Interpreter.php | PHP | mit | 2,436 |
$(document).ready(function() {
var a, b, c, d, e, col, otherCol;
module("Backbone.Collection", {
setup: function() {
a = new Backbone.Model({id: 3, label: 'a'});
b = new Backbone.Model({id: 2, label: 'b'});
c = new Backbone.Model({id: 1, label: 'c'});
d = new Backbone.Model({id: 0, label: 'd'});
e = null;
col = new Backbone.Collection([a,b,c,d]);
otherCol = new Backbone.Collection();
}
});
test("new and sort", 9, function() {
var counter = 0;
col.on('sort', function(){ counter++; });
equal(col.first(), a, "a should be first");
equal(col.last(), d, "d should be last");
col.comparator = function(a, b) {
return a.id > b.id ? -1 : 1;
};
col.sort();
equal(counter, 1);
equal(col.first(), a, "a should be first");
equal(col.last(), d, "d should be last");
col.comparator = function(model) { return model.id; };
col.sort();
equal(counter, 2);
equal(col.first(), d, "d should be first");
equal(col.last(), a, "a should be last");
equal(col.length, 4);
});
test("String comparator.", 1, function() {
var collection = new Backbone.Collection([
{id: 3},
{id: 1},
{id: 2}
], {comparator: 'id'});
deepEqual(collection.pluck('id'), [1, 2, 3]);
});
test("new and parse", 3, function() {
var Collection = Backbone.Collection.extend({
parse : function(data) {
return _.filter(data, function(datum) {
return datum.a % 2 === 0;
});
}
});
var models = [{a: 1}, {a: 2}, {a: 3}, {a: 4}];
var collection = new Collection(models, {parse: true});
strictEqual(collection.length, 2);
strictEqual(collection.first().get('a'), 2);
strictEqual(collection.last().get('a'), 4);
});
test("get", 6, function() {
equal(col.get(0), d);
equal(col.get(d.clone()), d);
equal(col.get(2), b);
equal(col.get({id: 1}), c);
equal(col.get(c.clone()), c);
equal(col.get(col.first().cid), col.first());
});
test("get with non-default ids", 5, function() {
var col = new Backbone.Collection();
var MongoModel = Backbone.Model.extend({idAttribute: '_id'});
var model = new MongoModel({_id: 100});
col.add(model);
equal(col.get(100), model);
equal(col.get(model.cid), model);
equal(col.get(model), model);
equal(col.get(101), void 0);
var col2 = new Backbone.Collection();
col2.model = MongoModel;
col2.add(model.attributes);
equal(col2.get(model.clone()), col2.first());
});
test("update index when id changes", 4, function() {
var col = new Backbone.Collection();
col.add([
{id : 0, name : 'one'},
{id : 1, name : 'two'}
]);
var one = col.get(0);
equal(one.get('name'), 'one');
col.on('change:name', function (model) { ok(this.get(model)); });
one.set({name: 'dalmatians', id : 101});
equal(col.get(0), null);
equal(col.get(101).get('name'), 'dalmatians');
});
test("at", 1, function() {
equal(col.at(2), c);
});
test("pluck", 1, function() {
equal(col.pluck('label').join(' '), 'a b c d');
});
test("add", 10, function() {
var added, opts, secondAdded;
added = opts = secondAdded = null;
e = new Backbone.Model({id: 10, label : 'e'});
otherCol.add(e);
otherCol.on('add', function() {
secondAdded = true;
});
col.on('add', function(model, collection, options){
added = model.get('label');
opts = options;
});
col.add(e, {amazing: true});
equal(added, 'e');
equal(col.length, 5);
equal(col.last(), e);
equal(otherCol.length, 1);
equal(secondAdded, null);
ok(opts.amazing);
var f = new Backbone.Model({id: 20, label : 'f'});
var g = new Backbone.Model({id: 21, label : 'g'});
var h = new Backbone.Model({id: 22, label : 'h'});
var atCol = new Backbone.Collection([f, g, h]);
equal(atCol.length, 3);
atCol.add(e, {at: 1});
equal(atCol.length, 4);
equal(atCol.at(1), e);
equal(atCol.last(), h);
});
test("add multiple models", 6, function() {
var col = new Backbone.Collection([{at: 0}, {at: 1}, {at: 9}]);
col.add([{at: 2}, {at: 3}, {at: 4}, {at: 5}, {at: 6}, {at: 7}, {at: 8}], {at: 2});
for (var i = 0; i <= 5; i++) {
equal(col.at(i).get('at'), i);
}
});
test("add; at should have preference over comparator", 1, function() {
var Col = Backbone.Collection.extend({
comparator: function(a,b) {
return a.id > b.id ? -1 : 1;
}
});
var col = new Col([{id: 2}, {id: 3}]);
col.add(new Backbone.Model({id: 1}), {at: 1});
equal(col.pluck('id').join(' '), '3 1 2');
});
test("can't add model to collection twice", function() {
var col = new Backbone.Collection([{id: 1}, {id: 2}, {id: 1}, {id: 2}, {id: 3}]);
equal(col.pluck('id').join(' '), '1 2 3');
});
test("can't add different model with same id to collection twice", 1, function() {
var col = new Backbone.Collection;
col.unshift({id: 101});
col.add({id: 101});
equal(col.length, 1);
});
test("merge in duplicate models with {merge: true}", 3, function() {
var col = new Backbone.Collection;
col.add([{id: 1, name: 'Moe'}, {id: 2, name: 'Curly'}, {id: 3, name: 'Larry'}]);
col.add({id: 1, name: 'Moses'});
equal(col.first().get('name'), 'Moe');
col.add({id: 1, name: 'Moses'}, {merge: true});
equal(col.first().get('name'), 'Moses');
col.add({id: 1, name: 'Tim'}, {merge: true, silent: true});
equal(col.first().get('name'), 'Tim');
});
test("add model to multiple collections", 10, function() {
var counter = 0;
var e = new Backbone.Model({id: 10, label : 'e'});
e.on('add', function(model, collection) {
counter++;
equal(e, model);
if (counter > 1) {
equal(collection, colF);
} else {
equal(collection, colE);
}
});
var colE = new Backbone.Collection([]);
colE.on('add', function(model, collection) {
equal(e, model);
equal(colE, collection);
});
var colF = new Backbone.Collection([]);
colF.on('add', function(model, collection) {
equal(e, model);
equal(colF, collection);
});
colE.add(e);
equal(e.collection, colE);
colF.add(e);
equal(e.collection, colE);
});
test("add model with parse", 1, function() {
var Model = Backbone.Model.extend({
parse: function(obj) {
obj.value += 1;
return obj;
}
});
var Col = Backbone.Collection.extend({model: Model});
var col = new Col;
col.add({value: 1}, {parse: true});
equal(col.at(0).get('value'), 2);
});
test("add with parse and merge", function() {
var Model = Backbone.Model.extend({
parse: function (data) {
return data.model;
}
});
var collection = new Backbone.Collection();
collection.model = Model;
collection.add({id: 1});
collection.add({model: {id: 1, name: 'Alf'}}, {parse: true, merge: true});
equal(collection.first().get('name'), 'Alf');
});
test("add model to collection with sort()-style comparator", 3, function() {
var col = new Backbone.Collection;
col.comparator = function(a, b) {
return a.get('name') < b.get('name') ? -1 : 1;
};
var tom = new Backbone.Model({name: 'Tom'});
var rob = new Backbone.Model({name: 'Rob'});
var tim = new Backbone.Model({name: 'Tim'});
col.add(tom);
col.add(rob);
col.add(tim);
equal(col.indexOf(rob), 0);
equal(col.indexOf(tim), 1);
equal(col.indexOf(tom), 2);
});
test("comparator that depends on `this`", 2, function() {
var col = new Backbone.Collection;
col.negative = function(num) {
return -num;
};
col.comparator = function(a) {
return this.negative(a.id);
};
col.add([{id: 1}, {id: 2}, {id: 3}]);
deepEqual(col.pluck('id'), [3, 2, 1]);
col.comparator = function(a, b) {
return this.negative(b.id) - this.negative(a.id);
};
col.sort();
deepEqual(col.pluck('id'), [1, 2, 3]);
});
test("remove", 5, function() {
var removed = null;
var otherRemoved = null;
col.on('remove', function(model, col, options) {
removed = model.get('label');
equal(options.index, 3);
});
otherCol.on('remove', function(model, col, options) {
otherRemoved = true;
});
col.remove(d);
equal(removed, 'd');
equal(col.length, 3);
equal(col.first(), a);
equal(otherRemoved, null);
});
test("shift and pop", 2, function() {
var col = new Backbone.Collection([{a: 'a'}, {b: 'b'}, {c: 'c'}]);
equal(col.shift().get('a'), 'a');
equal(col.pop().get('c'), 'c');
});
test("slice", 2, function() {
var col = new Backbone.Collection([{a: 'a'}, {b: 'b'}, {c: 'c'}]);
var array = col.slice(1, 3);
equal(array.length, 2);
equal(array[0].get('b'), 'b');
});
test("events are unbound on remove", 3, function() {
var counter = 0;
var dj = new Backbone.Model();
var emcees = new Backbone.Collection([dj]);
emcees.on('change', function(){ counter++; });
dj.set({name : 'Kool'});
equal(counter, 1);
emcees.reset([]);
equal(dj.collection, undefined);
dj.set({name : 'Shadow'});
equal(counter, 1);
});
test("remove in multiple collections", 7, function() {
var modelData = {
id : 5,
title : 'Othello'
};
var passed = false;
var e = new Backbone.Model(modelData);
var f = new Backbone.Model(modelData);
f.on('remove', function() {
passed = true;
});
var colE = new Backbone.Collection([e]);
var colF = new Backbone.Collection([f]);
ok(e != f);
ok(colE.length === 1);
ok(colF.length === 1);
colE.remove(e);
equal(passed, false);
ok(colE.length === 0);
colF.remove(e);
ok(colF.length === 0);
equal(passed, true);
});
test("remove same model in multiple collection", 16, function() {
var counter = 0;
var e = new Backbone.Model({id: 5, title: 'Othello'});
e.on('remove', function(model, collection) {
counter++;
equal(e, model);
if (counter > 1) {
equal(collection, colE);
} else {
equal(collection, colF);
}
});
var colE = new Backbone.Collection([e]);
colE.on('remove', function(model, collection) {
equal(e, model);
equal(colE, collection);
});
var colF = new Backbone.Collection([e]);
colF.on('remove', function(model, collection) {
equal(e, model);
equal(colF, collection);
});
equal(colE, e.collection);
colF.remove(e);
ok(colF.length === 0);
ok(colE.length === 1);
equal(counter, 1);
equal(colE, e.collection);
colE.remove(e);
equal(null, e.collection);
ok(colE.length === 0);
equal(counter, 2);
});
test("model destroy removes from all collections", 3, function() {
var e = new Backbone.Model({id: 5, title: 'Othello'});
e.sync = function(method, model, options) { options.success(); };
var colE = new Backbone.Collection([e]);
var colF = new Backbone.Collection([e]);
e.destroy();
ok(colE.length === 0);
ok(colF.length === 0);
equal(undefined, e.collection);
});
test("Colllection: non-persisted model destroy removes from all collections", 3, function() {
var e = new Backbone.Model({title: 'Othello'});
e.sync = function(method, model, options) { throw "should not be called"; };
var colE = new Backbone.Collection([e]);
var colF = new Backbone.Collection([e]);
e.destroy();
ok(colE.length === 0);
ok(colF.length === 0);
equal(undefined, e.collection);
});
test("fetch", 4, function() {
var collection = new Backbone.Collection;
collection.url = '/test';
collection.fetch();
equal(this.syncArgs.method, 'read');
equal(this.syncArgs.model, collection);
equal(this.syncArgs.options.parse, true);
collection.fetch({parse: false});
equal(this.syncArgs.options.parse, false);
});
test("fetch with an error response triggers an error event", 1, function () {
var collection = new Backbone.Collection();
collection.on('error', function () {
ok(true);
});
collection.sync = function (method, model, options) { options.error(); };
collection.fetch();
});
test("ensure fetch only parses once", 1, function() {
var collection = new Backbone.Collection;
var counter = 0;
collection.parse = function(models) {
counter++;
return models;
};
collection.url = '/test';
collection.fetch();
this.syncArgs.options.success();
equal(counter, 1);
});
test("create", 4, function() {
var collection = new Backbone.Collection;
collection.url = '/test';
var model = collection.create({label: 'f'}, {wait: true});
equal(this.syncArgs.method, 'create');
equal(this.syncArgs.model, model);
equal(model.get('label'), 'f');
equal(model.collection, collection);
});
test("create with validate:true enforces validation", 2, function() {
var ValidatingModel = Backbone.Model.extend({
validate: function(attrs) {
return "fail";
}
});
var ValidatingCollection = Backbone.Collection.extend({
model: ValidatingModel
});
var col = new ValidatingCollection();
col.on('invalid', function (collection, attrs, options) {
equal(options.validationError, 'fail');
});
equal(col.create({"foo":"bar"}, {validate:true}), false);
});
test("a failing create returns model with errors", function() {
var ValidatingModel = Backbone.Model.extend({
validate: function(attrs) {
return "fail";
}
});
var ValidatingCollection = Backbone.Collection.extend({
model: ValidatingModel
});
var col = new ValidatingCollection();
var m = col.create({"foo":"bar"});
equal(m.validationError, 'fail');
equal(col.length, 1);
});
test("initialize", 1, function() {
var Collection = Backbone.Collection.extend({
initialize: function() {
this.one = 1;
}
});
var coll = new Collection;
equal(coll.one, 1);
});
test("toJSON", 1, function() {
equal(JSON.stringify(col), '[{"id":3,"label":"a"},{"id":2,"label":"b"},{"id":1,"label":"c"},{"id":0,"label":"d"}]');
});
test("where and findWhere", 8, function() {
var model = new Backbone.Model({a: 1});
var coll = new Backbone.Collection([
model,
{a: 1},
{a: 1, b: 2},
{a: 2, b: 2},
{a: 3}
]);
equal(coll.where({a: 1}).length, 3);
equal(coll.where({a: 2}).length, 1);
equal(coll.where({a: 3}).length, 1);
equal(coll.where({b: 1}).length, 0);
equal(coll.where({b: 2}).length, 2);
equal(coll.where({a: 1, b: 2}).length, 1);
equal(coll.findWhere({a: 1}), model);
equal(coll.findWhere({a: 4}), void 0);
});
test("Underscore methods", 14, function() {
equal(col.map(function(model){ return model.get('label'); }).join(' '), 'a b c d');
equal(col.any(function(model){ return model.id === 100; }), false);
equal(col.any(function(model){ return model.id === 0; }), true);
equal(col.indexOf(b), 1);
equal(col.size(), 4);
equal(col.rest().length, 3);
ok(!_.include(col.rest(), a));
ok(_.include(col.rest(), d));
ok(!col.isEmpty());
ok(!_.include(col.without(d), d));
equal(col.max(function(model){ return model.id; }).id, 3);
equal(col.min(function(model){ return model.id; }).id, 0);
deepEqual(col.chain()
.filter(function(o){ return o.id % 2 === 0; })
.map(function(o){ return o.id * 2; })
.value(),
[4, 0]);
deepEqual(col.difference([c, d]), [a, b]);
});
test("sortedIndex", function () {
var model = new Backbone.Model({key: 2});
var collection = new (Backbone.Collection.extend({
comparator: 'key'
}))([model, {key: 1}]);
equal(collection.sortedIndex(model), 1);
equal(collection.sortedIndex(model, 'key'), 1);
equal(collection.sortedIndex(model, function (model) {
return model.get('key');
}), 1);
});
test("reset", 12, function() {
var resetCount = 0;
var models = col.models;
col.on('reset', function() { resetCount += 1; });
col.reset([]);
equal(resetCount, 1);
equal(col.length, 0);
equal(col.last(), null);
col.reset(models);
equal(resetCount, 2);
equal(col.length, 4);
equal(col.last(), d);
col.reset(_.map(models, function(m){ return m.attributes; }));
equal(resetCount, 3);
equal(col.length, 4);
ok(col.last() !== d);
ok(_.isEqual(col.last().attributes, d.attributes));
col.reset();
equal(col.length, 0);
equal(resetCount, 4);
});
test ("reset with different values", function(){
var col = new Backbone.Collection({id: 1});
col.reset({id: 1, a: 1});
equal(col.get(1).get('a'), 1);
});
test("same references in reset", function() {
var model = new Backbone.Model({id: 1});
var collection = new Backbone.Collection({id: 1});
collection.reset(model);
equal(collection.get(1), model);
});
test("reset passes caller options", 3, function() {
var Model = Backbone.Model.extend({
initialize: function(attrs, options) {
this.model_parameter = options.model_parameter;
}
});
var col = new (Backbone.Collection.extend({ model: Model }))();
col.reset([{ astring: "green", anumber: 1 }, { astring: "blue", anumber: 2 }], { model_parameter: 'model parameter' });
equal(col.length, 2);
col.each(function(model) {
equal(model.model_parameter, 'model parameter');
});
});
test("trigger custom events on models", 1, function() {
var fired = null;
a.on("custom", function() { fired = true; });
a.trigger("custom");
equal(fired, true);
});
test("add does not alter arguments", 2, function(){
var attrs = {};
var models = [attrs];
new Backbone.Collection().add(models);
equal(models.length, 1);
ok(attrs === models[0]);
});
test("#714: access `model.collection` in a brand new model.", 2, function() {
var collection = new Backbone.Collection;
collection.url = '/test';
var Model = Backbone.Model.extend({
set: function(attrs) {
equal(attrs.prop, 'value');
equal(this.collection, collection);
return this;
}
});
collection.model = Model;
collection.create({prop: 'value'});
});
test("#574, remove its own reference to the .models array.", 2, function() {
var col = new Backbone.Collection([
{id: 1}, {id: 2}, {id: 3}, {id: 4}, {id: 5}, {id: 6}
]);
equal(col.length, 6);
col.remove(col.models);
equal(col.length, 0);
});
test("#861, adding models to a collection which do not pass validation, with validate:true", function() {
var Model = Backbone.Model.extend({
validate: function(attrs) {
if (attrs.id == 3) return "id can't be 3";
}
});
var Collection = Backbone.Collection.extend({
model: Model
});
var collection = new Collection;
collection.on("error", function() { ok(true); });
collection.add([{id: 1}, {id: 2}, {id: 3}, {id: 4}, {id: 5}, {id: 6}], {validate:true});
deepEqual(collection.pluck('id'), [1, 2, 4, 5, 6]);
});
test("Invalid models are discarded with validate:true.", 5, function() {
var collection = new Backbone.Collection;
collection.on('test', function() { ok(true); });
collection.model = Backbone.Model.extend({
validate: function(attrs){ if (!attrs.valid) return 'invalid'; }
});
var model = new collection.model({id: 1, valid: true});
collection.add([model, {id: 2}], {validate:true});
model.trigger('test');
ok(collection.get(model.cid));
ok(collection.get(1));
ok(!collection.get(2));
equal(collection.length, 1);
});
test("multiple copies of the same model", 3, function() {
var col = new Backbone.Collection();
var model = new Backbone.Model();
col.add([model, model]);
equal(col.length, 1);
col.add([{id: 1}, {id: 1}]);
equal(col.length, 2);
equal(col.last().id, 1);
});
test("#964 - collection.get return inconsistent", 2, function() {
var c = new Backbone.Collection();
ok(c.get(null) === undefined);
ok(c.get() === undefined);
});
test("#1112 - passing options.model sets collection.model", 2, function() {
var Model = Backbone.Model.extend({});
var c = new Backbone.Collection([{id: 1}], {model: Model});
ok(c.model === Model);
ok(c.at(0) instanceof Model);
});
test("null and undefined are invalid ids.", 2, function() {
var model = new Backbone.Model({id: 1});
var collection = new Backbone.Collection([model]);
model.set({id: null});
ok(!collection.get('null'));
model.set({id: 1});
model.set({id: undefined});
ok(!collection.get('undefined'));
});
test("falsy comparator", 4, function(){
var Col = Backbone.Collection.extend({
comparator: function(model){ return model.id; }
});
var col = new Col();
var colFalse = new Col(null, {comparator: false});
var colNull = new Col(null, {comparator: null});
var colUndefined = new Col(null, {comparator: undefined});
ok(col.comparator);
ok(!colFalse.comparator);
ok(!colNull.comparator);
ok(colUndefined.comparator);
});
test("#1355 - `options` is passed to success callbacks", 2, function(){
var m = new Backbone.Model({x:1});
var col = new Backbone.Collection();
var opts = {
success: function(collection, resp, options){
ok(options);
}
};
col.sync = m.sync = function( method, collection, options ){
options.success(collection, [], options);
};
col.fetch(opts);
col.create(m, opts);
});
test("#1412 - Trigger 'request' and 'sync' events.", 4, function() {
var collection = new Backbone.Collection;
collection.url = '/test';
Backbone.ajax = function(settings){ settings.success(); };
collection.on('request', function(obj, xhr, options) {
ok(obj === collection, "collection has correct 'request' event after fetching");
});
collection.on('sync', function(obj, response, options) {
ok(obj === collection, "collection has correct 'sync' event after fetching");
});
collection.fetch();
collection.off();
collection.on('request', function(obj, xhr, options) {
ok(obj === collection.get(1), "collection has correct 'request' event after one of its models save");
});
collection.on('sync', function(obj, response, options) {
ok(obj === collection.get(1), "collection has correct 'sync' event after one of its models save");
});
collection.create({id: 1});
collection.off();
});
test("#1447 - create with wait adds model.", 1, function() {
var collection = new Backbone.Collection;
var model = new Backbone.Model;
model.sync = function(method, model, options){ options.success(); };
collection.on('add', function(){ ok(true); });
collection.create(model, {wait: true});
});
test("#1448 - add sorts collection after merge.", 1, function() {
var collection = new Backbone.Collection([
{id: 1, x: 1},
{id: 2, x: 2}
]);
collection.comparator = function(model){ return model.get('x'); };
collection.add({id: 1, x: 3}, {merge: true});
deepEqual(collection.pluck('id'), [2, 1]);
});
test("#1655 - groupBy can be used with a string argument.", 3, function() {
var collection = new Backbone.Collection([{x: 1}, {x: 2}]);
var grouped = collection.groupBy('x');
strictEqual(_.keys(grouped).length, 2);
strictEqual(grouped[1][0].get('x'), 1);
strictEqual(grouped[2][0].get('x'), 2);
});
test("#1655 - sortBy can be used with a string argument.", 1, function() {
var collection = new Backbone.Collection([{x: 3}, {x: 1}, {x: 2}]);
var values = _.map(collection.sortBy('x'), function(model) {
return model.get('x');
});
deepEqual(values, [1, 2, 3]);
});
test("#1604 - Removal during iteration.", 0, function() {
var collection = new Backbone.Collection([{}, {}]);
collection.on('add', function() {
collection.at(0).destroy();
});
collection.add({}, {at: 0});
});
test("#1638 - `sort` during `add` triggers correctly.", function() {
var collection = new Backbone.Collection;
collection.comparator = function(model) { return model.get('x'); };
var added = [];
collection.on('add', function(model) {
model.set({x: 3});
collection.sort();
added.push(model.id);
});
collection.add([{id: 1, x: 1}, {id: 2, x: 2}]);
deepEqual(added, [1, 2]);
});
test("fetch parses models by default", 1, function() {
var model = {};
var Collection = Backbone.Collection.extend({
url: 'test',
model: Backbone.Model.extend({
parse: function(resp) {
strictEqual(resp, model);
}
})
});
new Collection().fetch();
this.ajaxSettings.success([model]);
});
test("`sort` shouldn't always fire on `add`", 1, function() {
var c = new Backbone.Collection([{id: 1}, {id: 2}, {id: 3}], {
comparator: 'id'
});
c.sort = function(){ ok(true); };
c.add([]);
c.add({id: 1});
c.add([{id: 2}, {id: 3}]);
c.add({id: 4});
});
test("#1407 parse option on constructor parses collection and models", 2, function() {
var model = {
namespace : [{id: 1}, {id:2}]
};
var Collection = Backbone.Collection.extend({
model: Backbone.Model.extend({
parse: function(model) {
model.name = 'test';
return model;
}
}),
parse: function(model) {
return model.namespace;
}
});
var c = new Collection(model, {parse:true});
equal(c.length, 2);
equal(c.at(0).get('name'), 'test');
});
test("#1407 parse option on reset parses collection and models", 2, function() {
var model = {
namespace : [{id: 1}, {id:2}]
};
var Collection = Backbone.Collection.extend({
model: Backbone.Model.extend({
parse: function(model) {
model.name = 'test';
return model;
}
}),
parse: function(model) {
return model.namespace;
}
});
var c = new Collection();
c.reset(model, {parse:true});
equal(c.length, 2);
equal(c.at(0).get('name'), 'test');
});
test("Reset includes previous models in triggered event.", 1, function() {
var model = new Backbone.Model();
var collection = new Backbone.Collection([model])
.on('reset', function(collection, options) {
deepEqual(options.previousModels, [model]);
});
collection.reset([]);
});
test("set", function() {
var m1 = new Backbone.Model();
var m2 = new Backbone.Model({id: 2});
var m3 = new Backbone.Model();
var c = new Backbone.Collection([m1, m2]);
// Test add/change/remove events
c.on('add', function(model) {
strictEqual(model, m3);
});
c.on('change', function(model) {
strictEqual(model, m2);
});
c.on('remove', function(model) {
strictEqual(model, m1);
});
// remove: false doesn't remove any models
c.set([], {remove: false});
strictEqual(c.length, 2);
// add: false doesn't add any models
c.set([m1, m2, m3], {add: false});
strictEqual(c.length, 2);
// merge: false doesn't change any models
c.set([m1, {id: 2, a: 1}], {merge: false});
strictEqual(m2.get('a'), void 0);
// add: false, remove: false only merges existing models
c.set([m1, {id: 2, a: 0}, m3, {id: 4}], {add: false, remove: false});
strictEqual(c.length, 2);
strictEqual(m2.get('a'), 0);
// default options add/remove/merge as appropriate
c.set([{id: 2, a: 1}, m3]);
strictEqual(c.length, 2);
strictEqual(m2.get('a'), 1);
// Test removing models not passing an argument
c.off('remove').on('remove', function(model) {
ok(model === m2 || model === m3);
});
c.set([]);
strictEqual(c.length, 0);
});
test("set with only cids", 3, function() {
var m1 = new Backbone.Model;
var m2 = new Backbone.Model;
var c = new Backbone.Collection;
c.set([m1, m2]);
equal(c.length, 2);
c.set([m1]);
equal(c.length, 1);
c.set([m1, m1, m1, m2, m2], {remove: false});
equal(c.length, 2);
});
test("set with only idAttribute", 3, function() {
var m1 = { _id: 1 };
var m2 = { _id: 2 };
var col = Backbone.Collection.extend({
model: Backbone.Model.extend({
idAttribute: '_id'
})
});
var c = new col;
c.set([m1, m2]);
equal(c.length, 2);
c.set([m1]);
equal(c.length, 1);
c.set([m1, m1, m1, m2, m2], {remove: false});
equal(c.length, 2);
});
test("set + merge with default values defined", function() {
var Model = Backbone.Model.extend({
defaults: {
key: 'value'
}
});
var m = new Model({id: 1});
var col = new Backbone.Collection([m], {model: Model});
equal(col.first().get('key'), 'value');
col.set({id: 1, key: 'other'});
equal(col.first().get('key'), 'other');
col.set({id: 1, other: 'value'});
equal(col.first().get('key'), 'other');
equal(col.length, 1);
});
test('merge without mutation', function () {
var Model = Backbone.Model.extend({
initialize: function (attrs, options) {
if (attrs.child) {
this.set('child', new Model(attrs.child, options), options);
}
}
});
var Collection = Backbone.Collection.extend({model: Model});
var data = [{id: 1, child: {id: 2}}];
var collection = new Collection(data);
equal(collection.first().id, 1);
collection.set(data);
equal(collection.first().id, 1);
collection.set([{id: 2, child: {id: 2}}].concat(data));
deepEqual(collection.pluck('id'), [2, 1]);
});
test("`set` and model level `parse`", function() {
var Model = Backbone.Model.extend({
parse: function (res) { return res.model; }
});
var Collection = Backbone.Collection.extend({
model: Model,
parse: function (res) { return res.models; }
});
var model = new Model({id: 1});
var collection = new Collection(model);
collection.set({models: [
{model: {id: 1}},
{model: {id: 2}}
]}, {parse: true});
equal(collection.first(), model);
});
test("`set` data is only parsed once", function() {
var collection = new Backbone.Collection();
collection.model = Backbone.Model.extend({
parse: function (data) {
equal(data.parsed, void 0);
data.parsed = true;
return data;
}
});
collection.set({}, {parse: true});
});
test('`set` matches input order in the absence of a comparator', function () {
var one = new Backbone.Model({id: 1});
var two = new Backbone.Model({id: 2});
var three = new Backbone.Model({id: 3});
var collection = new Backbone.Collection([one, two, three]);
collection.set([{id: 3}, {id: 2}, {id: 1}]);
deepEqual(collection.models, [three, two, one]);
collection.set([{id: 1}, {id: 2}]);
deepEqual(collection.models, [one, two]);
collection.set([two, three, one]);
deepEqual(collection.models, [two, three, one]);
collection.set([{id: 1}, {id: 2}], {remove: false});
deepEqual(collection.models, [two, three, one]);
collection.set([{id: 1}, {id: 2}, {id: 3}], {merge: false});
deepEqual(collection.models, [one, two, three]);
collection.set([three, two, one, {id: 4}], {add: false});
deepEqual(collection.models, [one, two, three]);
});
test("#1894 - Push should not trigger a sort", 0, function() {
var Collection = Backbone.Collection.extend({
comparator: 'id',
sort: function() {
ok(false);
}
});
new Collection().push({id: 1});
});
test("`set` with non-normal id", function() {
var Collection = Backbone.Collection.extend({
model: Backbone.Model.extend({idAttribute: '_id'})
});
var collection = new Collection({_id: 1});
collection.set([{_id: 1, a: 1}], {add: false});
equal(collection.first().get('a'), 1);
});
test("#1894 - `sort` can optionally be turned off", 0, function() {
var Collection = Backbone.Collection.extend({
comparator: 'id',
sort: function() { ok(true); }
});
new Collection().add({id: 1}, {sort: false});
});
test("#1915 - `parse` data in the right order in `set`", function() {
var collection = new (Backbone.Collection.extend({
parse: function (data) {
strictEqual(data.status, 'ok');
return data.data;
}
}));
var res = {status: 'ok', data:[{id: 1}]};
collection.set(res, {parse: true});
});
asyncTest("#1939 - `parse` is passed `options`", 1, function () {
var collection = new (Backbone.Collection.extend({
url: '/',
parse: function (data, options) {
strictEqual(options.xhr.someHeader, 'headerValue');
return data;
}
}));
var ajax = Backbone.ajax;
Backbone.ajax = function (params) {
_.defer(params.success);
return {someHeader: 'headerValue'};
};
collection.fetch({
success: function () { start(); }
});
Backbone.ajax = ajax;
});
test("`add` only `sort`s when necessary", 2, function () {
var collection = new (Backbone.Collection.extend({
comparator: 'a'
}))([{id: 1}, {id: 2}, {id: 3}]);
collection.on('sort', function () { ok(true); });
collection.add({id: 4}); // do sort, new model
collection.add({id: 1, a: 1}, {merge: true}); // do sort, comparator change
collection.add({id: 1, b: 1}, {merge: true}); // don't sort, no comparator change
collection.add({id: 1, a: 1}, {merge: true}); // don't sort, no comparator change
collection.add(collection.models); // don't sort, nothing new
collection.add(collection.models, {merge: true}); // don't sort
});
test("`add` only `sort`s when necessary with comparator function", 3, function () {
var collection = new (Backbone.Collection.extend({
comparator: function(a, b) {
return a.get('a') > b.get('a') ? 1 : (a.get('a') < b.get('a') ? -1 : 0);
}
}))([{id: 1}, {id: 2}, {id: 3}]);
collection.on('sort', function () { ok(true); });
collection.add({id: 4}); // do sort, new model
collection.add({id: 1, a: 1}, {merge: true}); // do sort, model change
collection.add({id: 1, b: 1}, {merge: true}); // do sort, model change
collection.add({id: 1, a: 1}, {merge: true}); // don't sort, no model change
collection.add(collection.models); // don't sort, nothing new
collection.add(collection.models, {merge: true}); // don't sort
});
test("Attach options to collection.", 2, function() {
var model = new Backbone.Model;
var comparator = function(){};
var collection = new Backbone.Collection([], {
model: model,
comparator: comparator
});
ok(collection.model === model);
ok(collection.comparator === comparator);
});
test("`add` overrides `set` flags", function () {
var collection = new Backbone.Collection();
collection.once('add', function (model, collection, options) {
collection.add({id: 2}, options);
});
collection.set({id: 1});
equal(collection.length, 2);
});
test("#2606 - Collection#create, success arguments", 1, function() {
var collection = new Backbone.Collection;
collection.url = 'test';
collection.create({}, {
success: function(model, resp, options) {
strictEqual(resp, 'response');
}
});
this.ajaxSettings.success('response');
});
});
| wakanpaladin/mybackbone | test/collection.js | JavaScript | mit | 35,957 |
/*************************************************************************/
/* control_editor_plugin.cpp */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* http://www.godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#if 0
#include "control_editor_plugin.h"
#include "print_string.h"
#include "editor_node.h"
#include "os/keyboard.h"
#include "scene/main/viewport.h"
void ControlEditor::_add_control(Control *p_control,const EditInfo& p_info) {
if (controls.has(p_control))
return;
controls.insert(p_control,p_info);
p_control->call_deferred("connect","visibility_changed",this,"_visibility_changed",varray(p_control->get_instance_ID()));
}
void ControlEditor::_remove_control(Control *p_control) {
p_control->call_deferred("disconnect","visibility_changed",this,"_visibility_changed");
controls.erase(p_control);
}
void ControlEditor::_clear_controls(){
while(controls.size())
_remove_control(controls.front()->key());
}
void ControlEditor::_visibility_changed(ObjectID p_control) {
Object *c = ObjectDB::get_instance(p_control);
if (!c)
return;
Control *ct = c->cast_to<Control>();
if (!ct)
return;
_remove_control(ct);
}
void ControlEditor::_node_removed(Node *p_node) {
Control *control = (Control*)p_node; //not a good cast, but safe
if (controls.has(control))
_remove_control(control);
if (current_window==p_node) {
_clear_controls();
}
update();
}
// slow as hell
Control* ControlEditor::_select_control_at_pos(const Point2& p_pos,Node* p_node) {
for (int i=p_node->get_child_count()-1;i>=0;i--) {
Control *r=_select_control_at_pos(p_pos,p_node->get_child(i));
if (r)
return r;
}
Control *c=p_node->cast_to<Control>();
if (c) {
Rect2 rect = c->get_window_rect();
if (c->get_window()==current_window) {
rect.pos=transform.xform(rect.pos).floor();
}
if (rect.has_point(p_pos))
return c;
}
return NULL;
}
void ControlEditor::_key_move(const Vector2& p_dir, bool p_snap) {
if (drag!=DRAG_NONE)
return;
Vector2 motion=p_dir;
if (p_snap)
motion*=snap_val->get_text().to_double();
undo_redo->create_action("Edit Control");
for(ControlMap::Element *E=controls.front();E;E=E->next()) {
Control *control = E->key();
undo_redo->add_do_method(control,"set_pos",control->get_pos()+motion);
undo_redo->add_undo_method(control,"set_pos",control->get_pos());
}
undo_redo->commit_action();
}
void ControlEditor::_input_event(InputEvent p_event) {
if (p_event.type==InputEvent::MOUSE_BUTTON) {
const InputEventMouseButton &b=p_event.mouse_button;
if (b.button_index==BUTTON_RIGHT) {
if (controls.size() && drag!=DRAG_NONE) {
//cancel drag
for(ControlMap::Element *E=controls.front();E;E=E->next()) {
Control *control = E->key();
control->set_pos(E->get().drag_pos);
control->set_size(E->get().drag_size);
}
} else if (b.pressed) {
popup->set_pos(Point2(b.x,b.y));
popup->popup();
}
return;
}
//if (!controls.size())
// return;
if (b.button_index!=BUTTON_LEFT)
return;
if (!b.pressed) {
if (drag!=DRAG_NONE) {
if (undo_redo) {
undo_redo->create_action("Edit Control");
for(ControlMap::Element *E=controls.front();E;E=E->next()) {
Control *control = E->key();
undo_redo->add_do_method(control,"set_pos",control->get_pos());
undo_redo->add_do_method(control,"set_size",control->get_size());
undo_redo->add_undo_method(control,"set_pos",E->get().drag_pos);
undo_redo->add_undo_method(control,"set_size",E->get().drag_size);
}
undo_redo->commit_action();
}
drag=DRAG_NONE;
}
return;
}
if (controls.size()==1) {
//try single control edit
Control *control = controls.front()->key();
ERR_FAIL_COND(!current_window);
Rect2 rect=control->get_window_rect();
Point2 ofs=Point2();//get_global_pos();
Rect2 draw_rect=Rect2(rect.pos-ofs,rect.size);
Point2 click=Point2(b.x,b.y);
click = transform.affine_inverse().xform(click);
Size2 handle_size=Size2(handle_len,handle_len);
drag = DRAG_NONE;
if (Rect2(draw_rect.pos-handle_size,handle_size).has_point(click))
drag=DRAG_TOP_LEFT;
else if (Rect2(draw_rect.pos+draw_rect.size,handle_size).has_point(click))
drag=DRAG_BOTTOM_RIGHT;
else if(Rect2(draw_rect.pos+Point2(draw_rect.size.width,-handle_size.y),handle_size).has_point(click))
drag=DRAG_TOP_RIGHT;
else if (Rect2(draw_rect.pos+Point2(-handle_size.x,draw_rect.size.height),handle_size).has_point(click))
drag=DRAG_BOTTOM_LEFT;
else if (Rect2(draw_rect.pos+Point2(Math::floor((draw_rect.size.width-handle_size.x)/2.0),-handle_size.height),handle_size).has_point(click))
drag=DRAG_TOP;
else if( Rect2(draw_rect.pos+Point2(-handle_size.width,Math::floor((draw_rect.size.height-handle_size.y)/2.0)),handle_size).has_point(click))
drag=DRAG_LEFT;
else if ( Rect2(draw_rect.pos+Point2(Math::floor((draw_rect.size.width-handle_size.x)/2.0),draw_rect.size.height),handle_size).has_point(click))
drag=DRAG_BOTTOM;
else if( Rect2(draw_rect.pos+Point2(draw_rect.size.width,Math::floor((draw_rect.size.height-handle_size.y)/2.0)),handle_size).has_point(click))
drag=DRAG_RIGHT;
if (drag!=DRAG_NONE) {
drag_from=click;
controls[control].drag_pos=control->get_pos();
controls[control].drag_size=control->get_size();
controls[control].drag_limit=drag_from+controls[control].drag_size-control->get_minimum_size();
return;
}
}
//multi control edit
Point2 click=Point2(b.x,b.y);
Node* scene = get_scene()->get_root_node()->cast_to<EditorNode>()->get_edited_scene();
if (!scene)
return;
/*
if (current_window) {
//no window.... ?
click-=current_window->get_scroll();
}*/
Control *c=_select_control_at_pos(click, scene);
Node* n = c;
while ((n && n != scene && n->get_owner() != scene) || (n && !n->is_type("Control"))) {
n = n->get_parent();
};
c = n->cast_to<Control>();
if (b.mod.control) { //additive selection
if (!c)
return; //nothing to add
if (current_window && controls.size() && c->get_window()!=current_window)
return; //cant multiple select from multiple windows
if (!controls.size())
current_window=c->get_window();
if (controls.has(c)) {
//already in here, erase it
_remove_control(c);
update();
return;
}
//check parents!
Control *parent = c->get_parent()->cast_to<Control>();
while(parent) {
if (controls.has(parent))
return; //a parent is already selected, so this is pointless
parent=parent->get_parent()->cast_to<Control>();
}
//check childrens of everything!
List<Control*> to_erase;
for(ControlMap::Element *E=controls.front();E;E=E->next()) {
parent = E->key()->get_parent()->cast_to<Control>();
while(parent) {
if (parent==c) {
to_erase.push_back(E->key());
break;
}
parent=parent->get_parent()->cast_to<Control>();
}
}
while(to_erase.size()) {
_remove_control(to_erase.front()->get());
to_erase.pop_front();
}
_add_control(c,EditInfo());
update();
} else {
//regular selection
if (!c) {
_clear_controls();
update();
return;
}
if (!controls.has(c)) {
_clear_controls();
current_window=c->get_window();
_add_control(c,EditInfo());
//reselect
if (get_scene()->is_editor_hint()) {
get_scene()->get_root_node()->call("edit_node",c);
}
}
for(ControlMap::Element *E=controls.front();E;E=E->next()) {
EditInfo &ei=E->get();
Control *control=E->key();
ei.drag_pos=control->get_pos();
ei.drag_size=control->get_size();
ei.drag_limit=drag_from+ei.drag_size-control->get_minimum_size();
}
drag=DRAG_ALL;
drag_from=click;
update();
}
}
if (p_event.type==InputEvent::MOUSE_MOTION) {
const InputEventMouseMotion &m=p_event.mouse_motion;
if (drag==DRAG_NONE || !current_window)
return;
for(ControlMap::Element *E=controls.front();E;E=E->next()) {
Control *control = E->key();
Point2 control_drag_pos=E->get().drag_pos;
Point2 control_drag_size=E->get().drag_size;
Point2 control_drag_limit=E->get().drag_limit;
Point2 pos=Point2(m.x,m.y);
pos = transform.affine_inverse().xform(pos);
switch(drag) {
case DRAG_ALL: {
control->set_pos( snapify(control_drag_pos+(pos-drag_from)) );
} break;
case DRAG_RIGHT: {
control->set_size( snapify(Size2(control_drag_size.width+(pos-drag_from).x,control_drag_size.height)) );
} break;
case DRAG_BOTTOM: {
control->set_size( snapify(Size2(control_drag_size.width,control_drag_size.height+(pos-drag_from).y)) );
} break;
case DRAG_BOTTOM_RIGHT: {
control->set_size( snapify(control_drag_size+(pos-drag_from)) );
} break;
case DRAG_TOP_LEFT: {
if(pos.x>control_drag_limit.x)
pos.x=control_drag_limit.x;
if(pos.y>control_drag_limit.y)
pos.y=control_drag_limit.y;
Point2 old_size = control->get_size();
Point2 new_pos = snapify(control_drag_pos+(pos-drag_from));
Point2 new_size = old_size + (control->get_pos() - new_pos);
control->set_pos( new_pos );
control->set_size( new_size );
} break;
case DRAG_TOP: {
if(pos.y>control_drag_limit.y)
pos.y=control_drag_limit.y;
Point2 old_size = control->get_size();
Point2 new_pos = snapify(control_drag_pos+Point2(0,pos.y-drag_from.y));
Point2 new_size = old_size + (control->get_pos() - new_pos);
control->set_pos( new_pos );
control->set_size( new_size );
} break;
case DRAG_LEFT: {
if(pos.x>control_drag_limit.x)
pos.x=control_drag_limit.x;
Point2 old_size = control->get_size();
Point2 new_pos = snapify(control_drag_pos+Point2(pos.x-drag_from.x,0));
Point2 new_size = old_size + (control->get_pos() - new_pos);
control->set_pos( new_pos );
control->set_size( new_size );
} break;
case DRAG_TOP_RIGHT: {
if(pos.y>control_drag_limit.y)
pos.y=control_drag_limit.y;
Point2 old_size = control->get_size();
Point2 new_pos = snapify(control_drag_pos+Point2(0,pos.y-drag_from.y));
float new_size_y = Point2( old_size + (control->get_pos() - new_pos)).y;
float new_size_x = snapify(control_drag_size+Point2(pos.x-drag_from.x,0)).x;
control->set_pos( new_pos );
control->set_size( Point2(new_size_x, new_size_y) );
} break;
case DRAG_BOTTOM_LEFT: {
if(pos.x>control_drag_limit.x)
pos.x=control_drag_limit.x;
Point2 old_size = control->get_size();
Point2 new_pos = snapify(control_drag_pos+Point2(pos.x-drag_from.x,0));
float new_size_y = snapify(control_drag_size+Point2(0,pos.y-drag_from.y)).y;
float new_size_x = Point2( old_size + (control->get_pos() - new_pos)).x;
control->set_pos( new_pos );
control->set_size( Point2(new_size_x, new_size_y) );
} break;
default:{}
}
}
}
if (p_event.type==InputEvent::KEY) {
const InputEventKey &k=p_event.key;
if (k.pressed) {
if (k.scancode==KEY_UP)
_key_move(Vector2(0,-1),k.mod.shift);
else if (k.scancode==KEY_DOWN)
_key_move(Vector2(0,1),k.mod.shift);
else if (k.scancode==KEY_LEFT)
_key_move(Vector2(-1,0),k.mod.shift);
else if (k.scancode==KEY_RIGHT)
_key_move(Vector2(1,0),k.mod.shift);
}
}
}
bool ControlEditor::get_remove_list(List<Node*> *p_list) {
for(ControlMap::Element *E=controls.front();E;E=E->next()) {
p_list->push_back(E->key());
}
return !p_list->empty();
}
void ControlEditor::_update_scroll(float) {
if (updating_scroll)
return;
if (!current_window)
return;
Point2 ofs;
ofs.x=h_scroll->get_val();
ofs.y=v_scroll->get_val();
// current_window->set_scroll(-ofs);
transform=Matrix32();
transform.scale_basis(Size2(zoom,zoom));
transform.elements[2]=-ofs*zoom;
RID viewport = editor->get_scene_root()->get_viewport();
VisualServer::get_singleton()->viewport_set_global_canvas_transform(viewport,transform);
update();
}
void ControlEditor::_notification(int p_what) {
if (p_what==NOTIFICATION_PROCESS) {
for(ControlMap::Element *E=controls.front();E;E=E->next()) {
Control *control = E->key();
Rect2 r=control->get_window_rect();
if (r != E->get().last_rect ) {
update();
E->get().last_rect=r;
}
}
}
if (p_what==NOTIFICATION_CHILDREN_CONFIGURED) {
get_scene()->connect("node_removed",this,"_node_removed");
}
if (p_what==NOTIFICATION_DRAW) {
// TODO fetch the viewport?
/*
if (!control) {
h_scroll->hide();
v_scroll->hide();
return;
}
*/
_update_scrollbars();
if (!current_window)
return;
for(ControlMap::Element *E=controls.front();E;E=E->next()) {
Control *control = E->key();
Rect2 rect=control->get_window_rect();
RID ci=get_canvas_item();
VisualServer::get_singleton()->canvas_item_set_clip(ci,true);
Point2 ofs=Point2();//get_global_pos();
Rect2 draw_rect=Rect2(rect.pos-ofs,rect.size);
draw_rect.pos = transform.xform(draw_rect.pos);
Color light_edit_color=Color(1.0,0.8,0.8);
Color dark_edit_color=Color(0.4,0.1,0.1);
Size2 handle_size=Size2(handle_len,handle_len);
#define DRAW_RECT( m_rect, m_color )\
VisualServer::get_singleton()->canvas_item_add_rect(ci,m_rect,m_color);
#define DRAW_EMPTY_RECT( m_rect, m_color )\
DRAW_RECT( Rect2(m_rect.pos,Size2(m_rect.size.width,1)), m_color );\
DRAW_RECT(Rect2(Point2(m_rect.pos.x,m_rect.pos.y+m_rect.size.height-1),Size2(m_rect.size.width,1)), m_color);\
DRAW_RECT(Rect2(m_rect.pos,Size2(1,m_rect.size.height)), m_color);\
DRAW_RECT(Rect2(Point2(m_rect.pos.x+m_rect.size.width-1,m_rect.pos.y),Size2(1,m_rect.size.height)), m_color);
#define DRAW_BORDER_RECT( m_rect, m_border_color,m_color )\
DRAW_RECT( m_rect, m_color );\
DRAW_EMPTY_RECT( m_rect, m_border_color );
DRAW_EMPTY_RECT( draw_rect.grow(2), light_edit_color );
DRAW_EMPTY_RECT( draw_rect.grow(1), dark_edit_color );
if (controls.size()==1) {
DRAW_BORDER_RECT( Rect2(draw_rect.pos-handle_size,handle_size), light_edit_color,dark_edit_color );
DRAW_BORDER_RECT( Rect2(draw_rect.pos+draw_rect.size,handle_size), light_edit_color,dark_edit_color );
DRAW_BORDER_RECT( Rect2(draw_rect.pos+Point2(draw_rect.size.width,-handle_size.y),handle_size), light_edit_color,dark_edit_color );
DRAW_BORDER_RECT( Rect2(draw_rect.pos+Point2(-handle_size.x,draw_rect.size.height),handle_size), light_edit_color,dark_edit_color );
DRAW_BORDER_RECT( Rect2(draw_rect.pos+Point2(Math::floor((draw_rect.size.width-handle_size.x)/2.0),-handle_size.height),handle_size), light_edit_color,dark_edit_color );
DRAW_BORDER_RECT( Rect2(draw_rect.pos+Point2(-handle_size.width,Math::floor((draw_rect.size.height-handle_size.y)/2.0)),handle_size), light_edit_color,dark_edit_color );
DRAW_BORDER_RECT( Rect2(draw_rect.pos+Point2(Math::floor((draw_rect.size.width-handle_size.x)/2.0),draw_rect.size.height),handle_size), light_edit_color,dark_edit_color );
DRAW_BORDER_RECT( Rect2(draw_rect.pos+Point2(draw_rect.size.width,Math::floor((draw_rect.size.height-handle_size.y)/2.0)),handle_size), light_edit_color,dark_edit_color );
}
//DRAW_EMPTY_RECT( Rect2( current_window->get_scroll()-Point2(1,1), get_size()+Size2(2,2)), Color(0.8,0.8,1.0,0.8) );
E->get().last_rect = rect;
}
}
}
void ControlEditor::edit(Control *p_control) {
drag=DRAG_NONE;
_clear_controls();
_add_control(p_control,EditInfo());
current_window=p_control->get_window();
update();
}
void ControlEditor::_find_controls_span(Node *p_node, Rect2& r_rect) {
if (!editor->get_scene())
return;
if (p_node!=editor->get_edited_scene() && p_node->get_owner()!=editor->get_edited_scene())
return;
if (p_node->cast_to<Control>()) {
Control *c = p_node->cast_to<Control>();
if (c->get_viewport() != editor->get_viewport()->get_viewport())
return; //bye, it's in another viewport
if (!c->get_parent_control()) {
Rect2 span = c->get_subtree_span_rect();
r_rect.merge(span);
}
}
for(int i=0;i<p_node->get_child_count();i++) {
_find_controls_span(p_node->get_child(i),r_rect);
}
}
void ControlEditor::_update_scrollbars() {
if (!editor->get_scene()) {
h_scroll->hide();
v_scroll->hide();
return;
}
updating_scroll=true;
Size2 size = get_size();
Size2 hmin = h_scroll->get_minimum_size();
Size2 vmin = v_scroll->get_minimum_size();
v_scroll->set_begin( Point2(size.width - vmin.width, 0) );
v_scroll->set_end( Point2(size.width, size.height) );
h_scroll->set_begin( Point2( 0, size.height - hmin.height) );
h_scroll->set_end( Point2(size.width-vmin.width, size.height) );
Rect2 local_rect = Rect2(Point2(),get_size()-Size2(vmin.width,hmin.height));
Rect2 control_rect=local_rect;
if (editor->get_edited_scene())
_find_controls_span(editor->get_edited_scene(),control_rect);
control_rect.pos*=zoom;
control_rect.size*=zoom;
/*
for(ControlMap::Element *E=controls.front();E;E=E->next()) {
Control *control = E->key();
Rect2 r = control->get_window()->get_subtree_span_rect();
if (E==controls.front()) {
control_rect = r.merge(local_rect);
} else {
control_rect = control_rect.merge(r);
}
}
*/
Point2 ofs;
if (control_rect.size.height <= local_rect.size.height) {
v_scroll->hide();
ofs.y=0;
} else {
v_scroll->show();
v_scroll->set_min(control_rect.pos.y);
v_scroll->set_max(control_rect.pos.y+control_rect.size.y);
v_scroll->set_page(local_rect.size.y);
ofs.y=-v_scroll->get_val();
}
if (control_rect.size.width <= local_rect.size.width) {
h_scroll->hide();
ofs.x=0;
} else {
h_scroll->show();
h_scroll->set_min(control_rect.pos.x);
h_scroll->set_max(control_rect.pos.x+control_rect.size.x);
h_scroll->set_page(local_rect.size.x);
ofs.x=-h_scroll->get_val();
}
// transform=Matrix32();
transform.elements[2]=ofs*zoom;
RID viewport = editor->get_scene_root()->get_viewport();
VisualServer::get_singleton()->viewport_set_global_canvas_transform(viewport,transform);
// transform.scale_basis(Vector2(zoom,zoom));
updating_scroll=false;
}
Point2i ControlEditor::snapify(const Point2i& p_pos) const {
bool active=popup->is_item_checked(0);
int snap = snap_val->get_text().to_int();
if (!active || snap<1)
return p_pos;
Point2i pos=p_pos;
pos.x-=pos.x%snap;
pos.y-=pos.y%snap;
return pos;
}
void ControlEditor::_popup_callback(int p_op) {
switch(p_op) {
case SNAP_USE: {
popup->set_item_checked(0,!popup->is_item_checked(0));
} break;
case SNAP_CONFIGURE: {
snap_dialog->popup_centered(Size2(200,85));
} break;
}
}
void ControlEditor::_bind_methods() {
ObjectTypeDB::bind_method("_input_event",&ControlEditor::_input_event);
ObjectTypeDB::bind_method("_node_removed",&ControlEditor::_node_removed);
ObjectTypeDB::bind_method("_update_scroll",&ControlEditor::_update_scroll);
ObjectTypeDB::bind_method("_popup_callback",&ControlEditor::_popup_callback);
ObjectTypeDB::bind_method("_visibility_changed",&ControlEditor::_visibility_changed);
}
ControlEditor::ControlEditor(EditorNode *p_editor) {
editor=p_editor;
h_scroll = memnew( HScrollBar );
v_scroll = memnew( VScrollBar );
add_child(h_scroll);
add_child(v_scroll);
h_scroll->connect("value_changed", this,"_update_scroll",Vector<Variant>(),true);
v_scroll->connect("value_changed", this,"_update_scroll",Vector<Variant>(),true);
updating_scroll=false;
set_focus_mode(FOCUS_ALL);
handle_len=10;
popup=memnew( PopupMenu );
popup->add_check_item("Use Snap");
popup->add_item("Configure Snap..");
add_child(popup);
snap_dialog = memnew( ConfirmationDialog );
snap_dialog->get_ok()->hide();
snap_dialog->get_cancel()->set_text("Close");
add_child(snap_dialog);
Label *l = memnew(Label);
l->set_text("Snap:");
l->set_pos(Point2(5,5));
snap_dialog->add_child(l);
snap_val=memnew(LineEdit);
snap_val->set_text("5");
snap_val->set_anchor(MARGIN_RIGHT,ANCHOR_END);
snap_val->set_begin(Point2(15,25));
snap_val->set_end(Point2(10,25));
snap_dialog->add_child(snap_val);
popup->connect("item_pressed", this,"_popup_callback");
current_window=NULL;
zoom=0.5;
}
void ControlEditorPlugin::edit(Object *p_object) {
control_editor->set_undo_redo(&get_undo_redo());
control_editor->edit(p_object->cast_to<Control>());
}
bool ControlEditorPlugin::handles(Object *p_object) const {
return p_object->is_type("Control");
}
void ControlEditorPlugin::make_visible(bool p_visible) {
if (p_visible) {
control_editor->show();
control_editor->set_process(true);
} else {
control_editor->hide();
control_editor->set_process(false);
}
}
ControlEditorPlugin::ControlEditorPlugin(EditorNode *p_node) {
editor=p_node;
control_editor = memnew( ControlEditor(editor) );
editor->get_viewport()->add_child(control_editor);
control_editor->set_area_as_parent_rect();
control_editor->hide();
}
ControlEditorPlugin::~ControlEditorPlugin()
{
}
#endif
| tomasy23/evertonkrosnodart | tools/editor/plugins/control_editor_plugin.cpp | C++ | mit | 22,889 |
//
// ConvertLambdaBodyExpressionToStatementAction.cs
//
// Author:
// Mansheng Yang <lightyang0@gmail.com>
//
// Copyright (c) 2012 Mansheng Yang <lightyang0@gmail.com>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
namespace ICSharpCode.NRefactory.CSharp.Refactoring
{
[ContextAction ("Converts expression of lambda body to statement",
Description = "Converts expression of lambda body to statement")]
public class ConvertLambdaBodyExpressionToStatementAction : SpecializedCodeAction<LambdaExpression>
{
protected override CodeAction GetAction (RefactoringContext context, LambdaExpression node)
{
if (!node.ArrowToken.Contains (context.Location))
return null;
var bodyExpr = node.Body as Expression;
if (bodyExpr == null)
return null;
return new CodeAction (context.TranslateString ("Convert to lambda statement"),
script =>
{
var body = new BlockStatement ();
if (RequireReturnStatement (context, node)) {
body.Add (new ReturnStatement (bodyExpr.Clone ()));
} else {
body.Add (new ExpressionStatement (bodyExpr.Clone ()));
}
script.Replace (bodyExpr, body);
});
}
static bool RequireReturnStatement (RefactoringContext context, LambdaExpression lambda)
{
var type = LambdaHelper.GetLambdaReturnType (context, lambda);
return type != null && type.ReflectionName != "System.Void";
}
}
}
| praeclarum/Netjs | Netjs/Dependencies/NRefactory/ICSharpCode.NRefactory.CSharp/Refactoring/CodeActions/ConvertLambdaBodyExpressionToStatementAction.cs | C# | mit | 2,431 |
<?php
/**
* Magento
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://opensource.org/licenses/osl-3.0.php
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@magento.com so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade Magento to newer
* versions in the future. If you wish to customize Magento for your
* needs please refer to http://www.magento.com for more information.
*
* @category Mage
* @package Mage_Cms
* @copyright Copyright (c) 2006-2016 X.commerce, Inc. and affiliates (http://www.magento.com)
* @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0)
*/
/**
* Cms index controller
*
* @category Mage
* @package Mage_Cms
* @author Magento Core Team <core@magentocommerce.com>
*/
class Mage_Cms_IndexController extends Mage_Core_Controller_Front_Action
{
/**
* Renders CMS Home page
*
* @param string $coreRoute
*/
public function indexAction($coreRoute = null)
{
$pageId = Mage::getStoreConfig(Mage_Cms_Helper_Page::XML_PATH_HOME_PAGE);
if (!Mage::helper('cms/page')->renderPage($this, $pageId)) {
$this->_forward('defaultIndex');
}
}
/**
* Default index action (with 404 Not Found headers)
* Used if default page don't configure or available
*
*/
public function defaultIndexAction()
{
$this->getResponse()->setHeader('HTTP/1.1','404 Not Found');
$this->getResponse()->setHeader('Status','404 File not found');
$this->loadLayout();
$this->renderLayout();
}
/**
* Render CMS 404 Not found page
*
* @param string $coreRoute
*/
public function noRouteAction($coreRoute = null)
{
$this->getResponse()->setHeader('HTTP/1.1','404 Not Found');
$this->getResponse()->setHeader('Status','404 File not found');
$pageId = Mage::getStoreConfig(Mage_Cms_Helper_Page::XML_PATH_NO_ROUTE_PAGE);
if (!Mage::helper('cms/page')->renderPage($this, $pageId)) {
$this->_forward('defaultNoRoute');
}
}
/**
* Default no route page action
* Used if no route page don't configure or available
*
*/
public function defaultNoRouteAction()
{
$this->getResponse()->setHeader('HTTP/1.1','404 Not Found');
$this->getResponse()->setHeader('Status','404 File not found');
$this->loadLayout();
$this->renderLayout();
}
/**
* Render Disable cookies page
*
*/
public function noCookiesAction()
{
$pageId = Mage::getStoreConfig(Mage_Cms_Helper_Page::XML_PATH_NO_COOKIES_PAGE);
if (!Mage::helper('cms/page')->renderPage($this, $pageId)) {
$this->_forward('defaultNoCookies');;
}
}
/**
* Default no cookies page action
* Used if no cookies page don't configure or available
*
*/
public function defaultNoCookiesAction()
{
$this->loadLayout();
$this->renderLayout();
}
}
| hansbonini/cloud9-magento | www/app/code/core/Mage/Cms/controllers/IndexController.php | PHP | mit | 3,373 |
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const app_utils_1 = require("../../utilities/app-utils");
const dynamic_path_parser_1 = require("../../utilities/dynamic-path-parser");
const config_1 = require("../../models/config");
const stringUtils = require('ember-cli-string-utils');
const Blueprint = require('../../ember-cli/lib/models/blueprint');
const getFiles = Blueprint.prototype.files;
exports.default = Blueprint.extend({
name: 'class',
description: '',
aliases: ['cl'],
availableOptions: [
{
name: 'spec',
type: Boolean,
description: 'Specifies if a spec file is generated.'
},
{
name: 'app',
type: String,
aliases: ['a'],
description: 'Specifies app name to use.'
}
],
normalizeEntityName: function (entityName) {
const appConfig = app_utils_1.getAppFromConfig(this.options.app);
const parsedPath = dynamic_path_parser_1.dynamicPathParser(this.project, entityName.split('.')[0], appConfig);
this.dynamicPath = parsedPath;
return parsedPath.name;
},
locals: function (options) {
const rawName = options.args[1];
const nameParts = rawName.split('.')
.filter(part => part.length !== 0);
const classType = nameParts[1];
this.fileName = stringUtils.dasherize(options.entity.name);
if (classType) {
this.fileName += '.' + classType.toLowerCase();
}
options.spec = options.spec !== undefined ?
options.spec : config_1.CliConfig.getValue('defaults.class.spec');
return {
dynamicPath: this.dynamicPath.dir,
flat: options.flat,
fileName: this.fileName
};
},
files: function () {
let fileList = getFiles.call(this);
if (this.options && !this.options.spec) {
fileList = fileList.filter(p => p.indexOf('__name__.spec.ts') < 0);
}
return fileList;
},
fileMapTokens: function () {
// Return custom template variables here.
return {
__path__: () => {
this.generatePath = this.dynamicPath.dir;
return this.generatePath;
},
__name__: () => {
return this.fileName;
}
};
}
});
//# sourceMappingURL=/users/hans/sources/angular-cli/blueprints/class/index.js.map | raymonddavis/Angular-SailsJs-SocketIo | web/node_modules/@angular/cli/blueprints/class/index.js | JavaScript | mit | 2,492 |
<?php
/**
* PHPUnit
*
* Copyright (c) 2002-2014, Sebastian Bergmann <sebastian@phpunit.de>.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* * Neither the name of Sebastian Bergmann nor the names of his
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* @package DbUnit
* @author Mike Lively <m@digitalsandwich.com>
* @copyright 2002-2014 Sebastian Bergmann <sebastian@phpunit.de>
* @license http://www.opensource.org/licenses/BSD-3-Clause The BSD 3-Clause License
* @link http://www.phpunit.de/
* @since File available since Release 1.0.0
*/
/**
* This class facilitates combining database operations. To create a composite
* operation pass an array of classes that implement
* PHPUnit_Extensions_Database_Operation_IDatabaseOperation and they will be
* executed in that order against all data sets.
*
* @package DbUnit
* @author Mike Lively <m@digitalsandwich.com>
* @copyright 2010-2014 Mike Lively <m@digitalsandwich.com>
* @license http://www.opensource.org/licenses/BSD-3-Clause The BSD 3-Clause License
* @version Release: @package_version@
* @link http://www.phpunit.de/
* @since Class available since Release 1.0.0
*/
class PHPUnit_Extensions_Database_Operation_Composite implements PHPUnit_Extensions_Database_Operation_IDatabaseOperation
{
/**
* @var array
*/
protected $operations = array();
/**
* Creates a composite operation.
*
* @param array $operations
*/
public function __construct(Array $operations)
{
foreach ($operations as $operation) {
if ($operation instanceof PHPUnit_Extensions_Database_Operation_IDatabaseOperation) {
$this->operations[] = $operation;
} else {
throw new InvalidArgumentException("Only database operation instances can be passed to a composite database operation.");
}
}
}
public function execute(PHPUnit_Extensions_Database_DB_IDatabaseConnection $connection, PHPUnit_Extensions_Database_DataSet_IDataSet $dataSet)
{
try {
foreach ($this->operations as $operation) {
/* @var $operation PHPUnit_Extensions_Database_Operation_IDatabaseOperation */
$operation->execute($connection, $dataSet);
}
} catch (PHPUnit_Extensions_Database_Operation_Exception $e) {
throw new PHPUnit_Extensions_Database_Operation_Exception("COMPOSITE[{$e->getOperation()}]", $e->getQuery(), $e->getArgs(), $e->getTable(), $e->getError());
}
}
}
| drBenway/siteResearch | vendor/phpunit/dbunit/PHPUnit/Extensions/Database/Operation/Composite_1.php | PHP | mit | 3,983 |
export { default, DimmerProps } from './Dimmer';
| aabustamante/Semantic-UI-React | src/modules/Dimmer/index.d.ts | TypeScript | mit | 49 |
package keyseq
type TernaryTrie struct {
root TernaryNode
}
func NewTernaryTrie() *TernaryTrie {
return &TernaryTrie{}
}
func (t *TernaryTrie) Root() Node {
return &t.root
}
func (t *TernaryTrie) GetList(k KeyList) Node {
return Get(t, k)
}
func (t *TernaryTrie) Get(k Key) Node {
return Get(t, KeyList{k})
}
func (t *TernaryTrie) Put(k KeyList, v interface{}) Node {
return Put(t, k, v)
}
func (t *TernaryTrie) Size() int {
count := 0
EachDepth(t, func(Node) bool {
count++
return true
})
return count
}
func (t *TernaryTrie) Balance() {
EachDepth(t, func(n Node) bool {
n.(*TernaryNode).Balance()
return true
})
t.root.Balance()
}
type TernaryNode struct {
label Key
firstChild *TernaryNode
low, high *TernaryNode
value interface{}
}
func NewTernaryNode(l Key) *TernaryNode {
return &TernaryNode{label: l}
}
func (n *TernaryNode) GetList(k KeyList) Node {
return n.Get(k[0])
}
func (n *TernaryNode) Get(k Key) Node {
curr := n.firstChild
for curr != nil {
switch k.Compare(curr.label) {
case 0: // equal
return curr
case -1: // less
curr = curr.low
default: //more
curr = curr.high
}
}
return nil
}
func (n *TernaryNode) Dig(k Key) (node Node, isnew bool) {
curr := n.firstChild
if curr == nil {
n.firstChild = NewTernaryNode(k)
return n.firstChild, true
}
for {
switch k.Compare(curr.label) {
case 0:
return curr, false
case -1:
if curr.low == nil {
curr.low = NewTernaryNode(k)
return curr.low, true
}
curr = curr.low
default:
if curr.high == nil {
curr.high = NewTernaryNode(k)
return curr.high, true
}
curr = curr.high
}
}
}
func (n *TernaryNode) FirstChild() *TernaryNode {
return n.firstChild
}
func (n *TernaryNode) HasChildren() bool {
return n.firstChild != nil
}
func (n *TernaryNode) Size() int {
if n.firstChild == nil {
return 0
}
count := 0
n.Each(func(Node) bool {
count++
return true
})
return count
}
func (n *TernaryNode) Each(proc func(Node) bool) {
var f func(*TernaryNode) bool
f = func(n *TernaryNode) bool {
if n != nil {
if !f(n.low) || !proc(n) || !f(n.high) {
return false
}
}
return true
}
f(n.firstChild)
}
func (n *TernaryNode) RemoveAll() {
n.firstChild = nil
}
func (n *TernaryNode) Label() Key {
return n.label
}
func (n *TernaryNode) Value() interface{} {
return n.value
}
func (n *TernaryNode) SetValue(v interface{}) {
n.value = v
}
func (n *TernaryNode) children() []*TernaryNode {
children := make([]*TernaryNode, n.Size())
if n.firstChild == nil {
return children
}
idx := 0
n.Each(func(child Node) bool {
children[idx] = child.(*TernaryNode)
idx++
return true
})
return children
}
func (n *TernaryNode) Balance() {
if n.firstChild == nil {
return
}
children := n.children()
for _, child := range children {
child.low = nil
child.high = nil
}
n.firstChild = balance(children, 0, len(children))
}
func balance(nodes []*TernaryNode, s, e int) *TernaryNode {
count := e - s
if count <= 0 {
return nil
} else if count == 1 {
return nodes[s]
} else if count == 2 {
nodes[s].high = nodes[s+1]
return nodes[s]
} else {
mid := (s + e) / 2
n := nodes[mid]
n.low = balance(nodes, s, mid)
n.high = balance(nodes, mid+1, e)
return n
}
}
| dav009/peco | keyseq/ternary.go | GO | mit | 3,295 |
using System;
using System.IO;
using Foundation;
using UIKit;
using CoreGraphics;
using Dropbox.CoreApi.iOS;
namespace DropboxCoreApiSample
{
public partial class TextViewController : UIViewController
{
// A TextField with Placeholder
CustomUITextView textView;
RestClient restClient;
string filename;
public TextViewController ()
{
View.BackgroundColor = UIColor.White;
// Will handle the save to Dropbox process
var btnSave = new UIBarButtonItem ("Save", UIBarButtonItemStyle.Plain, WriteFile);
btnSave.Enabled = false;
// Create the TextField with a Placeholder
textView = new CustomUITextView (CGRect.Empty, "Type something nice!");
textView.TranslatesAutoresizingMaskIntoConstraints = false;
// If the user has written something, you can save the file
textView.Changed += (sender, e) => btnSave.Enabled = textView.Text.Length != 0;
// Rest client that will handle the file upload
restClient = new RestClient (Session.SharedSession);
// Once the file is on Dropbox, notify the user
restClient.FileUploaded += (sender, e) => {
new UIAlertView ("Saved on Dropbox", "The file was uploaded to Dropbox correctly", null, "OK", null).Show ();
#if __UNIFIED__
NavigationController.PopViewController (true);
#else
NavigationController.PopViewControllerAnimated (true);
#endif
};
// Handle if something went wrong with the upload of the file
restClient.LoadFileFailed += (sender, e) => {
// Try to upload the file again
var alertView = new UIAlertView ("Hmm...", "Something went wrong when trying to save the file on Dropbox...", null, "Not now", new [] { "Try Again" });
alertView.Clicked += (avSender, avE) => {
if (avE.ButtonIndex == 1)
restClient.UploadFile (filename, DropboxCredentials.FolderPath, null, Path.GetTempPath () + filename);
};
alertView.Show ();
};
// Add the view with its constraints
View.Add (textView);
NavigationItem.RightBarButtonItem = btnSave;
AddConstraints ();
}
void AddConstraints ()
{
var views = new NSDictionary ("textView", textView);
View.AddConstraints (NSLayoutConstraint.FromVisualFormat ("H:|-0-[textView]-0-|", 0, null, views));
View.AddConstraints (NSLayoutConstraint.FromVisualFormat ("V:|-0-[textView]-0-|", 0, null, views));
}
// Process to save the file on Dropbox
void WriteFile (object sender, EventArgs e)
{
// Notify that the user has ended typing
textView.EndEditing (true);
// Ask for a name to the file
var alertView = new UIAlertView ("Save to Dropbox", "Enter a name for the file", null, "Cancel", new [] { "Save" });
alertView.AlertViewStyle = UIAlertViewStyle.PlainTextInput;
alertView.Clicked += (avSender, avE) => {
// Once we have the name, we need to save the file locally first and then upload it to Dropbox
if (avE.ButtonIndex == 1) {
filename = alertView.GetTextField (0).Text + ".txt";
var fullPath = Path.GetTempPath () + filename;
// Write the file locally
File.WriteAllText (fullPath, textView.Text);
// Now upload it to Dropbox
restClient.UploadFile (filename, DropboxCredentials.FolderPath, null, fullPath);
}
};
alertView.Show ();
}
}
}
| JonDouglas/XamarinComponents | XPlat/DropboxCoreApi/iOS/samples/DropboxCoreApiSample/DropboxCoreApiSample/TextViewController.cs | C# | mit | 3,244 |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.Drawing;
using System.Data;
using System.Drawing.Drawing2D;
using System.Text;
using System.Windows.Forms;
using OpenLiveWriter.ApplicationFramework;
using OpenLiveWriter.BlogClient;
using OpenLiveWriter.Extensibility.BlogClient;
using OpenLiveWriter.Localization;
using OpenLiveWriter.Localization.Bidi;
using OpenLiveWriter.PostEditor.PostPropertyEditing.CategoryControl;
using OpenLiveWriter.CoreServices;
namespace OpenLiveWriter.PostEditor.PostPropertyEditing
{
/*
* TODO
* Bugs:
- Visibility is all screwed up!!
- Position of page-relevant labels vs. controls in dialog
* Putting focus in a textbox that has a cue banner, causes dirty flag to be marked
- Space should trigger View All
- F2 during page context shows too many labels
- Label visibility not staying in sync with control
- Unchecking publish date in dialog doesn't set cue banner in band
- Activate main window when dialog is dismissed
- Some labels not localized
- Labels do not have mnemonics
- Clicking on View All should restore a visible but minimized dialog
* Horizontal scrollbar sometimes flickers on
- Trackback detailed label
* Dropdown for Page Parent combo is not the right width
* Each Page Parent combo makes its own delayed request
- Tags control should share leftover space with category control
x Properties dialog should hide and come back
*
* Questions:
* Should Enter dismiss the properties dialog?
* Should properties dialog scroll state be remembered between views?
*/
public partial class PostPropertiesBandControl : UserControl, IBlogPostEditor, IRtlAware, INewCategoryContext
{
private Blog _targetBlog;
private IBlogClientOptions _clientOptions;
private const int COL_CATEGORY = 0;
private const int COL_TAGS = 1;
private const int COL_DATE = 2;
private const int COL_PAGEPARENTLABEL = 3;
private const int COL_PAGEPARENT = 4;
private const int COL_PAGEORDERLABEL = 5;
private const int COL_PAGEORDER = 6;
private const int COL_FILLER = 7;
private const int COL_VIEWALL = 8;
private readonly PostPropertiesForm postPropertiesForm;
private readonly List<PropertyField> fields = new List<PropertyField>();
private readonly SharedPropertiesController controller;
private readonly CategoryContext categoryContext;
public PostPropertiesBandControl(CommandManager commandManager)
{
SetStyle(ControlStyles.UserPaint, true);
SetStyle(ControlStyles.DoubleBuffer, true);
SetStyle(ControlStyles.AllPaintingInWmPaint, true);
SetStyle(ControlStyles.ResizeRedraw, true);
InitializeComponent();
categoryContext = new CategoryContext();
controller = new SharedPropertiesController(this, null, categoryDropDown,
null, textTags, labelPageOrder, textPageOrder, labelPageParent, comboPageParent, null,
datePublishDate, fields, categoryContext);
SimpleTextEditorCommandHelper.UseNativeBehaviors(commandManager,
textTags, textPageOrder);
postPropertiesForm = new PostPropertiesForm(commandManager, categoryContext);
if (components == null)
components = new Container();
components.Add(postPropertiesForm);
postPropertiesForm.Synchronize(controller);
commandManager.Add(CommandId.PostProperties, PostProperties_Execute);
commandManager.Add(CommandId.ShowCategoryPopup, ShowCategoryPopup_Execute);
linkViewAll.KeyDown += (sender, args) =>
{
if (args.KeyValue == ' ')
linkViewAll_LinkClicked(sender, new LinkLabelLinkClickedEventArgs(null));
};
// WinLive 180287: We don't want to show or use mnemonics on labels in the post properties band because
// they can steal focus from the canvas.
linkViewAll.Text = TextHelper.StripAmpersands(Res.Get(StringId.ViewAll));
linkViewAll.UseMnemonic = false;
labelPageParent.Text = TextHelper.StripAmpersands(Res.Get(StringId.PropertiesPageParent));
labelPageParent.UseMnemonic = false;
labelPageOrder.Text = TextHelper.StripAmpersands(Res.Get(StringId.PropertiesPageOrder));
labelPageOrder.UseMnemonic = false;
}
private void ShowCategoryPopup_Execute(object sender, EventArgs e)
{
if (postPropertiesForm.Visible)
postPropertiesForm.DisplayCategoryForm();
else
categoryDropDown.DisplayCategoryForm();
}
protected override void OnLoad(EventArgs args)
{
base.OnLoad(args);
FixCategoryDropDown();
}
private void FixCategoryDropDown()
{
// Exactly align the sizes of the category control and the publish datetime picker control
int nonItemHeight = categoryDropDown.Height - categoryDropDown.ItemHeight;
categoryDropDown.ItemHeight = datePublishDate.Height - nonItemHeight;
categoryDropDown.Height = datePublishDate.Height;
datePublishDate.LocationChanged += delegate
{
// Exactly align the vertical position of the category control and the publish datetime picker control
categoryDropDown.Anchor = categoryDropDown.Anchor | AnchorStyles.Top;
Padding margin = categoryDropDown.Margin;
margin.Top = datePublishDate.Top;
categoryDropDown.Margin = margin;
};
}
private void PostProperties_Execute(object sender, EventArgs e)
{
if (!Visible)
return;
if (postPropertiesForm.Visible)
postPropertiesForm.Hide();
else
postPropertiesForm.Show(FindForm());
}
protected override void OnSizeChanged(EventArgs e)
{
base.OnSizeChanged(e);
Invalidate();
}
protected override void OnPaintBackground(PaintEventArgs e)
{
// Without the height/width checks, minimizing and restoring causes painting to blow up
if (!SystemInformation.HighContrast && table.Height > 0 && table.Width > 0 && panelShadow.Height > 0 && panelShadow.Width > 0)
{
using (
Brush brush = new LinearGradientBrush(table.Bounds, Color.FromArgb(0xDC, 0xE7, 0xF5), Color.White,
LinearGradientMode.Vertical))
e.Graphics.FillRectangle(brush, table.Bounds);
using (
Brush brush = new LinearGradientBrush(panelShadow.Bounds, Color.FromArgb(208, 208, 208), Color.White,
LinearGradientMode.Vertical))
e.Graphics.FillRectangle(brush, panelShadow.Bounds);
}
else
{
e.Graphics.Clear(SystemColors.Window);
}
}
private bool categoryVisible = true;
private bool CategoryVisible
{
set
{
table.ColumnStyles[COL_CATEGORY].SizeType = value ? SizeType.Percent : SizeType.AutoSize;
categoryDropDown.Visible = categoryVisible = value;
ManageFillerVisibility();
}
}
private bool tagsVisible = true;
private bool TagsVisible
{
set
{
table.ColumnStyles[COL_TAGS].SizeType = value ? SizeType.Percent : SizeType.AutoSize;
textTags.Visible = tagsVisible = value;
ManageFillerVisibility();
}
}
private void ManageFillerVisibility()
{
bool shouldShow = !categoryVisible && !tagsVisible;
table.ColumnStyles[COL_FILLER].SizeType = shouldShow ? SizeType.Percent : SizeType.AutoSize;
}
private IBlogPostEditingContext _editorContext;
public void Initialize(IBlogPostEditingContext editorContext, IBlogClientOptions clientOptions)
{
_editorContext = editorContext;
_clientOptions = clientOptions;
controller.Initialize(editorContext, clientOptions);
((IBlogPostEditor)postPropertiesForm).Initialize(editorContext, clientOptions);
ManageLayout();
}
private bool IsPage
{
get { return _editorContext != null && _editorContext.BlogPost != null ? _editorContext.BlogPost.IsPage : false; }
}
public void OnBlogChanged(Blog newBlog)
{
_clientOptions = newBlog.ClientOptions;
_targetBlog = newBlog;
controller.OnBlogChanged(newBlog);
((IBlogPostEditor)postPropertiesForm).OnBlogChanged(newBlog);
ManageLayout();
}
public void OnBlogSettingsChanged(bool templateChanged)
{
controller.OnBlogSettingsChanged(templateChanged);
((IBlogPostEditor)postPropertiesForm).OnBlogSettingsChanged(templateChanged);
ManageLayout();
}
private void ManageLayout()
{
if (IsPage)
{
bool showViewAll = _clientOptions.SupportsCommentPolicy
|| _clientOptions.SupportsPingPolicy
|| _clientOptions.SupportsAuthor
|| _clientOptions.SupportsSlug
|| _clientOptions.SupportsPassword;
linkViewAll.Visible = showViewAll;
CategoryVisible = false;
TagsVisible = false;
Visible = showViewAll || _clientOptions.SupportsPageParent || _clientOptions.SupportsPageOrder;
}
else
{
bool showViewAll = _clientOptions.SupportsCommentPolicy
|| _clientOptions.SupportsPingPolicy
|| _clientOptions.SupportsAuthor
|| _clientOptions.SupportsSlug
|| _clientOptions.SupportsPassword
|| _clientOptions.SupportsExcerpt
|| _clientOptions.SupportsTrackbacks;
bool showTags = (_clientOptions.SupportsKeywords && (_clientOptions.KeywordsAsTags || _clientOptions.SupportsGetKeywords));
Visible = showViewAll
|| _clientOptions.SupportsCustomDate
|| showTags
|| _clientOptions.SupportsCategories;
CategoryVisible = _clientOptions.SupportsCategories;
TagsVisible = showTags;
linkViewAll.Visible = showViewAll;
}
}
public bool IsDirty
{
get { return controller.IsDirty || ((IBlogPostEditor)postPropertiesForm).IsDirty; }
}
public bool HasKeywords
{
get { return postPropertiesForm.HasKeywords; }
}
public void SaveChanges(BlogPost post, BlogPostSaveOptions options)
{
controller.SaveChanges(post, options);
((IBlogPostEditor)postPropertiesForm).SaveChanges(post, options);
}
public bool ValidatePublish()
{
return controller.ValidatePublish();
}
public void OnPublishSucceeded(BlogPost blogPost, PostResult postResult)
{
controller.OnPublishSucceeded(blogPost, postResult);
((IBlogPostEditor)postPropertiesForm).OnPublishSucceeded(blogPost, postResult);
}
public void OnClosing(CancelEventArgs e)
{
controller.OnClosing(e);
((IBlogPostEditor)postPropertiesForm).OnClosing(e);
}
public void OnPostClosing(CancelEventArgs e)
{
controller.OnPostClosing(e);
((IBlogPostEditor)postPropertiesForm).OnPostClosing(e);
}
public void OnClosed()
{
controller.OnClosed();
((IBlogPostEditor)postPropertiesForm).OnClosed();
}
public void OnPostClosed()
{
controller.OnPostClosed();
((IBlogPostEditor)postPropertiesForm).OnPostClosed();
}
private void linkViewAll_LinkClicked(object sender, LinkLabelLinkClickedEventArgs e)
{
if (e.Button != MouseButtons.Left)
return;
if (!postPropertiesForm.Visible)
postPropertiesForm.Show(FindForm());
else
{
if (postPropertiesForm.WindowState == FormWindowState.Minimized)
postPropertiesForm.WindowState = FormWindowState.Normal;
postPropertiesForm.Activate();
}
}
void IRtlAware.Layout()
{
}
#region Implementation of INewCategoryContext
public void NewCategoryAdded(BlogPostCategory category)
{
controller.NewCategoryAdded(category);
}
#endregion
}
}
| adilmughal/OpenLiveWriter | src/managed/OpenLiveWriter.PostEditor/PostPropertyEditing/PostPropertiesBandControl.cs | C# | mit | 13,831 |
<?php
namespace Herrera\Cli\Tests\Provider;
use Herrera\Cli\Provider\ErrorHandlingServiceProvider;
use Herrera\PHPUnit\TestCase;
use Herrera\Service\Container;
class ErrorHandlingServiceProviderTest extends TestCase
{
public function testRegister()
{
$container = new Container();
$container->register(new ErrorHandlingServiceProvider());
$this->setExpectedException(
'ErrorException',
'Test error.'
);
trigger_error('Test error.', E_USER_ERROR);
}
public function testRegisterIgnored()
{
$container = new Container();
$container->register(new ErrorHandlingServiceProvider());
error_reporting(E_ALL ^ E_USER_NOTICE);
trigger_error('Test error.', E_USER_NOTICE);
$this->assertTrue(true);
}
}
| spi-ke/Socialman | src/vendors/herrera-io/cli-app/src/tests/Herrera/Cli/Tests/Provider/ErrorHandlingServiceProviderTest.php | PHP | mit | 828 |
<?php
/**
* PHPExcel
*
* Copyright (c) 2006 - 2007 PHPExcel, Maarten Balliauw
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* @copyright Copyright (c) 2006 - 2007 PHPExcel (http://www.codeplex.com/PHPExcel)
* @license http://www.gnu.org/licenses/gpl.txt GPL
*/
/**
* This file creates a build of PHPExcel
*/
// Starting build
echo date('H:i:s') . " Starting build...\n";
// Specify paths and files to include
$aFilesToInclude = array('../changelog.txt', '../install.txt', '../license.txt');
$aPathsToInclude = array('../Classes', '../Tests', '../Documentation');
// Resulting file
$strResultingFile = 'LatestBuild.zip';
// Create new ZIP file and open it for writing
echo date('H:i:s') . " Creating ZIP archive...\n";
$objZip = new ZipArchive();
// Try opening the ZIP file
if ($objZip->open($strResultingFile, ZIPARCHIVE::OVERWRITE) !== true) {
throw new Exeption("Could not open " . $strResultingFile . " for writing!");
}
// Add files to include
foreach ($aFilesToInclude as $strFile) {
echo date('H:i:s') . " Adding file $strFile\n";
$objZip->addFile($strFile, cleanFileName($strFile));
}
// Add paths to include
foreach ($aPathsToInclude as $strPath) {
addPathToZIP($strPath, $objZip);
}
// Set archive comment...
echo date('H:i:s') . " Set archive comment...\n";
$objZip->setArchiveComment('PHPExcel - http://www.codeplex.com/PHPExcel');
// Close file
echo date('H:i:s') . " Saving ZIP archive...\n";
$objZip->close();
// Finished build
echo date('H:i:s') . " Finished build!\n";
/**
* Add a specific path's files and folders to a ZIP object
*
* @param string $strPath Path to add
* @param ZipArchive $objZip ZipArchive object
*/
function addPathToZIP($strPath, $objZip) {
echo date('H:i:s') . " Adding path $strPath...\n";
$currentDir = opendir($strPath);
while ($strFile = readdir($currentDir)) {
if ($strFile != '.' && $strFile != '..') {
if (is_file($strPath . '/' . $strFile)) {
$objZip->addFile($strPath . '/' . $strFile, cleanFileName($strPath . '/' . $strFile));
} else if (is_dir($strPath . '/' . $strFile)) {
if (!eregi('.svn', $strFile)) {
addPathToZIP( ($strPath . '/' . $strFile), $objZip );
}
}
}
}
}
/**
* Cleanup a filename
*
* @param string $strFile Filename
* @return string Filename
*/
function cleanFileName($strFile) {
$strFile = str_replace('../', '', $strFile);
$strFile = str_replace('WINDOWS', '', $strFile);
while (eregi('//', $strFile)) {
$strFile = str_replace('//', '/', $strFile);
}
return $strFile;
} | ALTELMA/OfficeEquipmentManager | application/libraries/PHPExcel/branches/v1.1.1/Build/build.php | PHP | mit | 3,220 |
// Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Bot.Connector
{
using System;
using System.Linq;
using System.Collections.Generic;
using Newtonsoft.Json;
using Microsoft.Rest;
using Microsoft.Rest.Serialization;
/// <summary>
/// Attachment View name and size
/// </summary>
public partial class AttachmentView
{
/// <summary>
/// Initializes a new instance of the AttachmentView class.
/// </summary>
public AttachmentView() { }
/// <summary>
/// Initializes a new instance of the AttachmentView class.
/// </summary>
public AttachmentView(string viewId = default(string), int? size = default(int?))
{
ViewId = viewId;
Size = size;
}
/// <summary>
/// content type of the attachmnet
/// </summary>
[JsonProperty(PropertyName = "viewId")]
public string ViewId { get; set; }
/// <summary>
/// Name of the attachment
/// </summary>
[JsonProperty(PropertyName = "size")]
public int? Size { get; set; }
}
}
| dr-em/BotBuilder | CSharp/Library/Microsoft.Bot.Connector/ConnectorAPI/Models/AttachmentView.cs | C# | mit | 1,269 |
'use strict';
var path = require('path');
var fixtures = require('haraka-test-fixtures');
var Connection = fixtures.connection;
var Plugin = fixtures.plugin;
var _set_up = function(done) {
this.backup = {};
// needed for tests
this.plugin = new Plugin('auth/auth_vpopmaild');
this.plugin.inherits('auth/auth_base');
// reset the config/root_path
this.plugin.config.root_path = path.resolve(__dirname, '../../../config');
this.plugin.cfg = this.plugin.config.get('auth_vpopmaild.ini');
this.connection = Connection.createConnection();
this.connection.capabilities=null;
done();
};
exports.hook_capabilities = {
setUp : _set_up,
'no TLS': function (test) {
var cb = function (rc, msg) {
test.expect(3);
test.equal(undefined, rc);
test.equal(undefined, msg);
test.equal(null, this.connection.capabilities);
test.done();
}.bind(this);
this.plugin.hook_capabilities(cb, this.connection);
},
'with TLS': function (test) {
var cb = function (rc, msg) {
test.expect(3);
test.equal(undefined, rc);
test.equal(undefined, msg);
test.ok(this.connection.capabilities.length);
// console.log(this.connection.capabilities);
test.done();
}.bind(this);
this.connection.using_tls=true;
this.connection.capabilities=[];
this.plugin.hook_capabilities(cb, this.connection);
},
'with TLS, sysadmin': function (test) {
var cb = function (rc, msg) {
test.expect(3);
test.equal(undefined, rc);
test.equal(undefined, msg);
test.ok(this.connection.capabilities.length);
// console.log(this.connection.capabilities);
test.done();
}.bind(this);
this.connection.using_tls=true;
this.connection.capabilities=[];
this.plugin.hook_capabilities(cb, this.connection);
},
};
exports.get_vpopmaild_socket = {
setUp : _set_up,
'any': function (test) {
test.expect(1);
var socket = this.plugin.get_vpopmaild_socket('foo@localhost.com');
// console.log(socket);
test.ok(socket);
socket.end();
test.done();
}
};
exports.get_plain_passwd = {
setUp : _set_up,
'matt@example.com': function (test) {
var cb = function (pass) {
test.expect(1);
test.ok(pass);
test.done();
};
if (this.plugin.cfg['example.com'].sysadmin) {
this.plugin.get_plain_passwd('matt@example.com', cb);
}
else {
test.expect(0);
test.done();
}
}
};
| slattery/Haraka | tests/plugins/auth/auth_vpopmaild.js | JavaScript | mit | 2,773 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#nullable disable
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Globalization;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading;
using Microsoft.CodeAnalysis.CSharp.Emit;
using Microsoft.CodeAnalysis.CSharp.Symbols;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.PooledObjects;
using Microsoft.CodeAnalysis.Symbols;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.CSharp
{
/// <summary>
/// The base class for all symbols (namespaces, classes, method, parameters, etc.) that are
/// exposed by the compiler.
/// </summary>
[DebuggerDisplay("{GetDebuggerDisplay(), nq}")]
internal abstract partial class Symbol : ISymbolInternal, IFormattable
{
private ISymbol _lazyISymbol;
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
// Changes to the public interface of this class should remain synchronized with the VB version of Symbol.
// Do not make any changes to the public interface without making the corresponding change
// to the VB version.
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
/// <summary>
/// True if this Symbol should be completed by calling ForceComplete.
/// Intuitively, true for source entities (from any compilation).
/// </summary>
internal virtual bool RequiresCompletion
{
get { return false; }
}
internal virtual void ForceComplete(SourceLocation locationOpt, CancellationToken cancellationToken)
{
// must be overridden by source symbols, no-op for other symbols
Debug.Assert(!this.RequiresCompletion);
}
internal virtual bool HasComplete(CompletionPart part)
{
// must be overridden by source symbols, no-op for other symbols
Debug.Assert(!this.RequiresCompletion);
return true;
}
/// <summary>
/// Gets the name of this symbol. Symbols without a name return the empty string; null is
/// never returned.
/// </summary>
public virtual string Name
{
get
{
return string.Empty;
}
}
/// <summary>
/// Gets the name of a symbol as it appears in metadata. Most of the time, this
/// is the same as the Name property, with the following exceptions:
/// 1) The metadata name of generic types includes the "`1", "`2" etc. suffix that
/// indicates the number of type parameters (it does not include, however, names of
/// containing types or namespaces).
/// 2) The metadata name of explicit interface names have spaces removed, compared to
/// the name property.
/// </summary>
public virtual string MetadataName
{
get
{
return this.Name;
}
}
/// <summary>
/// Gets the token for this symbol as it appears in metadata. Most of the time this is 0,
/// as it is when the symbol is not loaded from metadata.
/// </summary>
public virtual int MetadataToken => 0;
/// <summary>
/// Gets the kind of this symbol.
/// </summary>
public abstract SymbolKind Kind { get; }
/// <summary>
/// Get the symbol that logically contains this symbol.
/// </summary>
public abstract Symbol ContainingSymbol { get; }
/// <summary>
/// Returns the nearest lexically enclosing type, or null if there is none.
/// </summary>
public virtual NamedTypeSymbol ContainingType
{
get
{
Symbol container = this.ContainingSymbol;
NamedTypeSymbol containerAsType = container as NamedTypeSymbol;
// NOTE: container could be null, so we do not check
// whether containerAsType is not null, but
// instead check if it did not change after
// the cast.
if ((object)containerAsType == (object)container)
{
// this should be relatively uncommon
// most symbols that may be contained in a type
// know their containing type and can override ContainingType
// with a more precise implementation
return containerAsType;
}
// this is recursive, but recursion should be very short
// before we reach symbol that definitely knows its containing type.
return container.ContainingType;
}
}
/// <summary>
/// Gets the nearest enclosing namespace for this namespace or type. For a nested type,
/// returns the namespace that contains its container.
/// </summary>
public virtual NamespaceSymbol ContainingNamespace
{
get
{
for (var container = this.ContainingSymbol; (object)container != null; container = container.ContainingSymbol)
{
var ns = container as NamespaceSymbol;
if ((object)ns != null)
{
return ns;
}
}
return null;
}
}
/// <summary>
/// Returns the assembly containing this symbol. If this symbol is shared across multiple
/// assemblies, or doesn't belong to an assembly, returns null.
/// </summary>
public virtual AssemblySymbol ContainingAssembly
{
get
{
// Default implementation gets the containers assembly.
var container = this.ContainingSymbol;
return (object)container != null ? container.ContainingAssembly : null;
}
}
/// <summary>
/// For a source assembly, the associated compilation.
/// For any other assembly, null.
/// For a source module, the DeclaringCompilation of the associated source assembly.
/// For any other module, null.
/// For any other symbol, the DeclaringCompilation of the associated module.
/// </summary>
/// <remarks>
/// We're going through the containing module, rather than the containing assembly,
/// because of /addmodule (symbols in such modules should return null).
///
/// Remarks, not "ContainingCompilation" because it isn't transitive.
/// </remarks>
internal virtual CSharpCompilation DeclaringCompilation
{
get
{
switch (this.Kind)
{
case SymbolKind.ErrorType:
return null;
case SymbolKind.Assembly:
Debug.Assert(!(this is SourceAssemblySymbol), "SourceAssemblySymbol must override DeclaringCompilation");
return null;
case SymbolKind.NetModule:
Debug.Assert(!(this is SourceModuleSymbol), "SourceModuleSymbol must override DeclaringCompilation");
return null;
}
var sourceModuleSymbol = this.ContainingModule as SourceModuleSymbol;
return (object)sourceModuleSymbol == null ? null : sourceModuleSymbol.DeclaringCompilation;
}
}
Compilation ISymbolInternal.DeclaringCompilation
=> DeclaringCompilation;
string ISymbolInternal.Name => this.Name;
string ISymbolInternal.MetadataName => this.MetadataName;
ISymbolInternal ISymbolInternal.ContainingSymbol => this.ContainingSymbol;
IModuleSymbolInternal ISymbolInternal.ContainingModule => this.ContainingModule;
IAssemblySymbolInternal ISymbolInternal.ContainingAssembly => this.ContainingAssembly;
ImmutableArray<Location> ISymbolInternal.Locations => this.Locations;
INamespaceSymbolInternal ISymbolInternal.ContainingNamespace => this.ContainingNamespace;
bool ISymbolInternal.IsImplicitlyDeclared => this.IsImplicitlyDeclared;
INamedTypeSymbolInternal ISymbolInternal.ContainingType
{
get
{
return this.ContainingType;
}
}
ISymbol ISymbolInternal.GetISymbol() => this.ISymbol;
/// <summary>
/// Returns the module containing this symbol. If this symbol is shared across multiple
/// modules, or doesn't belong to a module, returns null.
/// </summary>
internal virtual ModuleSymbol ContainingModule
{
get
{
// Default implementation gets the containers module.
var container = this.ContainingSymbol;
return (object)container != null ? container.ContainingModule : null;
}
}
/// <summary>
/// The index of this member in the containing symbol. This is an optional
/// property, implemented by anonymous type properties only, for comparing
/// symbols in flow analysis.
/// </summary>
/// <remarks>
/// Should this be used for tuple fields as well?
/// </remarks>
internal virtual int? MemberIndexOpt => null;
/// <summary>
/// The original definition of this symbol. If this symbol is constructed from another
/// symbol by type substitution then OriginalDefinition gets the original symbol as it was defined in
/// source or metadata.
/// </summary>
public Symbol OriginalDefinition
{
get
{
return OriginalSymbolDefinition;
}
}
protected virtual Symbol OriginalSymbolDefinition
{
get
{
return this;
}
}
/// <summary>
/// Returns true if this is the original definition of this symbol.
/// </summary>
public bool IsDefinition
{
get
{
return (object)this == (object)OriginalDefinition;
}
}
/// <summary>
/// <para>
/// Get a source location key for sorting. For performance, it's important that this
/// be able to be returned from a symbol without doing any additional allocations (even
/// if nothing is cached yet.)
/// </para>
/// <para>
/// Only (original) source symbols and namespaces that can be merged
/// need implement this function if they want to do so for efficiency.
/// </para>
/// </summary>
internal virtual LexicalSortKey GetLexicalSortKey()
{
var locations = this.Locations;
var declaringCompilation = this.DeclaringCompilation;
Debug.Assert(declaringCompilation != null); // require that it is a source symbol
return (locations.Length > 0) ? new LexicalSortKey(locations[0], declaringCompilation) : LexicalSortKey.NotInSource;
}
/// <summary>
/// Gets the locations where this symbol was originally defined, either in source or
/// metadata. Some symbols (for example, partial classes) may be defined in more than one
/// location.
/// </summary>
public abstract ImmutableArray<Location> Locations { get; }
/// <summary>
/// <para>
/// Get the syntax node(s) where this symbol was declared in source. Some symbols (for
/// example, partial classes) may be defined in more than one location. This property should
/// return one or more syntax nodes only if the symbol was declared in source code and also
/// was not implicitly declared (see the <see cref="IsImplicitlyDeclared"/> property).
/// </para>
/// <para>
/// Note that for namespace symbol, the declaring syntax might be declaring a nested
/// namespace. For example, the declaring syntax node for N1 in "namespace N1.N2 {...}" is
/// the entire <see cref="BaseNamespaceDeclarationSyntax"/> for N1.N2. For the global namespace, the declaring
/// syntax will be the <see cref="CompilationUnitSyntax"/>.
/// </para>
/// </summary>
/// <returns>
/// The syntax node(s) that declared the symbol. If the symbol was declared in metadata or
/// was implicitly declared, returns an empty read-only array.
/// </returns>
/// <remarks>
/// To go the opposite direction (from syntax node to symbol), see <see
/// cref="CSharpSemanticModel.GetDeclaredSymbol(MemberDeclarationSyntax, CancellationToken)"/>.
/// </remarks>
public abstract ImmutableArray<SyntaxReference> DeclaringSyntaxReferences { get; }
/// <summary>
/// Helper for implementing <see cref="DeclaringSyntaxReferences"/> for derived classes that store a location but not a
/// <see cref="CSharpSyntaxNode"/> or <see cref="SyntaxReference"/>.
/// </summary>
internal static ImmutableArray<SyntaxReference> GetDeclaringSyntaxReferenceHelper<TNode>(ImmutableArray<Location> locations)
where TNode : CSharpSyntaxNode
{
if (locations.IsEmpty)
{
return ImmutableArray<SyntaxReference>.Empty;
}
ArrayBuilder<SyntaxReference> builder = ArrayBuilder<SyntaxReference>.GetInstance();
foreach (Location location in locations)
{
// Location may be null. See https://github.com/dotnet/roslyn/issues/28862.
if (location == null || !location.IsInSource)
{
continue;
}
if (location.SourceSpan.Length != 0)
{
SyntaxToken token = location.SourceTree.GetRoot().FindToken(location.SourceSpan.Start);
if (token.Kind() != SyntaxKind.None)
{
CSharpSyntaxNode node = token.Parent.FirstAncestorOrSelf<TNode>();
if (node != null)
{
builder.Add(node.GetReference());
}
}
}
else
{
// Since the location we're interested in can't contain a token, we'll inspect the whole tree,
// pruning away branches that don't contain that location. We'll pick the narrowest node of the type
// we're looking for.
// eg: finding the ParameterSyntax from the empty location of a blank identifier
SyntaxNode parent = location.SourceTree.GetRoot();
SyntaxNode found = null;
foreach (var descendant in parent.DescendantNodesAndSelf(c => c.Location.SourceSpan.Contains(location.SourceSpan)))
{
if (descendant is TNode && descendant.Location.SourceSpan.Contains(location.SourceSpan))
{
found = descendant;
}
}
if (found is object)
{
builder.Add(found.GetReference());
}
}
}
return builder.ToImmutableAndFree();
}
/// <summary>
/// Get this accessibility that was declared on this symbol. For symbols that do not have
/// accessibility declared on them, returns <see cref="Accessibility.NotApplicable"/>.
/// </summary>
public abstract Accessibility DeclaredAccessibility { get; }
/// <summary>
/// Returns true if this symbol is "static"; i.e., declared with the <c>static</c> modifier or
/// implicitly static.
/// </summary>
public abstract bool IsStatic { get; }
/// <summary>
/// Returns true if this symbol is "virtual", has an implementation, and does not override a
/// base class member; i.e., declared with the <c>virtual</c> modifier. Does not return true for
/// members declared as abstract or override.
/// </summary>
public abstract bool IsVirtual { get; }
/// <summary>
/// Returns true if this symbol was declared to override a base class member; i.e., declared
/// with the <c>override</c> modifier. Still returns true if member was declared to override
/// something, but (erroneously) no member to override exists.
/// </summary>
/// <remarks>
/// Even for metadata symbols, <see cref="IsOverride"/> = true does not imply that <see cref="IMethodSymbol.OverriddenMethod"/> will
/// be non-null.
/// </remarks>
public abstract bool IsOverride { get; }
/// <summary>
/// Returns true if this symbol was declared as requiring an override; i.e., declared with
/// the <c>abstract</c> modifier. Also returns true on a type declared as "abstract", all
/// interface types, and members of interface types.
/// </summary>
public abstract bool IsAbstract { get; }
/// <summary>
/// Returns true if this symbol was declared to override a base class member and was also
/// sealed from further overriding; i.e., declared with the <c>sealed</c> modifier. Also set for
/// types that do not allow a derived class (declared with <c>sealed</c> or <c>static</c> or <c>struct</c>
/// or <c>enum</c> or <c>delegate</c>).
/// </summary>
public abstract bool IsSealed { get; }
/// <summary>
/// Returns true if this symbol has external implementation; i.e., declared with the
/// <c>extern</c> modifier.
/// </summary>
public abstract bool IsExtern { get; }
/// <summary>
/// Returns true if this symbol was automatically created by the compiler, and does not
/// have an explicit corresponding source code declaration.
///
/// This is intended for symbols that are ordinary symbols in the language sense,
/// and may be used by code, but that are simply declared implicitly rather than
/// with explicit language syntax.
///
/// Examples include (this list is not exhaustive):
/// the default constructor for a class or struct that is created if one is not provided,
/// the BeginInvoke/Invoke/EndInvoke methods for a delegate,
/// the generated backing field for an auto property or a field-like event,
/// the "this" parameter for non-static methods,
/// the "value" parameter for a property setter,
/// the parameters on indexer accessor methods (not on the indexer itself),
/// methods in anonymous types,
/// anonymous functions
/// </summary>
public virtual bool IsImplicitlyDeclared
{
get { return false; }
}
/// <summary>
/// Returns true if this symbol can be referenced by its name in code. Examples of symbols
/// that cannot be referenced by name are:
/// constructors, destructors, operators, explicit interface implementations,
/// accessor methods for properties and events, array types.
/// </summary>
public bool CanBeReferencedByName
{
get
{
switch (this.Kind)
{
case SymbolKind.Local:
case SymbolKind.Label:
case SymbolKind.Alias:
case SymbolKind.RangeVariable:
// never imported, and always references by name.
return true;
case SymbolKind.Namespace:
case SymbolKind.Field:
case SymbolKind.ErrorType:
case SymbolKind.Parameter:
case SymbolKind.TypeParameter:
case SymbolKind.Event:
break;
case SymbolKind.NamedType:
if (((NamedTypeSymbol)this).IsSubmissionClass)
{
return false;
}
break;
case SymbolKind.Property:
var property = (PropertySymbol)this;
if (property.IsIndexer || property.MustCallMethodsDirectly)
{
return false;
}
break;
case SymbolKind.Method:
var method = (MethodSymbol)this;
switch (method.MethodKind)
{
case MethodKind.Ordinary:
case MethodKind.LocalFunction:
case MethodKind.ReducedExtension:
break;
case MethodKind.Destructor:
// You wouldn't think that destructors would be referenceable by name, but
// dev11 only prevents them from being invoked - they can still be assigned
// to delegates.
return true;
case MethodKind.DelegateInvoke:
return true;
case MethodKind.PropertyGet:
case MethodKind.PropertySet:
if (!((PropertySymbol)method.AssociatedSymbol).CanCallMethodsDirectly())
{
return false;
}
break;
default:
return false;
}
break;
case SymbolKind.ArrayType:
case SymbolKind.PointerType:
case SymbolKind.FunctionPointerType:
case SymbolKind.Assembly:
case SymbolKind.DynamicType:
case SymbolKind.NetModule:
case SymbolKind.Discard:
return false;
default:
throw ExceptionUtilities.UnexpectedValue(this.Kind);
}
// This will eliminate backing fields for auto-props, explicit interface implementations,
// indexers, etc.
// See the comment on ContainsDroppedIdentifierCharacters for an explanation of why
// such names are not referenceable (or see DevDiv #14432).
return SyntaxFacts.IsValidIdentifier(this.Name) &&
!SyntaxFacts.ContainsDroppedIdentifierCharacters(this.Name);
}
}
/// <summary>
/// As an optimization, viability checking in the lookup code should use this property instead
/// of <see cref="CanBeReferencedByName"/>. The full name check will then be performed in the <see cref="CSharpSemanticModel"/>.
/// </summary>
/// <remarks>
/// This property exists purely for performance reasons.
/// </remarks>
internal bool CanBeReferencedByNameIgnoringIllegalCharacters
{
get
{
if (this.Kind == SymbolKind.Method)
{
var method = (MethodSymbol)this;
switch (method.MethodKind)
{
case MethodKind.Ordinary:
case MethodKind.LocalFunction:
case MethodKind.DelegateInvoke:
case MethodKind.Destructor: // See comment in CanBeReferencedByName.
return true;
case MethodKind.PropertyGet:
case MethodKind.PropertySet:
return ((PropertySymbol)method.AssociatedSymbol).CanCallMethodsDirectly();
default:
return false;
}
}
return true;
}
}
/// <summary>
/// Perform additional checks after the member has been
/// added to the member list of the containing type.
/// </summary>
internal virtual void AfterAddingTypeMembersChecks(ConversionsBase conversions, BindingDiagnosticBag diagnostics)
{
}
// Note: This is no public "IsNew". This is intentional, because new has no syntactic meaning.
// It serves only to remove a warning. Furthermore, it can not be inferred from
// metadata. For symbols defined in source, the modifiers in the syntax tree
// can be examined.
/// <summary>
/// Compare two symbol objects to see if they refer to the same symbol. You should always
/// use <see cref="operator =="/> and <see cref="operator !="/>, or the <see cref="Equals(object)"/> method, to compare two symbols for equality.
/// </summary>
public static bool operator ==(Symbol left, Symbol right)
{
//PERF: this function is often called with
// 1) left referencing same object as the right
// 2) right being null
// The code attempts to check for these conditions before
// resorting to .Equals
// the condition is expected to be folded when inlining "someSymbol == null"
if (right is null)
{
return left is null;
}
// this part is expected to disappear when inlining "someSymbol == null"
return (object)left == (object)right || right.Equals(left);
}
/// <summary>
/// Compare two symbol objects to see if they refer to the same symbol. You should always
/// use == and !=, or the Equals method, to compare two symbols for equality.
/// </summary>
public static bool operator !=(Symbol left, Symbol right)
{
//PERF: this function is often called with
// 1) left referencing same object as the right
// 2) right being null
// The code attempts to check for these conditions before
// resorting to .Equals
//
//NOTE: we do not implement this as !(left == right)
// since that sometimes results in a worse code
// the condition is expected to be folded when inlining "someSymbol != null"
if (right is null)
{
return left is object;
}
// this part is expected to disappear when inlining "someSymbol != null"
return (object)left != (object)right && !right.Equals(left);
}
public sealed override bool Equals(object obj)
{
return this.Equals(obj as Symbol, SymbolEqualityComparer.Default.CompareKind);
}
public bool Equals(Symbol other)
{
return this.Equals(other, SymbolEqualityComparer.Default.CompareKind);
}
bool ISymbolInternal.Equals(ISymbolInternal other, TypeCompareKind compareKind)
{
return this.Equals(other as Symbol, compareKind);
}
// By default we don't consider the compareKind, and do reference equality. This can be overridden.
public virtual bool Equals(Symbol other, TypeCompareKind compareKind)
{
return (object)this == other;
}
// By default, we do reference equality. This can be overridden.
public override int GetHashCode()
{
return System.Runtime.CompilerServices.RuntimeHelpers.GetHashCode(this);
}
public static bool Equals(Symbol first, Symbol second, TypeCompareKind compareKind)
{
if (first is null)
{
return second is null;
}
return first.Equals(second, compareKind);
}
/// <summary>
/// Returns a string representation of this symbol, suitable for debugging purposes, or
/// for placing in an error message.
/// </summary>
/// <remarks>
/// This will provide a useful representation, but it would be clearer to call <see cref="ToDisplayString"/>
/// directly and provide an explicit format.
/// Sealed so that <see cref="ToString"/> and <see cref="ToDisplayString"/> can't get out of sync.
/// </remarks>
public sealed override string ToString()
{
return this.ToDisplayString();
}
// ---- End of Public Definition ---
// Below here can be various useful virtual methods that are useful to the compiler, but we don't
// want to expose publicly.
// ---- End of Public Definition ---
// Must override this in derived classes for visitor pattern.
internal abstract TResult Accept<TArgument, TResult>(CSharpSymbolVisitor<TArgument, TResult> visitor, TArgument a);
// Prevent anyone else from deriving from this class.
internal Symbol()
{
}
/// <summary>
/// Build and add synthesized attributes for this symbol.
/// </summary>
internal virtual void AddSynthesizedAttributes(PEModuleBuilder moduleBuilder, ref ArrayBuilder<SynthesizedAttributeData> attributes)
{
}
/// <summary>
/// Convenience helper called by subclasses to add a synthesized attribute to a collection of attributes.
/// </summary>
internal static void AddSynthesizedAttribute(ref ArrayBuilder<SynthesizedAttributeData> attributes, SynthesizedAttributeData attribute)
{
if (attribute != null)
{
if (attributes == null)
{
attributes = new ArrayBuilder<SynthesizedAttributeData>(1);
}
attributes.Add(attribute);
}
}
/// <summary>
/// <see cref="CharSet"/> effective for this symbol (type or DllImport method).
/// Nothing if <see cref="DefaultCharSetAttribute"/> isn't applied on the containing module or it doesn't apply on this symbol.
/// </summary>
/// <remarks>
/// Determined based upon value specified via <see cref="DefaultCharSetAttribute"/> applied on the containing module.
/// </remarks>
internal CharSet? GetEffectiveDefaultMarshallingCharSet()
{
Debug.Assert(this.Kind == SymbolKind.NamedType || this.Kind == SymbolKind.Method);
return this.ContainingModule.DefaultMarshallingCharSet;
}
internal bool IsFromCompilation(CSharpCompilation compilation)
{
Debug.Assert(compilation != null);
return compilation == this.DeclaringCompilation;
}
/// <summary>
/// Always prefer <see cref="IsFromCompilation"/>.
/// </summary>
/// <remarks>
/// <para>
/// Unfortunately, when determining overriding/hiding/implementation relationships, we don't
/// have the "current" compilation available. We could, but that would clutter up the API
/// without providing much benefit. As a compromise, we consider all compilations "current".
/// </para>
/// <para>
/// Unlike in VB, we are not allowing retargeting symbols. This method is used as an approximation
/// for <see cref="IsFromCompilation"/> when a compilation is not available and that method will never return
/// true for retargeting symbols.
/// </para>
/// </remarks>
internal bool Dangerous_IsFromSomeCompilation
{
get { return this.DeclaringCompilation != null; }
}
internal virtual bool IsDefinedInSourceTree(SyntaxTree tree, TextSpan? definedWithinSpan, CancellationToken cancellationToken = default(CancellationToken))
{
var declaringReferences = this.DeclaringSyntaxReferences;
if (this.IsImplicitlyDeclared && declaringReferences.Length == 0)
{
return this.ContainingSymbol.IsDefinedInSourceTree(tree, definedWithinSpan, cancellationToken);
}
foreach (var syntaxRef in declaringReferences)
{
cancellationToken.ThrowIfCancellationRequested();
if (syntaxRef.SyntaxTree == tree &&
(!definedWithinSpan.HasValue || syntaxRef.Span.IntersectsWith(definedWithinSpan.Value)))
{
return true;
}
}
return false;
}
internal static void ForceCompleteMemberByLocation(SourceLocation locationOpt, Symbol member, CancellationToken cancellationToken)
{
if (locationOpt == null || member.IsDefinedInSourceTree(locationOpt.SourceTree, locationOpt.SourceSpan, cancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
member.ForceComplete(locationOpt, cancellationToken);
}
}
/// <summary>
/// Returns the Documentation Comment ID for the symbol, or null if the symbol doesn't
/// support documentation comments.
/// </summary>
public virtual string GetDocumentationCommentId()
{
// NOTE: we're using a try-finally here because there's a test that specifically
// triggers an exception here to confirm that some symbols don't have documentation
// comment IDs. We don't care about "leaks" in such cases, but we don't want spew
// in the test output.
var pool = PooledStringBuilder.GetInstance();
try
{
StringBuilder builder = pool.Builder;
DocumentationCommentIDVisitor.Instance.Visit(this, builder);
return builder.Length == 0 ? null : builder.ToString();
}
finally
{
pool.Free();
}
}
#nullable enable
/// <summary>
/// Fetches the documentation comment for this element with a cancellation token.
/// </summary>
/// <param name="preferredCulture">Optionally, retrieve the comments formatted for a particular culture. No impact on source documentation comments.</param>
/// <param name="expandIncludes">Optionally, expand <![CDATA[<include>]]> elements. No impact on non-source documentation comments.</param>
/// <param name="cancellationToken">Optionally, allow cancellation of documentation comment retrieval.</param>
/// <returns>The XML that would be written to the documentation file for the symbol.</returns>
public virtual string GetDocumentationCommentXml(
CultureInfo? preferredCulture = null,
bool expandIncludes = false,
CancellationToken cancellationToken = default(CancellationToken))
{
return "";
}
#nullable disable
private static readonly SymbolDisplayFormat s_debuggerDisplayFormat =
SymbolDisplayFormat.TestFormat
.AddMiscellaneousOptions(SymbolDisplayMiscellaneousOptions.IncludeNullableReferenceTypeModifier
| SymbolDisplayMiscellaneousOptions.IncludeNotNullableReferenceTypeModifier)
.WithCompilerInternalOptions(SymbolDisplayCompilerInternalOptions.None);
internal virtual string GetDebuggerDisplay()
{
return $"{this.Kind} {this.ToDisplayString(s_debuggerDisplayFormat)}";
}
internal virtual void AddDeclarationDiagnostics(BindingDiagnosticBag diagnostics)
{
#if DEBUG
if (ContainingSymbol is SourceMemberContainerTypeSymbol container)
{
container.AssertMemberExposure(this, forDiagnostics: true);
}
#endif
if (diagnostics.DiagnosticBag?.IsEmptyWithoutResolution == false || diagnostics.DependenciesBag?.Count > 0)
{
CSharpCompilation compilation = this.DeclaringCompilation;
Debug.Assert(compilation != null);
compilation.AddUsedAssemblies(diagnostics.DependenciesBag);
if (diagnostics.DiagnosticBag?.IsEmptyWithoutResolution == false)
{
compilation.DeclarationDiagnostics.AddRange(diagnostics.DiagnosticBag);
}
}
}
#region Use-Site Diagnostics
/// <summary>
/// True if the symbol has a use-site diagnostic with error severity.
/// </summary>
internal bool HasUseSiteError
{
get
{
var info = GetUseSiteInfo();
return info.DiagnosticInfo?.Severity == DiagnosticSeverity.Error;
}
}
/// <summary>
/// Returns diagnostic info that should be reported at the use site of the symbol, or default if there is none.
/// </summary>
internal virtual UseSiteInfo<AssemblySymbol> GetUseSiteInfo()
{
return default;
}
protected AssemblySymbol PrimaryDependency
{
get
{
AssemblySymbol dependency = this.ContainingAssembly;
if (dependency is object && dependency.CorLibrary == dependency)
{
return null;
}
return dependency;
}
}
/// <summary>
/// Return error code that has highest priority while calculating use site error for this symbol.
/// Supposed to be ErrorCode, but it causes inconsistent accessibility error.
/// </summary>
protected virtual int HighestPriorityUseSiteError
{
get
{
return int.MaxValue;
}
}
/// <summary>
/// Indicates that this symbol uses metadata that cannot be supported by the language.
///
/// Examples include:
/// - Pointer types in VB
/// - ByRef return type
/// - Required custom modifiers
///
/// This is distinguished from, for example, references to metadata symbols defined in assemblies that weren't referenced.
/// Symbols where this returns true can never be used successfully, and thus should never appear in any IDE feature.
///
/// This is set for metadata symbols, as follows:
/// Type - if a type is unsupported (e.g., a pointer type, etc.)
/// Method - parameter or return type is unsupported
/// Field - type is unsupported
/// Event - type is unsupported
/// Property - type is unsupported
/// Parameter - type is unsupported
/// </summary>
public virtual bool HasUnsupportedMetadata
{
get
{
return false;
}
}
/// <summary>
/// Merges given diagnostic to the existing result diagnostic.
/// </summary>
internal bool MergeUseSiteDiagnostics(ref DiagnosticInfo result, DiagnosticInfo info)
{
if (info == null)
{
return false;
}
if (info.Severity == DiagnosticSeverity.Error && (info.Code == HighestPriorityUseSiteError || HighestPriorityUseSiteError == Int32.MaxValue))
{
// this error is final, no other error can override it:
result = info;
return true;
}
if (result == null || result.Severity == DiagnosticSeverity.Warning && info.Severity == DiagnosticSeverity.Error)
{
// there could be an error of higher-priority
result = info;
return false;
}
// we have a second low-pri error, continue looking for a higher priority one
return false;
}
/// <summary>
/// Merges given diagnostic and dependencies to the existing result.
/// </summary>
internal bool MergeUseSiteInfo(ref UseSiteInfo<AssemblySymbol> result, UseSiteInfo<AssemblySymbol> info)
{
DiagnosticInfo diagnosticInfo = result.DiagnosticInfo;
bool retVal = MergeUseSiteDiagnostics(ref diagnosticInfo, info.DiagnosticInfo);
if (diagnosticInfo?.Severity == DiagnosticSeverity.Error)
{
result = new UseSiteInfo<AssemblySymbol>(diagnosticInfo);
return retVal;
}
var secondaryDependencies = result.SecondaryDependencies;
var primaryDependency = result.PrimaryDependency;
info.MergeDependencies(ref primaryDependency, ref secondaryDependencies);
result = new UseSiteInfo<AssemblySymbol>(diagnosticInfo, primaryDependency, secondaryDependencies);
Debug.Assert(!retVal);
return retVal;
}
/// <summary>
/// Reports specified use-site diagnostic to given diagnostic bag.
/// </summary>
/// <remarks>
/// This method should be the only method adding use-site diagnostics to a diagnostic bag.
/// It performs additional adjustments of the location for unification related diagnostics and
/// may be the place where to add more use-site location post-processing.
/// </remarks>
/// <returns>True if the diagnostic has error severity.</returns>
internal static bool ReportUseSiteDiagnostic(DiagnosticInfo info, DiagnosticBag diagnostics, Location location)
{
// Unlike VB the C# Dev11 compiler reports only a single unification error/warning.
// By dropping the location we effectively merge all unification use-site errors that have the same error code into a single error.
// The error message clearly explains how to fix the problem and reporting the error for each location wouldn't add much value.
if (info.Code == (int)ErrorCode.WRN_UnifyReferenceBldRev ||
info.Code == (int)ErrorCode.WRN_UnifyReferenceMajMin ||
info.Code == (int)ErrorCode.ERR_AssemblyMatchBadVersion)
{
location = NoLocation.Singleton;
}
diagnostics.Add(info, location);
return info.Severity == DiagnosticSeverity.Error;
}
internal static bool ReportUseSiteDiagnostic(DiagnosticInfo info, BindingDiagnosticBag diagnostics, Location location)
{
return diagnostics.ReportUseSiteDiagnostic(info, location);
}
/// <summary>
/// Derive use-site info from a type symbol.
/// </summary>
internal bool DeriveUseSiteInfoFromType(ref UseSiteInfo<AssemblySymbol> result, TypeSymbol type)
{
UseSiteInfo<AssemblySymbol> info = type.GetUseSiteInfo();
if (info.DiagnosticInfo?.Code == (int)ErrorCode.ERR_BogusType)
{
GetSymbolSpecificUnsupportedMetadataUseSiteErrorInfo(ref info);
}
return MergeUseSiteInfo(ref result, info);
}
private void GetSymbolSpecificUnsupportedMetadataUseSiteErrorInfo(ref UseSiteInfo<AssemblySymbol> info)
{
switch (this.Kind)
{
case SymbolKind.Field:
case SymbolKind.Method:
case SymbolKind.Property:
case SymbolKind.Event:
info = info.AdjustDiagnosticInfo(new CSDiagnosticInfo(ErrorCode.ERR_BindToBogus, this));
break;
}
}
private UseSiteInfo<AssemblySymbol> GetSymbolSpecificUnsupportedMetadataUseSiteErrorInfo()
{
var useSiteInfo = new UseSiteInfo<AssemblySymbol>(new CSDiagnosticInfo(ErrorCode.ERR_BogusType, string.Empty));
GetSymbolSpecificUnsupportedMetadataUseSiteErrorInfo(ref useSiteInfo);
return useSiteInfo;
}
internal bool DeriveUseSiteInfoFromType(ref UseSiteInfo<AssemblySymbol> result, TypeWithAnnotations type, AllowedRequiredModifierType allowedRequiredModifierType)
{
return DeriveUseSiteInfoFromType(ref result, type.Type) ||
DeriveUseSiteInfoFromCustomModifiers(ref result, type.CustomModifiers, allowedRequiredModifierType);
}
internal bool DeriveUseSiteInfoFromParameter(ref UseSiteInfo<AssemblySymbol> result, ParameterSymbol param)
{
return DeriveUseSiteInfoFromType(ref result, param.TypeWithAnnotations, AllowedRequiredModifierType.None) ||
DeriveUseSiteInfoFromCustomModifiers(ref result, param.RefCustomModifiers,
this is MethodSymbol method && method.MethodKind == MethodKind.FunctionPointerSignature ?
AllowedRequiredModifierType.System_Runtime_InteropServices_InAttribute | AllowedRequiredModifierType.System_Runtime_CompilerServices_OutAttribute :
AllowedRequiredModifierType.System_Runtime_InteropServices_InAttribute);
}
internal bool DeriveUseSiteInfoFromParameters(ref UseSiteInfo<AssemblySymbol> result, ImmutableArray<ParameterSymbol> parameters)
{
foreach (ParameterSymbol param in parameters)
{
if (DeriveUseSiteInfoFromParameter(ref result, param))
{
return true;
}
}
return false;
}
[Flags]
internal enum AllowedRequiredModifierType
{
None = 0,
System_Runtime_CompilerServices_Volatile = 1,
System_Runtime_InteropServices_InAttribute = 1 << 1,
System_Runtime_CompilerServices_IsExternalInit = 1 << 2,
System_Runtime_CompilerServices_OutAttribute = 1 << 3,
}
internal bool DeriveUseSiteInfoFromCustomModifiers(ref UseSiteInfo<AssemblySymbol> result, ImmutableArray<CustomModifier> customModifiers, AllowedRequiredModifierType allowedRequiredModifierType)
{
AllowedRequiredModifierType requiredModifiersFound = AllowedRequiredModifierType.None;
bool checkRequiredModifiers = true;
foreach (CustomModifier modifier in customModifiers)
{
NamedTypeSymbol modifierType = ((CSharpCustomModifier)modifier).ModifierSymbol;
if (checkRequiredModifiers && !modifier.IsOptional)
{
AllowedRequiredModifierType current = AllowedRequiredModifierType.None;
if ((allowedRequiredModifierType & AllowedRequiredModifierType.System_Runtime_InteropServices_InAttribute) != 0 &&
modifierType.IsWellKnownTypeInAttribute())
{
current = AllowedRequiredModifierType.System_Runtime_InteropServices_InAttribute;
}
else if ((allowedRequiredModifierType & AllowedRequiredModifierType.System_Runtime_CompilerServices_Volatile) != 0 &&
modifierType.SpecialType == SpecialType.System_Runtime_CompilerServices_IsVolatile)
{
current = AllowedRequiredModifierType.System_Runtime_CompilerServices_Volatile;
}
else if ((allowedRequiredModifierType & AllowedRequiredModifierType.System_Runtime_CompilerServices_IsExternalInit) != 0 &&
modifierType.IsWellKnownTypeIsExternalInit())
{
current = AllowedRequiredModifierType.System_Runtime_CompilerServices_IsExternalInit;
}
else if ((allowedRequiredModifierType & AllowedRequiredModifierType.System_Runtime_CompilerServices_OutAttribute) != 0 &&
modifierType.IsWellKnownTypeOutAttribute())
{
current = AllowedRequiredModifierType.System_Runtime_CompilerServices_OutAttribute;
}
if (current == AllowedRequiredModifierType.None ||
(current != requiredModifiersFound && requiredModifiersFound != AllowedRequiredModifierType.None)) // At the moment we don't support applying different allowed modreqs to the same target.
{
if (MergeUseSiteInfo(ref result, GetSymbolSpecificUnsupportedMetadataUseSiteErrorInfo()))
{
return true;
}
checkRequiredModifiers = false;
}
requiredModifiersFound |= current;
}
// Unbound generic type is valid as a modifier, let's not report any use site diagnostics because of that.
if (modifierType.IsUnboundGenericType)
{
modifierType = modifierType.OriginalDefinition;
}
if (DeriveUseSiteInfoFromType(ref result, modifierType))
{
return true;
}
}
return false;
}
internal static bool GetUnificationUseSiteDiagnosticRecursive<T>(ref DiagnosticInfo result, ImmutableArray<T> types, Symbol owner, ref HashSet<TypeSymbol> checkedTypes) where T : TypeSymbol
{
foreach (var t in types)
{
if (t.GetUnificationUseSiteDiagnosticRecursive(ref result, owner, ref checkedTypes))
{
return true;
}
}
return false;
}
internal static bool GetUnificationUseSiteDiagnosticRecursive(ref DiagnosticInfo result, ImmutableArray<TypeWithAnnotations> types, Symbol owner, ref HashSet<TypeSymbol> checkedTypes)
{
foreach (var t in types)
{
if (t.GetUnificationUseSiteDiagnosticRecursive(ref result, owner, ref checkedTypes))
{
return true;
}
}
return false;
}
internal static bool GetUnificationUseSiteDiagnosticRecursive(ref DiagnosticInfo result, ImmutableArray<CustomModifier> modifiers, Symbol owner, ref HashSet<TypeSymbol> checkedTypes)
{
foreach (var modifier in modifiers)
{
if (((CSharpCustomModifier)modifier).ModifierSymbol.GetUnificationUseSiteDiagnosticRecursive(ref result, owner, ref checkedTypes))
{
return true;
}
}
return false;
}
internal static bool GetUnificationUseSiteDiagnosticRecursive(ref DiagnosticInfo result, ImmutableArray<ParameterSymbol> parameters, Symbol owner, ref HashSet<TypeSymbol> checkedTypes)
{
foreach (var parameter in parameters)
{
if (parameter.TypeWithAnnotations.GetUnificationUseSiteDiagnosticRecursive(ref result, owner, ref checkedTypes) ||
GetUnificationUseSiteDiagnosticRecursive(ref result, parameter.RefCustomModifiers, owner, ref checkedTypes))
{
return true;
}
}
return false;
}
internal static bool GetUnificationUseSiteDiagnosticRecursive(ref DiagnosticInfo result, ImmutableArray<TypeParameterSymbol> typeParameters, Symbol owner, ref HashSet<TypeSymbol> checkedTypes)
{
foreach (var typeParameter in typeParameters)
{
if (GetUnificationUseSiteDiagnosticRecursive(ref result, typeParameter.ConstraintTypesNoUseSiteDiagnostics, owner, ref checkedTypes))
{
return true;
}
}
return false;
}
#endregion
/// <summary>
/// True if this symbol has been marked with the <see cref="ObsoleteAttribute"/> attribute.
/// This property returns <see cref="ThreeState.Unknown"/> if the <see cref="ObsoleteAttribute"/> attribute hasn't been cracked yet.
/// </summary>
internal ThreeState ObsoleteState
{
get
{
switch (ObsoleteKind)
{
case ObsoleteAttributeKind.None:
case ObsoleteAttributeKind.Experimental:
return ThreeState.False;
case ObsoleteAttributeKind.Uninitialized:
return ThreeState.Unknown;
default:
return ThreeState.True;
}
}
}
internal ObsoleteAttributeKind ObsoleteKind
{
get
{
var data = this.ObsoleteAttributeData;
return (data == null) ? ObsoleteAttributeKind.None : data.Kind;
}
}
/// <summary>
/// Returns data decoded from <see cref="ObsoleteAttribute"/> attribute or null if there is no <see cref="ObsoleteAttribute"/> attribute.
/// This property returns <see cref="Microsoft.CodeAnalysis.ObsoleteAttributeData.Uninitialized"/> if attribute arguments haven't been decoded yet.
/// </summary>
internal abstract ObsoleteAttributeData ObsoleteAttributeData { get; }
/// <summary>
/// Returns true and a <see cref="string"/> from the first <see cref="GuidAttribute"/> on the symbol,
/// the string might be null or an invalid guid representation. False,
/// if there is no <see cref="GuidAttribute"/> with string argument.
/// </summary>
internal bool GetGuidStringDefaultImplementation(out string guidString)
{
foreach (var attrData in this.GetAttributes())
{
if (attrData.IsTargetAttribute(this, AttributeDescription.GuidAttribute))
{
if (attrData.TryGetGuidAttributeValue(out guidString))
{
return true;
}
}
}
guidString = null;
return false;
}
public string ToDisplayString(SymbolDisplayFormat format = null)
{
return SymbolDisplay.ToDisplayString(ISymbol, format);
}
public ImmutableArray<SymbolDisplayPart> ToDisplayParts(SymbolDisplayFormat format = null)
{
return SymbolDisplay.ToDisplayParts(ISymbol, format);
}
public string ToMinimalDisplayString(
SemanticModel semanticModel,
int position,
SymbolDisplayFormat format = null)
{
return SymbolDisplay.ToMinimalDisplayString(ISymbol, semanticModel, position, format);
}
public ImmutableArray<SymbolDisplayPart> ToMinimalDisplayParts(
SemanticModel semanticModel,
int position,
SymbolDisplayFormat format = null)
{
return SymbolDisplay.ToMinimalDisplayParts(ISymbol, semanticModel, position, format);
}
internal static void ReportErrorIfHasConstraints(
SyntaxList<TypeParameterConstraintClauseSyntax> constraintClauses, DiagnosticBag diagnostics)
{
if (constraintClauses.Count > 0)
{
diagnostics.Add(
ErrorCode.ERR_ConstraintOnlyAllowedOnGenericDecl,
constraintClauses[0].WhereKeyword.GetLocation());
}
}
internal static void CheckForBlockAndExpressionBody(
CSharpSyntaxNode block,
CSharpSyntaxNode expression,
CSharpSyntaxNode syntax,
BindingDiagnosticBag diagnostics)
{
if (block != null && expression != null)
{
diagnostics.Add(ErrorCode.ERR_BlockBodyAndExpressionBody, syntax.GetLocation());
}
}
[Flags]
internal enum ReservedAttributes
{
DynamicAttribute = 1 << 1,
IsReadOnlyAttribute = 1 << 2,
IsUnmanagedAttribute = 1 << 3,
IsByRefLikeAttribute = 1 << 4,
TupleElementNamesAttribute = 1 << 5,
NullableAttribute = 1 << 6,
NullableContextAttribute = 1 << 7,
NullablePublicOnlyAttribute = 1 << 8,
NativeIntegerAttribute = 1 << 9,
CaseSensitiveExtensionAttribute = 1 << 10,
}
internal bool ReportExplicitUseOfReservedAttributes(in DecodeWellKnownAttributeArguments<AttributeSyntax, CSharpAttributeData, AttributeLocation> arguments, ReservedAttributes reserved)
{
var attribute = arguments.Attribute;
var diagnostics = (BindingDiagnosticBag)arguments.Diagnostics;
if ((reserved & ReservedAttributes.DynamicAttribute) != 0 &&
attribute.IsTargetAttribute(this, AttributeDescription.DynamicAttribute))
{
// DynamicAttribute should not be set explicitly.
diagnostics.Add(ErrorCode.ERR_ExplicitDynamicAttr, arguments.AttributeSyntaxOpt.Location);
}
else if ((reserved & ReservedAttributes.IsReadOnlyAttribute) != 0 &&
reportExplicitUseOfReservedAttribute(attribute, arguments, AttributeDescription.IsReadOnlyAttribute))
{
}
else if ((reserved & ReservedAttributes.IsUnmanagedAttribute) != 0 &&
reportExplicitUseOfReservedAttribute(attribute, arguments, AttributeDescription.IsUnmanagedAttribute))
{
}
else if ((reserved & ReservedAttributes.IsByRefLikeAttribute) != 0 &&
reportExplicitUseOfReservedAttribute(attribute, arguments, AttributeDescription.IsByRefLikeAttribute))
{
}
else if ((reserved & ReservedAttributes.TupleElementNamesAttribute) != 0 &&
attribute.IsTargetAttribute(this, AttributeDescription.TupleElementNamesAttribute))
{
diagnostics.Add(ErrorCode.ERR_ExplicitTupleElementNamesAttribute, arguments.AttributeSyntaxOpt.Location);
}
else if ((reserved & ReservedAttributes.NullableAttribute) != 0 &&
attribute.IsTargetAttribute(this, AttributeDescription.NullableAttribute))
{
// NullableAttribute should not be set explicitly.
diagnostics.Add(ErrorCode.ERR_ExplicitNullableAttribute, arguments.AttributeSyntaxOpt.Location);
}
else if ((reserved & ReservedAttributes.NullableContextAttribute) != 0 &&
reportExplicitUseOfReservedAttribute(attribute, arguments, AttributeDescription.NullableContextAttribute))
{
}
else if ((reserved & ReservedAttributes.NullablePublicOnlyAttribute) != 0 &&
reportExplicitUseOfReservedAttribute(attribute, arguments, AttributeDescription.NullablePublicOnlyAttribute))
{
}
else if ((reserved & ReservedAttributes.NativeIntegerAttribute) != 0 &&
reportExplicitUseOfReservedAttribute(attribute, arguments, AttributeDescription.NativeIntegerAttribute))
{
}
else if ((reserved & ReservedAttributes.CaseSensitiveExtensionAttribute) != 0 &&
attribute.IsTargetAttribute(this, AttributeDescription.CaseSensitiveExtensionAttribute))
{
// ExtensionAttribute should not be set explicitly.
diagnostics.Add(ErrorCode.ERR_ExplicitExtension, arguments.AttributeSyntaxOpt.Location);
}
else
{
return false;
}
return true;
bool reportExplicitUseOfReservedAttribute(CSharpAttributeData attribute, in DecodeWellKnownAttributeArguments<AttributeSyntax, CSharpAttributeData, AttributeLocation> arguments, in AttributeDescription attributeDescription)
{
if (attribute.IsTargetAttribute(this, attributeDescription))
{
// Do not use '{FullName}'. This is reserved for compiler usage.
diagnostics.Add(ErrorCode.ERR_ExplicitReservedAttr, arguments.AttributeSyntaxOpt.Location, attributeDescription.FullName);
return true;
}
return false;
}
}
internal virtual byte? GetNullableContextValue()
{
return GetLocalNullableContextValue() ?? ContainingSymbol?.GetNullableContextValue();
}
internal virtual byte? GetLocalNullableContextValue()
{
return null;
}
internal void GetCommonNullableValues(CSharpCompilation compilation, ref MostCommonNullableValueBuilder builder)
{
switch (this.Kind)
{
case SymbolKind.NamedType:
if (compilation.ShouldEmitNullableAttributes(this))
{
builder.AddValue(this.GetLocalNullableContextValue());
}
break;
case SymbolKind.Event:
if (compilation.ShouldEmitNullableAttributes(this))
{
builder.AddValue(((EventSymbol)this).TypeWithAnnotations);
}
break;
case SymbolKind.Field:
var field = (FieldSymbol)this;
if (field is TupleElementFieldSymbol tupleElement)
{
field = tupleElement.TupleUnderlyingField;
}
if (compilation.ShouldEmitNullableAttributes(field))
{
builder.AddValue(field.TypeWithAnnotations);
}
break;
case SymbolKind.Method:
if (compilation.ShouldEmitNullableAttributes(this))
{
builder.AddValue(this.GetLocalNullableContextValue());
}
break;
case SymbolKind.Property:
if (compilation.ShouldEmitNullableAttributes(this))
{
builder.AddValue(((PropertySymbol)this).TypeWithAnnotations);
// Attributes are not emitted for property parameters.
}
break;
case SymbolKind.Parameter:
builder.AddValue(((ParameterSymbol)this).TypeWithAnnotations);
break;
case SymbolKind.TypeParameter:
if (this is SourceTypeParameterSymbolBase typeParameter)
{
builder.AddValue(typeParameter.GetSynthesizedNullableAttributeValue());
foreach (var constraintType in typeParameter.ConstraintTypesNoUseSiteDiagnostics)
{
builder.AddValue(constraintType);
}
}
break;
}
}
internal bool ShouldEmitNullableContextValue(out byte value)
{
byte? localValue = GetLocalNullableContextValue();
if (localValue == null)
{
value = 0;
return false;
}
value = localValue.GetValueOrDefault();
byte containingValue = ContainingSymbol?.GetNullableContextValue() ?? 0;
return value != containingValue;
}
#nullable enable
/// <summary>
/// True if the symbol is declared outside of the scope of the containing
/// symbol
/// </summary>
internal static bool IsCaptured(Symbol variable, SourceMethodSymbol containingSymbol)
{
switch (variable.Kind)
{
case SymbolKind.Field:
case SymbolKind.Property:
case SymbolKind.Event:
// Range variables are not captured, but their underlying parameters
// may be. If this is a range underlying parameter it will be a
// ParameterSymbol, not a RangeVariableSymbol.
case SymbolKind.RangeVariable:
return false;
case SymbolKind.Local:
if (((LocalSymbol)variable).IsConst)
{
return false;
}
break;
case SymbolKind.Parameter:
break;
case SymbolKind.Method:
if (variable is LocalFunctionSymbol localFunction)
{
// calling a static local function doesn't require capturing state
if (localFunction.IsStatic)
{
return false;
}
break;
}
throw ExceptionUtilities.UnexpectedValue(variable);
default:
throw ExceptionUtilities.UnexpectedValue(variable.Kind);
}
// Walk up the containing symbols until we find the target function, in which
// case the variable is not captured by the target function, or null, in which
// case it is.
for (var currentFunction = variable.ContainingSymbol;
(object)currentFunction != null;
currentFunction = currentFunction.ContainingSymbol)
{
if (ReferenceEquals(currentFunction, containingSymbol))
{
return false;
}
}
return true;
}
#nullable disable
bool ISymbolInternal.IsStatic
{
get { return this.IsStatic; }
}
bool ISymbolInternal.IsVirtual
{
get { return this.IsVirtual; }
}
bool ISymbolInternal.IsOverride
{
get { return this.IsOverride; }
}
bool ISymbolInternal.IsAbstract
{
get
{
return this.IsAbstract;
}
}
Accessibility ISymbolInternal.DeclaredAccessibility
{
get
{
return this.DeclaredAccessibility;
}
}
public abstract void Accept(CSharpSymbolVisitor visitor);
public abstract TResult Accept<TResult>(CSharpSymbolVisitor<TResult> visitor);
string IFormattable.ToString(string format, IFormatProvider formatProvider)
{
return ToString();
}
protected abstract ISymbol CreateISymbol();
internal ISymbol ISymbol
{
get
{
if (_lazyISymbol is null)
{
Interlocked.CompareExchange(ref _lazyISymbol, CreateISymbol(), null);
}
return _lazyISymbol;
}
}
}
}
| mavasani/roslyn | src/Compilers/CSharp/Portable/Symbols/Symbol.cs | C# | mit | 68,726 |
//= require locastyle/templates/_popover.jst.eco
//= require locastyle/templates/_dropdown.jst.eco
| diegoeis/locawebstyle | source/assets/javascripts/templates.js | JavaScript | mit | 99 |
// # Mail API
// API for sending Mail
var Promise = require('bluebird'),
pipeline = require('../utils/pipeline'),
errors = require('../errors'),
mail = require('../mail'),
Models = require('../models'),
utils = require('./utils'),
notifications = require('./notifications'),
i18n = require('../i18n'),
docName = 'mail',
mailer,
apiMail;
/**
* Send mail helper
*/
function sendMail(object) {
if (!(mailer instanceof mail.GhostMailer)) {
mailer = new mail.GhostMailer();
}
return mailer.send(object.mail[0].message).catch(function (err) {
if (mailer.state.usingDirect) {
notifications.add(
{notifications: [{
type: 'warn',
message: [
i18n.t('warnings.index.unableToSendEmail'),
i18n.t('common.seeLinkForInstructions',
{link: '<a href=\'https://docs.ghost.org/v1.0.0/docs/mail-config\' target=\'_blank\'>Checkout our mail configuration docs!</a>'})
].join(' ')
}]},
{context: {internal: true}}
);
}
return Promise.reject(new errors.EmailError({err: err}));
});
}
/**
* ## Mail API Methods
*
* **See:** [API Methods](index.js.html#api%20methods)
* @typedef Mail
* @param mail
*/
apiMail = {
/**
* ### Send
* Send an email
*
* @public
* @param {Mail} object details of the email to send
* @returns {Promise}
*/
send: function (object, options) {
var tasks;
/**
* ### Format Response
* @returns {Mail} mail
*/
function formatResponse(data) {
delete object.mail[0].options;
// Sendmail returns extra details we don't need and that don't convert to JSON
delete object.mail[0].message.transport;
object.mail[0].status = {
message: data.message
};
return object;
}
/**
* ### Send Mail
*/
function send() {
return sendMail(object, options);
}
tasks = [
utils.handlePermissions(docName, 'send'),
send,
formatResponse
];
return pipeline(tasks, options || {});
},
/**
* ### SendTest
* Send a test email
*
* @public
* @param {Object} options required property 'to' which contains the recipient address
* @returns {Promise}
*/
sendTest: function (options) {
var tasks;
/**
* ### Model Query
*/
function modelQuery() {
return Models.User.findOne({id: options.context.user});
}
/**
* ### Generate content
*/
function generateContent(result) {
return mail.utils.generateContent({template: 'test'}).then(function (content) {
var payload = {
mail: [{
message: {
to: result.get('email'),
subject: i18n.t('common.api.mail.testGhostEmail'),
html: content.html,
text: content.text
}
}]
};
return payload;
});
}
/**
* ### Send mail
*/
function send(payload) {
return sendMail(payload, options);
}
tasks = [
modelQuery,
generateContent,
send
];
return pipeline(tasks);
}
};
module.exports = apiMail;
| jordanwalsh23/jordanwalsh23.github.io | core/server/api/mail.js | JavaScript | mit | 3,802 |
/**
* Lo-Dash 2.4.1 (Custom Build) <http://lodash.com/>
* Build: `lodash modularize exports="amd" -o ./compat/`
* Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/>
* Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE>
* Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
* Available under MIT license <http://lodash.com/license>
*/
define(['../internals/baseEach', '../functions/createCallback', '../objects/isArray'], function(baseEach, createCallback, isArray) {
/**
* Creates an array of values by running each element in the collection
* through the callback. The callback is bound to `thisArg` and invoked with
* three arguments; (value, index|key, collection).
*
* If a property name is provided for `callback` the created "_.pluck" style
* callback will return the property value of the given element.
*
* If an object is provided for `callback` the created "_.where" style callback
* will return `true` for elements that have the properties of the given object,
* else `false`.
*
* @static
* @memberOf _
* @alias collect
* @category Collections
* @param {Array|Object|string} collection The collection to iterate over.
* @param {Function|Object|string} [callback=identity] The function called
* per iteration. If a property name or object is provided it will be used
* to create a "_.pluck" or "_.where" style callback, respectively.
* @param {*} [thisArg] The `this` binding of `callback`.
* @returns {Array} Returns a new array of the results of each `callback` execution.
* @example
*
* _.map([1, 2, 3], function(num) { return num * 3; });
* // => [3, 6, 9]
*
* _.map({ 'one': 1, 'two': 2, 'three': 3 }, function(num) { return num * 3; });
* // => [3, 6, 9] (property order is not guaranteed across environments)
*
* var characters = [
* { 'name': 'barney', 'age': 36 },
* { 'name': 'fred', 'age': 40 }
* ];
*
* // using "_.pluck" callback shorthand
* _.map(characters, 'name');
* // => ['barney', 'fred']
*/
function map(collection, callback, thisArg) {
var index = -1,
length = collection ? collection.length : 0,
result = Array(typeof length == 'number' ? length : 0);
callback = createCallback(callback, thisArg, 3);
if (isArray(collection)) {
while (++index < length) {
result[index] = callback(collection[index], index, collection);
}
} else {
baseEach(collection, function(value, key, collection) {
result[++index] = callback(value, key, collection);
});
}
return result;
}
return map;
});
| john-bixly/Morsel | app/vendor/lodash-amd/compat/collections/map.js | JavaScript | mit | 2,696 |
// Copyright 2015-2018 Hans Dembinski
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt
// or copy at http://www.boost.org/LICENSE_1_0.txt)
//[ guide_histogram_serialization
#include <boost/archive/text_iarchive.hpp>
#include <boost/archive/text_oarchive.hpp>
#include <boost/histogram.hpp>
#include <boost/histogram/serialization.hpp> // includes serialization code
#include <cassert>
#include <sstream>
int main() {
using namespace boost::histogram;
auto a = make_histogram(axis::regular<>(3, -1.0, 1.0, "axis 0"),
axis::integer<>(0, 2, "axis 1"));
a(0.5, 1);
std::string buf; // to hold persistent representation
// store histogram
{
std::ostringstream os;
boost::archive::text_oarchive oa(os);
oa << a;
buf = os.str();
}
auto b = decltype(a)(); // create a default-constructed second histogram
assert(b != a); // b is empty, a is not
// load histogram
{
std::istringstream is(buf);
boost::archive::text_iarchive ia(is);
ia >> b;
}
assert(b == a); // now b is equal to a
}
//]
| davehorton/drachtio-server | deps/boost_1_77_0/libs/histogram/examples/guide_histogram_serialization.cpp | C++ | mit | 1,128 |
<?php
namespace Spy\TimelineBundle\DependencyInjection;
use Symfony\Component\DependencyInjection\ContainerBuilder;
use Symfony\Component\Config\FileLocator;
use Symfony\Component\DependencyInjection\Reference;
use Symfony\Component\HttpKernel\DependencyInjection\Extension;
use Symfony\Component\DependencyInjection\Loader;
use Symfony\Component\Config\Definition\Processor;
class SpyTimelineExtension extends Extension
{
/**
* {@inheritdoc}
*/
public function load(array $configs, ContainerBuilder $container)
{
$processor = new Processor();
$configuration = new Configuration();
$config = $processor->processConfiguration($configuration, $configs);
$loader = new Loader\XmlFileLoader($container, new FileLocator(__DIR__.'/../Resources/config/services'));
$loader->load('filter.xml');
$loader->load('notification.xml');
$loader->load('paginator.xml');
$loader->load('resolve_component.xml');
$loader->load('result_builder.xml');
$loader->load('spread.xml');
$loader->load('twig.xml');
$driver = null;
if (isset($config['drivers'])) {
if (isset($config['drivers']['orm'])) {
$this->loadORMDriver($container, $loader, $config['drivers']['orm']);
$driver = 'orm';
} elseif (isset($config['drivers']['odm'])) {
$this->loadODMDriver($container, $loader, $config['drivers']['odm']);
$driver = 'odm';
} elseif (isset($config['drivers']['redis'])) {
$this->loadRedisDriver($container, $loader, $config['drivers']['redis']);
$driver = 'redis';
}
}
if (!$driver) {
$timelineManager = $config['timeline_manager'];
$actionManager = $config['action_manager'];
} else {
$timelineManager = isset($config['timeline_manager']) ? $config['timeline_manager'] : sprintf('spy_timeline.timeline_manager.%s', $driver);
$actionManager = isset($config['action_manager']) ? $config['action_manager'] : sprintf('spy_timeline.action_manager.%s', $driver);
}
$container->setAlias('spy_timeline.timeline_manager', $timelineManager);
$container->setAlias('spy_timeline.action_manager', $actionManager);
// pager
if (isset($config['paginator']) && !empty($config['paginator'])) {
$paginator = $config['paginator'];
} else {
$paginator = sprintf('spy_timeline.pager.%s', $driver);
}
// filters
$filters = isset($config['filters']) ? $config['filters'] : array();
$filterManager = $container->getDefinition('spy_timeline.filter.manager');
if (isset($filters['duplicate_key'])) {
$filter = $filters['duplicate_key'];
$service = $container->getDefinition($filter['service']);
$service->addMethodCall('setPriority', array($filter['priority']));
$filterManager->addMethodCall('add', array($service));
}
if (isset($filters['data_hydrator'])) {
$filter = $filters['data_hydrator'];
$service = $container->getDefinition($filter['service']);
$service->addArgument($filter['filter_unresolved']);
$service->addMethodCall('setPriority', array($filter['priority']));
$container->setParameter('spy_timeline.filter.data_hydrator.locators_config', $filter['locators']);
$filterManager->addMethodCall('add', array($service));
}
// result builder
$definition = $container->getDefinition('spy_timeline.result_builder');
$definition->addArgument($container->getDefinition(sprintf('spy_timeline.query_executor.%s', $driver)));
$definition->addArgument($filterManager);
if ($paginator) {
$definition->addMethodCall('setPager', array($container->getDefinition($paginator)));
}
// spreads
$container->setAlias('spy_timeline.spread.deployer', $config['spread']['deployer']);
$container->setParameter('spy_timeline.spread.deployer.delivery', $config['spread']['delivery']);
$container->setParameter('spy_timeline.spread.on_subject', $config['spread']['on_subject']);
$container->setParameter('spy_timeline.spread.on_global_context', $config['spread']['on_global_context']);
$container->setParameter('spy_timeline.spread.deployer.batch_size', $config['spread']['batch_size']);
// notifiers
$notifiers = $config['notifiers'];
$definition = $container->getDefinition($config['spread']['deployer']);
foreach ($notifiers as $notifier) {
$definition->addMethodCall('addNotifier', array(new Reference($notifier)));
}
//twig
$render = $config['render'];
$container->setParameter('spy_timeline.render.path', $render['path']);
$container->setParameter('spy_timeline.render.fallback', $render['fallback']);
$container->setParameter('spy_timeline.render.i18n.fallback', isset($render['i18n']) && isset($render['i18n']['fallback']) ? $render['i18n']['fallback'] : null);
$container->setParameter('spy_timeline.twig.resources', $render['resources']);
// query_builder
$queryBuilder = $config['query_builder'];
$container->setParameter('spy_timeline.query_builder.factory.class', $queryBuilder['classes']['factory']);
$container->setParameter('spy_timeline.query_builder.asserter.class', $queryBuilder['classes']['asserter']);
$container->setParameter('spy_timeline.query_builder.operator.class', $queryBuilder['classes']['operator']);
// resolve_component
$resolveComponent = $config['resolve_component'];
$container->setAlias('spy_timeline.resolve_component.resolver', $resolveComponent['resolver']);
// sets a parameter which we use in the addRegistryCompilerPass (there should be a cleaner way)
if ($resolveComponent['resolver'] === 'spy_timeline.resolve_component.doctrine') {
$container->setParameter('spy_timeline.resolve_component.doctrine_registries', true);
}
}
private function loadORMDriver($container, $loader, $config)
{
$classes = isset($config['classes']) ? $config['classes'] : array();
$parameters = array(
'timeline', 'action', 'component', 'action_component',
);
foreach ($parameters as $parameter) {
if (isset($classes[$parameter])) {
$container->setParameter(sprintf('spy_timeline.class.%s', $parameter), $classes[$parameter]);
}
}
$container->setAlias('spy_timeline.driver.object_manager', $config['object_manager']);
$loader->load('driver/orm.xml');
if ($config['post_load_listener']) {
$loader->load('driver/doctrine/orm_listener.xml');
}
if (isset($classes['query_builder'])) {
$container->setParameter('spy_timeline.query_builder.class', $classes['query_builder']);
}
$loader->load('query_builder.xml');
$container->setAlias('spy_timeline.query_builder', 'spy_timeline.query_builder.orm');
}
private function loadODMDriver($container, $loader, $config)
{
$classes = isset($config['classes']) ? $config['classes'] : array();
$parameters = array(
'timeline', 'action', 'component', 'action_component',
);
foreach ($parameters as $parameter) {
if (isset($classes[$parameter])) {
$container->setParameter(sprintf('spy_timeline.class.%s', $parameter), $classes[$parameter]);
}
}
$container->setAlias('spy_timeline.driver.object_manager', $config['object_manager']);
$loader->load('driver/odm.xml');
if ($config['post_load_listener']) {
$loader->load('driver/doctrine/odm_listener.xml');
}
}
private function loadRedisDriver($container, $loader, $config)
{
$classes = isset($config['classes']) ? $config['classes'] : array();
$parameters = array(
'action', 'component', 'action_component',
);
foreach ($parameters as $parameter) {
if (isset($classes[$parameter])) {
$container->setParameter(sprintf('spy_timeline.class.%s', $parameter), $classes[$parameter]);
}
}
$container->setParameter('spy_timeline.driver.redis.pipeline', $config['pipeline']);
$container->setParameter('spy_timeline.driver.redis.prefix', $config['prefix']);
$container->setAlias('spy_timeline.driver.redis.client', $config['client']);
$loader->load('driver/redis.xml');
}
}
| saberyounis/Sonata-Project | vendor/stephpy/timeline-bundle/DependencyInjection/SpyTimelineExtension.php | PHP | mit | 8,842 |
window.Reactable = require('../build/reactable.common');
| wemcdonald/reactable | src/reactable.global.js | JavaScript | mit | 57 |
from __future__ import absolute_import
import numpy
import chainer
from chainer import _backend
from chainer.backends import _cpu
from chainer.configuration import config
_ideep_version = None
_error = None
try:
import ideep4py as ideep # NOQA
from ideep4py import mdarray # type: ignore # NOQA
_ideep_version = 2 if hasattr(ideep, '__version__') else 1
except ImportError as e:
_error = e
_ideep_version = None
class mdarray(object): # type: ignore
pass # for type testing
class Intel64Device(_backend.Device):
"""Device for Intel64 (Intel Architecture) backend with iDeep"""
xp = numpy
name = '@intel64'
supported_array_types = (numpy.ndarray, mdarray)
__hash__ = _backend.Device.__hash__
def __init__(self):
check_ideep_available()
super(Intel64Device, self).__init__()
@staticmethod
def from_array(array):
if isinstance(array, mdarray):
return Intel64Device()
return None
def __eq__(self, other):
return isinstance(other, Intel64Device)
def __repr__(self):
return '<{}>'.format(self.__class__.__name__)
def send_array(self, array):
if isinstance(array, ideep.mdarray):
return array
if not isinstance(array, numpy.ndarray):
array = _cpu._to_cpu(array) # to numpy.ndarray
if (isinstance(array, numpy.ndarray) and
array.ndim in (1, 2, 4) and
0 not in array.shape):
# TODO(kmaehashi): Remove ndim validation once iDeep has fixed.
# Currently iDeep only supports (1, 2, 4)-dim arrays.
# Note that array returned from `ideep.array` may not be an
# iDeep mdarray, e.g., when the dtype is not float32.
array = ideep.array(array, itype=ideep.wgt_array)
return array
def is_array_supported(self, array):
return isinstance(array, (numpy.ndarray, mdarray))
# ------------------------------------------------------------------------------
# ideep configuration
# ------------------------------------------------------------------------------
_SHOULD_USE_IDEEP = {
'==always': {'always': True, 'auto': False, 'never': False},
'>=auto': {'always': True, 'auto': True, 'never': False},
}
def is_ideep_available():
"""Returns if iDeep is available.
Returns:
bool: ``True`` if the supported version of iDeep is installed.
"""
return _ideep_version is not None and _ideep_version == 2
def check_ideep_available():
"""Checks if iDeep is available.
When iDeep is correctly set up, nothing happens.
Otherwise it raises ``RuntimeError``.
"""
if _ideep_version is None:
# If the error is missing shared object, append a message to
# redirect to the ideep website.
msg = str(_error)
if 'cannot open shared object file' in msg:
msg += ('\n\nEnsure iDeep requirements are satisfied: '
'https://github.com/intel/ideep')
raise RuntimeError(
'iDeep is not available.\n'
'Reason: {}: {}'.format(type(_error).__name__, msg))
elif _ideep_version != 2:
raise RuntimeError(
'iDeep is not available.\n'
'Reason: Unsupported iDeep version ({})'.format(_ideep_version))
def should_use_ideep(level):
"""Determines if we should use iDeep.
This function checks ``chainer.config.use_ideep`` and availability
of ``ideep4py`` package.
Args:
level (str): iDeep use level. It must be either ``'==always'`` or
``'>=auto'``. ``'==always'`` indicates that the ``use_ideep``
config must be ``'always'`` to use iDeep.
Returns:
bool: ``True`` if the caller should use iDeep.
"""
if not is_ideep_available():
return False
# TODO(niboshi):
# Add lowest_version argument and compare with ideep version.
# Currently ideep does not provide a way to retrieve its version.
if level not in _SHOULD_USE_IDEEP:
raise ValueError('invalid iDeep use level: %s '
'(must be either of "==always" or ">=auto")' %
repr(level))
flags = _SHOULD_USE_IDEEP[level]
use_ideep = config.use_ideep
if use_ideep not in flags:
raise ValueError('invalid use_ideep configuration: %s '
'(must be either of "always", "auto", or "never")' %
repr(use_ideep))
return flags[use_ideep]
def inputs_all_ready(inputs, supported_ndim=(2, 4)):
"""Checks if input arrays are supported for an iDeep primitive.
Before calling an iDeep primitive (e.g., ``ideep4py.linear.Forward``), you
need to make sure that all input arrays are ready for the primitive by
calling this function.
Information to be checked includes array types, dimesions and data types.
The function checks ``inputs`` info and ``supported_ndim``.
Inputs to be tested can be any of ``Variable``, ``numpy.ndarray`` or
``ideep4py.mdarray``. However, all inputs to iDeep primitives must be
``ideep4py.mdarray``. Callers of iDeep primitives are responsible of
converting all inputs to ``ideep4py.mdarray``.
Args:
inputs (sequence of arrays or variables):
Inputs to be checked.
supported_ndim (tuple of ints):
Supported ndim values for the iDeep primitive.
Returns:
bool: ``True`` if all conditions meet.
"""
def _is_supported_array_type(a):
return isinstance(a, ideep.mdarray) or ideep.check_type([a])
if not is_ideep_available():
return False
inputs = [x.data if isinstance(x, chainer.variable.Variable)
else x for x in inputs]
return (ideep.check_ndim(inputs, supported_ndim)
and all([_is_supported_array_type(a) for a in inputs]))
| pfnet/chainer | chainer/backends/intel64.py | Python | mit | 5,920 |
package com.prolificinteractive.materialcalendarview;
import android.animation.Animator;
import android.content.res.Resources;
import android.text.TextUtils;
import android.util.TypedValue;
import android.view.ViewPropertyAnimator;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.Interpolator;
import android.widget.TextView;
import com.prolificinteractive.materialcalendarview.format.TitleFormatter;
class TitleChanger {
public static final int DEFAULT_ANIMATION_DELAY = 400;
public static final int DEFAULT_Y_TRANSLATION_DP = 20;
private final TextView title;
private TitleFormatter titleFormatter;
private final int animDelay;
private final int animDuration;
private final int translate;
private final Interpolator interpolator = new DecelerateInterpolator(2f);
private int orientation = MaterialCalendarView.VERTICAL;
private long lastAnimTime = 0;
private CalendarDay previousMonth = null;
public TitleChanger(TextView title) {
this.title = title;
Resources res = title.getResources();
animDelay = DEFAULT_ANIMATION_DELAY;
animDuration = res.getInteger(android.R.integer.config_shortAnimTime) / 2;
translate = (int) TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, DEFAULT_Y_TRANSLATION_DP, res.getDisplayMetrics()
);
}
public void change(final CalendarDay currentMonth) {
long currentTime = System.currentTimeMillis();
if (currentMonth == null) {
return;
}
if (TextUtils.isEmpty(title.getText()) || (currentTime - lastAnimTime) < animDelay) {
doChange(currentTime, currentMonth, false);
}
if (currentMonth.equals(previousMonth) ||
(currentMonth.getMonth() == previousMonth.getMonth()
&& currentMonth.getYear() == previousMonth.getYear())) {
return;
}
doChange(currentTime, currentMonth, true);
}
private void doChange(final long now, final CalendarDay currentMonth, boolean animate) {
title.animate().cancel();
doTranslation(title, 0);
title.setAlpha(1);
lastAnimTime = now;
final CharSequence newTitle = titleFormatter.format(currentMonth);
if (!animate) {
title.setText(newTitle);
} else {
final int translation = translate * (previousMonth.isBefore(currentMonth) ? 1 : -1);
final ViewPropertyAnimator viewPropertyAnimator = title.animate();
if (orientation == MaterialCalendarView.HORIZONTAL) {
viewPropertyAnimator.translationX(translation * -1);
} else {
viewPropertyAnimator.translationY(translation * -1);
}
viewPropertyAnimator
.alpha(0)
.setDuration(animDuration)
.setInterpolator(interpolator)
.setListener(new AnimatorListener() {
@Override
public void onAnimationCancel(Animator animator) {
doTranslation(title, 0);
title.setAlpha(1);
}
@Override
public void onAnimationEnd(Animator animator) {
title.setText(newTitle);
doTranslation(title, translation);
final ViewPropertyAnimator viewPropertyAnimator = title.animate();
if (orientation == MaterialCalendarView.HORIZONTAL) {
viewPropertyAnimator.translationX(0);
} else {
viewPropertyAnimator.translationY(0);
}
viewPropertyAnimator
.alpha(1)
.setDuration(animDuration)
.setInterpolator(interpolator)
.setListener(new AnimatorListener())
.start();
}
}).start();
}
previousMonth = currentMonth;
}
private void doTranslation(final TextView title, final int translate) {
if (orientation == MaterialCalendarView.HORIZONTAL) {
title.setTranslationX(translate);
} else {
title.setTranslationY(translate);
}
}
public TitleFormatter getTitleFormatter() {
return titleFormatter;
}
public void setTitleFormatter(TitleFormatter titleFormatter) {
this.titleFormatter = titleFormatter;
}
public void setOrientation(int orientation) {
this.orientation = orientation;
}
public int getOrientation() {
return orientation;
}
public void setPreviousMonth(CalendarDay previousMonth) {
this.previousMonth = previousMonth;
}
}
| netcosports/material-calendarview | library/src/main/java/com/prolificinteractive/materialcalendarview/TitleChanger.java | Java | mit | 5,089 |
/****************************************************************************
Copyright (c) 2010-2012 cocos2d-x.org
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
#include "CCScriptSupport.h"
#include "CCScheduler.h"
bool CC_DLL cc_assert_script_compatible(const char *msg)
{
cocos2d::CCScriptEngineProtocol* pEngine = cocos2d::CCScriptEngineManager::sharedManager()->getScriptEngine();
if (pEngine && pEngine->handleAssert(msg))
{
return true;
}
return false;
}
NS_CC_BEGIN
// #pragma mark -
// #pragma mark CCScriptHandlerEntry
CCScriptHandlerEntry* CCScriptHandlerEntry::create(int nHandler)
{
CCScriptHandlerEntry* entry = new CCScriptHandlerEntry(nHandler);
entry->autorelease();
return entry;
}
CCScriptHandlerEntry::~CCScriptHandlerEntry(void)
{
if (m_nHandler != 0)
{
CCScriptEngineManager::sharedManager()->getScriptEngine()->removeScriptHandler(m_nHandler);
m_nHandler = 0;
}
}
// #pragma mark -
// #pragma mark CCSchedulerScriptHandlerEntry
CCSchedulerScriptHandlerEntry* CCSchedulerScriptHandlerEntry::create(int nHandler, float fInterval, bool bPaused)
{
CCSchedulerScriptHandlerEntry* pEntry = new CCSchedulerScriptHandlerEntry(nHandler);
pEntry->init(fInterval, bPaused);
pEntry->autorelease();
return pEntry;
}
bool CCSchedulerScriptHandlerEntry::init(float fInterval, bool bPaused)
{
m_pTimer = new CCTimer();
m_pTimer->initWithScriptHandler(m_nHandler, fInterval);
m_pTimer->autorelease();
m_pTimer->retain();
m_bPaused = bPaused;
LUALOG("[LUA] ADD script schedule: %d, entryID: %d", m_nHandler, m_nEntryId);
return true;
}
CCSchedulerScriptHandlerEntry::~CCSchedulerScriptHandlerEntry(void)
{
m_pTimer->release();
LUALOG("[LUA] DEL script schedule %d, entryID: %d", m_nHandler, m_nEntryId);
}
// #pragma mark -
// #pragma mark CCTouchScriptHandlerEntry
CCTouchScriptHandlerEntry* CCTouchScriptHandlerEntry::create(int nHandler,
bool bIsMultiTouches,
int nPriority,
bool bSwallowsTouches)
{
CCTouchScriptHandlerEntry* pEntry = new CCTouchScriptHandlerEntry(nHandler);
pEntry->init(bIsMultiTouches, nPriority, bSwallowsTouches);
pEntry->autorelease();
return pEntry;
}
CCTouchScriptHandlerEntry::~CCTouchScriptHandlerEntry(void)
{
if (m_nHandler != 0)
{
CCScriptEngineManager::sharedManager()->getScriptEngine()->removeScriptHandler(m_nHandler);
LUALOG("[LUA] Remove touch event handler: %d", m_nHandler);
m_nHandler = 0;
}
}
bool CCTouchScriptHandlerEntry::init(bool bIsMultiTouches, int nPriority, bool bSwallowsTouches)
{
m_bIsMultiTouches = bIsMultiTouches;
m_nPriority = nPriority;
m_bSwallowsTouches = bSwallowsTouches;
return true;
}
// #pragma mark -
// #pragma mark CCScriptEngineManager
static CCScriptEngineManager* s_pSharedScriptEngineManager = NULL;
CCScriptEngineManager::~CCScriptEngineManager(void)
{
removeScriptEngine();
}
void CCScriptEngineManager::setScriptEngine(CCScriptEngineProtocol *pScriptEngine)
{
removeScriptEngine();
m_pScriptEngine = pScriptEngine;
}
void CCScriptEngineManager::removeScriptEngine(void)
{
if (m_pScriptEngine)
{
delete m_pScriptEngine;
m_pScriptEngine = NULL;
}
}
CCScriptEngineManager* CCScriptEngineManager::sharedManager(void)
{
if (!s_pSharedScriptEngineManager)
{
s_pSharedScriptEngineManager = new CCScriptEngineManager();
}
return s_pSharedScriptEngineManager;
}
void CCScriptEngineManager::purgeSharedManager(void)
{
if (s_pSharedScriptEngineManager)
{
delete s_pSharedScriptEngineManager;
s_pSharedScriptEngineManager = NULL;
}
}
NS_CC_END
| h-iwata/MultiplayPaint | proj.ios_mac/Photon-iOS_SDK/Demos/etc-bin/cocos2dx/cocos2dx/script_support/CCScriptSupport.cpp | C++ | mit | 5,005 |