code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
odoo.define('website_sale.s_products_searchbar', function (require) {
'use strict';
const concurrency = require('web.concurrency');
const publicWidget = require('web.public.widget');
const { qweb } = require('web.core');
/**
* @todo maybe the custom autocomplete logic could be extract to be reusable
*/
publicWidget.registry.productsSearchBar = publicWidget.Widget.extend({
selector: '.o_wsale_products_searchbar_form',
xmlDependencies: ['/website_sale/static/src/xml/website_sale_utils.xml'],
events: {
'input .search-query': '_onInput',
'focusout': '_onFocusOut',
'keydown .search-query': '_onKeydown',
},
autocompleteMinWidth: 300,
/**
* @constructor
*/
init: function () {
this._super.apply(this, arguments);
this._dp = new concurrency.DropPrevious();
this._onInput = _.debounce(this._onInput, 400);
this._onFocusOut = _.debounce(this._onFocusOut, 100);
},
/**
* @override
*/
start: function () {
this.$input = this.$('.search-query');
this.order = this.$('.o_wsale_search_order_by').val();
this.limit = parseInt(this.$input.data('limit'));
this.displayDescription = !!this.$input.data('displayDescription');
this.displayPrice = !!this.$input.data('displayPrice');
this.displayImage = !!this.$input.data('displayImage');
if (this.limit) {
this.$input.attr('autocomplete', 'off');
}
return this._super.apply(this, arguments);
},
//--------------------------------------------------------------------------
// Private
//--------------------------------------------------------------------------
/**
* @private
*/
_fetch: function () {
return this._rpc({
route: '/shop/products/autocomplete',
params: {
'term': this.$input.val(),
'options': {
'order': this.order,
'limit': this.limit,
'display_description': this.displayDescription,
'display_price': this.displayPrice,
'max_nb_chars': Math.round(Math.max(this.autocompleteMinWidth, parseInt(this.$el.width())) * 0.22),
},
},
});
},
/**
* @private
*/
_render: function (res) {
var $prevMenu = this.$menu;
this.$el.toggleClass('dropdown show', !!res);
if (res) {
var products = res['products'];
this.$menu = $(qweb.render('website_sale.productsSearchBar.autocomplete', {
products: products,
hasMoreProducts: products.length < res['products_count'],
currency: res['currency'],
widget: this,
}));
this.$menu.css('min-width', this.autocompleteMinWidth);
this.$el.append(this.$menu);
}
if ($prevMenu) {
$prevMenu.remove();
}
},
//--------------------------------------------------------------------------
// Handlers
//--------------------------------------------------------------------------
/**
* @private
*/
_onInput: function () {
if (!this.limit) {
return;
}
this._dp.add(this._fetch()).then(this._render.bind(this));
},
/**
* @private
*/
_onFocusOut: function () {
if (!this.$el.has(document.activeElement).length) {
this._render();
}
},
/**
* @private
*/
_onKeydown: function (ev) {
switch (ev.which) {
case $.ui.keyCode.ESCAPE:
this._render();
break;
case $.ui.keyCode.UP:
case $.ui.keyCode.DOWN:
ev.preventDefault();
if (this.$menu) {
let $element = ev.which === $.ui.keyCode.UP ? this.$menu.children().last() : this.$menu.children().first();
$element.focus();
}
break;
}
},
});
});
| ddico/odoo | addons/website_sale/static/src/snippets/s_products_searchbar/000.js | JavaScript | agpl-3.0 | 4,128 |
<?php
/**
* @copyright Copyright (c) 2016, ownCloud, Inc.
*
* @author Joas Schilling <coding@schilljs.com>
* @author Lukas Reschke <lukas@statuscode.ch>
*
* @license AGPL-3.0
*
* This code is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License, version 3,
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License, version 3,
* along with this program. If not, see <http://www.gnu.org/licenses/>
*
*/
namespace OCA\UpdateNotification\Controller;
use OCA\UpdateNotification\UpdateChecker;
use OCP\AppFramework\Controller;
use OCP\AppFramework\Http\DataResponse;
use OCP\AppFramework\Http\TemplateResponse;
use OCP\AppFramework\Utility\ITimeFactory;
use OCP\BackgroundJob\IJobList;
use OCP\IConfig;
use OCP\IDateTimeFormatter;
use OCP\IL10N;
use OCP\IRequest;
use OCP\Security\ISecureRandom;
use OCP\Settings\ISettings;
class AdminController extends Controller implements ISettings {
/** @var IJobList */
private $jobList;
/** @var ISecureRandom */
private $secureRandom;
/** @var IConfig */
private $config;
/** @var ITimeFactory */
private $timeFactory;
/** @var UpdateChecker */
private $updateChecker;
/** @var IL10N */
private $l10n;
/** @var IDateTimeFormatter */
private $dateTimeFormatter;
/**
* @param string $appName
* @param IRequest $request
* @param IJobList $jobList
* @param ISecureRandom $secureRandom
* @param IConfig $config
* @param ITimeFactory $timeFactory
* @param IL10N $l10n
* @param UpdateChecker $updateChecker
* @param IDateTimeFormatter $dateTimeFormatter
*/
public function __construct($appName,
IRequest $request,
IJobList $jobList,
ISecureRandom $secureRandom,
IConfig $config,
ITimeFactory $timeFactory,
IL10N $l10n,
UpdateChecker $updateChecker,
IDateTimeFormatter $dateTimeFormatter) {
parent::__construct($appName, $request);
$this->jobList = $jobList;
$this->secureRandom = $secureRandom;
$this->config = $config;
$this->timeFactory = $timeFactory;
$this->l10n = $l10n;
$this->updateChecker = $updateChecker;
$this->dateTimeFormatter = $dateTimeFormatter;
}
/**
* @return TemplateResponse
*/
public function displayPanel() {
$lastUpdateCheck = $this->dateTimeFormatter->formatDateTime(
$this->config->getAppValue('core', 'lastupdatedat')
);
$channels = [
'daily',
'beta',
'stable',
'production',
];
$currentChannel = \OCP\Util::getChannel();
// Remove the currently used channel from the channels list
if(($key = array_search($currentChannel, $channels)) !== false) {
unset($channels[$key]);
}
$updateState = $this->updateChecker->getUpdateState();
$notifyGroups = json_decode($this->config->getAppValue('updatenotification', 'notify_groups', '["admin"]'), true);
$params = [
'isNewVersionAvailable' => !empty($updateState['updateAvailable']),
'lastChecked' => $lastUpdateCheck,
'currentChannel' => $currentChannel,
'channels' => $channels,
'newVersionString' => (empty($updateState['updateVersion'])) ? '' : $updateState['updateVersion'],
'downloadLink' => (empty($updateState['downloadLink'])) ? '' : $updateState['downloadLink'],
'updaterEnabled' => (empty($updateState['updaterEnabled'])) ? false : $updateState['updaterEnabled'],
'notify_groups' => implode('|', $notifyGroups),
];
return new TemplateResponse($this->appName, 'admin', $params, '');
}
/**
* @UseSession
*
* @param string $channel
* @return DataResponse
*/
public function setChannel($channel) {
\OCP\Util::setChannel($channel);
$this->config->setAppValue('core', 'lastupdatedat', 0);
return new DataResponse(['status' => 'success', 'data' => ['message' => $this->l10n->t('Channel updated')]]);
}
/**
* @return DataResponse
*/
public function createCredentials() {
// Create a new job and store the creation date
$this->jobList->add('OCA\UpdateNotification\ResetTokenBackgroundJob');
$this->config->setAppValue('core', 'updater.secret.created', $this->timeFactory->getTime());
// Create a new token
$newToken = $this->secureRandom->generate(64);
$this->config->setSystemValue('updater.secret', password_hash($newToken, PASSWORD_DEFAULT));
return new DataResponse($newToken);
}
/**
* @return TemplateResponse returns the instance with all parameters set, ready to be rendered
*/
public function getForm() {
return $this->displayPanel();
}
/**
* @return string the section ID, e.g. 'sharing'
*/
public function getSection() {
return 'server';
}
/**
* @return int whether the form should be rather on the top or bottom of
* the admin section. The forms are arranged in ascending order of the
* priority values. It is required to return a value between 0 and 100.
*
* E.g.: 70
*/
public function getPriority() {
return 1;
}
}
| jbicha/server | apps/updatenotification/lib/Controller/AdminController.php | PHP | agpl-3.0 | 5,211 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2012+ BREMSKERL-REIBBELAGWERKE EMMERLING GmbH & Co. KG
# Author Marco Dieckhoff
# Copyright (C) 2013 Agile Business Group sagl (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Stock Move Backdating",
"version": "1.0",
'author': ['Marco Dieckhoff, BREMSKERL', 'Agile Business Group'],
"category": "Stock Logistics",
'website': 'www.bremskerl.com',
"depends": ["stock"],
"summary": "Allows back-dating of stock moves",
"description": """This module allows to register old stock moves
(with date != now).
On stock moves, user can specify the "Actual Movement Date", that will be
used as movement date""",
'data': [
"view/stock_view.xml",
"wizard/stock_partial_picking_view.xml",
],
'demo': [],
'installable': False,
}
| yvaucher/stock-logistics-workflow | __unported__/stock_move_backdating/__openerp__.py | Python | agpl-3.0 | 1,678 |
"""Model managers for Reversion."""
try:
set
except NameError:
from sets import Set as set # Python 2.3 fallback.
from django.contrib.contenttypes.models import ContentType
from django.db import models
class VersionManager(models.Manager):
"""Manager for Version models."""
def get_for_object(self, object):
"""Returns all the versions of the given Revision, ordered by date created."""
content_type = ContentType.objects.get_for_model(object)
return self.filter(content_type=content_type, object_id=unicode(object.pk)).order_by("pk").select_related().order_by("pk")
def get_unique_for_object(self,obj):
"""Returns unique versions associated with the object."""
versions = self.get_for_object(obj)
changed_versions = []
known_serialized_data = set()
for version in versions:
serialized_data = version.serialized_data
if serialized_data in known_serialized_data:
continue
known_serialized_data.add(serialized_data)
changed_versions.append(version)
return changed_versions
def get_for_date(self, object, date):
"""Returns the latest version of an object for the given date."""
try:
return self.get_for_object(object).filter(revision__date_created__lte=date).order_by("-pk")[0]
except IndexError:
raise self.model.DoesNotExist
def get_deleted(self, model_class):
"""Returns all the deleted versions for the given model class."""
live_ids = [unicode(row[0]) for row in model_class._default_manager.all().values_list("pk")]
content_type = ContentType.objects.get_for_model(model_class)
deleted_ids = self.filter(content_type=content_type).exclude(object_id__in=live_ids).order_by().values_list("object_id").distinct()
deleted = []
for object_id, in deleted_ids:
deleted.append(self.get_deleted_object(model_class, object_id))
return deleted
def get_deleted_object(self, model_class, object_id):
"""
Returns the version corresponding to the deletion of the object with
the given id.
"""
try:
content_type = ContentType.objects.get_for_model(model_class)
return self.filter(content_type=content_type, object_id=unicode(object_id)).order_by("-pk").select_related()[0]
except IndexError:
raise self.model.DoesNotExist | jaredjennings/snowy | wsgi/snowy/snowy/lib/reversion/managers.py | Python | agpl-3.0 | 2,511 |
/*
* The Kuali Financial System, a comprehensive financial management system for higher education.
*
* Copyright 2005-2014 The Kuali Foundation
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.kfs.module.tem.document.web.struts;
import java.util.List;
import java.util.Map;
import java.util.Observable;
import java.util.Observer;
import org.kuali.kfs.module.tem.document.web.bean.TravelMvcWrapperBean;
public class TravelStrutsObservable extends Observable {
public Map<String, List<Observer>> observers;
/**
* deprecating this since the best practice is to use Spring
*/
@Override
@Deprecated
public void addObserver(final Observer observer) {
super.addObserver(observer);
}
@SuppressWarnings("null")
@Override
public void notifyObservers(final Object arg) {
TravelMvcWrapperBean wrapper = null;
if (arg instanceof TravelMvcWrapperBean) {
wrapper = (TravelMvcWrapperBean) arg;
}
else if (arg instanceof Object[]) {
final Object[] args = (Object[]) arg;
if (args != null && args.length > 0
&& args[0] instanceof TravelMvcWrapperBean) {
wrapper = (TravelMvcWrapperBean) args[0];
}
}
final String eventName = wrapper.getMethodToCall();
for (final Observer observer : getObservers().get(eventName)) {
observer.update(this, arg);
}
clearChanged();
}
/**
* Gets the observers attribute.
*
* @return Returns the observers.
*/
public Map<String, List<Observer>> getObservers() {
return observers;
}
/**
* Sets the observers attribute value.
*
* @param observers The observers to set.
*/
public void setObservers(final Map<String,List<Observer>> observers) {
this.observers = observers;
}
}
| bhutchinson/kfs | kfs-tem/src/main/java/org/kuali/kfs/module/tem/document/web/struts/TravelStrutsObservable.java | Java | agpl-3.0 | 2,543 |
// This file was generated by the Gtk# code generator.
// Any changes made will be lost if regenerated.
namespace GLib {
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.InteropServices;
#region Autogenerated code
public partial class TimeZone : GLib.Opaque {
[DllImport (Global.GLibNativeDll, CallingConvention = CallingConvention.Cdecl)]
static extern IntPtr g_time_zone_get_type();
public static GLib.GType GType {
get {
IntPtr raw_ret = g_time_zone_get_type();
GLib.GType ret = new GLib.GType(raw_ret);
return ret;
}
}
[DllImport (Global.GLibNativeDll, CallingConvention = CallingConvention.Cdecl)]
static extern int g_time_zone_adjust_time(IntPtr raw, int type, long time_);
public int AdjustTime(int type, long time_) {
int raw_ret = g_time_zone_adjust_time(Handle, type, time_);
int ret = raw_ret;
return ret;
}
[DllImport (Global.GLibNativeDll, CallingConvention = CallingConvention.Cdecl)]
static extern int g_time_zone_find_interval(IntPtr raw, int type, long time_);
public int FindInterval(int type, long time_) {
int raw_ret = g_time_zone_find_interval(Handle, type, time_);
int ret = raw_ret;
return ret;
}
[DllImport (Global.GLibNativeDll, CallingConvention = CallingConvention.Cdecl)]
static extern IntPtr g_time_zone_get_abbreviation(IntPtr raw, int interval);
public string GetAbbreviation(int interval) {
IntPtr raw_ret = g_time_zone_get_abbreviation(Handle, interval);
string ret = GLib.Marshaller.Utf8PtrToString (raw_ret);
return ret;
}
[DllImport (Global.GLibNativeDll, CallingConvention = CallingConvention.Cdecl)]
static extern int g_time_zone_get_offset(IntPtr raw, int interval);
public int GetOffset(int interval) {
int raw_ret = g_time_zone_get_offset(Handle, interval);
int ret = raw_ret;
return ret;
}
[DllImport (Global.GLibNativeDll, CallingConvention = CallingConvention.Cdecl)]
static extern bool g_time_zone_is_dst(IntPtr raw, int interval);
public bool IsDst(int interval) {
bool raw_ret = g_time_zone_is_dst(Handle, interval);
bool ret = raw_ret;
return ret;
}
public TimeZone(IntPtr raw) : base(raw) {}
[DllImport (Global.GLibNativeDll, CallingConvention = CallingConvention.Cdecl)]
static extern IntPtr g_time_zone_new(IntPtr identifier);
public TimeZone (string identifier)
{
IntPtr native_identifier = GLib.Marshaller.StringToPtrGStrdup (identifier);
Raw = g_time_zone_new(native_identifier);
GLib.Marshaller.Free (native_identifier);
}
[DllImport (Global.GLibNativeDll, CallingConvention = CallingConvention.Cdecl)]
static extern IntPtr g_time_zone_new_local();
public TimeZone ()
{
Raw = g_time_zone_new_local();
}
[DllImport (Global.GLibNativeDll, CallingConvention = CallingConvention.Cdecl)]
static extern IntPtr g_time_zone_new_utc();
public static TimeZone NewUtc()
{
TimeZone result = new TimeZone (g_time_zone_new_utc());
return result;
}
[DllImport (Global.GLibNativeDll, CallingConvention = CallingConvention.Cdecl)]
static extern IntPtr g_time_zone_ref(IntPtr raw);
protected override void Ref (IntPtr raw)
{
if (!Owned) {
g_time_zone_ref (raw);
Owned = true;
}
}
[DllImport (Global.GLibNativeDll, CallingConvention = CallingConvention.Cdecl)]
static extern void g_time_zone_unref(IntPtr raw);
protected override void Unref (IntPtr raw)
{
if (Owned) {
g_time_zone_unref (raw);
Owned = false;
}
}
class FinalizerInfo {
IntPtr handle;
public FinalizerInfo (IntPtr handle)
{
this.handle = handle;
}
public bool Handler ()
{
g_time_zone_unref (handle);
return false;
}
}
~TimeZone ()
{
if (!Owned)
return;
FinalizerInfo info = new FinalizerInfo (Handle);
GLib.Timeout.Add (50, new GLib.TimeoutHandler (info.Handler));
}
#endregion
}
} | antoniusriha/gtk-sharp | glib/TimeZone.cs | C# | lgpl-2.1 | 3,935 |
/****************************************************************/
/* MOOSE - Multiphysics Object Oriented Simulation Environment */
/* */
/* All contents are licensed under LGPL V2.1 */
/* See LICENSE for full restrictions */
/****************************************************************/
#include "NonlinearRZ.h"
#include "SolidModel.h"
#include "Problem.h"
#include "SymmIsotropicElasticityTensor.h"
namespace SolidMechanics
{
NonlinearRZ::NonlinearRZ( SolidModel & solid_model,
const std::string & name,
InputParameters parameters )
:Nonlinear( solid_model, name, parameters ),
_grad_disp_r(coupledGradient("disp_r")),
_grad_disp_z(coupledGradient("disp_z")),
_grad_disp_r_old(coupledGradientOld("disp_r")),
_grad_disp_z_old(coupledGradientOld("disp_z")),
_disp_r(coupledValue("disp_r")),
_disp_r_old(coupledValueOld("disp_r"))
{
}
////////////////////////////////////////////////////////////////////////
NonlinearRZ::~NonlinearRZ()
{
}
////////////////////////////////////////////////////////////////////////
void
NonlinearRZ::computeIncrementalDeformationGradient( std::vector<ColumnMajorMatrix> & Fhat )
{
// A = grad(u(k+1) - u(k))
// Fbar = 1 + grad(u(k))
// Fhat = 1 + A*(Fbar^-1)
ColumnMajorMatrix A;
ColumnMajorMatrix Fbar;
ColumnMajorMatrix Fbar_inverse;
ColumnMajorMatrix Fhat_average;
Real volume(0);
_Fbar.resize(_solid_model.qrule()->n_points());
for ( unsigned qp= 0; qp < _solid_model.qrule()->n_points(); ++qp )
{
fillMatrix( qp, _grad_disp_r, _grad_disp_z, _disp_r, A );
fillMatrix( qp, _grad_disp_r_old, _grad_disp_z_old, _disp_r_old, Fbar);
A -= Fbar;
Fbar.addDiag( 1 );
_Fbar[qp] = Fbar;
// Get Fbar^(-1)
// Computing the inverse is generally a bad idea.
// It's better to compute LU factors. For now at least, we'll take
// a direct route.
invertMatrix( Fbar, Fbar_inverse );
Fhat[qp] = A * Fbar_inverse;
Fhat[qp].addDiag( 1 );
// Now include the contribution for the integration of Fhat over the element
Fhat_average += Fhat[qp] * _solid_model.JxW(qp);
volume += _solid_model.JxW(qp); // Accumulate original configuration volume
}
Fhat_average /= volume;
const Real det_Fhat_average( detMatrix( Fhat_average ) );
const Real third( 1./3. );
// Finalize volumetric locking correction
for ( unsigned qp=0; qp < _solid_model.qrule()->n_points(); ++qp )
{
const Real det_Fhat( detMatrix( Fhat[qp] ) );
const Real factor( std::pow( det_Fhat_average/det_Fhat, third ) );
Fhat[qp] *= factor;
}
// Moose::out << "Fhat(0,0)" << Fhat[0](0,0) << std::endl;
}
////////////////////////////////////////////////////////////////////////
void
NonlinearRZ::computeDeformationGradient( unsigned int qp, ColumnMajorMatrix & F)
{
mooseAssert(F.n() == 3 && F.m() == 3, "computeDefGrad requires 3x3 matrix");
F(0,0) = _grad_disp_r[qp](0) + 1;
F(0,1) = _grad_disp_r[qp](1);
F(0,2) = 0;
F(1,0) = _grad_disp_z[qp](0);
F(1,1) = _grad_disp_z[qp](1) + 1;
F(1,2) = 0;
F(2,0) = 0;
F(2,1) = 0;
F(2,2) = (_solid_model.q_point(qp)(0) != 0.0 ? _disp_r[qp]/_solid_model.q_point(qp)(0) : 0.0) + 1;
}
////////////////////////////////////////////////////////////////////////
void
NonlinearRZ::fillMatrix( unsigned int qp,
const VariableGradient & grad_r,
const VariableGradient & grad_z,
const VariableValue & u,
ColumnMajorMatrix & A) const
{
mooseAssert(A.n() == 3 && A.m() == 3, "computeDefGrad requires 3x3 matrix");
A(0,0) = grad_r[qp](0);
A(0,1) = grad_r[qp](1);
A(0,2) = 0;
A(1,0) = grad_z[qp](0);
A(1,1) = grad_z[qp](1);
A(1,2) = 0;
A(2,0) = 0;
A(2,1) = 0;
A(2,2) = (_solid_model.q_point(qp)(0) != 0.0 ? u[qp]/_solid_model.q_point(qp)(0) : 0.0);
}
//////////////////////////////////////////////////////////////////////////
Real
NonlinearRZ::volumeRatioOld(unsigned int qp) const
{
ColumnMajorMatrix Fnm1T;
fillMatrix( qp, _grad_disp_r_old, _grad_disp_z_old, _disp_r_old, Fnm1T);
Fnm1T.addDiag( 1 );
return detMatrix(Fnm1T);
}
//////////////////////////////////////////////////////////////////////////
}
| danielru/moose | modules/solid_mechanics/src/materials/NonlinearRZ.C | C++ | lgpl-2.1 | 4,391 |
class Test18 {
String str;
static class A {
private final Test18 anObject;
boolean flag;
public A(Test18 anObject, boolean flag) {
this.flag = flag;
this.anObject = anObject;
}
void foo() {
System.out.println("str = " + anObject.str);
}
}
} | siosio/intellij-community | java/java-tests/testData/refactoring/makeClassStatic/IDEADEV12762_after.java | Java | apache-2.0 | 340 |
package edu.harvard.iq.dataverse.authorization.providers.oauth2.impl;
import com.github.scribejava.apis.GoogleApi20;
import com.github.scribejava.core.builder.api.BaseApi;
import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo;
import edu.harvard.iq.dataverse.authorization.providers.oauth2.AbstractOAuth2AuthenticationProvider;
import edu.harvard.iq.dataverse.util.BundleUtil;
import java.io.StringReader;
import java.util.UUID;
import javax.json.Json;
import javax.json.JsonObject;
import javax.json.JsonReader;
/**
*
* @author michael
*/
public class GoogleOAuth2AP extends AbstractOAuth2AuthenticationProvider {
public GoogleOAuth2AP(String aClientId, String aClientSecret) {
id = "google";
title = BundleUtil.getStringFromBundle("auth.providers.title.google");
clientId = aClientId;
clientSecret = aClientSecret;
scope = "https://www.googleapis.com/auth/userinfo.profile https://www.googleapis.com/auth/userinfo.email";
baseUserEndpoint = "https://www.googleapis.com/oauth2/v2/userinfo";
}
@Override
public BaseApi getApiInstance() {
return GoogleApi20.instance();
}
@Override
protected ParsedUserResponse parseUserResponse(String responseBody) {
try ( StringReader rdr = new StringReader(responseBody);
JsonReader jrdr = Json.createReader(rdr) ) {
JsonObject response = jrdr.readObject();
AuthenticatedUserDisplayInfo displayInfo = new AuthenticatedUserDisplayInfo(
response.getString("given_name",""),
response.getString("family_name",""),
response.getString("email",""),
"",
""
);
String persistentUserId = response.getString("id");
String username = response.getString("email");
if ( username != null ) {
username = username.split("@")[0].trim();
} else {
// compose a username from given and family names
username = response.getString("given_name","") + "."
+ response.getString("family_name","");
username = username.trim();
if ( username.isEmpty() ) {
username = UUID.randomUUID().toString();
} else {
username = username.replaceAll(" ", "-");
}
}
return new ParsedUserResponse(displayInfo, persistentUserId, username);
}
}
@Override
public boolean isDisplayIdentifier() {
return false;
}
}
| ekoi/DANS-DVN-4.6.1 | src/main/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/impl/GoogleOAuth2AP.java | Java | apache-2.0 | 2,685 |
/*
* Copyright (C) 2006 Oliver Hunt <ojh16@student.canterbury.ac.nz>
* Copyright (C) 2006 Apple Inc. All rights reserved.
* Copyright (C) Research In Motion Limited 2010. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "config.h"
#include "core/layout/svg/LayoutSVGInline.h"
#include "core/layout/svg/LayoutSVGText.h"
#include "core/layout/svg/SVGLayoutSupport.h"
#include "core/layout/svg/SVGResourcesCache.h"
#include "core/layout/svg/line/SVGInlineFlowBox.h"
#include "core/svg/SVGAElement.h"
namespace blink {
bool LayoutSVGInline::isChildAllowed(LayoutObject* child, const ComputedStyle& style) const
{
if (child->isText())
return SVGLayoutSupport::isLayoutableTextNode(child);
if (isSVGAElement(*node())) {
// Disallow direct descendant 'a'.
if (isSVGAElement(*child->node()))
return false;
}
if (!child->isSVGInline() && !child->isSVGInlineText())
return false;
return LayoutInline::isChildAllowed(child, style);
}
LayoutSVGInline::LayoutSVGInline(Element* element)
: LayoutInline(element)
{
setAlwaysCreateLineBoxes();
}
InlineFlowBox* LayoutSVGInline::createInlineFlowBox()
{
InlineFlowBox* box = new SVGInlineFlowBox(*this);
box->setHasVirtualLogicalHeight();
return box;
}
FloatRect LayoutSVGInline::objectBoundingBox() const
{
if (const LayoutObject* object = LayoutSVGText::locateLayoutSVGTextAncestor(this))
return object->objectBoundingBox();
return FloatRect();
}
FloatRect LayoutSVGInline::strokeBoundingBox() const
{
if (const LayoutObject* object = LayoutSVGText::locateLayoutSVGTextAncestor(this))
return object->strokeBoundingBox();
return FloatRect();
}
FloatRect LayoutSVGInline::paintInvalidationRectInLocalCoordinates() const
{
if (const LayoutObject* object = LayoutSVGText::locateLayoutSVGTextAncestor(this))
return object->paintInvalidationRectInLocalCoordinates();
return FloatRect();
}
LayoutRect LayoutSVGInline::clippedOverflowRectForPaintInvalidation(const LayoutBoxModelObject* paintInvalidationContainer, const PaintInvalidationState* paintInvalidationState) const
{
return SVGLayoutSupport::clippedOverflowRectForPaintInvalidation(*this, paintInvalidationContainer, paintInvalidationState);
}
void LayoutSVGInline::mapLocalToContainer(const LayoutBoxModelObject* paintInvalidationContainer, TransformState& transformState, MapCoordinatesFlags, bool* wasFixed, const PaintInvalidationState* paintInvalidationState) const
{
SVGLayoutSupport::mapLocalToContainer(this, paintInvalidationContainer, transformState, wasFixed, paintInvalidationState);
}
const LayoutObject* LayoutSVGInline::pushMappingToContainer(const LayoutBoxModelObject* ancestorToStopAt, LayoutGeometryMap& geometryMap) const
{
return SVGLayoutSupport::pushMappingToContainer(this, ancestorToStopAt, geometryMap);
}
void LayoutSVGInline::absoluteQuads(Vector<FloatQuad>& quads, bool* wasFixed) const
{
const LayoutObject* object = LayoutSVGText::locateLayoutSVGTextAncestor(this);
if (!object)
return;
FloatRect textBoundingBox = object->strokeBoundingBox();
for (InlineFlowBox* box = firstLineBox(); box; box = box->nextLineBox())
quads.append(localToAbsoluteQuad(FloatRect(textBoundingBox.x() + box->x().toFloat(), textBoundingBox.y() + box->y().toFloat(), box->logicalWidth().toFloat(), box->logicalHeight().toFloat()), false, wasFixed));
}
void LayoutSVGInline::willBeDestroyed()
{
SVGResourcesCache::clientDestroyed(this);
LayoutInline::willBeDestroyed();
}
void LayoutSVGInline::styleDidChange(StyleDifference diff, const ComputedStyle* oldStyle)
{
if (diff.needsFullLayout())
setNeedsBoundariesUpdate();
LayoutInline::styleDidChange(diff, oldStyle);
SVGResourcesCache::clientStyleChanged(this, diff, styleRef());
}
void LayoutSVGInline::addChild(LayoutObject* child, LayoutObject* beforeChild)
{
LayoutInline::addChild(child, beforeChild);
SVGResourcesCache::clientWasAddedToTree(child, child->styleRef());
if (LayoutSVGText* textLayoutObject = LayoutSVGText::locateLayoutSVGTextAncestor(this))
textLayoutObject->subtreeChildWasAdded(child);
}
void LayoutSVGInline::removeChild(LayoutObject* child)
{
SVGResourcesCache::clientWillBeRemovedFromTree(child);
LayoutSVGText* textLayoutObject = LayoutSVGText::locateLayoutSVGTextAncestor(this);
if (!textLayoutObject) {
LayoutInline::removeChild(child);
return;
}
Vector<SVGTextLayoutAttributes*, 2> affectedAttributes;
textLayoutObject->subtreeChildWillBeRemoved(child, affectedAttributes);
LayoutInline::removeChild(child);
textLayoutObject->subtreeChildWasRemoved(affectedAttributes);
}
}
| weolar/miniblink49 | third_party/WebKit/Source/core/layout/svg/LayoutSVGInline.cpp | C++ | apache-2.0 | 5,510 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.ipc;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.HConnection;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
import org.apache.hadoop.hbase.util.ByteStringer;
import com.google.protobuf.Descriptors;
import com.google.protobuf.Message;
/**
* Provides clients with an RPC connection to call coprocessor endpoint {@link com.google.protobuf.Service}s
* against the active master. An instance of this class may be obtained
* by calling {@link org.apache.hadoop.hbase.client.HBaseAdmin#coprocessorService()},
* but should normally only be used in creating a new {@link com.google.protobuf.Service} stub to call the endpoint
* methods.
* @see org.apache.hadoop.hbase.client.HBaseAdmin#coprocessorService()
*/
@InterfaceAudience.Private
public class MasterCoprocessorRpcChannel extends CoprocessorRpcChannel{
private static Log LOG = LogFactory.getLog(MasterCoprocessorRpcChannel.class);
private final HConnection connection;
public MasterCoprocessorRpcChannel(HConnection conn) {
this.connection = conn;
}
@Override
protected Message callExecService(Descriptors.MethodDescriptor method,
Message request, Message responsePrototype)
throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Call: "+method.getName()+", "+request.toString());
}
final ClientProtos.CoprocessorServiceCall call =
ClientProtos.CoprocessorServiceCall.newBuilder()
.setRow(ByteStringer.wrap(HConstants.EMPTY_BYTE_ARRAY))
.setServiceName(method.getService().getFullName())
.setMethodName(method.getName())
.setRequest(request.toByteString()).build();
CoprocessorServiceResponse result = ProtobufUtil.execService(connection.getMaster(), call);
Message response = null;
if (result.getValue().hasValue()) {
response = responsePrototype.newBuilderForType()
.mergeFrom(result.getValue().getValue()).build();
} else {
response = responsePrototype.getDefaultInstanceForType();
}
if (LOG.isTraceEnabled()) {
LOG.trace("Master Result is value=" + response);
}
return response;
}
}
| Guavus/hbase | hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/MasterCoprocessorRpcChannel.java | Java | apache-2.0 | 3,362 |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Collections.Generic;
using System.ComponentModel;
#if NETFX_CORE
using Microsoft.VisualStudio.TestPlatform.UnitTestFramework;
using TestFixture = Microsoft.VisualStudio.TestPlatform.UnitTestFramework.TestClassAttribute;
using Test = Microsoft.VisualStudio.TestPlatform.UnitTestFramework.TestMethodAttribute;
#elif DNXCORE50
using Xunit;
using Test = Xunit.FactAttribute;
using Assert = Newtonsoft.Json.Tests.XUnitAssert;
#else
using NUnit.Framework;
#endif
using Newtonsoft.Json.Linq;
#if NET20
using Newtonsoft.Json.Utilities.LinqBridge;
#else
using System.Linq;
#endif
namespace Newtonsoft.Json.Tests.Linq
{
[TestFixture]
public class JArrayTests : TestFixtureBase
{
[Test]
public void RemoveSpecificAndRemoveSelf()
{
JObject o = new JObject
{
{ "results", new JArray(1, 2, 3, 4) }
};
JArray a = (JArray)o["results"];
var last = a.Last();
Assert.IsTrue(a.Remove(last));
last = a.Last();
last.Remove();
Assert.AreEqual(2, a.Count);
}
[Test]
public void Clear()
{
JArray a = new JArray { 1 };
Assert.AreEqual(1, a.Count);
a.Clear();
Assert.AreEqual(0, a.Count);
}
[Test]
public void AddToSelf()
{
JArray a = new JArray();
a.Add(a);
Assert.IsFalse(ReferenceEquals(a[0], a));
}
[Test]
public void Contains()
{
JValue v = new JValue(1);
JArray a = new JArray { v };
Assert.AreEqual(false, a.Contains(new JValue(2)));
Assert.AreEqual(false, a.Contains(new JValue(1)));
Assert.AreEqual(false, a.Contains(null));
Assert.AreEqual(true, a.Contains(v));
}
[Test]
public void GenericCollectionCopyTo()
{
JArray j = new JArray();
j.Add(new JValue(1));
j.Add(new JValue(2));
j.Add(new JValue(3));
Assert.AreEqual(3, j.Count);
JToken[] a = new JToken[5];
((ICollection<JToken>)j).CopyTo(a, 1);
Assert.AreEqual(null, a[0]);
Assert.AreEqual(1, (int)a[1]);
Assert.AreEqual(2, (int)a[2]);
Assert.AreEqual(3, (int)a[3]);
Assert.AreEqual(null, a[4]);
}
[Test]
public void GenericCollectionCopyToNullArrayShouldThrow()
{
JArray j = new JArray();
ExceptionAssert.Throws<ArgumentNullException>(() => { ((ICollection<JToken>)j).CopyTo(null, 0); }, @"Value cannot be null.
Parameter name: array");
}
[Test]
public void GenericCollectionCopyToNegativeArrayIndexShouldThrow()
{
JArray j = new JArray();
ExceptionAssert.Throws<ArgumentOutOfRangeException>(() => { ((ICollection<JToken>)j).CopyTo(new JToken[1], -1); }, @"arrayIndex is less than 0.
Parameter name: arrayIndex");
}
[Test]
public void GenericCollectionCopyToArrayIndexEqualGreaterToArrayLengthShouldThrow()
{
JArray j = new JArray();
ExceptionAssert.Throws<ArgumentException>(() => { ((ICollection<JToken>)j).CopyTo(new JToken[1], 1); }, @"arrayIndex is equal to or greater than the length of array.");
}
[Test]
public void GenericCollectionCopyToInsufficientArrayCapacity()
{
JArray j = new JArray();
j.Add(new JValue(1));
j.Add(new JValue(2));
j.Add(new JValue(3));
ExceptionAssert.Throws<ArgumentException>(() => { ((ICollection<JToken>)j).CopyTo(new JToken[3], 1); }, @"The number of elements in the source JObject is greater than the available space from arrayIndex to the end of the destination array.");
}
[Test]
public void Remove()
{
JValue v = new JValue(1);
JArray j = new JArray();
j.Add(v);
Assert.AreEqual(1, j.Count);
Assert.AreEqual(false, j.Remove(new JValue(1)));
Assert.AreEqual(false, j.Remove(null));
Assert.AreEqual(true, j.Remove(v));
Assert.AreEqual(false, j.Remove(v));
Assert.AreEqual(0, j.Count);
}
[Test]
public void IndexOf()
{
JValue v1 = new JValue(1);
JValue v2 = new JValue(1);
JValue v3 = new JValue(1);
JArray j = new JArray();
j.Add(v1);
Assert.AreEqual(0, j.IndexOf(v1));
j.Add(v2);
Assert.AreEqual(0, j.IndexOf(v1));
Assert.AreEqual(1, j.IndexOf(v2));
j.AddFirst(v3);
Assert.AreEqual(1, j.IndexOf(v1));
Assert.AreEqual(2, j.IndexOf(v2));
Assert.AreEqual(0, j.IndexOf(v3));
v3.Remove();
Assert.AreEqual(0, j.IndexOf(v1));
Assert.AreEqual(1, j.IndexOf(v2));
Assert.AreEqual(-1, j.IndexOf(v3));
}
[Test]
public void RemoveAt()
{
JValue v1 = new JValue(1);
JValue v2 = new JValue(1);
JValue v3 = new JValue(1);
JArray j = new JArray();
j.Add(v1);
j.Add(v2);
j.Add(v3);
Assert.AreEqual(true, j.Contains(v1));
j.RemoveAt(0);
Assert.AreEqual(false, j.Contains(v1));
Assert.AreEqual(true, j.Contains(v3));
j.RemoveAt(1);
Assert.AreEqual(false, j.Contains(v3));
Assert.AreEqual(1, j.Count);
}
[Test]
public void RemoveAtOutOfRangeIndexShouldError()
{
JArray j = new JArray();
ExceptionAssert.Throws<ArgumentOutOfRangeException>(() => { j.RemoveAt(0); }, @"Index is equal to or greater than Count.
Parameter name: index");
}
[Test]
public void RemoveAtNegativeIndexShouldError()
{
JArray j = new JArray();
ExceptionAssert.Throws<ArgumentOutOfRangeException>(() => { j.RemoveAt(-1); }, @"Index is less than 0.
Parameter name: index");
}
[Test]
public void Insert()
{
JValue v1 = new JValue(1);
JValue v2 = new JValue(2);
JValue v3 = new JValue(3);
JValue v4 = new JValue(4);
JArray j = new JArray();
j.Add(v1);
j.Add(v2);
j.Add(v3);
j.Insert(1, v4);
Assert.AreEqual(0, j.IndexOf(v1));
Assert.AreEqual(1, j.IndexOf(v4));
Assert.AreEqual(2, j.IndexOf(v2));
Assert.AreEqual(3, j.IndexOf(v3));
}
[Test]
public void AddFirstAddedTokenShouldBeFirst()
{
JValue v1 = new JValue(1);
JValue v2 = new JValue(2);
JValue v3 = new JValue(3);
JArray j = new JArray();
Assert.AreEqual(null, j.First);
Assert.AreEqual(null, j.Last);
j.AddFirst(v1);
Assert.AreEqual(v1, j.First);
Assert.AreEqual(v1, j.Last);
j.AddFirst(v2);
Assert.AreEqual(v2, j.First);
Assert.AreEqual(v1, j.Last);
j.AddFirst(v3);
Assert.AreEqual(v3, j.First);
Assert.AreEqual(v1, j.Last);
}
[Test]
public void InsertShouldInsertAtZeroIndex()
{
JValue v1 = new JValue(1);
JValue v2 = new JValue(2);
JArray j = new JArray();
j.Insert(0, v1);
Assert.AreEqual(0, j.IndexOf(v1));
j.Insert(0, v2);
Assert.AreEqual(1, j.IndexOf(v1));
Assert.AreEqual(0, j.IndexOf(v2));
}
[Test]
public void InsertNull()
{
JArray j = new JArray();
j.Insert(0, null);
Assert.AreEqual(null, ((JValue)j[0]).Value);
}
[Test]
public void InsertNegativeIndexShouldThrow()
{
JArray j = new JArray();
ExceptionAssert.Throws<ArgumentOutOfRangeException>(() => { j.Insert(-1, new JValue(1)); }, @"Index was out of range. Must be non-negative and less than the size of the collection.
Parameter name: index");
}
[Test]
public void InsertOutOfRangeIndexShouldThrow()
{
JArray j = new JArray();
ExceptionAssert.Throws<ArgumentOutOfRangeException>(() => { j.Insert(2, new JValue(1)); }, @"Index must be within the bounds of the List.
Parameter name: index");
}
[Test]
public void Item()
{
JValue v1 = new JValue(1);
JValue v2 = new JValue(2);
JValue v3 = new JValue(3);
JValue v4 = new JValue(4);
JArray j = new JArray();
j.Add(v1);
j.Add(v2);
j.Add(v3);
j[1] = v4;
Assert.AreEqual(null, v2.Parent);
Assert.AreEqual(-1, j.IndexOf(v2));
Assert.AreEqual(j, v4.Parent);
Assert.AreEqual(1, j.IndexOf(v4));
}
[Test]
public void Parse_ShouldThrowOnUnexpectedToken()
{
string json = @"{""prop"":""value""}";
ExceptionAssert.Throws<JsonReaderException>(() => { JArray.Parse(json); }, "Error reading JArray from JsonReader. Current JsonReader item is not an array: StartObject. Path '', line 1, position 1.");
}
public class ListItemFields
{
public string ListItemText { get; set; }
public object ListItemValue { get; set; }
}
[Test]
public void ArrayOrder()
{
string itemZeroText = "Zero text";
IEnumerable<ListItemFields> t = new List<ListItemFields>
{
new ListItemFields { ListItemText = "First", ListItemValue = 1 },
new ListItemFields { ListItemText = "Second", ListItemValue = 2 },
new ListItemFields { ListItemText = "Third", ListItemValue = 3 }
};
JObject optionValues =
new JObject(
new JProperty("options",
new JArray(
new JObject(
new JProperty("text", itemZeroText),
new JProperty("value", "0")),
from r in t
orderby r.ListItemValue
select new JObject(
new JProperty("text", r.ListItemText),
new JProperty("value", r.ListItemValue.ToString())))));
string result = "myOptions = " + optionValues.ToString();
StringAssert.AreEqual(@"myOptions = {
""options"": [
{
""text"": ""Zero text"",
""value"": ""0""
},
{
""text"": ""First"",
""value"": ""1""
},
{
""text"": ""Second"",
""value"": ""2""
},
{
""text"": ""Third"",
""value"": ""3""
}
]
}", result);
}
[Test]
public void Iterate()
{
JArray a = new JArray(1, 2, 3, 4, 5);
int i = 1;
foreach (JToken token in a)
{
Assert.AreEqual(i, (int)token);
i++;
}
}
#if !(NETFX_CORE || PORTABLE || DNXCORE50 || PORTABLE40)
[Test]
public void ITypedListGetItemProperties()
{
JProperty p1 = new JProperty("Test1", 1);
JProperty p2 = new JProperty("Test2", "Two");
ITypedList a = new JArray(new JObject(p1, p2));
PropertyDescriptorCollection propertyDescriptors = a.GetItemProperties(null);
Assert.IsNotNull(propertyDescriptors);
Assert.AreEqual(2, propertyDescriptors.Count);
Assert.AreEqual("Test1", propertyDescriptors[0].Name);
Assert.AreEqual("Test2", propertyDescriptors[1].Name);
}
#endif
[Test]
public void AddArrayToSelf()
{
JArray a = new JArray(1, 2);
a.Add(a);
Assert.AreEqual(3, a.Count);
Assert.AreEqual(1, (int)a[0]);
Assert.AreEqual(2, (int)a[1]);
Assert.AreNotSame(a, a[2]);
}
[Test]
public void SetValueWithInvalidIndex()
{
ExceptionAssert.Throws<ArgumentException>(() =>
{
JArray a = new JArray();
a["badvalue"] = new JValue(3);
}, @"Set JArray values with invalid key value: ""badvalue"". Int32 array index expected.");
}
[Test]
public void SetValue()
{
object key = 0;
JArray a = new JArray((object)null);
a[key] = new JValue(3);
Assert.AreEqual(3, (int)a[key]);
}
[Test]
public void ReplaceAll()
{
JArray a = new JArray(new[] { 1, 2, 3 });
Assert.AreEqual(3, a.Count);
Assert.AreEqual(1, (int)a[0]);
Assert.AreEqual(2, (int)a[1]);
Assert.AreEqual(3, (int)a[2]);
a.ReplaceAll(1);
Assert.AreEqual(1, a.Count);
Assert.AreEqual(1, (int)a[0]);
}
[Test]
public void ParseIncomplete()
{
ExceptionAssert.Throws<JsonReaderException>(() => { JArray.Parse("[1"); }, "Unexpected end of content while loading JArray. Path '[0]', line 1, position 2.");
}
[Test]
public void InsertAddEnd()
{
JArray array = new JArray();
array.Insert(0, 123);
array.Insert(1, 456);
Assert.AreEqual(2, array.Count);
Assert.AreEqual(123, (int)array[0]);
Assert.AreEqual(456, (int)array[1]);
}
[Test]
public void ParseAdditionalContent()
{
string json = @"[
""Small"",
""Medium"",
""Large""
], 987987";
ExceptionAssert.Throws<JsonReaderException>(() => { JArray.Parse(json); }, "Additional text encountered after finished reading JSON content: ,. Path '', line 5, position 1.");
}
[Test]
public void ToListOnEmptyArray()
{
string json = @"{""decks"":[]}";
JArray decks = (JArray)JObject.Parse(json)["decks"];
IList<JToken> l = decks.ToList();
Assert.AreEqual(0, l.Count);
json = @"{""decks"":[1]}";
decks = (JArray)JObject.Parse(json)["decks"];
l = decks.ToList();
Assert.AreEqual(1, l.Count);
}
[Test]
public void Parse_NoComments()
{
string json = "[1,2/*comment*/,3]";
JArray a = JArray.Parse(json, new JsonLoadSettings
{
CommentHandling = CommentHandling.Ignore
});
Assert.AreEqual(3, a.Count);
Assert.AreEqual(1, (int)a[0]);
Assert.AreEqual(2, (int)a[1]);
Assert.AreEqual(3, (int)a[2]);
}
[Test]
public void Parse_LineInfo()
{
string json = "[1,2,3]";
JArray a = JArray.Parse(json, new JsonLoadSettings
{
LineInfoHandling = LineInfoHandling.Load
});
Assert.AreEqual(false, ((IJsonLineInfo)a).HasLineInfo());
Assert.AreEqual(false, ((IJsonLineInfo)a[0]).HasLineInfo());
Assert.AreEqual(false, ((IJsonLineInfo)a[1]).HasLineInfo());
Assert.AreEqual(false, ((IJsonLineInfo)a[2]).HasLineInfo());
}
}
} | pWnH/CarsonAgainstHumanity | lib/Json90r1/Source/Src/Newtonsoft.Json.Tests/Linq/JArrayTests.cs | C# | apache-2.0 | 17,073 |
/**
* Copyright 2014 The PlayN Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package playn.robovm;
/**
* Maintains canvas state that is not maintained via CGContext.
*/
public class RoboCanvasState {
RoboGradient gradient;
public RoboCanvasState() {
this((RoboGradient)null);
}
public RoboCanvasState(RoboCanvasState toCopy) {
this(toCopy.gradient);
}
public RoboCanvasState(RoboGradient gradient) {
this.gradient = gradient;
}
}
| tinkerstudent/playn | robovm/src/playn/robovm/RoboCanvasState.java | Java | apache-2.0 | 978 |
export * from './number-spinner.component';
| dlyle65535/metron | metron-interface/metron-config/src/app/shared/number-spinner/index.ts | TypeScript | apache-2.0 | 44 |
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import com.google.cloud.dataflow.sdk.coders.BigEndianLongCoder;
import com.google.cloud.dataflow.sdk.coders.StringUtf8Coder;
import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory;
import com.google.cloud.dataflow.sdk.transforms.Combine.CombineFn;
import com.google.cloud.dataflow.sdk.transforms.Combine.KeyedCombineFn;
import com.google.cloud.dataflow.sdk.transforms.Sum;
import com.google.cloud.dataflow.sdk.transforms.windowing.BoundedWindow;
import com.google.cloud.dataflow.sdk.transforms.windowing.FixedWindows;
import com.google.cloud.dataflow.sdk.transforms.windowing.IntervalWindow;
import com.google.cloud.dataflow.sdk.transforms.windowing.Sessions;
import com.google.cloud.dataflow.sdk.transforms.windowing.SlidingWindows;
import com.google.cloud.dataflow.sdk.util.common.CounterSet;
import com.google.cloud.dataflow.sdk.values.KV;
import com.google.cloud.dataflow.sdk.values.TupleTag;
import org.hamcrest.Matchers;
import org.joda.time.Duration;
import org.joda.time.Instant;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/** Unit tests for {@link GroupAlsoByWindowsDoFn}. */
@RunWith(JUnit4.class)
@SuppressWarnings({"rawtypes", "unchecked"})
public class GroupAlsoByWindowsDoFnTest {
ExecutionContext execContext;
CounterSet counters;
TupleTag<KV<String, Iterable<String>>> outputTag;
@Before public void setUp() {
execContext = new DirectModeExecutionContext();
counters = new CounterSet();
outputTag = new TupleTag<>();
}
@Test public void testEmpty() throws Exception {
DoFnRunner<KV<String, Iterable<WindowedValue<String>>>,
KV<String, Iterable<String>>, List> runner =
makeRunner(WindowingStrategy.of(FixedWindows.<String>of(Duration.millis(10))));
runner.startBundle();
runner.finishBundle();
List<KV<String, Iterable<String>>> result = runner.getReceiver(outputTag);
assertEquals(0, result.size());
}
@Test public void testFixedWindows() throws Exception {
DoFnRunner<KV<String, Iterable<WindowedValue<String>>>,
KV<String, Iterable<String>>, List> runner =
makeRunner(WindowingStrategy.of(FixedWindows.<String>of(Duration.millis(10))));
runner.startBundle();
runner.processElement(WindowedValue.valueInEmptyWindows(
KV.of("k", (Iterable<WindowedValue<String>>) Arrays.asList(
WindowedValue.of(
"v1",
new Instant(1),
Arrays.asList(window(0, 10))),
WindowedValue.of(
"v2",
new Instant(2),
Arrays.asList(window(0, 10))),
WindowedValue.of(
"v3",
new Instant(13),
Arrays.asList(window(10, 20)))))));
runner.finishBundle();
List<WindowedValue<KV<String, Iterable<String>>>> result = runner.getReceiver(outputTag);
assertEquals(2, result.size());
WindowedValue<KV<String, Iterable<String>>> item0 = result.get(0);
assertEquals("k", item0.getValue().getKey());
assertThat(item0.getValue().getValue(), Matchers.containsInAnyOrder("v1", "v2"));
assertEquals(new Instant(1), item0.getTimestamp());
assertThat(item0.getWindows(),
Matchers.contains(window(0, 10)));
WindowedValue<KV<String, Iterable<String>>> item1 = result.get(1);
assertEquals("k", item1.getValue().getKey());
assertThat(item1.getValue().getValue(), Matchers.contains("v3"));
assertEquals(new Instant(13), item1.getTimestamp());
assertThat(item1.getWindows(),
Matchers.contains(window(10, 20)));
}
@Test public void testSlidingWindows() throws Exception {
DoFnRunner<KV<String, Iterable<WindowedValue<String>>>,
KV<String, Iterable<String>>, List> runner =
makeRunner(WindowingStrategy.of(
SlidingWindows.<String>of(Duration.millis(20)).every(Duration.millis(10))));
runner.startBundle();
runner.processElement(WindowedValue.valueInEmptyWindows(
KV.of("k", (Iterable<WindowedValue<String>>) Arrays.asList(
WindowedValue.of(
"v1",
new Instant(5),
Arrays.asList(window(-10, 10), window(0, 20))),
WindowedValue.of(
"v2",
new Instant(15),
Arrays.asList(window(0, 20), window(10, 30)))))));
runner.finishBundle();
List<WindowedValue<KV<String, Iterable<String>>>> result = runner.getReceiver(outputTag);
assertEquals(3, result.size());
WindowedValue<KV<String, Iterable<String>>> item0 = result.get(0);
assertEquals("k", item0.getValue().getKey());
assertThat(item0.getValue().getValue(), Matchers.contains("v1"));
assertEquals(new Instant(5), item0.getTimestamp());
assertThat(item0.getWindows(),
Matchers.contains(window(-10, 10)));
WindowedValue<KV<String, Iterable<String>>> item1 = result.get(1);
assertEquals("k", item1.getValue().getKey());
assertThat(item1.getValue().getValue(), Matchers.containsInAnyOrder("v1", "v2"));
assertEquals(new Instant(5), item1.getTimestamp());
assertThat(item1.getWindows(),
Matchers.contains(window(0, 20)));
WindowedValue<KV<String, Iterable<String>>> item2 = result.get(2);
assertEquals("k", item2.getValue().getKey());
assertThat(item2.getValue().getValue(), Matchers.contains("v2"));
assertEquals(new Instant(15), item2.getTimestamp());
assertThat(item2.getWindows(),
Matchers.contains(window(10, 30)));
}
@Test public void testDiscontiguousWindows() throws Exception {
DoFnRunner<KV<String, Iterable<WindowedValue<String>>>,
KV<String, Iterable<String>>, List> runner =
makeRunner(WindowingStrategy.of(FixedWindows.<String>of(Duration.millis(10))));
runner.startBundle();
runner.processElement(WindowedValue.valueInEmptyWindows(
KV.of("k", (Iterable<WindowedValue<String>>) Arrays.asList(
WindowedValue.of(
"v1",
new Instant(1),
Arrays.asList(window(0, 5))),
WindowedValue.of(
"v2",
new Instant(4),
Arrays.asList(window(1, 5))),
WindowedValue.of(
"v3",
new Instant(4),
Arrays.asList(window(0, 5)))))));
runner.finishBundle();
List<WindowedValue<KV<String, Iterable<String>>>> result = runner.getReceiver(outputTag);
assertEquals(2, result.size());
WindowedValue<KV<String, Iterable<String>>> item0 = result.get(0);
assertEquals("k", item0.getValue().getKey());
assertThat(item0.getValue().getValue(), Matchers.containsInAnyOrder("v1", "v3"));
assertEquals(new Instant(1), item0.getTimestamp());
assertThat(item0.getWindows(),
Matchers.contains(window(0, 5)));
WindowedValue<KV<String, Iterable<String>>> item1 = result.get(1);
assertEquals("k", item1.getValue().getKey());
assertThat(item1.getValue().getValue(), Matchers.contains("v2"));
assertEquals(new Instant(4), item1.getTimestamp());
assertThat(item1.getWindows(),
Matchers.contains(window(1, 5)));
}
@Test public void testSessions() throws Exception {
DoFnRunner<KV<String, Iterable<WindowedValue<String>>>,
KV<String, Iterable<String>>, List> runner =
makeRunner(WindowingStrategy.of(Sessions.<String>withGapDuration(Duration.millis(10))));
runner.startBundle();
runner.processElement(WindowedValue.valueInEmptyWindows(
KV.of("k", (Iterable<WindowedValue<String>>) Arrays.asList(
WindowedValue.of(
"v1",
new Instant(0),
Arrays.asList(window(0, 10))),
WindowedValue.of(
"v2",
new Instant(5),
Arrays.asList(window(5, 15))),
WindowedValue.of(
"v3",
new Instant(15),
Arrays.asList(window(15, 25)))))));
runner.finishBundle();
List<WindowedValue<KV<String, Iterable<String>>>> result = runner.getReceiver(outputTag);
assertEquals(2, result.size());
WindowedValue<KV<String, Iterable<String>>> item0 = result.get(0);
assertEquals("k", item0.getValue().getKey());
assertThat(item0.getValue().getValue(), Matchers.containsInAnyOrder("v1", "v2"));
assertEquals(new Instant(0), item0.getTimestamp());
assertThat(item0.getWindows(),
Matchers.contains(window(0, 15)));
WindowedValue<KV<String, Iterable<String>>> item1 = result.get(1);
assertEquals("k", item1.getValue().getKey());
assertThat(item1.getValue().getValue(), Matchers.contains("v3"));
assertEquals(new Instant(15), item1.getTimestamp());
assertThat(item1.getWindows(),
Matchers.contains(window(15, 25)));
}
@Test public void testSessionsCombine() throws Exception {
CombineFn<Long, ?, Long> combineFn = new Sum.SumLongFn();
DoFnRunner<KV<String, Iterable<WindowedValue<Long>>>,
KV<String, Long>, List> runner =
makeRunner(WindowingStrategy.of(Sessions.<String>withGapDuration(Duration.millis(10))),
combineFn.<String>asKeyedFn());
runner.startBundle();
runner.processElement(WindowedValue.valueInEmptyWindows(
KV.of("k", (Iterable<WindowedValue<Long>>) Arrays.asList(
WindowedValue.of(
1L,
new Instant(0),
Arrays.asList(window(0, 10))),
WindowedValue.of(
2L,
new Instant(5),
Arrays.asList(window(5, 15))),
WindowedValue.of(
4L,
new Instant(15),
Arrays.asList(window(15, 25)))))));
runner.finishBundle();
List<WindowedValue<KV<String, Long>>> result = runner.getReceiver(outputTag);
assertEquals(2, result.size());
WindowedValue<KV<String, Long>> item0 = result.get(0);
assertEquals("k", item0.getValue().getKey());
assertEquals(3L, item0.getValue().getValue().longValue());
assertEquals(new Instant(0), item0.getTimestamp());
assertThat(item0.getWindows(), Matchers.contains(window(0, 15)));
WindowedValue<KV<String, Long>> item1 = result.get(1);
assertEquals("k", item1.getValue().getKey());
assertEquals(4L, item1.getValue().getValue().longValue());
assertEquals(new Instant(15), item1.getTimestamp());
assertThat(item1.getWindows(), Matchers.contains(window(15, 25)));
}
private DoFnRunner<KV<String, Iterable<WindowedValue<String>>>,
KV<String, Iterable<String>>, List> makeRunner(
WindowingStrategy<? super String, IntervalWindow> windowingStrategy) {
GroupAlsoByWindowsDoFn<String, String, Iterable<String>, IntervalWindow> fn =
GroupAlsoByWindowsDoFn.createForIterable(windowingStrategy, StringUtf8Coder.of());
return makeRunner(windowingStrategy, fn);
}
private DoFnRunner<KV<String, Iterable<WindowedValue<Long>>>,
KV<String, Long>, List> makeRunner(
WindowingStrategy<? super String, IntervalWindow> windowingStrategy,
KeyedCombineFn<String, Long, ?, Long> combineFn) {
GroupAlsoByWindowsDoFn<String, Long, Long, IntervalWindow> fn =
GroupAlsoByWindowsDoFn.create(
windowingStrategy, combineFn, StringUtf8Coder.of(), BigEndianLongCoder.of());
return makeRunner(windowingStrategy, fn);
}
private <VI, VO> DoFnRunner<KV<String, Iterable<WindowedValue<VI>>>,
KV<String, VO>, List> makeRunner(
WindowingStrategy<? super String, IntervalWindow> windowingStrategy,
GroupAlsoByWindowsDoFn<String, VI, VO, IntervalWindow> fn) {
return
DoFnRunner.createWithListOutputs(
PipelineOptionsFactory.create(),
fn,
PTuple.empty(),
(TupleTag<KV<String, VO>>) (TupleTag) outputTag,
new ArrayList<TupleTag<?>>(),
execContext.createStepContext("merge"),
counters.getAddCounterMutator(),
windowingStrategy);
}
private BoundedWindow window(long start, long end) {
return new IntervalWindow(new Instant(start), new Instant(end));
}
}
| haonaturel/DataflowJavaSDK | sdk/src/test/java/com/google/cloud/dataflow/sdk/util/GroupAlsoByWindowsDoFnTest.java | Java | apache-2.0 | 13,007 |
//// [topLevelLambda.ts]
module M {
var f = () => {this.window;}
}
//// [topLevelLambda.js]
var M;
(function (M) {
var _this = this;
var f = function () {
_this.window;
};
})(M || (M = {}));
| RReverser/TSX | tests/baselines/reference/topLevelLambda.js | JavaScript | apache-2.0 | 225 |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.repo.model;
/**
* Created by bmorrise on 10/20/16.
*/
public class LoginModel {
private String username;
private String password;
public String getUsername() {
return username;
}
public void setUsername( String username ) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword( String password ) {
this.password = password;
}
}
| AliaksandrShuhayeu/pentaho-kettle | plugins/repositories/core/src/main/java/org/pentaho/di/ui/repo/model/LoginModel.java | Java | apache-2.0 | 1,351 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sling.ide.eclipse.core.internal;
import static org.apache.sling.ide.artifacts.EmbeddedArtifactLocator.SUPPORT_BUNDLE_SYMBOLIC_NAME;
import static org.apache.sling.ide.artifacts.EmbeddedArtifactLocator.SUPPORT_SOURCE_BUNDLE_SYMBOLIC_NAME;
import java.io.IOException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.sling.ide.artifacts.EmbeddedArtifact;
import org.apache.sling.ide.artifacts.EmbeddedArtifactLocator;
import org.apache.sling.ide.eclipse.core.ISlingLaunchpadServer;
import org.apache.sling.ide.eclipse.core.ServerUtil;
import org.apache.sling.ide.log.Logger;
import org.apache.sling.ide.osgi.OsgiClient;
import org.apache.sling.ide.osgi.OsgiClientException;
import org.apache.sling.ide.serialization.SerializationException;
import org.apache.sling.ide.transport.Batcher;
import org.apache.sling.ide.transport.Command;
import org.apache.sling.ide.transport.Repository;
import org.apache.sling.ide.transport.RepositoryInfo;
import org.apache.sling.ide.transport.ResourceProxy;
import org.apache.sling.ide.transport.Result;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.SubMonitor;
import org.eclipse.debug.core.ILaunch;
import org.eclipse.debug.core.ILaunchManager;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.wst.server.core.IModule;
import org.eclipse.wst.server.core.IServer;
import org.eclipse.wst.server.core.model.IModuleResource;
import org.eclipse.wst.server.core.model.IModuleResourceDelta;
import org.eclipse.wst.server.core.model.ServerBehaviourDelegate;
import org.osgi.framework.Version;
public class SlingLaunchpadBehaviour extends ServerBehaviourDelegateWithModulePublishSupport {
private ResourceChangeCommandFactory commandFactory;
private ILaunch launch;
private JVMDebuggerConnection debuggerConnection;
@Override
public void stop(boolean force) {
if (debuggerConnection!=null) {
debuggerConnection.stop(force);
}
setServerState(IServer.STATE_STOPPED);
try {
ServerUtil.stopRepository(getServer(), new NullProgressMonitor());
} catch (CoreException e) {
Activator.getDefault().getPluginLogger().warn("Failed to stop repository", e);
}
}
public void start(IProgressMonitor monitor) throws CoreException {
boolean success = false;
Result<ResourceProxy> result = null;
monitor = SubMonitor.convert(monitor, "Starting server", 10).setWorkRemaining(50);
Repository repository;
RepositoryInfo repositoryInfo;
OsgiClient client;
try {
repository = ServerUtil.connectRepository(getServer(), monitor);
repositoryInfo = ServerUtil.getRepositoryInfo(getServer(), monitor);
client = Activator.getDefault().getOsgiClientFactory().createOsgiClient(repositoryInfo);
} catch (CoreException e) {
setServerState(IServer.STATE_STOPPED);
throw e;
} catch (URISyntaxException e) {
setServerState(IServer.STATE_STOPPED);
throw new CoreException(new Status(IStatus.ERROR, Activator.PLUGIN_ID, e.getMessage(), e));
}
monitor.worked(10); // 10/50 done
try {
EmbeddedArtifactLocator artifactLocator = Activator.getDefault().getArtifactLocator();
installBundle(monitor,client, artifactLocator.loadSourceSupportBundle(), SUPPORT_SOURCE_BUNDLE_SYMBOLIC_NAME); // 15/50 done
installBundle(monitor,client, artifactLocator.loadToolingSupportBundle(), SUPPORT_BUNDLE_SYMBOLIC_NAME); // 20/50 done
} catch ( IOException | OsgiClientException e) {
Activator.getDefault().getPluginLogger()
.warn("Failed reading the installation support bundle", e);
}
try {
if (getServer().getMode().equals(ILaunchManager.DEBUG_MODE)) {
debuggerConnection = new JVMDebuggerConnection(client);
success = debuggerConnection.connectInDebugMode(launch, getServer(), SubMonitor.convert(monitor, 30));
// 50/50 done
} else {
Command<ResourceProxy> command = repository.newListChildrenNodeCommand("/");
result = command.execute();
success = result.isSuccess();
monitor.worked(30); // 50/50 done
}
if (success) {
setServerState(IServer.STATE_STARTED);
} else {
setServerState(IServer.STATE_STOPPED);
String message = "Unable to connect to the Server. Please make sure a server instance is running ";
if (result != null) {
message += " (" + result.toString() + ")";
}
throw new CoreException(new Status(IStatus.ERROR, Activator.PLUGIN_ID, message));
}
} catch ( CoreException | RuntimeException e ) {
setServerState(IServer.STATE_STOPPED);
throw e;
} finally {
monitor.done();
}
}
private void installBundle(IProgressMonitor monitor, OsgiClient client, final EmbeddedArtifact bundle,
String bundleSymbolicName) throws OsgiClientException, IOException {
Version embeddedVersion = new Version(bundle.getOsgiFriendlyVersion());
monitor.setTaskName("Installing " + bundleSymbolicName + " " + embeddedVersion);
Version remoteVersion = client.getBundleVersion(bundleSymbolicName);
monitor.worked(2);
ISlingLaunchpadServer launchpadServer = (ISlingLaunchpadServer) getServer().loadAdapter(SlingLaunchpadServer.class,
monitor);
if (remoteVersion == null || remoteVersion.compareTo(embeddedVersion) < 0
|| ( remoteVersion.equals(embeddedVersion) || "SNAPSHOT".equals(embeddedVersion.getQualifier()))) {
try ( InputStream contents = bundle.openInputStream() ){
client.installBundle(contents, bundle.getName());
}
remoteVersion = embeddedVersion;
}
launchpadServer.setBundleVersion(bundleSymbolicName, remoteVersion,
monitor);
monitor.worked(3);
}
// TODO refine signature
public void setupLaunch(ILaunch launch, String launchMode, IProgressMonitor monitor) throws CoreException {
// TODO check that ports are free
this.launch = launch;
setServerRestartState(false);
setServerState(IServer.STATE_STARTING);
setMode(launchMode);
}
@Override
protected void publishModule(int kind, int deltaKind, IModule[] module, IProgressMonitor monitor)
throws CoreException {
Logger logger = Activator.getDefault().getPluginLogger();
if (commandFactory == null) {
commandFactory = new ResourceChangeCommandFactory(Activator.getDefault().getSerializationManager());
}
logger.trace(traceOperation(kind, deltaKind, module));
if (getServer().getServerState() == IServer.STATE_STOPPED) {
logger.trace("Ignoring request to publish module when the server is stopped");
setModulePublishState(module, IServer.PUBLISH_STATE_NONE);
return;
}
if ((kind == IServer.PUBLISH_AUTO || kind == IServer.PUBLISH_INCREMENTAL)
&& deltaKind == ServerBehaviourDelegate.NO_CHANGE) {
logger.trace("Ignoring request to publish the module when no resources have changed; most likely another module has changed");
setModulePublishState(module, IServer.PUBLISH_STATE_NONE);
return;
}
if (kind == IServer.PUBLISH_FULL && deltaKind == ServerBehaviourDelegate.REMOVED) {
logger.trace("Ignoring request to unpublish all of the module resources");
setModulePublishState(module, IServer.PUBLISH_STATE_NONE);
return;
}
if (ProjectHelper.isBundleProject(module[0].getProject())) {
String serverMode = getServer().getMode();
if (!serverMode.equals(ILaunchManager.DEBUG_MODE) || kind==IServer.PUBLISH_CLEAN) {
// in debug mode, we rely on the hotcode replacement feature of eclipse/jvm
// otherwise, for run and profile modes we explicitly publish the bundle module
// TODO: make this configurable as part of the server config
// SLING-3655 : when doing PUBLISH_CLEAN, the bundle deployment mechanism should
// still be triggered
publishBundleModule(module, monitor);
}
} else if (ProjectHelper.isContentProject(module[0].getProject())) {
try {
publishContentModule(kind, deltaKind, module, monitor);
} catch (SerializationException e) {
throw new CoreException(new Status(IStatus.ERROR, Activator.PLUGIN_ID, "Serialization error for "
+ traceOperation(kind, deltaKind, module).toString(), e));
} catch (IOException e) {
throw new CoreException(new Status(IStatus.ERROR, Activator.PLUGIN_ID, "IO error for "
+ traceOperation(kind, deltaKind, module).toString(), e));
}
}
}
private String traceOperation(int kind, int deltaKind, IModule[] module) {
StringBuilder trace = new StringBuilder();
trace.append("SlingLaunchpadBehaviour.publishModule(");
switch (kind) {
case IServer.PUBLISH_CLEAN:
trace.append("PUBLISH_CLEAN, ");
break;
case IServer.PUBLISH_INCREMENTAL:
trace.append("PUBLISH_INCREMENTAL, ");
break;
case IServer.PUBLISH_AUTO:
trace.append("PUBLISH_AUTO, ");
break;
case IServer.PUBLISH_FULL:
trace.append("PUBLISH_FULL, ");
break;
default:
trace.append("UNKNOWN - ").append(kind).append(", ");
}
switch (deltaKind) {
case ServerBehaviourDelegate.ADDED:
trace.append("ADDED, ");
break;
case ServerBehaviourDelegate.CHANGED:
trace.append("CHANGED, ");
break;
case ServerBehaviourDelegate.NO_CHANGE:
trace.append("NO_CHANGE, ");
break;
case ServerBehaviourDelegate.REMOVED:
trace.append("REMOVED, ");
break;
default:
trace.append("UNKONWN - ").append(deltaKind).append(", ");
break;
}
switch (getServer().getServerState()) {
case IServer.STATE_STARTED:
trace.append("STARTED, ");
break;
case IServer.STATE_STARTING:
trace.append("STARTING, ");
break;
case IServer.STATE_STOPPED:
trace.append("STOPPED, ");
break;
case IServer.STATE_STOPPING:
trace.append("STOPPING, ");
break;
default:
trace.append("UNKONWN - ").append(getServer().getServerState()).append(", ");
break;
}
trace.append(Arrays.toString(module)).append(")");
return trace.toString();
}
private void publishBundleModule(IModule[] module, IProgressMonitor monitor) throws CoreException {
final IProject project = module[0].getProject();
boolean installLocally = getServer().getAttribute(ISlingLaunchpadServer.PROP_INSTALL_LOCALLY, true);
monitor.beginTask("deploying via local install", 5);
try {
OsgiClient osgiClient = Activator.getDefault().getOsgiClientFactory()
.createOsgiClient(ServerUtil.getRepositoryInfo(getServer(), monitor));
Version supportBundleVersion = osgiClient
.getBundleVersion(EmbeddedArtifactLocator.SUPPORT_BUNDLE_SYMBOLIC_NAME);
monitor.worked(1);
if (supportBundleVersion == null) {
throw new CoreException(new Status(Status.ERROR, Activator.PLUGIN_ID,
"The support bundle was not found, please install it via the server properties page."));
}
IJavaProject javaProject = ProjectHelper.asJavaProject(project);
IFolder outputFolder = (IFolder) project.getWorkspace().getRoot().findMember(javaProject.getOutputLocation());
IPath outputLocation = outputFolder.getLocation();
//ensure the MANIFEST.MF exists - if it doesn't then let the publish fail with a warn (instead of an error)
IResource manifest = outputFolder.findMember("META-INF/MANIFEST.MF");
if (manifest==null) {
Activator.getDefault().getPluginLogger().warn("Project "+project+" does not have a META-INF/MANIFEST.MF (yet) - not publishing this time");
Activator.getDefault().issueConsoleLog("InstallBundle", outputFolder.getLocation().toOSString(), "Project "+project+" does not have a META-INF/MANIFEST.MF (yet) - not publishing this time");
monitor.done();
setModulePublishState(module, IServer.PUBLISH_STATE_FULL);
return;
}
monitor.worked(1);
//TODO SLING-3767:
//osgiClient must have a timeout!!!
if ( installLocally ) {
osgiClient.installLocalBundle(outputLocation.toOSString());
monitor.worked(3);
} else {
JarBuilder builder = new JarBuilder();
InputStream bundle = builder.buildJar(outputFolder);
monitor.worked(1);
osgiClient.installLocalBundle(bundle, outputFolder.getLocation().toOSString());
monitor.worked(2);
}
setModulePublishState(module, IServer.PUBLISH_STATE_NONE);
} catch (URISyntaxException e1) {
throw new CoreException(new Status(IStatus.ERROR, Activator.PLUGIN_ID, e1.getMessage(), e1));
} catch (OsgiClientException e1) {
throw new CoreException(new Status(IStatus.ERROR, Activator.PLUGIN_ID, "Failed installing bundle : "
+ e1.getMessage(), e1));
} finally {
monitor.done();
}
}
private void publishContentModule(int kind, int deltaKind, IModule[] module, IProgressMonitor monitor)
throws CoreException, SerializationException, IOException {
Logger logger = Activator.getDefault().getPluginLogger();
Repository repository = ServerUtil.getConnectedRepository(getServer(), monitor);
if (repository == null) {
throw new CoreException(new Status(IStatus.ERROR, Activator.PLUGIN_ID,
"Unable to find a repository for server " + getServer()));
}
Batcher batcher = Activator.getDefault().getBatcherFactory().createBatcher();
// TODO it would be more efficient to have a module -> filter mapping
// it would be simpler to implement this in SlingContentModuleAdapter, but
// the behaviour for resources being filtered out is deletion, and that
// would be an incorrect ( or at least suprising ) behaviour at development time
List<IModuleResource> addedOrUpdatedResources = new ArrayList<>();
IModuleResource[] allResources = getResources(module);
Set<IPath> handledPaths = new HashSet<>();
switch (deltaKind) {
case ServerBehaviourDelegate.CHANGED:
for (IModuleResourceDelta resourceDelta : getPublishedResourceDelta(module)) {
StringBuilder deltaTrace = new StringBuilder();
deltaTrace.append("- processing delta kind ");
switch (resourceDelta.getKind()) {
case IModuleResourceDelta.ADDED:
deltaTrace.append("ADDED ");
break;
case IModuleResourceDelta.CHANGED:
deltaTrace.append("CHANGED ");
break;
case IModuleResourceDelta.NO_CHANGE:
deltaTrace.append("NO_CHANGE ");
break;
case IModuleResourceDelta.REMOVED:
deltaTrace.append("REMOVED ");
break;
default:
deltaTrace.append("UNKNOWN - ").append(resourceDelta.getKind());
}
deltaTrace.append("for resource ").append(resourceDelta.getModuleResource());
logger.trace(deltaTrace.toString());
switch (resourceDelta.getKind()) {
case IModuleResourceDelta.ADDED:
case IModuleResourceDelta.CHANGED:
case IModuleResourceDelta.NO_CHANGE: // TODO is this needed?
Command<?> command = addFileCommand(repository, resourceDelta.getModuleResource());
if (command != null) {
ensureParentIsPublished(resourceDelta.getModuleResource(), repository, allResources,
handledPaths, batcher);
addedOrUpdatedResources.add(resourceDelta.getModuleResource());
}
enqueue(batcher, command);
break;
case IModuleResourceDelta.REMOVED:
enqueue(batcher, removeFileCommand(repository, resourceDelta.getModuleResource()));
break;
}
}
break;
case ServerBehaviourDelegate.ADDED:
case ServerBehaviourDelegate.NO_CHANGE: // TODO is this correct ?
for (IModuleResource resource : getResources(module)) {
Command<?> command = addFileCommand(repository, resource);
enqueue(batcher, command);
if (command != null) {
addedOrUpdatedResources.add(resource);
}
}
break;
case ServerBehaviourDelegate.REMOVED:
for (IModuleResource resource : getResources(module)) {
enqueue(batcher, removeFileCommand(repository, resource));
}
break;
}
// reorder the child nodes at the end, when all create/update/deletes have been processed
for (IModuleResource resource : addedOrUpdatedResources) {
enqueue(batcher, reorderChildNodesCommand(repository, resource));
}
execute(batcher);
// set state to published
super.publishModule(kind, deltaKind, module, monitor);
setModulePublishState(module, IServer.PUBLISH_STATE_NONE);
// setServerPublishState(IServer.PUBLISH_STATE_NONE);
}
private void execute(Batcher batcher) throws CoreException {
for ( Command<?> command : batcher.get()) {
Result<?> result = command.execute();
if (!result.isSuccess()) {
// TODO - proper error logging
throw new CoreException(new Status(Status.ERROR, Activator.PLUGIN_ID, "Failed publishing path="
+ command.getPath() + ", result=" + result.toString()));
}
}
}
/**
* Ensures that the parent of this resource has been published to the repository
*
* <p>
* Note that the parents explicitly do not have their child nodes reordered, this will happen when they are
* published due to a resource change
* </p>
*
* @param moduleResource the current resource
* @param repository the repository to publish to
* @param allResources all of the module's resources
* @param handledPaths the paths that have been handled already in this publish operation, but possibly not
* registered as published
* @param batcher
* @throws IOException
* @throws SerializationException
* @throws CoreException
*/
private void ensureParentIsPublished(IModuleResource moduleResource, Repository repository,
IModuleResource[] allResources, Set<IPath> handledPaths, Batcher batcher)
throws CoreException, SerializationException, IOException {
Logger logger = Activator.getDefault().getPluginLogger();
IPath currentPath = moduleResource.getModuleRelativePath();
logger.trace("Ensuring that parent of path {0} is published", currentPath);
// we assume the root is always published
if (currentPath.segmentCount() == 0) {
logger.trace("Path {0} can not have a parent, skipping", currentPath);
return;
}
IPath parentPath = currentPath.removeLastSegments(1);
// already published by us, a parent of another resource that was published in this execution
if (handledPaths.contains(parentPath)) {
logger.trace("Parent path {0} was already handled, skipping", parentPath);
return;
}
for (IModuleResource maybeParent : allResources) {
if (maybeParent.getModuleRelativePath().equals(parentPath)) {
// handle the parent's parent first, if needed
ensureParentIsPublished(maybeParent, repository, allResources, handledPaths, batcher);
// create this resource
enqueue(batcher, addFileCommand(repository, maybeParent));
handledPaths.add(maybeParent.getModuleRelativePath());
logger.trace("Ensured that resource at path {0} is published", parentPath);
return;
}
}
throw new IllegalArgumentException("Resource at " + moduleResource.getModuleRelativePath()
+ " has parent path " + parentPath + " but no resource with that path is in the module's resources.");
}
private void enqueue(Batcher batcher, Command<?> command) {
if (command == null) {
return;
}
batcher.add(command);
}
private Command<?> addFileCommand(Repository repository, IModuleResource resource) throws CoreException,
SerializationException, IOException {
IResource res = getResource(resource);
if (res == null) {
return null;
}
return commandFactory.newCommandForAddedOrUpdated(repository, res);
}
private Command<?> reorderChildNodesCommand(Repository repository, IModuleResource resource) throws CoreException,
SerializationException, IOException {
IResource res = getResource(resource);
if (res == null) {
return null;
}
return commandFactory.newReorderChildNodesCommand(repository, res);
}
private IResource getResource(IModuleResource resource) {
IResource file = (IFile) resource.getAdapter(IFile.class);
if (file == null) {
file = (IFolder) resource.getAdapter(IFolder.class);
}
if (file == null) {
// Usually happens on server startup, it seems to be safe to ignore for now
Activator.getDefault().getPluginLogger()
.trace("Got null {0} and {1} for {2}", IFile.class.getSimpleName(),
IFolder.class.getSimpleName(), resource);
return null;
}
return file;
}
private Command<?> removeFileCommand(Repository repository, IModuleResource resource)
throws SerializationException, IOException, CoreException {
IResource deletedResource = getResource(resource);
if (deletedResource == null) {
return null;
}
return commandFactory.newCommandForRemovedResources(repository, deletedResource);
}
}
| Nimco/sling | tooling/ide/eclipse-core/src/org/apache/sling/ide/eclipse/core/internal/SlingLaunchpadBehaviour.java | Java | apache-2.0 | 25,793 |
//========================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//========================================================================
//
package org.xtuml.bp.debug.ui.model;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.core.resources.IMarker;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.IWorkspaceRunnable;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.debug.core.DebugException;
import org.eclipse.debug.core.DebugPlugin;
import org.eclipse.debug.core.model.Breakpoint;
import org.eclipse.debug.core.model.IBreakpoint;
import org.eclipse.jface.resource.ImageDescriptor;
import org.eclipse.ui.model.IWorkbenchAdapter;
import org.xtuml.bp.core.Breakpoint_c;
import org.xtuml.bp.core.Condition_c;
import org.xtuml.bp.core.Gd_c;
import org.xtuml.bp.core.Instance_c;
import org.xtuml.bp.core.ModelClass_c;
import org.xtuml.bp.core.Ooaofooa;
import org.xtuml.bp.core.ProvidedOperation_c;
import org.xtuml.bp.core.ProvidedSignal_c;
import org.xtuml.bp.core.RequiredOperation_c;
import org.xtuml.bp.core.RequiredSignal_c;
import org.xtuml.bp.core.common.NonRootModelElement;
import org.xtuml.bp.debug.ui.IBPDebugUIPluginConstants;
import org.xtuml.bp.debug.ui.ModelElementLocation;
public abstract class BPBreakpoint extends Breakpoint implements IBPBreakpoint, IWorkbenchAdapter {
/**
* List of active instance filters for this breakpoint
* (list of <code>Instance_c</code>).
*/
protected List fInstanceFilters = null;
/**
* Empty instance filters array.
*/
protected static final Instance_c[] fgEmptyInstanceFilters = new Instance_c[0];
/**
* Default constructor is required for the breakpoint manager
* to re-create persisted breakpoints. After instantiating a breakpoint,
* the <code>setMarker(...)</code> method is called to restore
* this breakpoint's attributes.
*/
public BPBreakpoint() {
}
/**
* @param resource file on which to set the breakpoint
* @throws CoreException if unable to create the breakpoint
*/
public BPBreakpoint(final String markerType, NonRootModelElement nrme, final int flags_all)
throws CoreException {
IResource resource = (IResource)nrme.getModelRoot().getPersistenceFile();
init(resource, markerType, nrme, flags_all, "", 0); //$NON-NLS-1$
setHitCount(0);
}
protected void init(final IResource resource, final String markerType,
NonRootModelElement nrme, final int flags_all,
final String optionalAttribute, final int optionalAttrValue) throws DebugException {
final String mr_id = nrme.getModelRoot().getId();
final String location = ModelElementLocation.getModelElementLocation(nrme);
String ooa_id = String.valueOf(nrme.Get_ooa_id());
if(ooa_id.equals(Gd_c.Null_unique_id().toString())) {
if(nrme instanceof RequiredOperation_c) {
ooa_id = ((RequiredOperation_c) nrme).getId().toString();
} else if (nrme instanceof RequiredSignal_c) {
ooa_id = ((RequiredSignal_c) nrme).getId().toString();
} else if (nrme instanceof ProvidedOperation_c) {
ooa_id = ((ProvidedOperation_c) nrme).getId().toString();
} else if (nrme instanceof ProvidedSignal_c) {
ooa_id = ((ProvidedSignal_c) nrme).getId().toString();
}
}
final String final_id = ooa_id;
//final ModelElementID modelElementID = ModelAdapter.getModelElementAdapter(nrme).createModelElementID(nrme);
IWorkspaceRunnable runnable = new IWorkspaceRunnable() {
public void run(IProgressMonitor monitor) throws CoreException {
IMarker marker = resource.createMarker(markerType);
setMarker(marker);
marker.setAttribute(IBreakpoint.ENABLED, true);
marker.setAttribute(IBreakpoint.ID, getModelIdentifier()+"/" + location);
marker.setAttribute(MODELROOT_ID, mr_id);
marker.setAttribute(MODELELEMENT_ID, final_id);
marker.setAttribute(LOCATION, location);
marker.setAttribute(CONDITION, "");
marker.setAttribute(CONDITION_ENABLED, false);
marker.setAttribute(FLAGS, flags_all);
if ( !optionalAttribute.equals("") ) { //$NON-NLS-1$
marker.setAttribute(optionalAttribute, optionalAttrValue);
}
setHitCount(0);
}
};
run(getMarkerRule(resource), runnable);
}
public String getModelIdentifier() {
return IBPDebugUIPluginConstants.PLUGIN_ID;
}
/* (non-Javadoc)
* @see org.xtuml.bp.debug.ui.model.IBPBreakpoint#getTypeName()
*/
public String getTypeName() throws CoreException {
return "Class:";
}
public void setLocation(String location) throws CoreException {
if (!location.equals(getCondition())) {
setAttribute(LOCATION, location);
}
}
public String getLocation() throws CoreException {
return ensureMarker().getAttribute(LOCATION, "");
}
/* (non-Javadoc)
* @see org.xtuml.bp.debug.ui.model.IBPBreakpoint#setHitCount(int)
*/
public void setHitCount(int hitCount) throws CoreException {
if (hitCount != getHitCount()) {
setAttribute(HIT_COUNT, hitCount);
if ( hitCount > 0 ) {
String message = getLocation() +
" [hit count: " + hitCount + "]";
ensureMarker().setAttribute(IMarker.MESSAGE, message);
}
else {
String message = getLocation();
ensureMarker().setAttribute(IMarker.MESSAGE, message);
}
}
}
/* (non-Javadoc)
* @see org.xtuml.bp.debug.ui.model.IBPBreakpoint#getHitCount()
*/
public int getHitCount() throws CoreException {
return ensureMarker().getAttribute(HIT_COUNT, -1);
}
public void setCondition(String condition) throws CoreException {
if (!condition.equals(getCondition())) {
setAttribute(CONDITION, condition);
}
}
public String getCondition() throws CoreException {
return ensureMarker().getAttribute(CONDITION, "");
}
public boolean isConditionEnabled() throws CoreException {
return ensureMarker().getAttribute(CONDITION_ENABLED, false);
}
public void setConditionEnabled(boolean enableCondition) throws CoreException {
if ( isConditionEnabled() != enableCondition ) {
setAttribute(CONDITION_ENABLED, enableCondition);
}
}
public boolean supportsCondition() {
return false;
}
public boolean supportsHitCount() {
return true;
}
public boolean getFlag(int flag, int all_flags, boolean defaultValue) {
IMarker m = getMarker();
if (m != null) {
int flags = m.getAttribute(FLAGS, all_flags);
return (flags & flag) != 0;
}
return defaultValue;
}
public void setFlag(boolean condition, int flag, int all_flags) throws CoreException {
IMarker m = getMarker();
if (m != null) {
int flags = m.getAttribute(FLAGS, all_flags);
if ( condition ) {
flags = flags | flag;
}
else {
flags = flags & ~flag;
}
m.setAttribute(FLAGS, flags);
}
}
public String getText() {
IMarker m = getMarker();
if (m != null) {
return getTextDetail() + " " + m.getAttribute(IMarker.MESSAGE, "");
}
return "";
}
public boolean supportsInstanceFilters() {
return false;
}
public Instance_c[] getInstanceFilters() {
if (fInstanceFilters == null || fInstanceFilters.isEmpty()) {
return fgEmptyInstanceFilters;
}
return (Instance_c[])fInstanceFilters.toArray(new Instance_c[fInstanceFilters.size()]);
}
public void clearInstanceFilters() {
if (fInstanceFilters != null) {
fInstanceFilters.clear();
fInstanceFilters = null;
fireChanged();
}
}
public void addInstanceFilter(Instance_c object) {
if (fInstanceFilters == null) {
fInstanceFilters = new ArrayList();
}
fInstanceFilters.add(object);
fireChanged();
}
/**
* Change notification when there are no marker changes. If the marker
* does not exist, do not fire a change notification (the marker may not
* exist if the associated project was closed).
*/
protected void fireChanged() {
if (markerExists()) {
DebugPlugin.getDefault().getBreakpointManager().fireBreakpointChanged(this);
}
}
public ModelClass_c[] getAllClasses() {
IMarker m = getMarker();
if (m != null) {
String mr_id = m.getAttribute(MODELROOT_ID, ""); //$NON_NLS-1$
Ooaofooa x = Ooaofooa.getInstance(mr_id);
ModelClass_c [] ret_val = ModelClass_c.ModelClassInstances(x);
return ret_val;
}
return new ModelClass_c[0];
}
public Object[] getChildren(Object o) {
return null;
}
public ImageDescriptor getImageDescriptor(Object object) {
return null;
}
public String getLabel(Object o) {
// more specific labels will be supplied by overrides in subtypes
return "Breakpoint";
}
public Object getParent(Object o) {
return null;
}
protected boolean managerEnabled() {
return DebugPlugin.getDefault().getBreakpointManager().isEnabled();
}
public void modifyTargetBreakpoint(Breakpoint_c bp, String string, Object newAttr) {
if ( string.equals(ENABLED) ) {
boolean newValue = ((Boolean)newAttr).booleanValue();
if (!managerEnabled()) {
newValue = false;
}
bp.setEnabled(newValue);
}
else if ( string.equals(HIT_COUNT) ) {
int newValue = ((Integer)newAttr).intValue();
bp.setTarget_hit_count(newValue);
}
else if ( string.equals(CONDITION_ENABLED) ) {
boolean newValue = ((Boolean)newAttr).booleanValue();
bp.setCondition_enabled(newValue);
}
else if ( string.equals(CONDITION) ) {
String newValue = (String)newAttr;
Condition_c cond = Condition_c.getOneBP_CONOnR3100(bp);
if ( cond == null ) {
cond = new Condition_c(bp.getModelRoot());
cond.relateAcrossR3100To(bp);
}
cond.setExpression(newValue);
}
}
protected String getTextDetail() {
// Subtypes should override this to provide more
// detail than just the data stored in the Marker
return "";
}
/**
* Deletes this breakpoint's underlying marker, and removes
* this breakpoint from the breakpoint manager.
*
* @override
* @exception CoreException if unable to delete this breakpoint's
* underlying marker
*/
public void delete() throws CoreException {
deleteTargetBreakpoint();
super.delete();
}
}
| rmulvey/bridgepoint | src/org.xtuml.bp.debug.ui/src/org/xtuml/bp/debug/ui/model/BPBreakpoint.java | Java | apache-2.0 | 10,881 |
using System;
using System.Collections.Generic;
using System.Linq;
using NUnit.Core;
using UnityEditor;
using UnityEngine;
using Event = UnityEngine.Event;
namespace UnityTest
{
public abstract class UnitTestRendererLine : IComparable<UnitTestRendererLine>
{
public static Action<TestFilter> RunTest;
public static List<UnitTestRendererLine> SelectedLines;
protected static bool s_Refresh;
protected static GUIContent s_GUIRunSelected = new GUIContent("Run Selected");
protected static GUIContent s_GUIRun = new GUIContent("Run");
protected static GUIContent s_GUITimeoutIcon = new GUIContent(Icons.StopwatchImg, "Timeout");
protected string m_UniqueId;
protected internal string m_FullName;
protected string m_RenderedName;
protected internal Test m_Test;
protected UnitTestRendererLine(Test test)
{
m_FullName = test.TestName.FullName;
m_RenderedName = test.TestName.Name;
m_UniqueId = test.TestName.UniqueName;
m_Test = test;
}
public int CompareTo(UnitTestRendererLine other)
{
return m_UniqueId.CompareTo(other.m_UniqueId);
}
public bool Render(RenderingOptions options)
{
s_Refresh = false;
EditorGUIUtility.SetIconSize(new Vector2(15, 15));
Render(0, options);
EditorGUIUtility.SetIconSize(Vector2.zero);
return s_Refresh;
}
protected internal virtual void Render(int indend, RenderingOptions options)
{
EditorGUILayout.BeginHorizontal();
GUILayout.Space(indend * 10);
DrawLine(SelectedLines.Contains(this), options);
EditorGUILayout.EndHorizontal();
}
protected void OnSelect()
{
if (!Event.current.control && !Event.current.command) SelectedLines.Clear();
if ((Event.current.control || Event.current.command) && SelectedLines.Contains(this))
SelectedLines.Remove(this);
else
SelectedLines.Add(this);
s_Refresh = true;
}
protected abstract void DrawLine(bool isSelected, RenderingOptions options);
protected internal abstract TestResultState ? GetResult();
protected internal abstract bool IsVisible(RenderingOptions options);
public void RunTests(object[] testObjectsList)
{
RunTest(new TestFilter { objects = testObjectsList });
}
public void RunTests(string[] testList)
{
RunTest(new TestFilter {names = testList});
}
public void RunSelectedTests()
{
RunTest(new TestFilter { objects = SelectedLines.Select(line => line.m_Test.TestName).ToArray() });
}
public bool IsAnySelected
{
get
{
return SelectedLines.Count > 0;
}
}
public virtual string GetResultText()
{
return m_RenderedName;
}
}
}
| thurn/swapdrop | client/Assets/UnityTestTools/UnitTesting/Editor/NUnit/Renderer/UnitTestRendererLine.cs | C# | apache-2.0 | 3,115 |
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.siddhi.core.query.processor;
import io.siddhi.core.util.Scheduler;
/**
* Parent interface for Processors which need access to Siddhi {@link Scheduler}
*/
public interface SchedulingProcessor extends Processor {
Scheduler getScheduler();
void setScheduler(Scheduler scheduler);
}
| wso2/siddhi | modules/siddhi-core/src/main/java/io/siddhi/core/query/processor/SchedulingProcessor.java | Java | apache-2.0 | 980 |
/*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.ruleunit;
public class SimpleFact {
private final String stringValue;
public SimpleFact(final String stringValue) {
this.stringValue = stringValue;
}
public String getStringValue() {
return stringValue;
}
}
| droolsjbpm/drools | drools-ruleunit/src/test/java/org/drools/ruleunit/SimpleFact.java | Java | apache-2.0 | 887 |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.trans.steps.mapping;
import org.apache.commons.vfs2.FileObject;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CTabFolder;
import org.eclipse.swt.custom.CTabFolder2Adapter;
import org.eclipse.swt.custom.CTabFolderEvent;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.events.FocusAdapter;
import org.eclipse.swt.events.FocusEvent;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.ShellAdapter;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.ObjectLocationSpecificationMethod;
import org.pentaho.di.core.Props;
import org.pentaho.di.core.SourceToTargetMapping;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.gui.SpoonFactory;
import org.pentaho.di.core.gui.SpoonInterface;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.RepositoryDirectoryInterface;
import org.pentaho.di.repository.RepositoryElementMetaInterface;
import org.pentaho.di.repository.RepositoryObject;
import org.pentaho.di.repository.RepositoryObjectType;
import org.pentaho.di.trans.TransHopMeta;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDialogInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.steps.mapping.MappingIODefinition;
import org.pentaho.di.trans.steps.mapping.MappingMeta;
import org.pentaho.di.trans.steps.mapping.MappingParameters;
import org.pentaho.di.trans.steps.mapping.MappingValueRename;
import org.pentaho.di.trans.steps.mappinginput.MappingInputMeta;
import org.pentaho.di.trans.steps.mappingoutput.MappingOutputMeta;
import org.pentaho.di.ui.core.dialog.EnterMappingDialog;
import org.pentaho.di.ui.core.dialog.EnterSelectionDialog;
import org.pentaho.di.ui.core.dialog.ErrorDialog;
import org.pentaho.di.ui.core.gui.GUIResource;
import org.pentaho.di.ui.core.widget.ColumnInfo;
import org.pentaho.di.ui.core.widget.TableView;
import org.pentaho.di.ui.core.widget.TextVar;
import org.pentaho.di.ui.repository.dialog.SelectObjectDialog;
import org.pentaho.di.ui.spoon.Spoon;
import org.pentaho.di.ui.trans.dialog.TransDialog;
import org.pentaho.di.ui.trans.step.BaseStepDialog;
import org.pentaho.vfs.ui.VfsFileChooserDialog;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class MappingDialog extends BaseStepDialog implements StepDialogInterface {
private static Class<?> PKG = MappingMeta.class; // for i18n purposes, needed by Translator2!!
private MappingMeta mappingMeta;
// File
//
private Button radioFilename;
private Button wbbFilename;
private TextVar wFilename;
// Repository by name
//
private Button radioByName;
private TextVar wTransname, wDirectory;
private Button wbTrans;
// Repository by reference
//
private Button radioByReference;
private Button wbByReference;
private TextVar wByReference;
private CTabFolder wTabFolder;
private TransMeta mappingTransMeta = null;
protected boolean transModified;
private ModifyListener lsMod;
private int middle;
private int margin;
private MappingParameters mappingParameters;
private List<MappingIODefinition> inputMappings;
private List<MappingIODefinition> outputMappings;
private Button wAddInput;
private Button wAddOutput;
private ObjectId referenceObjectId;
private ObjectLocationSpecificationMethod specificationMethod;
private Button wMultiInput, wMultiOutput;
private interface ApplyChanges {
public void applyChanges();
}
private class MappingParametersTab implements ApplyChanges {
private TableView wMappingParameters;
private MappingParameters parameters;
private Button wInheritAll;
public MappingParametersTab( TableView wMappingParameters, Button wInheritAll, MappingParameters parameters ) {
this.wMappingParameters = wMappingParameters;
this.wInheritAll = wInheritAll;
this.parameters = parameters;
}
public void applyChanges() {
int nrLines = wMappingParameters.nrNonEmpty();
String[] variables = new String[ nrLines ];
String[] inputFields = new String[ nrLines ];
parameters.setVariable( variables );
parameters.setInputField( inputFields );
//CHECKSTYLE:Indentation:OFF
for ( int i = 0; i < nrLines; i++ ) {
TableItem item = wMappingParameters.getNonEmpty( i );
parameters.getVariable()[ i ] = item.getText( 1 );
parameters.getInputField()[ i ] = item.getText( 2 );
}
parameters.setInheritingAllVariables( wInheritAll.getSelection() );
}
}
private class MappingDefinitionTab implements ApplyChanges {
private MappingIODefinition definition;
private Text wInputStep;
private Text wOutputStep;
private Button wMainPath;
private Text wDescription;
private TableView wFieldMappings;
public MappingDefinitionTab( MappingIODefinition definition, Text inputStep, Text outputStep, Button mainPath,
Text description, TableView fieldMappings ) {
super();
this.definition = definition;
wInputStep = inputStep;
wOutputStep = outputStep;
wMainPath = mainPath;
wDescription = description;
wFieldMappings = fieldMappings;
}
public void applyChanges() {
// The input step
definition.setInputStepname( wInputStep.getText() );
// The output step
definition.setOutputStepname( wOutputStep.getText() );
// The description
definition.setDescription( wDescription.getText() );
// The main path flag
definition.setMainDataPath( wMainPath.getSelection() );
// The grid
//
int nrLines = wFieldMappings.nrNonEmpty();
definition.getValueRenames().clear();
for ( int i = 0; i < nrLines; i++ ) {
TableItem item = wFieldMappings.getNonEmpty( i );
definition.getValueRenames().add( new MappingValueRename( item.getText( 1 ), item.getText( 2 ) ) );
}
}
}
private List<ApplyChanges> changeList;
public MappingDialog( Shell parent, Object in, TransMeta tr, String sname ) {
super( parent, (BaseStepMeta) in, tr, sname );
mappingMeta = (MappingMeta) in;
transModified = false;
// Make a copy for our own purposes...
// This allows us to change everything directly in the classes with
// listeners.
// Later we need to copy it to the input class on ok()
//
mappingParameters = (MappingParameters) mappingMeta.getMappingParameters().clone();
inputMappings = new ArrayList<MappingIODefinition>();
outputMappings = new ArrayList<MappingIODefinition>();
for ( int i = 0; i < mappingMeta.getInputMappings().size(); i++ ) {
inputMappings.add( (MappingIODefinition) mappingMeta.getInputMappings().get( i ).clone() );
}
for ( int i = 0; i < mappingMeta.getOutputMappings().size(); i++ ) {
outputMappings.add( (MappingIODefinition) mappingMeta.getOutputMappings().get( i ).clone() );
}
changeList = new ArrayList<ApplyChanges>();
}
public String open() {
Shell parent = getParent();
Display display = parent.getDisplay();
shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MIN | SWT.MAX );
props.setLook( shell );
setShellImage( shell, mappingMeta );
lsMod = new ModifyListener() {
public void modifyText( ModifyEvent e ) {
mappingMeta.setChanged();
}
};
changed = mappingMeta.hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout( formLayout );
shell.setText( BaseMessages.getString( PKG, "MappingDialog.Shell.Title" ) );
middle = props.getMiddlePct();
margin = Const.MARGIN;
// Stepname line
wlStepname = new Label( shell, SWT.RIGHT );
wlStepname.setText( BaseMessages.getString( PKG, "MappingDialog.Stepname.Label" ) );
props.setLook( wlStepname );
fdlStepname = new FormData();
fdlStepname.left = new FormAttachment( 0, 0 );
fdlStepname.right = new FormAttachment( middle, -margin );
fdlStepname.top = new FormAttachment( 0, margin );
wlStepname.setLayoutData( fdlStepname );
wStepname = new Text( shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
wStepname.setText( stepname );
props.setLook( wStepname );
wStepname.addModifyListener( lsMod );
fdStepname = new FormData();
fdStepname.left = new FormAttachment( middle, 0 );
fdStepname.top = new FormAttachment( 0, margin );
fdStepname.right = new FormAttachment( 100, 0 );
wStepname.setLayoutData( fdStepname );
// Show a group with 2 main options: a transformation in the repository
// or on file
//
// //////////////////////////////////////////////////
// The key creation box
// //////////////////////////////////////////////////
//
Group gTransGroup = new Group( shell, SWT.SHADOW_ETCHED_IN );
gTransGroup.setText( BaseMessages.getString( PKG, "MappingDialog.TransGroup.Label" ) );
gTransGroup.setBackground( shell.getBackground() ); // the default looks
// ugly
FormLayout transGroupLayout = new FormLayout();
transGroupLayout.marginLeft = margin * 2;
transGroupLayout.marginTop = margin * 2;
transGroupLayout.marginRight = margin * 2;
transGroupLayout.marginBottom = margin * 2;
gTransGroup.setLayout( transGroupLayout );
// Radio button: The mapping is in a file
//
radioFilename = new Button( gTransGroup, SWT.RADIO );
props.setLook( radioFilename );
radioFilename.setSelection( false );
radioFilename.setText( BaseMessages.getString( PKG, "MappingDialog.RadioFile.Label" ) );
radioFilename.setToolTipText( BaseMessages.getString( PKG, "MappingDialog.RadioFile.Tooltip", Const.CR ) );
FormData fdFileRadio = new FormData();
fdFileRadio.left = new FormAttachment( 0, 0 );
fdFileRadio.right = new FormAttachment( 100, 0 );
fdFileRadio.top = new FormAttachment( 0, 0 );
radioFilename.setLayoutData( fdFileRadio );
radioFilename.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
setSpecificationMethod( ObjectLocationSpecificationMethod.FILENAME );
setRadioButtons();
}
} );
wbbFilename = new Button( gTransGroup, SWT.PUSH | SWT.CENTER ); // Browse
props.setLook( wbbFilename );
wbbFilename.setText( BaseMessages.getString( PKG, "System.Button.Browse" ) );
wbbFilename.setToolTipText( BaseMessages.getString( PKG, "System.Tooltip.BrowseForFileOrDirAndAdd" ) );
FormData fdbFilename = new FormData();
fdbFilename.right = new FormAttachment( 100, 0 );
fdbFilename.top = new FormAttachment( radioFilename, margin );
wbbFilename.setLayoutData( fdbFilename );
wbbFilename.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
selectFileTrans();
}
} );
wFilename = new TextVar( transMeta, gTransGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wFilename );
wFilename.addModifyListener( lsMod );
FormData fdFilename = new FormData();
fdFilename.left = new FormAttachment( 0, 25 );
fdFilename.right = new FormAttachment( wbbFilename, -margin );
fdFilename.top = new FormAttachment( wbbFilename, 0, SWT.CENTER );
wFilename.setLayoutData( fdFilename );
wFilename.addModifyListener( new ModifyListener() {
public void modifyText( ModifyEvent e ) {
setSpecificationMethod( ObjectLocationSpecificationMethod.FILENAME );
setRadioButtons();
}
} );
// Radio button: The mapping is in the repository
//
radioByName = new Button( gTransGroup, SWT.RADIO );
props.setLook( radioByName );
radioByName.setSelection( false );
radioByName.setText( BaseMessages.getString( PKG, "MappingDialog.RadioRep.Label" ) );
radioByName.setToolTipText( BaseMessages.getString( PKG, "MappingDialog.RadioRep.Tooltip", Const.CR ) );
FormData fdRepRadio = new FormData();
fdRepRadio.left = new FormAttachment( 0, 0 );
fdRepRadio.right = new FormAttachment( 100, 0 );
fdRepRadio.top = new FormAttachment( wbbFilename, 2 * margin );
radioByName.setLayoutData( fdRepRadio );
radioByName.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
setSpecificationMethod( ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME );
setRadioButtons();
}
} );
wbTrans = new Button( gTransGroup, SWT.PUSH | SWT.CENTER ); // Browse
props.setLook( wbTrans );
wbTrans.setText( BaseMessages.getString( PKG, "MappingDialog.Select.Button" ) );
wbTrans.setToolTipText( BaseMessages.getString( PKG, "System.Tooltip.BrowseForFileOrDirAndAdd" ) );
FormData fdbTrans = new FormData();
fdbTrans.right = new FormAttachment( 100, 0 );
fdbTrans.top = new FormAttachment( radioByName, 2 * margin );
wbTrans.setLayoutData( fdbTrans );
wbTrans.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
selectRepositoryTrans();
}
} );
wDirectory = new TextVar( transMeta, gTransGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wDirectory );
wDirectory.addModifyListener( lsMod );
FormData fdTransDir = new FormData();
fdTransDir.left = new FormAttachment( middle + ( 100 - middle ) / 2, 0 );
fdTransDir.right = new FormAttachment( wbTrans, -margin );
fdTransDir.top = new FormAttachment( wbTrans, 0, SWT.CENTER );
wDirectory.setLayoutData( fdTransDir );
wDirectory.addModifyListener( new ModifyListener() {
public void modifyText( ModifyEvent e ) {
setSpecificationMethod( ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME );
setRadioButtons();
}
} );
wTransname = new TextVar( transMeta, gTransGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wTransname );
wTransname.addModifyListener( lsMod );
FormData fdTransName = new FormData();
fdTransName.left = new FormAttachment( 0, 25 );
fdTransName.right = new FormAttachment( wDirectory, -margin );
fdTransName.top = new FormAttachment( wbTrans, 0, SWT.CENTER );
wTransname.setLayoutData( fdTransName );
wTransname.addModifyListener( new ModifyListener() {
public void modifyText( ModifyEvent e ) {
setSpecificationMethod( ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME );
setRadioButtons();
}
} );
// Radio button: The mapping is in the repository
//
radioByReference = new Button( gTransGroup, SWT.RADIO );
props.setLook( radioByReference );
radioByReference.setSelection( false );
radioByReference.setText( BaseMessages.getString( PKG, "MappingDialog.RadioRepByReference.Label" ) );
radioByReference.setToolTipText( BaseMessages.getString( PKG, "MappingDialog.RadioRepByReference.Tooltip",
Const.CR ) );
FormData fdRadioByReference = new FormData();
fdRadioByReference.left = new FormAttachment( 0, 0 );
fdRadioByReference.right = new FormAttachment( 100, 0 );
fdRadioByReference.top = new FormAttachment( wTransname, 2 * margin );
radioByReference.setLayoutData( fdRadioByReference );
radioByReference.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
setSpecificationMethod( ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE );
setRadioButtons();
}
} );
wbByReference = new Button( gTransGroup, SWT.PUSH | SWT.CENTER );
props.setLook( wbByReference );
wbByReference.setImage( GUIResource.getInstance().getImageTransGraph() );
wbByReference.setToolTipText( BaseMessages.getString( PKG, "MappingDialog.SelectTrans.Tooltip" ) );
FormData fdbByReference = new FormData();
fdbByReference.top = new FormAttachment( radioByReference, margin );
fdbByReference.right = new FormAttachment( 100, 0 );
wbByReference.setLayoutData( fdbByReference );
wbByReference.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
selectTransformationByReference();
}
} );
wByReference = new TextVar( transMeta, gTransGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER | SWT.READ_ONLY );
props.setLook( wByReference );
wByReference.addModifyListener( lsMod );
FormData fdByReference = new FormData();
fdByReference.top = new FormAttachment( radioByReference, margin );
fdByReference.left = new FormAttachment( 0, 25 );
fdByReference.right = new FormAttachment( wbByReference, -margin );
wByReference.setLayoutData( fdByReference );
wByReference.addModifyListener( new ModifyListener() {
public void modifyText( ModifyEvent e ) {
setSpecificationMethod( ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE );
setRadioButtons();
}
} );
Button wNewTrans = new Button( gTransGroup, SWT.PUSH | SWT.CENTER );
props.setLook( wNewTrans );
wNewTrans.setText( BaseMessages.getString( PKG, "MappingDialog.New.Button" ) );
FormData fdNewTrans = new FormData();
fdNewTrans.left = new FormAttachment( 0, 0 );
fdNewTrans.top = new FormAttachment( wByReference, 3 * margin );
wNewTrans.setLayoutData( fdNewTrans );
wNewTrans.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
newTransformation();
}
} );
Button wEditTrans = new Button( gTransGroup, SWT.PUSH | SWT.CENTER );
props.setLook( wEditTrans );
wEditTrans.setText( BaseMessages.getString( PKG, "MappingDialog.Edit.Button" ) );
wEditTrans.setToolTipText( BaseMessages.getString( PKG, "System.Tooltip.BrowseForFileOrDirAndAdd" ) );
FormData fdEditTrans = new FormData();
fdEditTrans.left = new FormAttachment( wNewTrans, 2 * margin );
fdEditTrans.top = new FormAttachment( wByReference, 3 * margin );
wEditTrans.setLayoutData( fdEditTrans );
wEditTrans.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
editTrans();
}
} );
FormData fdTransGroup = new FormData();
fdTransGroup.left = new FormAttachment( 0, 0 );
fdTransGroup.top = new FormAttachment( wStepname, 2 * margin );
fdTransGroup.right = new FormAttachment( 100, 0 );
gTransGroup.setLayoutData( fdTransGroup );
Control lastControl = gTransGroup;
wMultiInput = new Button( shell, SWT.CHECK );
props.setLook( wMultiInput );
wMultiInput.setText( BaseMessages.getString( PKG, "MappingDialog.AllowMultipleInputs.Label" ) );
FormData fdMultiInput = new FormData();
fdMultiInput.left = new FormAttachment( 0, 0 );
fdMultiInput.right = new FormAttachment( 100, 0 );
fdMultiInput.top = new FormAttachment( lastControl, margin * 2 );
wMultiInput.setLayoutData( fdMultiInput );
wMultiInput.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent event ) {
setFlags();
}
} );
lastControl = wMultiInput;
wMultiOutput = new Button( shell, SWT.CHECK );
props.setLook( wMultiOutput );
wMultiOutput.setText( BaseMessages.getString( PKG, "MappingDialog.AllowMultipleOutputs.Label" ) );
FormData fdMultiOutput = new FormData();
fdMultiOutput.left = new FormAttachment( 0, 0 );
fdMultiOutput.right = new FormAttachment( 100, 0 );
fdMultiOutput.top = new FormAttachment( lastControl, margin );
wMultiOutput.setLayoutData( fdMultiOutput );
wMultiOutput.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent event ) {
setFlags();
}
} );
lastControl = wMultiOutput;
//
// Add a tab folder for the parameters and various input and output
// streams
//
wTabFolder = new CTabFolder( shell, SWT.BORDER );
props.setLook( wTabFolder, Props.WIDGET_STYLE_TAB );
wTabFolder.setSimple( false );
wTabFolder.setUnselectedCloseVisible( true );
FormData fdTabFolder = new FormData();
fdTabFolder.left = new FormAttachment( 0, 0 );
fdTabFolder.right = new FormAttachment( 100, 0 );
fdTabFolder.top = new FormAttachment( lastControl, margin * 2 );
fdTabFolder.bottom = new FormAttachment( 100, -75 );
wTabFolder.setLayoutData( fdTabFolder );
// Now add buttons that will allow us to add or remove input or output
// tabs...
wAddInput = new Button( shell, SWT.PUSH );
props.setLook( wAddInput );
wAddInput.setText( BaseMessages.getString( PKG, "MappingDialog.button.AddInput" ) );
wAddInput.addSelectionListener( new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent event ) {
// Simply add a new MappingIODefinition object to the
// inputMappings
MappingIODefinition definition = new MappingIODefinition();
definition.setRenamingOnOutput( true );
inputMappings.add( definition );
int index = inputMappings.size() - 1;
addInputMappingDefinitionTab( definition, index );
setFlags();
}
} );
wAddOutput = new Button( shell, SWT.PUSH );
props.setLook( wAddOutput );
wAddOutput.setText( BaseMessages.getString( PKG, "MappingDialog.button.AddOutput" ) );
wAddOutput.addSelectionListener( new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent event ) {
// Simply add a new MappingIODefinition object to the
// inputMappings
MappingIODefinition definition = new MappingIODefinition();
outputMappings.add( definition );
int index = outputMappings.size() - 1;
addOutputMappingDefinitionTab( definition, index );
setFlags();
}
} );
setButtonPositions( new Button[] { wAddInput, wAddOutput }, margin, wTabFolder );
// Some buttons
wOK = new Button( shell, SWT.PUSH );
wOK.setText( BaseMessages.getString( PKG, "System.Button.OK" ) );
wCancel = new Button( shell, SWT.PUSH );
wCancel.setText( BaseMessages.getString( PKG, "System.Button.Cancel" ) );
setButtonPositions( new Button[] { wOK, wCancel }, margin, null );
// Add listeners
lsCancel = new Listener() {
public void handleEvent( Event e ) {
cancel();
}
};
lsOK = new Listener() {
public void handleEvent( Event e ) {
ok();
}
};
wCancel.addListener( SWT.Selection, lsCancel );
wOK.addListener( SWT.Selection, lsOK );
lsDef = new SelectionAdapter() {
public void widgetDefaultSelected( SelectionEvent e ) {
ok();
}
};
wStepname.addSelectionListener( lsDef );
wFilename.addSelectionListener( lsDef );
wTransname.addSelectionListener( lsDef );
// Detect X or ALT-F4 or something that kills this window...
shell.addShellListener( new ShellAdapter() {
public void shellClosed( ShellEvent e ) {
cancel();
}
} );
// Set the shell size, based upon previous time...
setSize();
getData();
mappingMeta.setChanged( changed );
wTabFolder.setSelection( 0 );
shell.open();
while ( !shell.isDisposed() ) {
if ( !display.readAndDispatch() ) {
display.sleep();
}
}
return stepname;
}
protected void selectTransformationByReference() {
if ( repository != null ) {
SelectObjectDialog sod = new SelectObjectDialog( shell, repository, true, false );
sod.open();
RepositoryElementMetaInterface repositoryObject = sod.getRepositoryObject();
if ( repositoryObject != null ) {
setSpecificationMethod( ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE );
updateByReferenceField( repositoryObject );
setReferenceObjectId( repositoryObject.getObjectId() );
setRadioButtons();
}
}
}
private void selectRepositoryTrans() {
try {
SelectObjectDialog sod = new SelectObjectDialog( shell, repository );
String transName = sod.open();
RepositoryDirectoryInterface repdir = sod.getDirectory();
if ( transName != null && repdir != null ) {
loadRepositoryTrans( transName, repdir );
wTransname.setText( mappingTransMeta.getName() );
wDirectory.setText( mappingTransMeta.getRepositoryDirectory().getPath() );
wFilename.setText( "" );
radioByName.setSelection( true );
radioFilename.setSelection( false );
setSpecificationMethod( ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME );
setRadioButtons();
}
} catch ( KettleException ke ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "MappingDialog.ErrorSelectingObject.DialogTitle" ),
BaseMessages.getString( PKG, "MappingDialog.ErrorSelectingObject.DialogMessage" ), ke );
}
}
private void loadRepositoryTrans( String transName, RepositoryDirectoryInterface repdir ) throws KettleException {
// Read the transformation...
//
mappingTransMeta =
repository.loadTransformation( transMeta.environmentSubstitute( transName ), repdir, null, true, null );
mappingTransMeta.clearChanged();
}
private void selectFileTrans() {
String curFile = wFilename.getText();
FileObject root;
try {
root = KettleVFS.getFileObject( curFile != null ? curFile : Const.getUserHomeDirectory() );
VfsFileChooserDialog vfsFileChooser = Spoon.getInstance().getVfsFileChooserDialog( root.getParent(), root );
FileObject file =
vfsFileChooser.open( shell, null, Const.STRING_TRANS_FILTER_EXT, Const.getTransformationFilterNames(),
VfsFileChooserDialog.VFS_DIALOG_OPEN_FILE );
if ( file == null ) {
return;
}
String fname;
fname = file.getURL().getFile();
if ( fname != null ) {
loadFileTrans( fname );
wFilename.setText( mappingTransMeta.getFilename() );
wTransname.setText( Const.NVL( mappingTransMeta.getName(), "" ) );
wDirectory.setText( "" );
setSpecificationMethod( ObjectLocationSpecificationMethod.FILENAME );
setRadioButtons();
}
} catch ( IOException e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "MappingDialog.ErrorLoadingTransformation.DialogTitle" ),
BaseMessages.getString( PKG, "MappingDialog.ErrorLoadingTransformation.DialogMessage" ), e );
} catch ( KettleException e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "MappingDialog.ErrorLoadingTransformation.DialogTitle" ),
BaseMessages.getString( PKG, "MappingDialog.ErrorLoadingTransformation.DialogMessage" ), e );
}
}
private void loadFileTrans( String fname ) throws KettleException {
mappingTransMeta = new TransMeta( transMeta.environmentSubstitute( fname ) );
mappingTransMeta.clearChanged();
}
private void editTrans() {
// Load the transformation again to make sure it's still there and
// refreshed
// It's an extra check to make sure it's still OK...
//
try {
loadTransformation();
// If we're still here, mappingTransMeta is valid.
//
SpoonInterface spoon = SpoonFactory.getInstance();
if ( spoon != null ) {
spoon.addTransGraph( mappingTransMeta );
}
} catch ( KettleException e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "MappingDialog.ErrorShowingTransformation.Title" ),
BaseMessages.getString( PKG, "MappingDialog.ErrorShowingTransformation.Message" ), e );
}
}
// Method is defined as package-protected in order to be accessible by unit tests
void loadTransformation() throws KettleException {
switch( getSpecificationMethod() ) {
case FILENAME:
loadFileTrans( wFilename.getText() );
break;
case REPOSITORY_BY_NAME:
String realDirectory = transMeta.environmentSubstitute( wDirectory.getText() );
String realTransname = transMeta.environmentSubstitute( wTransname.getText() );
if ( Const.isEmpty( realDirectory ) || Const.isEmpty( realTransname ) ) {
throw new KettleException( BaseMessages.getString(
PKG, "MappingDialog.Exception.NoValidMappingDetailsFound" ) );
}
RepositoryDirectoryInterface repdir = repository.findDirectory( realDirectory );
if ( repdir == null ) {
throw new KettleException( BaseMessages.getString(
PKG, "MappingDialog.Exception.UnableToFindRepositoryDirectory)" ) );
}
loadRepositoryTrans( realTransname, repdir );
break;
case REPOSITORY_BY_REFERENCE:
if ( getReferenceObjectId() == null ) {
throw new KettleException( BaseMessages.getString( PKG,
"MappingDialog.Exception.ReferencedTransformationIdIsNull" ) );
}
mappingTransMeta = repository.loadTransformation( getReferenceObjectId(), null ); // load the last version
mappingTransMeta.clearChanged();
break;
default:
break;
}
}
public void setActive() {
boolean supportsReferences =
repository != null && repository.getRepositoryMeta().getRepositoryCapabilities().supportsReferences();
radioByName.setEnabled( repository != null );
radioByReference.setEnabled( repository != null && supportsReferences );
wFilename.setEnabled( radioFilename.getSelection() );
wbbFilename.setEnabled( radioFilename.getSelection() );
wTransname.setEnabled( repository != null && radioByName.getSelection() );
wDirectory.setEnabled( repository != null && radioByName.getSelection() );
wbTrans.setEnabled( repository != null && radioByName.getSelection() );
wByReference.setEnabled( repository != null && radioByReference.getSelection() && supportsReferences );
wbByReference.setEnabled( repository != null && radioByReference.getSelection() && supportsReferences );
}
protected void setRadioButtons() {
radioFilename.setSelection( getSpecificationMethod() == ObjectLocationSpecificationMethod.FILENAME );
radioByName.setSelection( getSpecificationMethod() == ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME );
radioByReference
.setSelection( getSpecificationMethod() == ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE );
setActive();
}
/**
* Ask the user to fill in the details...
*/
protected void newTransformation() {
// Get input fields for this step so we can put this metadata in the mapping
//
RowMetaInterface inFields = new RowMeta();
try {
inFields = transMeta.getPrevStepFields( stepname );
} catch ( Exception e ) {
// Just show the error but continue operations.
//
new ErrorDialog( shell, "Error", "Unable to get input fields from previous step", e );
}
TransMeta newTransMeta = new TransMeta();
newTransMeta.getDatabases().addAll( transMeta.getDatabases() );
newTransMeta.setRepository( transMeta.getRepository() );
newTransMeta.setRepositoryDirectory( transMeta.getRepositoryDirectory() );
// Pass some interesting settings from the parent transformations...
//
newTransMeta.setUsingUniqueConnections( transMeta.isUsingUniqueConnections() );
// Add MappingInput and MappingOutput steps
//
String INPUT_STEP_NAME = "Mapping Input";
MappingInputMeta inputMeta = new MappingInputMeta();
inputMeta.allocate( inFields.size() );
//CHECKSTYLE:Indentation:OFF
for ( int i = 0; i < inFields.size(); i++ ) {
ValueMetaInterface valueMeta = inFields.getValueMeta( i );
inputMeta.getFieldName()[ i ] = valueMeta.getName();
inputMeta.getFieldType()[ i ] = valueMeta.getType();
inputMeta.getFieldLength()[ i ] = valueMeta.getLength();
inputMeta.getFieldPrecision()[ i ] = valueMeta.getPrecision();
}
StepMeta inputStep = new StepMeta( INPUT_STEP_NAME, inputMeta );
inputStep.setLocation( 50, 50 );
inputStep.setDraw( true );
newTransMeta.addStep( inputStep );
String OUTPUT_STEP_NAME = "Mapping Output";
MappingOutputMeta outputMeta = new MappingOutputMeta();
outputMeta.allocate( 0 );
StepMeta outputStep = new StepMeta( OUTPUT_STEP_NAME, outputMeta );
outputStep.setLocation( 500, 50 );
outputStep.setDraw( true );
newTransMeta.addStep( outputStep );
newTransMeta.addTransHop( new TransHopMeta( inputStep, outputStep ) );
TransDialog transDialog = new TransDialog( shell, SWT.NONE, newTransMeta, repository );
if ( transDialog.open() != null ) {
Spoon spoon = Spoon.getInstance();
spoon.addTransGraph( newTransMeta );
boolean saved = false;
try {
if ( repository != null ) {
if ( !Const.isEmpty( newTransMeta.getName() ) ) {
wStepname.setText( newTransMeta.getName() );
}
saved = spoon.saveToRepository( newTransMeta, false );
if ( repository.getRepositoryMeta().getRepositoryCapabilities().supportsReferences() ) {
setSpecificationMethod( ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE );
} else {
setSpecificationMethod( ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME );
}
} else {
saved = spoon.saveToFile( newTransMeta );
setSpecificationMethod( ObjectLocationSpecificationMethod.FILENAME );
}
} catch ( Exception e ) {
new ErrorDialog( shell, "Error", "Error saving new transformation", e );
}
if ( saved ) {
setRadioButtons();
switch( getSpecificationMethod() ) {
case FILENAME:
wFilename.setText( Const.NVL( newTransMeta.getFilename(), "" ) );
break;
case REPOSITORY_BY_NAME:
wTransname.setText( Const.NVL( newTransMeta.getName(), "" ) );
wDirectory.setText( newTransMeta.getRepositoryDirectory().getPath() );
break;
case REPOSITORY_BY_REFERENCE:
getByReferenceData( newTransMeta.getObjectId() );
break;
default:
break;
}
}
}
}
private void getByReferenceData( ObjectId transObjectId ) {
try {
if ( repository == null ) {
throw new KettleException( BaseMessages.getString(
PKG, "MappingDialog.Exception.NotConnectedToRepository.Message" ) );
}
RepositoryObject transInf = repository.getObjectInformation( transObjectId, RepositoryObjectType.TRANSFORMATION );
updateByReferenceField( transInf );
} catch ( KettleException e ) {
new ErrorDialog( shell, BaseMessages.getString(
PKG, "MappingDialog.Exception.UnableToReferenceObjectId.Title" ), BaseMessages.getString(
PKG, "MappingDialog.Exception.UnableToReferenceObjectId.Message" ), e );
}
}
private void updateByReferenceField( RepositoryElementMetaInterface element ) {
String path = getPathOf( element );
if ( path == null ) {
path = "";
}
wByReference.setText( path );
}
/**
* Copy information from the meta-data input to the dialog fields.
*/
public void getData() {
setSpecificationMethod( mappingMeta.getSpecificationMethod() );
switch( getSpecificationMethod() ) {
case FILENAME:
wFilename.setText( Const.NVL( mappingMeta.getFileName(), "" ) );
break;
case REPOSITORY_BY_NAME:
wDirectory.setText( Const.NVL( mappingMeta.getDirectoryPath(), "" ) );
wTransname.setText( Const.NVL( mappingMeta.getTransName(), "" ) );
break;
case REPOSITORY_BY_REFERENCE:
wByReference.setText( "" );
if ( mappingMeta.getTransObjectId() != null ) {
setReferenceObjectId( mappingMeta.getTransObjectId() );
getByReferenceData( getReferenceObjectId() );
}
break;
default:
break;
}
setRadioButtons();
// Add the parameters tab
addParametersTab( mappingParameters );
wTabFolder.setSelection( 0 );
wMultiInput.setSelection( mappingMeta.isAllowingMultipleInputs() );
wMultiOutput.setSelection( mappingMeta.isAllowingMultipleOutputs() );
// Now add the input stream tabs: where is our data coming from?
for ( int i = 0; i < inputMappings.size(); i++ ) {
addInputMappingDefinitionTab( inputMappings.get( i ), i );
}
// Now add the output stream tabs: where is our data going to?
for ( int i = 0; i < outputMappings.size(); i++ ) {
addOutputMappingDefinitionTab( outputMappings.get( i ), i );
}
try {
loadTransformation();
} catch ( Throwable t ) {
// Ignore errors
}
setFlags();
wStepname.selectAll();
wStepname.setFocus();
}
private void addOutputMappingDefinitionTab( MappingIODefinition definition, int index ) {
addMappingDefinitionTab( outputMappings.get( index ), index + 1 + inputMappings.size(), BaseMessages
.getString( PKG, "MappingDialog.OutputTab.Title" ), BaseMessages.getString(
PKG, "MappingDialog.OutputTab.Tooltip" ), BaseMessages.getString(
PKG, "MappingDialog.OutputTab.label.InputSourceStepName" ), BaseMessages.getString(
PKG, "MappingDialog.OutputTab.label.OutputTargetStepName" ), BaseMessages.getString(
PKG, "MappingDialog.OutputTab.label.Description" ), BaseMessages.getString(
PKG, "MappingDialog.OutputTab.column.SourceField" ), BaseMessages.getString(
PKG, "MappingDialog.OutputTab.column.TargetField" ), false );
}
private void addInputMappingDefinitionTab( MappingIODefinition definition, int index ) {
addMappingDefinitionTab(
definition, index + 1, BaseMessages.getString( PKG, "MappingDialog.InputTab.Title" ), BaseMessages
.getString( PKG, "MappingDialog.InputTab.Tooltip" ), BaseMessages.getString(
PKG, "MappingDialog.InputTab.label.InputSourceStepName" ), BaseMessages.getString(
PKG, "MappingDialog.InputTab.label.OutputTargetStepName" ), BaseMessages.getString(
PKG, "MappingDialog.InputTab.label.Description" ), BaseMessages.getString(
PKG, "MappingDialog.InputTab.column.SourceField" ), BaseMessages.getString(
PKG, "MappingDialog.InputTab.column.TargetField" ), true
);
}
private void addParametersTab( final MappingParameters parameters ) {
CTabItem wParametersTab = new CTabItem( wTabFolder, SWT.NONE );
wParametersTab.setText( BaseMessages.getString( PKG, "MappingDialog.Parameters.Title" ) );
wParametersTab.setToolTipText( BaseMessages.getString( PKG, "MappingDialog.Parameters.Tooltip" ) );
Composite wParametersComposite = new Composite( wTabFolder, SWT.NONE );
props.setLook( wParametersComposite );
FormLayout parameterTabLayout = new FormLayout();
parameterTabLayout.marginWidth = Const.FORM_MARGIN;
parameterTabLayout.marginHeight = Const.FORM_MARGIN;
wParametersComposite.setLayout( parameterTabLayout );
// Add a checkbox: inherit all variables...
//
Button wInheritAll = new Button( wParametersComposite, SWT.CHECK );
wInheritAll.setText( BaseMessages.getString( PKG, "MappingDialog.Parameters.InheritAll" ) );
props.setLook( wInheritAll );
FormData fdInheritAll = new FormData();
fdInheritAll.bottom = new FormAttachment( 100, 0 );
fdInheritAll.left = new FormAttachment( 0, 0 );
fdInheritAll.right = new FormAttachment( 100, -30 );
wInheritAll.setLayoutData( fdInheritAll );
wInheritAll.setSelection( parameters.isInheritingAllVariables() );
// Now add a tableview with the 2 columns to specify: input and output
// fields for the source and target steps.
//
ColumnInfo[] colinfo =
new ColumnInfo[] {
new ColumnInfo(
BaseMessages.getString( PKG, "MappingDialog.Parameters.column.Variable" ),
ColumnInfo.COLUMN_TYPE_TEXT, false, false ),
new ColumnInfo(
BaseMessages.getString( PKG, "MappingDialog.Parameters.column.ValueOrField" ),
ColumnInfo.COLUMN_TYPE_TEXT, false, false ), };
colinfo[ 1 ].setUsingVariables( true );
final TableView wMappingParameters =
new TableView(
transMeta, wParametersComposite, SWT.FULL_SELECTION | SWT.SINGLE | SWT.BORDER, colinfo, parameters
.getVariable().length, lsMod, props
);
props.setLook( wMappingParameters );
FormData fdMappings = new FormData();
fdMappings.left = new FormAttachment( 0, 0 );
fdMappings.right = new FormAttachment( 100, 0 );
fdMappings.top = new FormAttachment( 0, 0 );
fdMappings.bottom = new FormAttachment( wInheritAll, -margin * 2 );
wMappingParameters.setLayoutData( fdMappings );
for ( int i = 0; i < parameters.getVariable().length; i++ ) {
TableItem tableItem = wMappingParameters.table.getItem( i );
tableItem.setText( 1, parameters.getVariable()[ i ] );
tableItem.setText( 2, parameters.getInputField()[ i ] );
}
wMappingParameters.setRowNums();
wMappingParameters.optWidth( true );
FormData fdParametersComposite = new FormData();
fdParametersComposite.left = new FormAttachment( 0, 0 );
fdParametersComposite.top = new FormAttachment( 0, 0 );
fdParametersComposite.right = new FormAttachment( 100, 0 );
fdParametersComposite.bottom = new FormAttachment( 100, 0 );
wParametersComposite.setLayoutData( fdParametersComposite );
wParametersComposite.layout();
wParametersTab.setControl( wParametersComposite );
changeList.add( new MappingParametersTab( wMappingParameters, wInheritAll, parameters ) );
}
protected String selectTransformationStepname( boolean getTransformationStep, boolean mappingInput ) {
String dialogTitle;
String dialogMessage;
if ( getTransformationStep ) {
dialogTitle = BaseMessages.getString( PKG, "MappingDialog.SelectTransStep.Title" );
dialogMessage = BaseMessages.getString( PKG, "MappingDialog.SelectTransStep.Message" );
String[] stepnames;
if ( mappingInput ) {
stepnames = transMeta.getPrevStepNames( stepMeta );
} else {
stepnames = transMeta.getNextStepNames( stepMeta );
}
EnterSelectionDialog dialog = new EnterSelectionDialog( shell, stepnames, dialogTitle, dialogMessage );
return dialog.open();
} else {
dialogTitle = BaseMessages.getString( PKG, "MappingDialog.SelectMappingStep.Title" );
dialogMessage = BaseMessages.getString( PKG, "MappingDialog.SelectMappingStep.Message" );
String[] stepnames = getMappingSteps( mappingTransMeta, mappingInput );
EnterSelectionDialog dialog = new EnterSelectionDialog( shell, stepnames, dialogTitle, dialogMessage );
return dialog.open();
}
}
public static String[] getMappingSteps( TransMeta mappingTransMeta, boolean mappingInput ) {
List<StepMeta> steps = new ArrayList<StepMeta>();
for ( StepMeta stepMeta : mappingTransMeta.getSteps() ) {
if ( mappingInput && stepMeta.getStepID().equals( "MappingInput" ) ) {
steps.add( stepMeta );
}
if ( !mappingInput && stepMeta.getStepID().equals( "MappingOutput" ) ) {
steps.add( stepMeta );
}
}
String[] stepnames = new String[ steps.size() ];
for ( int i = 0; i < stepnames.length; i++ ) {
stepnames[ i ] = steps.get( i ).getName();
}
return stepnames;
}
public RowMetaInterface getFieldsFromStep( String stepname, boolean getTransformationStep, boolean mappingInput ) throws KettleException {
if ( !( mappingInput ^ getTransformationStep ) ) {
if ( Const.isEmpty( stepname ) ) {
// If we don't have a specified stepname we return the input row
// metadata
//
return transMeta.getPrevStepFields( this.stepname );
} else {
// OK, a fieldname is specified...
// See if we can find it...
StepMeta stepMeta = transMeta.findStep( stepname );
if ( stepMeta == null ) {
throw new KettleException( BaseMessages.getString(
PKG, "MappingDialog.Exception.SpecifiedStepWasNotFound", stepname ) );
}
return transMeta.getStepFields( stepMeta );
}
} else {
if ( mappingTransMeta == null ) {
throw new KettleException( BaseMessages.getString( PKG, "MappingDialog.Exception.NoMappingSpecified" ) );
}
if ( Const.isEmpty( stepname ) ) {
// If we don't have a specified stepname we select the one and
// only "mapping input" step.
//
String[] stepnames = getMappingSteps( mappingTransMeta, mappingInput );
if ( stepnames.length > 1 ) {
throw new KettleException( BaseMessages.getString(
PKG, "MappingDialog.Exception.OnlyOneMappingInputStepAllowed", "" + stepnames.length ) );
}
if ( stepnames.length == 0 ) {
throw new KettleException( BaseMessages.getString(
PKG, "MappingDialog.Exception.OneMappingInputStepRequired", "" + stepnames.length ) );
}
return mappingTransMeta.getStepFields( stepnames[ 0 ] );
} else {
// OK, a fieldname is specified...
// See if we can find it...
StepMeta stepMeta = mappingTransMeta.findStep( stepname );
if ( stepMeta == null ) {
throw new KettleException( BaseMessages.getString(
PKG, "MappingDialog.Exception.SpecifiedStepWasNotFound", stepname ) );
}
return mappingTransMeta.getStepFields( stepMeta );
}
}
}
private void addMappingDefinitionTab( final MappingIODefinition definition, int index, final String tabTitle,
final String tabTooltip, String inputStepLabel, String outputStepLabel,
String descriptionLabel,
String sourceColumnLabel, String targetColumnLabel, final boolean input ) {
final CTabItem wTab;
if ( index >= wTabFolder.getItemCount() ) {
wTab = new CTabItem( wTabFolder, SWT.CLOSE );
} else {
wTab = new CTabItem( wTabFolder, SWT.CLOSE, index );
}
setMappingDefinitionTabNameAndToolTip( wTab, tabTitle, tabTooltip, definition, input );
Composite wInputComposite = new Composite( wTabFolder, SWT.NONE );
props.setLook( wInputComposite );
FormLayout tabLayout = new FormLayout();
tabLayout.marginWidth = Const.FORM_MARGIN;
tabLayout.marginHeight = Const.FORM_MARGIN;
wInputComposite.setLayout( tabLayout );
// What's the stepname to read from? (empty is OK too)
//
final Button wbInputStep = new Button( wInputComposite, SWT.PUSH );
props.setLook( wbInputStep );
wbInputStep.setText( BaseMessages.getString( PKG, "MappingDialog.button.SourceStepName" ) );
FormData fdbInputStep = new FormData();
fdbInputStep.top = new FormAttachment( 0, 0 );
fdbInputStep.right = new FormAttachment( 100, 0 ); // First one in the
// left top corner
wbInputStep.setLayoutData( fdbInputStep );
final Label wlInputStep = new Label( wInputComposite, SWT.RIGHT );
props.setLook( wlInputStep );
wlInputStep.setText( inputStepLabel );
FormData fdlInputStep = new FormData();
fdlInputStep.top = new FormAttachment( wbInputStep, 0, SWT.CENTER );
fdlInputStep.left = new FormAttachment( 0, 0 ); // First one in the left
// top corner
fdlInputStep.right = new FormAttachment( middle, -margin );
wlInputStep.setLayoutData( fdlInputStep );
final Text wInputStep = new Text( wInputComposite, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wInputStep );
wInputStep.setText( Const.NVL( definition.getInputStepname(), "" ) );
wInputStep.addModifyListener( lsMod );
FormData fdInputStep = new FormData();
fdInputStep.top = new FormAttachment( wbInputStep, 0, SWT.CENTER );
fdInputStep.left = new FormAttachment( middle, 0 ); // To the right of
// the label
fdInputStep.right = new FormAttachment( wbInputStep, -margin );
wInputStep.setLayoutData( fdInputStep );
wInputStep.addFocusListener( new FocusAdapter() {
@Override
public void focusLost( FocusEvent event ) {
definition.setInputStepname( wInputStep.getText() );
setMappingDefinitionTabNameAndToolTip( wTab, tabTitle, tabTooltip, definition, input );
}
} );
wbInputStep.addSelectionListener( new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent event ) {
String stepName = selectTransformationStepname( input, input );
if ( stepName != null ) {
wInputStep.setText( stepName );
definition.setInputStepname( stepName );
setMappingDefinitionTabNameAndToolTip( wTab, tabTitle, tabTooltip, definition, input );
}
}
} );
// What's the step name to read from? (empty is OK too)
//
final Button wbOutputStep = new Button( wInputComposite, SWT.PUSH );
props.setLook( wbOutputStep );
wbOutputStep.setText( BaseMessages.getString( PKG, "MappingDialog.button.SourceStepName" ) );
FormData fdbOutputStep = new FormData();
fdbOutputStep.top = new FormAttachment( wbInputStep, margin );
fdbOutputStep.right = new FormAttachment( 100, 0 );
wbOutputStep.setLayoutData( fdbOutputStep );
final Label wlOutputStep = new Label( wInputComposite, SWT.RIGHT );
props.setLook( wlOutputStep );
wlOutputStep.setText( outputStepLabel );
FormData fdlOutputStep = new FormData();
fdlOutputStep.top = new FormAttachment( wbOutputStep, 0, SWT.CENTER );
fdlOutputStep.left = new FormAttachment( 0, 0 );
fdlOutputStep.right = new FormAttachment( middle, -margin );
wlOutputStep.setLayoutData( fdlOutputStep );
final Text wOutputStep = new Text( wInputComposite, SWT.SINGLE | SWT.LEFT | SWT.BORDER );
props.setLook( wOutputStep );
wOutputStep.setText( Const.NVL( definition.getOutputStepname(), "" ) );
wOutputStep.addModifyListener( lsMod );
FormData fdOutputStep = new FormData();
fdOutputStep.top = new FormAttachment( wbOutputStep, 0, SWT.CENTER );
fdOutputStep.left = new FormAttachment( middle, 0 ); // To the right of
// the label
fdOutputStep.right = new FormAttachment( wbOutputStep, -margin );
wOutputStep.setLayoutData( fdOutputStep );
// Add a checkbox to indicate the main step to read from, the main data
// path...
//
final Label wlMainPath = new Label( wInputComposite, SWT.RIGHT );
props.setLook( wlMainPath );
wlMainPath.setText( BaseMessages.getString( PKG, "MappingDialog.input.MainDataPath" ) );
FormData fdlMainPath = new FormData();
fdlMainPath.top = new FormAttachment( wbOutputStep, margin );
fdlMainPath.left = new FormAttachment( 0, 0 );
fdlMainPath.right = new FormAttachment( middle, -margin );
wlMainPath.setLayoutData( fdlMainPath );
final Button wMainPath = new Button( wInputComposite, SWT.CHECK );
props.setLook( wMainPath );
FormData fdMainPath = new FormData();
fdMainPath.top = new FormAttachment( wbOutputStep, margin );
fdMainPath.left = new FormAttachment( middle, 0 );
// fdMainPath.right = new FormAttachment(100, 0); // who cares, it's a
// checkbox
wMainPath.setLayoutData( fdMainPath );
wMainPath.setSelection( definition.isMainDataPath() );
wMainPath.addSelectionListener( new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent event ) {
definition.setMainDataPath( !definition.isMainDataPath() ); // flip
// the
// switch
}
} );
Control lastControl = wMainPath;
// Allow for a small description
//
Label wlDescription = new Label( wInputComposite, SWT.RIGHT );
props.setLook( wlDescription );
wlDescription.setText( descriptionLabel );
FormData fdlDescription = new FormData();
fdlDescription.top = new FormAttachment( lastControl, margin );
fdlDescription.left = new FormAttachment( 0, 0 ); // First one in the left
// top corner
fdlDescription.right = new FormAttachment( middle, -margin );
wlDescription.setLayoutData( fdlDescription );
final Text wDescription =
new Text( wInputComposite, SWT.MULTI | SWT.LEFT | SWT.BORDER | SWT.V_SCROLL | SWT.H_SCROLL );
props.setLook( wDescription );
wDescription.setText( Const.NVL( definition.getDescription(), "" ) );
wDescription.addModifyListener( lsMod );
FormData fdDescription = new FormData();
fdDescription.top = new FormAttachment( lastControl, margin );
fdDescription.bottom = new FormAttachment( lastControl, 100 + margin );
fdDescription.left = new FormAttachment( middle, 0 ); // To the right of
// the label
fdDescription.right = new FormAttachment( wbOutputStep, -margin );
wDescription.setLayoutData( fdDescription );
wDescription.addFocusListener( new FocusAdapter() {
@Override
public void focusLost( FocusEvent event ) {
definition.setDescription( wDescription.getText() );
}
} );
lastControl = wDescription;
// Now add a table view with the 2 columns to specify: input and output
// fields for the source and target steps.
//
final Button wbEnterMapping = new Button( wInputComposite, SWT.PUSH );
props.setLook( wbEnterMapping );
wbEnterMapping.setText( BaseMessages.getString( PKG, "MappingDialog.button.EnterMapping" ) );
FormData fdbEnterMapping = new FormData();
fdbEnterMapping.top = new FormAttachment( lastControl, margin * 2 );
fdbEnterMapping.right = new FormAttachment( 100, 0 ); // First one in the
// left top corner
wbEnterMapping.setLayoutData( fdbEnterMapping );
wbEnterMapping.setEnabled( input );
ColumnInfo[] colinfo =
new ColumnInfo[] {
new ColumnInfo( sourceColumnLabel, ColumnInfo.COLUMN_TYPE_TEXT, false, false ),
new ColumnInfo( targetColumnLabel, ColumnInfo.COLUMN_TYPE_TEXT, false, false ), };
final TableView wFieldMappings =
new TableView(
transMeta, wInputComposite, SWT.FULL_SELECTION | SWT.SINGLE | SWT.BORDER, colinfo, 1, lsMod, props );
props.setLook( wFieldMappings );
FormData fdMappings = new FormData();
fdMappings.left = new FormAttachment( 0, 0 );
fdMappings.right = new FormAttachment( wbEnterMapping, -margin );
fdMappings.top = new FormAttachment( lastControl, margin * 2 );
fdMappings.bottom = new FormAttachment( 100, -50 );
wFieldMappings.setLayoutData( fdMappings );
lastControl = wFieldMappings;
for ( MappingValueRename valueRename : definition.getValueRenames() ) {
TableItem tableItem = new TableItem( wFieldMappings.table, SWT.NONE );
tableItem.setText( 1, Const.NVL( valueRename.getSourceValueName(), "" ) );
tableItem.setText( 2, Const.NVL( valueRename.getTargetValueName(), "" ) );
}
wFieldMappings.removeEmptyRows();
wFieldMappings.setRowNums();
wFieldMappings.optWidth( true );
wbEnterMapping.addSelectionListener( new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent arg0 ) {
try {
RowMetaInterface sourceRowMeta = getFieldsFromStep( wInputStep.getText(), true, input );
RowMetaInterface targetRowMeta = getFieldsFromStep( wOutputStep.getText(), false, input );
String[] sourceFields = sourceRowMeta.getFieldNames();
String[] targetFields = targetRowMeta.getFieldNames();
EnterMappingDialog dialog = new EnterMappingDialog( shell, sourceFields, targetFields );
List<SourceToTargetMapping> mappings = dialog.open();
if ( mappings != null ) {
// first clear the dialog...
wFieldMappings.clearAll( false );
//
definition.getValueRenames().clear();
// Now add the new values...
for ( SourceToTargetMapping mapping : mappings ) {
TableItem item = new TableItem( wFieldMappings.table, SWT.NONE );
item.setText( 1, mapping.getSourceString( sourceFields ) );
item.setText( 2, mapping.getTargetString( targetFields ) );
String source = input ? item.getText( 1 ) : item.getText( 2 );
String target = input ? item.getText( 2 ) : item.getText( 1 );
definition.getValueRenames().add( new MappingValueRename( source, target ) );
}
wFieldMappings.removeEmptyRows();
wFieldMappings.setRowNums();
wFieldMappings.optWidth( true );
}
} catch ( KettleException e ) {
new ErrorDialog( shell, BaseMessages.getString( PKG, "System.Dialog.Error.Title" ), BaseMessages.getString(
PKG, "MappingDialog.Exception.ErrorGettingMappingSourceAndTargetFields", e.toString() ), e );
}
}
} );
wOutputStep.addFocusListener( new FocusAdapter() {
@Override
public void focusLost( FocusEvent event ) {
definition.setOutputStepname( wOutputStep.getText() );
try {
enableMappingButton( wbEnterMapping, input, wInputStep.getText(), wOutputStep.getText() );
} catch ( KettleException e ) {
// Show the missing/wrong step name error
//
new ErrorDialog( shell, "Error", "Unexpected error", e );
}
}
} );
wbOutputStep.addSelectionListener( new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent event ) {
String stepName = selectTransformationStepname( !input, input );
if ( stepName != null ) {
wOutputStep.setText( stepName );
definition.setOutputStepname( stepName );
try {
enableMappingButton( wbEnterMapping, input, wInputStep.getText(), wOutputStep.getText() );
} catch ( KettleException e ) {
// Show the missing/wrong stepname error
new ErrorDialog( shell, "Error", "Unexpected error", e );
}
}
}
} );
if ( input ) {
// Add a checkbox to indicate that all output mappings need to rename
// the values back...
//
Label wlRenameOutput = new Label( wInputComposite, SWT.RIGHT );
props.setLook( wlRenameOutput );
wlRenameOutput.setText( BaseMessages.getString( PKG, "MappingDialog.input.RenamingOnOutput" ) );
FormData fdlRenameOutput = new FormData();
fdlRenameOutput.top = new FormAttachment( lastControl, margin );
fdlRenameOutput.left = new FormAttachment( 0, 0 );
fdlRenameOutput.right = new FormAttachment( middle, -margin );
wlRenameOutput.setLayoutData( fdlRenameOutput );
Button wRenameOutput = new Button( wInputComposite, SWT.CHECK );
props.setLook( wRenameOutput );
FormData fdRenameOutput = new FormData();
fdRenameOutput.top = new FormAttachment( lastControl, margin );
fdRenameOutput.left = new FormAttachment( middle, 0 );
// fdRenameOutput.right = new FormAttachment(100, 0); // who cares, it's
// a check box
wRenameOutput.setLayoutData( fdRenameOutput );
wRenameOutput.setSelection( definition.isRenamingOnOutput() );
wRenameOutput.addSelectionListener( new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent event ) {
definition.setRenamingOnOutput( !definition.isRenamingOnOutput() ); // flip
// the
// switch
}
} );
}
FormData fdParametersComposite = new FormData();
fdParametersComposite.left = new FormAttachment( 0, 0 );
fdParametersComposite.top = new FormAttachment( 0, 0 );
fdParametersComposite.right = new FormAttachment( 100, 0 );
fdParametersComposite.bottom = new FormAttachment( 100, 0 );
wInputComposite.setLayoutData( fdParametersComposite );
wInputComposite.layout();
wTab.setControl( wInputComposite );
final ApplyChanges applyChanges =
new MappingDefinitionTab( definition, wInputStep, wOutputStep, wMainPath, wDescription, wFieldMappings );
changeList.add( applyChanges );
// OK, suppose for some weird reason the user wants to remove an input
// or output tab...
wTabFolder.addCTabFolder2Listener( new CTabFolder2Adapter() {
@Override
public void close( CTabFolderEvent event ) {
if ( event.item.equals( wTab ) ) {
// The user has the audacity to try and close this mapping
// definition tab.
// We really should warn him that this is a bad idea...
MessageBox box = new MessageBox( shell, SWT.YES | SWT.NO );
box.setText( BaseMessages.getString( PKG, "MappingDialog.CloseDefinitionTabAreYouSure.Title" ) );
box.setMessage( BaseMessages.getString( PKG, "MappingDialog.CloseDefinitionTabAreYouSure.Message" ) );
int answer = box.open();
if ( answer != SWT.YES ) {
event.doit = false;
} else {
// Remove it from our list to make sure it's gone...
if ( input ) {
inputMappings.remove( definition );
} else {
outputMappings.remove( definition );
}
// remove it from the changeList too...
// Otherwise the dialog leaks memory.
//
changeList.remove( applyChanges );
}
setFlags();
}
}
} );
wMultiInput.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent arg0 ) {
setTabFlags( input, wlMainPath, wMainPath, wlInputStep, wInputStep, wbInputStep, wlOutputStep, wOutputStep,
wbOutputStep );
}
} );
wMultiOutput.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent arg0 ) {
setTabFlags( input, wlMainPath, wMainPath, wlInputStep, wInputStep, wbInputStep, wlOutputStep, wOutputStep,
wbOutputStep );
}
} );
wMainPath.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent arg0 ) {
setTabFlags( input, wlMainPath, wMainPath, wlInputStep, wInputStep, wbInputStep, wlOutputStep, wOutputStep,
wbOutputStep );
}
} );
setTabFlags( input, wlMainPath, wMainPath, wlInputStep, wInputStep, wbInputStep, wlOutputStep, wOutputStep,
wbOutputStep );
wTabFolder.setSelection( wTab );
}
private void setTabFlags( boolean input, Label wlMainPath, Button wMainPath, Label wlInputStep, Text wInputStep,
Button wbInputStep, Label wlOutputStep, Text wOutputStep, Button wbOutputStep ) {
boolean multiInput = wMultiInput.getSelection();
boolean multiOutput = wMultiOutput.getSelection();
if ( multiInput ) {
wlMainPath.setEnabled( true );
wMainPath.setEnabled( true );
} else {
wMainPath.setSelection( true );
wMainPath.setEnabled( false );
wlMainPath.setEnabled( false );
}
boolean mainPath = wMainPath.getSelection();
if ( input ) {
wlInputStep.setEnabled( !mainPath );
wInputStep.setEnabled( !mainPath );
wbInputStep.setEnabled( !mainPath );
wlOutputStep.setEnabled( multiInput );
wOutputStep.setEnabled( multiInput );
wbOutputStep.setEnabled( multiInput );
} else {
wlInputStep.setEnabled( multiOutput );
wInputStep.setEnabled( multiOutput );
wbInputStep.setEnabled( multiOutput );
wlOutputStep.setEnabled( !mainPath );
wOutputStep.setEnabled( !mainPath );
wbOutputStep.setEnabled( !mainPath );
}
}
private void setFlags() {
// Enable/disable fields...
//
boolean allowMultiInput = wMultiInput.getSelection();
boolean allowMultiOutput = wMultiOutput.getSelection();
wAddInput.setEnabled( allowMultiInput || inputMappings.size() == 0 );
wAddOutput.setEnabled( allowMultiOutput || outputMappings.size() == 0 );
}
/**
* Enables or disables the mapping button. We can only enable it if the target steps allows a mapping to be made
* against it.
*
* @param button The button to disable or enable
* @param input input or output. If it's true, we keep the button enabled all the time.
* @param sourceStepname The mapping output step
* @param targetStepname The target step to verify
* @throws KettleException
*/
private void enableMappingButton( final Button button, boolean input, String sourceStepname, String targetStepname ) throws KettleException {
if ( input ) {
return; // nothing to do
}
boolean enabled = false;
if ( mappingTransMeta != null ) {
StepMeta mappingInputStep = mappingTransMeta.findMappingInputStep( sourceStepname );
if ( mappingInputStep != null ) {
StepMeta mappingOutputStep = transMeta.findMappingOutputStep( targetStepname );
RowMetaInterface requiredFields = mappingOutputStep.getStepMetaInterface().getRequiredFields( transMeta );
if ( requiredFields != null && requiredFields.size() > 0 ) {
enabled = true;
}
}
}
button.setEnabled( enabled );
}
private void setMappingDefinitionTabNameAndToolTip( CTabItem wTab, String tabTitle, String tabTooltip,
MappingIODefinition definition, boolean input ) {
String stepname;
if ( input ) {
stepname = definition.getInputStepname();
} else {
stepname = definition.getOutputStepname();
}
String description = definition.getDescription();
if ( Const.isEmpty( stepname ) ) {
wTab.setText( tabTitle );
} else {
wTab.setText( tabTitle + " : " + stepname );
}
String tooltip = tabTooltip;
if ( !Const.isEmpty( stepname ) ) {
tooltip += Const.CR + Const.CR + stepname;
}
if ( !Const.isEmpty( description ) ) {
tooltip += Const.CR + Const.CR + description;
}
wTab.setToolTipText( tooltip );
}
private void cancel() {
stepname = null;
mappingMeta.setChanged( changed );
dispose();
}
private void ok() {
if ( Const.isEmpty( wStepname.getText() ) ) {
return;
}
stepname = wStepname.getText(); // return value
try {
loadTransformation();
} catch ( KettleException e ) {
new ErrorDialog( shell, BaseMessages.getString(
PKG, "MappingDialog.ErrorLoadingSpecifiedTransformation.Title" ), BaseMessages.getString(
PKG, "MappingDialog.ErrorLoadingSpecifiedTransformation.Message" ), e );
return;
}
mappingMeta.setSpecificationMethod( getSpecificationMethod() );
switch( getSpecificationMethod() ) {
case FILENAME:
mappingMeta.setFileName( wFilename.getText() );
mappingMeta.setDirectoryPath( null );
mappingMeta.setTransName( null );
mappingMeta.setTransObjectId( null );
break;
case REPOSITORY_BY_NAME:
mappingMeta.setDirectoryPath( wDirectory.getText() );
mappingMeta.setTransName( wTransname.getText() );
mappingMeta.setFileName( null );
mappingMeta.setTransObjectId( null );
break;
case REPOSITORY_BY_REFERENCE:
mappingMeta.setFileName( null );
mappingMeta.setDirectoryPath( null );
mappingMeta.setTransName( null );
mappingMeta.setTransObjectId( getReferenceObjectId() );
break;
default:
break;
}
// Load the information on the tabs, optionally do some
// verifications...
//
collectInformation();
mappingMeta.setMappingParameters( mappingParameters );
mappingMeta.setInputMappings( inputMappings );
// Set the input steps for input mappings
mappingMeta.searchInfoAndTargetSteps( transMeta.getSteps() );
mappingMeta.setOutputMappings( outputMappings );
mappingMeta.setAllowingMultipleInputs( wMultiInput.getSelection() );
mappingMeta.setAllowingMultipleOutputs( wMultiOutput.getSelection() );
mappingMeta.setChanged( true );
dispose();
}
private void collectInformation() {
for ( ApplyChanges applyChanges : changeList ) {
applyChanges.applyChanges(); // collect information from all
// tabs...
}
}
// Method is defined as package-protected in order to be accessible by unit tests
ObjectId getReferenceObjectId() {
return referenceObjectId;
}
private void setReferenceObjectId( ObjectId referenceObjectId ) {
this.referenceObjectId = referenceObjectId;
}
// Method is defined as package-protected in order to be accessible by unit tests
ObjectLocationSpecificationMethod getSpecificationMethod() {
return specificationMethod;
}
private void setSpecificationMethod( ObjectLocationSpecificationMethod specificationMethod ) {
this.specificationMethod = specificationMethod;
}
}
| gretchiemoran/pentaho-kettle | ui/src/org/pentaho/di/ui/trans/steps/mapping/MappingDialog.java | Java | apache-2.0 | 70,861 |
/*
Open Asset Import Library (assimp)
----------------------------------------------------------------------
Copyright (c) 2006-2008, assimp team
All rights reserved.
Redistribution and use of this software in source and binary forms,
with or without modification, are permitted provided that the
following conditions are met:
* Redistributions of source code must retain the above
copyright notice, this list of conditions and the
following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other
materials provided with the distribution.
* Neither the name of the assimp team, nor the names of its
contributors may be used to endorse or promote products
derived from this software without specific prior
written permission of the assimp team.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
----------------------------------------------------------------------
*/
#include "AssimpPCH.h"
#ifndef ASSIMP_BUILD_NO_Q3BSP_IMPORTER
#include "Q3BSPZipArchive.h"
#include <algorithm>
#include <cassert>
namespace Assimp {
namespace Q3BSP {
voidpf IOSystem2Unzip::open(voidpf opaque, const char* filename, int mode) {
IOSystem* io_system = (IOSystem*) opaque;
const char* mode_fopen = NULL;
if((mode & ZLIB_FILEFUNC_MODE_READWRITEFILTER)==ZLIB_FILEFUNC_MODE_READ) {
mode_fopen = "rb";
} else {
if(mode & ZLIB_FILEFUNC_MODE_EXISTING) {
mode_fopen = "r+b";
} else {
if(mode & ZLIB_FILEFUNC_MODE_CREATE) {
mode_fopen = "wb";
}
}
}
return (voidpf) io_system->Open(filename, mode_fopen);
}
uLong IOSystem2Unzip::read(voidpf opaque, voidpf stream, void* buf, uLong size) {
IOStream* io_stream = (IOStream*) stream;
return io_stream->Read(buf, 1, size);
}
uLong IOSystem2Unzip::write(voidpf opaque, voidpf stream, const void* buf, uLong size) {
IOStream* io_stream = (IOStream*) stream;
return io_stream->Write(buf, 1, size);
}
long IOSystem2Unzip::tell(voidpf opaque, voidpf stream) {
IOStream* io_stream = (IOStream*) stream;
return io_stream->Tell();
}
long IOSystem2Unzip::seek(voidpf opaque, voidpf stream, uLong offset, int origin) {
IOStream* io_stream = (IOStream*) stream;
aiOrigin assimp_origin;
switch (origin) {
default:
case ZLIB_FILEFUNC_SEEK_CUR:
assimp_origin = aiOrigin_CUR;
break;
case ZLIB_FILEFUNC_SEEK_END:
assimp_origin = aiOrigin_END;
break;
case ZLIB_FILEFUNC_SEEK_SET:
assimp_origin = aiOrigin_SET;
break;
}
return (io_stream->Seek(offset, assimp_origin) == aiReturn_SUCCESS ? 0 : -1);
}
int IOSystem2Unzip::close(voidpf opaque, voidpf stream) {
IOSystem* io_system = (IOSystem*) opaque;
IOStream* io_stream = (IOStream*) stream;
io_system->Close(io_stream);
return 0;
}
int IOSystem2Unzip::testerror(voidpf opaque, voidpf stream) {
return 0;
}
zlib_filefunc_def IOSystem2Unzip::get(IOSystem* pIOHandler) {
zlib_filefunc_def mapping;
mapping.zopen_file = open;
mapping.zread_file = read;
mapping.zwrite_file = write;
mapping.ztell_file = tell;
mapping.zseek_file = seek;
mapping.zclose_file = close;
mapping.zerror_file = testerror;
mapping.opaque = (voidpf) pIOHandler;
return mapping;
}
// ------------------------------------------------------------------------------------------------
ZipFile::ZipFile(size_t size) : m_Size(size) {
ai_assert(m_Size != 0);
m_Buffer = std::malloc(m_Size);
}
ZipFile::~ZipFile() {
std::free(m_Buffer);
m_Buffer = NULL;
}
size_t ZipFile::Read(void* pvBuffer, size_t pSize, size_t pCount) {
const size_t size = pSize * pCount;
assert(size <= m_Size);
std::memcpy(pvBuffer, m_Buffer, size);
return size;
}
size_t ZipFile::Write(const void* /*pvBuffer*/, size_t /*pSize*/, size_t /*pCount*/) {
return 0;
}
size_t ZipFile::FileSize() const {
return m_Size;
}
aiReturn ZipFile::Seek(size_t /*pOffset*/, aiOrigin /*pOrigin*/) {
return aiReturn_FAILURE;
}
size_t ZipFile::Tell() const {
return 0;
}
void ZipFile::Flush() {
// empty
}
// ------------------------------------------------------------------------------------------------
// Constructor.
Q3BSPZipArchive::Q3BSPZipArchive(IOSystem* pIOHandler, const std::string& rFile) : m_ZipFileHandle(NULL), m_ArchiveMap() {
if (! rFile.empty()) {
zlib_filefunc_def mapping = IOSystem2Unzip::get(pIOHandler);
m_ZipFileHandle = unzOpen2(rFile.c_str(), &mapping);
if(m_ZipFileHandle != NULL) {
mapArchive();
}
}
}
// ------------------------------------------------------------------------------------------------
// Destructor.
Q3BSPZipArchive::~Q3BSPZipArchive() {
for( std::map<std::string, ZipFile*>::iterator it(m_ArchiveMap.begin()), end(m_ArchiveMap.end()); it != end; ++it ) {
delete it->second;
}
m_ArchiveMap.clear();
if(m_ZipFileHandle != NULL) {
unzClose(m_ZipFileHandle);
m_ZipFileHandle = NULL;
}
}
// ------------------------------------------------------------------------------------------------
// Returns true, if the archive is already open.
bool Q3BSPZipArchive::isOpen() const {
return (m_ZipFileHandle != NULL);
}
// ------------------------------------------------------------------------------------------------
// Returns true, if the filename is part of the archive.
bool Q3BSPZipArchive::Exists(const char* pFile) const {
ai_assert(pFile != NULL);
bool exist = false;
if (pFile != NULL) {
std::string rFile(pFile);
std::map<std::string, ZipFile*>::const_iterator it = m_ArchiveMap.find(rFile);
if(it != m_ArchiveMap.end()) {
exist = true;
}
}
return exist;
}
// ------------------------------------------------------------------------------------------------
// Returns the separator delimiter.
char Q3BSPZipArchive::getOsSeparator() const {
#ifndef _WIN32
return '/';
#else
return '\\';
#endif
}
// ------------------------------------------------------------------------------------------------
// Opens a file, which is part of the archive.
IOStream *Q3BSPZipArchive::Open(const char* pFile, const char* /*pMode*/) {
ai_assert(pFile != NULL);
IOStream* result = NULL;
std::map<std::string, ZipFile*>::iterator it = m_ArchiveMap.find(pFile);
if(it != m_ArchiveMap.end()) {
result = (IOStream*) it->second;
}
return result;
}
// ------------------------------------------------------------------------------------------------
// Close a filestream.
void Q3BSPZipArchive::Close(IOStream *pFile) {
ai_assert(pFile != NULL);
// We don't do anything in case the file would be opened again in the future
}
// ------------------------------------------------------------------------------------------------
// Returns the file-list of the archive.
void Q3BSPZipArchive::getFileList(std::vector<std::string> &rFileList) {
rFileList.clear();
for(std::map<std::string, ZipFile*>::iterator it(m_ArchiveMap.begin()), end(m_ArchiveMap.end()); it != end; ++it) {
rFileList.push_back(it->first);
}
}
// ------------------------------------------------------------------------------------------------
// Maps the archive content.
bool Q3BSPZipArchive::mapArchive() {
bool success = false;
if(m_ZipFileHandle != NULL) {
if(m_ArchiveMap.empty()) {
// At first ensure file is already open
if(unzGoToFirstFile(m_ZipFileHandle) == UNZ_OK) {
// Loop over all files
do {
char filename[FileNameSize];
unz_file_info fileInfo;
if(unzGetCurrentFileInfo(m_ZipFileHandle, &fileInfo, filename, FileNameSize, NULL, 0, NULL, 0) == UNZ_OK) {
// The file has EXACTLY the size of uncompressed_size. In C
// you need to mark the last character with '\0', so add
// another character
if(unzOpenCurrentFile(m_ZipFileHandle) == UNZ_OK) {
std::pair<std::map<std::string, ZipFile*>::iterator, bool> result = m_ArchiveMap.insert(std::make_pair(filename, new ZipFile(fileInfo.uncompressed_size)));
if(unzReadCurrentFile(m_ZipFileHandle, result.first->second->m_Buffer, fileInfo.uncompressed_size) == (long int) fileInfo.uncompressed_size) {
if(unzCloseCurrentFile(m_ZipFileHandle) == UNZ_OK) {
// Nothing to do anymore...
}
}
}
}
} while(unzGoToNextFile(m_ZipFileHandle) != UNZ_END_OF_LIST_OF_FILE);
}
}
success = true;
}
return success;
}
// ------------------------------------------------------------------------------------------------
} // Namespace Q3BSP
} // Namespace Assimp
#endif // ASSIMP_BUILD_NO_Q3BSP_IMPORTER
| gsi-upm/SmartSim | smartbody/src/assimp-3.1.1/code/Q3BSPZipArchive.cpp | C++ | apache-2.0 | 9,518 |
define("dojox/editor/plugins/nls/mk/TableDialog", {
//begin v1.x content
insertTableTitle: "Вметни табела",
modifyTableTitle: "Модифицирај табела",
rows: "Редови",
columns: "Колони",
align: "Порамни:",
cellPadding: "Дополнување на ќелија:",
cellSpacing: "Растојание меѓу ќелии:",
tableWidth: "Ширина на табела:",
backgroundColor: "Боја на заднина:",
borderColor: "Боја на раб:",
borderThickness: "Дебелина на раб:",
percent: "процент",
pixels: "пиксели",
"default": "стандардно",
left: "лево",
center: "центар",
right: "десно",
buttonSet: "Постави", // translated elsewhere?
buttonInsert: "Вметни",
buttonCancel: "Откажи",
selectTableLabel: "Избери табела",
insertTableRowBeforeLabel: "Додај ред пред",
insertTableRowAfterLabel: "Додај ред после",
insertTableColumnBeforeLabel: "Додај колона пред",
insertTableColumnAfterLabel: "Додај колона после",
deleteTableRowLabel: "Избриши ред",
deleteTableColumnLabel: "Избриши колона",
colorTableCellTitle: "Боја на заднина на ќелија на табела",
tableContextMenuTitle: "Контекстуално мени на табела"
//end v1.x content
});
| Caspar12/zh.sw | zh.web.site.admin/src/main/resources/static/js/dojo/dojox/editor/plugins/nls/mk/TableDialog.js.uncompressed.js | JavaScript | apache-2.0 | 1,440 |
define("dojo/robot", ["dojo", "doh/robot", "dojo/window"], function(dojo) {
dojo.experimental("dojo.robot");
(function(){
// users who use doh+dojo get the added convenience of dojo.mouseMoveAt,
// instead of computing the absolute coordinates of their elements themselves
dojo.mixin(doh.robot,{
_resolveNode: function(/*String||DOMNode||Function*/ n){
if(typeof n == "function"){
// if the user passed a function returning a node, evaluate it
n = n();
}
return n? dojo.byId(n) : null;
},
_scrollIntoView: function(/*Node*/ n){
// scrolls the passed node into view, scrolling all ancester frames/windows as well.
// Assumes parent iframes can be made fully visible given the current browser window size
var d = dojo,
dr = doh.robot,
p = null;
d.forEach(dr._getWindowChain(n), function(w){
d.withGlobal(w, function(){
// get the position of the node wrt its parent window
// if it is a parent frame, its padding and border extents will get added in
var p2 = d.position(n, false),
b = d._getPadBorderExtents(n),
oldp = null;
// if p2 is the position of the original passed node, store the position away as p
// otherwise, node is actually an iframe. in this case, add the iframe's position wrt its parent window and also the iframe's padding and border extents
if(!p){
p = p2;
}else{
oldp = p;
p = {x: p.x+p2.x+b.l,
y: p.y+p2.y+b.t,
w: p.w,
h: p.h};
}
// scroll the parent window so that the node translated into the parent window's coordinate space is in view
dojo.window.scrollIntoView(n,p);
// adjust position for the new scroll offsets
p2 = d.position(n, false);
if(!oldp){
p = p2;
}else{
p = {x: oldp.x+p2.x+b.l,
y: oldp.y+p2.y+b.t,
w: p.w,
h: p.h};
}
// get the parent iframe so it can be scrolled too
n = w.frameElement;
});
});
},
_position: function(/*Node*/ n){
// Returns the dojo.position of the passed node wrt the passed window's viewport,
// following any parent iframes containing the node and clipping the node to each iframe.
// precondition: _scrollIntoView already called
var d = dojo, p = null, M = Math.max, m = Math.min;
// p: the returned position of the node
d.forEach(doh.robot._getWindowChain(n), function(w){
d.withGlobal(w, function(){
// get the position of the node wrt its parent window
// if it is a parent frame, its padding and border extents will get added in
var p2 = d.position(n, false), b = d._getPadBorderExtents(n);
// if p2 is the position of the original passed node, store the position away as p
// otherwise, node is actually an iframe. in this case, add the iframe's position wrt its parent window and also the iframe's padding and border extents
if(!p){
p = p2;
}else{
var view;
d.withGlobal(n.contentWindow,function(){
view=dojo.window.getBox();
});
p2.r = p2.x+view.w;
p2.b = p2.y+view.h;
p = {x: M(p.x+p2.x,p2.x)+b.l, // clip left edge of node wrt the iframe
y: M(p.y+p2.y,p2.y)+b.t, // top edge
r: m(p.x+p2.x+p.w,p2.r)+b.l, // right edge (to compute width)
b: m(p.y+p2.y+p.h,p2.b)+b.t}; // bottom edge (to compute height)
// save a few bytes by computing width and height from r and b
p.w = p.r-p.x;
p.h = p.b-p.y;
}
// the new node is now the old node's parent iframe
n=w.frameElement;
});
});
return p;
},
_getWindowChain : function(/*Node*/ n){
// Returns an array of windows starting from the passed node's parent window and ending at dojo's window
var cW = dojo.window.get(n.ownerDocument);
var arr=[cW];
var f = cW.frameElement;
return (cW == dojo.global || f == null)? arr : arr.concat(doh.robot._getWindowChain(f));
},
scrollIntoView : function(/*String||DOMNode||Function*/ node, /*Number, optional*/ delay){
// summary:
// Scroll the passed node into view, if it is not.
//
// node:
// The id of the node, or the node itself, to move the mouse to.
// If you pass an id or a function that returns a node, the node will not be evaluated until the movement executes.
// This is useful if you need to move the mouse to an node that is not yet present.
//
// delay:
// Delay, in milliseconds, to wait before firing.
// The delay is a delta with respect to the previous automation call.
//
doh.robot.sequence(function(){
doh.robot._scrollIntoView(doh.robot._resolveNode(node));
}, delay);
},
mouseMoveAt : function(/*String||DOMNode||Function*/ node, /*Integer, optional*/ delay, /*Integer, optional*/ duration, /*Number, optional*/ offsetX, /*Number, optional*/ offsetY){
// summary:
// Moves the mouse over the specified node at the specified relative x,y offset.
//
// description:
// Moves the mouse over the specified node at the specified relative x,y offset.
// If you do not specify an offset, mouseMove will default to move to the middle of the node.
// Example: to move the mouse over a ComboBox's down arrow node, call doh.mouseMoveAt(dijit.byId('setvaluetest').downArrowNode);
//
// node:
// The id of the node, or the node itself, to move the mouse to.
// If you pass an id or a function that returns a node, the node will not be evaluated until the movement executes.
// This is useful if you need to move the mouse to an node that is not yet present.
//
// delay:
// Delay, in milliseconds, to wait before firing.
// The delay is a delta with respect to the previous automation call.
// For example, the following code ends after 600ms:
// doh.robot.mouseClick({left:true}, 100) // first call; wait 100ms
// doh.robot.typeKeys("dij", 500) // 500ms AFTER previous call; 600ms in all
//
// duration:
// Approximate time Robot will spend moving the mouse
// The default is 100ms.
//
// offsetX:
// x offset relative to the node, in pixels, to move the mouse. The default is half the node's width.
//
// offsetY:
// y offset relative to the node, in pixels, to move the mouse. The default is half the node's height.
//
doh.robot._assertRobot();
duration = duration||100;
this.sequence(function(){
node=doh.robot._resolveNode(node);
doh.robot._scrollIntoView(node);
var pos = doh.robot._position(node);
if(offsetY === undefined){
offsetX=pos.w/2;
offsetY=pos.h/2;
}
var x = pos.x+offsetX;
var y = pos.y+offsetY;
doh.robot._mouseMove(x, y, false, duration);
}, delay, duration);
}
});
})();
return doh.robot;
});
| sulistionoadi/belajar-springmvc-dojo | training-web/src/main/webapp/js/dojotoolkit/dojo/robot.js | JavaScript | apache-2.0 | 6,577 |
(function($) {
$.fn.toc = function(options) {
var self = this;
var opts = $.extend({}, jQuery.fn.toc.defaults, options);
var container = $(opts.container);
var headings = $(opts.selectors, container);
var activeClassName = opts.prefix+'-active';
var scrollTo = function(e) {
if (opts.smoothScrolling) {
e.preventDefault();
var elScrollTo = $(e.target).attr('href');
var $el = $(elScrollTo.replace(":", "\\:"));
var callbackCalled = false;
$('body,html').animate({ scrollTop: $el.offset().top - opts.scrollOffset }, 400, 'swing', function(e) {
location.hash = elScrollTo;
if (!callbackCalled){
opts.onScrollFinish.call(self);
callbackCalled = true;
}
});
}
$('li', self).removeClass(activeClassName);
$(e.target).parent().addClass(activeClassName);
};
//highlight on scroll
var timeout;
var highlightOnScroll = function(e) {
if (timeout) {
clearTimeout(timeout);
}
timeout = setTimeout(function() {
var top = $(window).scrollTop(),
highlighted;
headings.each(function(i, heading) {
var $h = $(heading);
var htop = $h.offset().top - opts.highlightOffset;
if (htop >= top) {
$('li', self).removeClass(activeClassName);
highlighted = $('li:eq('+(i)+')', self).addClass(activeClassName);
opts.onHighlight(highlighted);
return false;
}
});
}, 50);
};
if (opts.highlightOnScroll) {
$(window).bind('scroll', highlightOnScroll);
highlightOnScroll();
}
//Perform search and hide unmatched elements
var tocList;
var treeObject = {};
//Create the tree
var createTree = function(ul) {
var prevLevel = {level: -1, index: -1, parent: -1, val: ''};
var levelParent = {0: -1};
tocList = ul.children("li");
tocList.each(function(i) {
var me = $(this).removeClass("toc-active");
var currentLevel = parseInt(me.attr('class').trim().slice(-1));
if (currentLevel > prevLevel.level) {
currentParent = prevLevel.index;
} else if (currentLevel == prevLevel.level) {
currentParent = prevLevel.parent;
} else if (currentLevel < prevLevel.level) {
currentParent = levelParent[currentLevel] || prevLevel.parent;
}
levelParent[currentLevel] = currentParent;
var currentVal = $('a', this).text().trim().toLowerCase();
treeObject[i] = {
val: currentVal,
level: currentLevel,
parent: currentParent
}
prevLevel = {index: i, val: currentVal, level: currentLevel, parent: currentParent};
});
}
//Show the parents recursively
var showParents = function(key) {
var me = treeObject[key];
if (me.parent > -1) {
$(tocList[me.parent]).show();
showParents(me.parent);
}
};
//Perform the search
var search = function(searchVal) {
searchVal = searchVal.trim().toLowerCase();
for (var key in treeObject) {
var me = treeObject[key];
if (me.val.indexOf(searchVal) !== -1 || searchVal.length == 0) {
$(tocList[key]).show();
if ($(tocList[me.parent]).is(":hidden")) {
showParents(key);
}
} else {
$(tocList[key]).hide();
}
}
}
return this.each(function() {
//build TOC
var el = $(this);
var searchVal = '';
var searchForm = $("<form/>", {class: "form-search quick-search"})
.append($("<input/>", {type: "text", class: "input-medium search-query", placeholder: "Quick Search"}))
.append($("<i/>", {class: "icon icon-search search-icon"}));
searchForm.css({'position': 'fixed', 'top': '45px', 'padding-right': '20px'});
$(".search-icon", searchForm).css({'marginLeft': '-20px', 'marginTop': '3px'});
var ul = $('<ul/>');
headings.each(function(i, heading) {
var $h = $(heading);
//add anchor
var anchor = $('<span/>').attr('id', opts.anchorName(i, heading, opts.prefix)).insertBefore($h);
//build TOC item
var a = $('<a/>')
.text(opts.headerText(i, heading, $h))
.attr('href', '#' + opts.anchorName(i, heading, opts.prefix))
.bind('click', function(e) {
scrollTo(e);
el.trigger('selected', $(this).attr('href'));
});
var li = $('<li/>')
.addClass(opts.itemClass(i, heading, $h, opts.prefix))
.append(a);
ul.append(li);
});
el.html(ul);
el.parent().prepend(searchForm);
el.css({'top': '80px'});
//create the tree
createTree(ul)
//set intent timer
var intentTimer;
var accumulatedTime = 0;
//bind quick search
el.siblings('.quick-search').children('.search-query').bind('keyup', function(e) {
if (accumulatedTime < 1000) {
window.clearTimeout(intentTimer);
}
var me = $(this);
if (me.val().length > 0) {
$(".search-icon").removeClass("icon-search").addClass("icon-remove-circle").css('cursor', 'pointer');
} else {
$(".search-icon").removeClass("icon-remove-circle").addClass("icon-search").css('cursor', 'auto');
}
var intentTime = 500 - (me.val().length * 10);
accumulatedTime += intentTime;
intentTimer = window.setTimeout(function() {
if (searchVal == me.val()) {
return false;
}
searchVal = me.val();
search(me.val());
accumulatedTime = 0;
}, intentTime);
});
// Make text clear icon work
$(".search-icon").click(function(e) {
if($(this).hasClass('icon-remove-circle')) {
$('.search-query').val('').trigger('keyup');
} else {
$('.search-query').focus();
}
});
//set positions of search box and TOC
var navHeight = $(".navbar").height();
var searchHeight = $(".quick-search").height();
$(".quick-search").css({'top': navHeight + 10 + 'px', 'position': 'fixed'});
el.css('top', navHeight + searchHeight + 15 + 'px');
});
};
jQuery.fn.toc.defaults = {
container: 'body',
selectors: 'h1,h2,h3',
smoothScrolling: true,
prefix: 'toc',
scrollOffset: 0,
onHighlight: function() {},
highlightOnScroll: true,
highlightOffset: 10,
anchorName: function(i, heading, prefix) {
return prefix+i;
},
headerText: function(i, heading, $heading) {
return $heading.text();
},
itemClass: function(i, heading, $heading, prefix) {
return prefix + '-' + $heading[0].tagName.toLowerCase();
}
};
})(jQuery);
| sconway/pulley-scroll | js/scroll-magic/docs/scripts/toc.js | JavaScript | apache-2.0 | 6,511 |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.svn;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.internal.io.fs.FSRepository;
import org.tmatesoft.svn.core.io.ISVNConnectionListener;
import org.tmatesoft.svn.core.io.ISVNSession;
public class MockSvnRepository extends FSRepository {
private long myCreationTime;
private boolean mySessionWasClosed;
private boolean myHaveConnectionListener;
public MockSvnRepository(SVNURL location, ISVNSession options) {
super(location, options);
myCreationTime = System.currentTimeMillis();
mySessionWasClosed = false;
myHaveConnectionListener = false;
}
@Override
public void addConnectionListener(ISVNConnectionListener listener) {
super.addConnectionListener(listener);
myHaveConnectionListener = true;
}
// todo count?
@Override
public void removeConnectionListener(ISVNConnectionListener listener) {
super.removeConnectionListener(listener);
myHaveConnectionListener = false;
}
@Override
public void closeSession() {
super.closeSession();
mySessionWasClosed = true;
}
public boolean isSessionWasClosed() {
return mySessionWasClosed;
}
public long getCreationTime() {
return myCreationTime;
}
public boolean isHaveConnectionListener() {
return myHaveConnectionListener;
}
@Override
public void fireConnectionClosed() {
super.fireConnectionClosed();
}
@Override
public void fireConnectionOpened() {
super.fireConnectionOpened();
}
}
| asedunov/intellij-community | plugins/svn4idea/testSource/org/jetbrains/idea/svn/MockSvnRepository.java | Java | apache-2.0 | 2,100 |
/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1_test
import (
"encoding/json"
"reflect"
"testing"
"k8s.io/apimachinery/pkg/api/resource"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/intstr"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/v1"
versioned "k8s.io/kubernetes/pkg/api/v1"
sccutil "k8s.io/kubernetes/pkg/securitycontextconstraints/util"
)
func roundTrip(t *testing.T, obj runtime.Object) runtime.Object {
codec := api.Codecs.LegacyCodec(v1.SchemeGroupVersion)
data, err := runtime.Encode(codec, obj)
if err != nil {
t.Errorf("%v\n %#v", err, obj)
return nil
}
obj2, err := runtime.Decode(codec, data)
if err != nil {
t.Errorf("%v\nData: %s\nSource: %#v", err, string(data), obj)
return nil
}
obj3 := reflect.New(reflect.TypeOf(obj).Elem()).Interface().(runtime.Object)
err = api.Scheme.Convert(obj2, obj3, nil)
if err != nil {
t.Errorf("%v\nSource: %#v", err, obj2)
return nil
}
return obj3
}
func TestSetDefaultReplicationController(t *testing.T) {
tests := []struct {
rc *v1.ReplicationController
expectLabels bool
expectSelector bool
}{
{
rc: &v1.ReplicationController{
Spec: v1.ReplicationControllerSpec{
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
},
},
},
},
},
expectLabels: true,
expectSelector: true,
},
{
rc: &v1.ReplicationController{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"bar": "foo",
},
},
Spec: v1.ReplicationControllerSpec{
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
},
},
},
},
},
expectLabels: false,
expectSelector: true,
},
{
rc: &v1.ReplicationController{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"bar": "foo",
},
},
Spec: v1.ReplicationControllerSpec{
Selector: map[string]string{
"some": "other",
},
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
},
},
},
},
},
expectLabels: false,
expectSelector: false,
},
{
rc: &v1.ReplicationController{
Spec: v1.ReplicationControllerSpec{
Selector: map[string]string{
"some": "other",
},
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
},
},
},
},
},
expectLabels: true,
expectSelector: false,
},
}
for _, test := range tests {
rc := test.rc
obj2 := roundTrip(t, runtime.Object(rc))
rc2, ok := obj2.(*v1.ReplicationController)
if !ok {
t.Errorf("unexpected object: %v", rc2)
t.FailNow()
}
if test.expectSelector != reflect.DeepEqual(rc2.Spec.Selector, rc2.Spec.Template.Labels) {
if test.expectSelector {
t.Errorf("expected: %v, got: %v", rc2.Spec.Template.Labels, rc2.Spec.Selector)
} else {
t.Errorf("unexpected equality: %v", rc.Spec.Selector)
}
}
if test.expectLabels != reflect.DeepEqual(rc2.Labels, rc2.Spec.Template.Labels) {
if test.expectLabels {
t.Errorf("expected: %v, got: %v", rc2.Spec.Template.Labels, rc2.Labels)
} else {
t.Errorf("unexpected equality: %v", rc.Labels)
}
}
}
}
func newInt(val int32) *int32 {
p := new(int32)
*p = val
return p
}
func TestSetDefaultReplicationControllerReplicas(t *testing.T) {
tests := []struct {
rc v1.ReplicationController
expectReplicas int32
}{
{
rc: v1.ReplicationController{
Spec: v1.ReplicationControllerSpec{
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
},
},
},
},
},
expectReplicas: 1,
},
{
rc: v1.ReplicationController{
Spec: v1.ReplicationControllerSpec{
Replicas: newInt(0),
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
},
},
},
},
},
expectReplicas: 0,
},
{
rc: v1.ReplicationController{
Spec: v1.ReplicationControllerSpec{
Replicas: newInt(3),
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
},
},
},
},
},
expectReplicas: 3,
},
}
for _, test := range tests {
rc := &test.rc
obj2 := roundTrip(t, runtime.Object(rc))
rc2, ok := obj2.(*v1.ReplicationController)
if !ok {
t.Errorf("unexpected object: %v", rc2)
t.FailNow()
}
if rc2.Spec.Replicas == nil {
t.Errorf("unexpected nil Replicas")
} else if test.expectReplicas != *rc2.Spec.Replicas {
t.Errorf("expected: %d replicas, got: %d", test.expectReplicas, *rc2.Spec.Replicas)
}
}
}
func TestSetDefaultReplicationControllerImagePullPolicy(t *testing.T) {
containersWithoutPullPolicy, _ := json.Marshal([]map[string]interface{}{
{
"name": "install",
"image": "busybox:latest",
},
})
containersWithPullPolicy, _ := json.Marshal([]map[string]interface{}{
{
"name": "install",
"imagePullPolicy": "IfNotPresent",
},
})
tests := []struct {
rc v1.ReplicationController
expectPullPolicy v1.PullPolicy
}{
{
rc: v1.ReplicationController{
Spec: v1.ReplicationControllerSpec{
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Annotations: map[string]string{
"pod.beta.kubernetes.io/init-containers": string(containersWithoutPullPolicy),
},
},
},
},
},
expectPullPolicy: v1.PullAlways,
},
{
rc: v1.ReplicationController{
Spec: v1.ReplicationControllerSpec{
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Annotations: map[string]string{
"pod.beta.kubernetes.io/init-containers": string(containersWithPullPolicy),
},
},
},
},
},
expectPullPolicy: v1.PullIfNotPresent,
},
}
for _, test := range tests {
rc := &test.rc
obj2 := roundTrip(t, runtime.Object(rc))
rc2, ok := obj2.(*v1.ReplicationController)
if !ok {
t.Errorf("unexpected object: %v", rc2)
t.FailNow()
}
if test.expectPullPolicy != rc2.Spec.Template.Spec.InitContainers[0].ImagePullPolicy {
t.Errorf("expected ImagePullPolicy: %s, got: %s",
test.expectPullPolicy,
rc2.Spec.Template.Spec.InitContainers[0].ImagePullPolicy,
)
}
}
}
func TestSetDefaultService(t *testing.T) {
svc := &v1.Service{}
obj2 := roundTrip(t, runtime.Object(svc))
svc2 := obj2.(*v1.Service)
if svc2.Spec.SessionAffinity != v1.ServiceAffinityNone {
t.Errorf("Expected default session affinity type:%s, got: %s", v1.ServiceAffinityNone, svc2.Spec.SessionAffinity)
}
if svc2.Spec.Type != v1.ServiceTypeClusterIP {
t.Errorf("Expected default type:%s, got: %s", v1.ServiceTypeClusterIP, svc2.Spec.Type)
}
}
func TestSetDefaultSecretVolumeSource(t *testing.T) {
s := v1.PodSpec{}
s.Volumes = []v1.Volume{
{
VolumeSource: v1.VolumeSource{
Secret: &v1.SecretVolumeSource{},
},
},
}
pod := &v1.Pod{
Spec: s,
}
output := roundTrip(t, runtime.Object(pod))
pod2 := output.(*v1.Pod)
defaultMode := pod2.Spec.Volumes[0].VolumeSource.Secret.DefaultMode
expectedMode := v1.SecretVolumeSourceDefaultMode
if defaultMode == nil || *defaultMode != expectedMode {
t.Errorf("Expected secret DefaultMode %v, got %v", expectedMode, defaultMode)
}
}
func TestSetDefaultConfigMapVolumeSource(t *testing.T) {
s := v1.PodSpec{}
s.Volumes = []v1.Volume{
{
VolumeSource: v1.VolumeSource{
ConfigMap: &v1.ConfigMapVolumeSource{},
},
},
}
pod := &v1.Pod{
Spec: s,
}
output := roundTrip(t, runtime.Object(pod))
pod2 := output.(*v1.Pod)
defaultMode := pod2.Spec.Volumes[0].VolumeSource.ConfigMap.DefaultMode
expectedMode := v1.ConfigMapVolumeSourceDefaultMode
if defaultMode == nil || *defaultMode != expectedMode {
t.Errorf("Expected ConfigMap DefaultMode %v, got %v", expectedMode, defaultMode)
}
}
func TestSetDefaultDownwardAPIVolumeSource(t *testing.T) {
s := v1.PodSpec{}
s.Volumes = []v1.Volume{
{
VolumeSource: v1.VolumeSource{
DownwardAPI: &v1.DownwardAPIVolumeSource{},
},
},
}
pod := &v1.Pod{
Spec: s,
}
output := roundTrip(t, runtime.Object(pod))
pod2 := output.(*v1.Pod)
defaultMode := pod2.Spec.Volumes[0].VolumeSource.DownwardAPI.DefaultMode
expectedMode := v1.DownwardAPIVolumeSourceDefaultMode
if defaultMode == nil || *defaultMode != expectedMode {
t.Errorf("Expected DownwardAPI DefaultMode %v, got %v", expectedMode, defaultMode)
}
}
func TestSetDefaultProjectedVolumeSource(t *testing.T) {
s := v1.PodSpec{}
s.Volumes = []v1.Volume{
{
VolumeSource: v1.VolumeSource{
Projected: &v1.ProjectedVolumeSource{},
},
},
}
pod := &v1.Pod{
Spec: s,
}
output := roundTrip(t, runtime.Object(pod))
pod2 := output.(*v1.Pod)
defaultMode := pod2.Spec.Volumes[0].VolumeSource.Projected.DefaultMode
expectedMode := v1.ProjectedVolumeSourceDefaultMode
if defaultMode == nil || *defaultMode != expectedMode {
t.Errorf("Expected ProjectedVolumeSource DefaultMode %v, got %v", expectedMode, defaultMode)
}
}
func TestSetDefaultSecret(t *testing.T) {
s := &v1.Secret{}
obj2 := roundTrip(t, runtime.Object(s))
s2 := obj2.(*v1.Secret)
if s2.Type != v1.SecretTypeOpaque {
t.Errorf("Expected secret type %v, got %v", v1.SecretTypeOpaque, s2.Type)
}
}
func TestSetDefaultPersistentVolume(t *testing.T) {
pv := &v1.PersistentVolume{}
obj2 := roundTrip(t, runtime.Object(pv))
pv2 := obj2.(*v1.PersistentVolume)
if pv2.Status.Phase != v1.VolumePending {
t.Errorf("Expected volume phase %v, got %v", v1.VolumePending, pv2.Status.Phase)
}
if pv2.Spec.PersistentVolumeReclaimPolicy != v1.PersistentVolumeReclaimRetain {
t.Errorf("Expected pv reclaim policy %v, got %v", v1.PersistentVolumeReclaimRetain, pv2.Spec.PersistentVolumeReclaimPolicy)
}
}
func TestSetDefaultPersistentVolumeClaim(t *testing.T) {
pvc := &v1.PersistentVolumeClaim{}
obj2 := roundTrip(t, runtime.Object(pvc))
pvc2 := obj2.(*v1.PersistentVolumeClaim)
if pvc2.Status.Phase != v1.ClaimPending {
t.Errorf("Expected claim phase %v, got %v", v1.ClaimPending, pvc2.Status.Phase)
}
}
func TestSetDefaulEndpointsProtocol(t *testing.T) {
in := &v1.Endpoints{Subsets: []v1.EndpointSubset{
{Ports: []v1.EndpointPort{{}, {Protocol: "UDP"}, {}}},
}}
obj := roundTrip(t, runtime.Object(in))
out := obj.(*v1.Endpoints)
for i := range out.Subsets {
for j := range out.Subsets[i].Ports {
if in.Subsets[i].Ports[j].Protocol == "" {
if out.Subsets[i].Ports[j].Protocol != v1.ProtocolTCP {
t.Errorf("Expected protocol %s, got %s", v1.ProtocolTCP, out.Subsets[i].Ports[j].Protocol)
}
} else {
if out.Subsets[i].Ports[j].Protocol != in.Subsets[i].Ports[j].Protocol {
t.Errorf("Expected protocol %s, got %s", in.Subsets[i].Ports[j].Protocol, out.Subsets[i].Ports[j].Protocol)
}
}
}
}
}
func TestSetDefaulServiceTargetPort(t *testing.T) {
in := &v1.Service{Spec: v1.ServiceSpec{Ports: []v1.ServicePort{{Port: 1234}}}}
obj := roundTrip(t, runtime.Object(in))
out := obj.(*v1.Service)
if out.Spec.Ports[0].TargetPort != intstr.FromInt(1234) {
t.Errorf("Expected TargetPort to be defaulted, got %v", out.Spec.Ports[0].TargetPort)
}
in = &v1.Service{Spec: v1.ServiceSpec{Ports: []v1.ServicePort{{Port: 1234, TargetPort: intstr.FromInt(5678)}}}}
obj = roundTrip(t, runtime.Object(in))
out = obj.(*v1.Service)
if out.Spec.Ports[0].TargetPort != intstr.FromInt(5678) {
t.Errorf("Expected TargetPort to be unchanged, got %v", out.Spec.Ports[0].TargetPort)
}
}
func TestSetDefaultServicePort(t *testing.T) {
// Unchanged if set.
in := &v1.Service{Spec: v1.ServiceSpec{
Ports: []v1.ServicePort{
{Protocol: "UDP", Port: 9376, TargetPort: intstr.FromString("p")},
{Protocol: "UDP", Port: 8675, TargetPort: intstr.FromInt(309)},
},
}}
out := roundTrip(t, runtime.Object(in)).(*v1.Service)
if out.Spec.Ports[0].Protocol != v1.ProtocolUDP {
t.Errorf("Expected protocol %s, got %s", v1.ProtocolUDP, out.Spec.Ports[0].Protocol)
}
if out.Spec.Ports[0].TargetPort != intstr.FromString("p") {
t.Errorf("Expected port %v, got %v", in.Spec.Ports[0].Port, out.Spec.Ports[0].TargetPort)
}
if out.Spec.Ports[1].Protocol != v1.ProtocolUDP {
t.Errorf("Expected protocol %s, got %s", v1.ProtocolUDP, out.Spec.Ports[1].Protocol)
}
if out.Spec.Ports[1].TargetPort != intstr.FromInt(309) {
t.Errorf("Expected port %v, got %v", in.Spec.Ports[1].Port, out.Spec.Ports[1].TargetPort)
}
// Defaulted.
in = &v1.Service{Spec: v1.ServiceSpec{
Ports: []v1.ServicePort{
{Protocol: "", Port: 9376, TargetPort: intstr.FromString("")},
{Protocol: "", Port: 8675, TargetPort: intstr.FromInt(0)},
},
}}
out = roundTrip(t, runtime.Object(in)).(*v1.Service)
if out.Spec.Ports[0].Protocol != v1.ProtocolTCP {
t.Errorf("Expected protocol %s, got %s", v1.ProtocolTCP, out.Spec.Ports[0].Protocol)
}
if out.Spec.Ports[0].TargetPort != intstr.FromInt(int(in.Spec.Ports[0].Port)) {
t.Errorf("Expected port %v, got %v", in.Spec.Ports[0].Port, out.Spec.Ports[0].TargetPort)
}
if out.Spec.Ports[1].Protocol != v1.ProtocolTCP {
t.Errorf("Expected protocol %s, got %s", v1.ProtocolTCP, out.Spec.Ports[1].Protocol)
}
if out.Spec.Ports[1].TargetPort != intstr.FromInt(int(in.Spec.Ports[1].Port)) {
t.Errorf("Expected port %v, got %v", in.Spec.Ports[1].Port, out.Spec.Ports[1].TargetPort)
}
}
func TestSetDefaultNamespace(t *testing.T) {
s := &v1.Namespace{}
obj2 := roundTrip(t, runtime.Object(s))
s2 := obj2.(*v1.Namespace)
if s2.Status.Phase != v1.NamespaceActive {
t.Errorf("Expected phase %v, got %v", v1.NamespaceActive, s2.Status.Phase)
}
}
func TestSetDefaultPodSpecHostNetwork(t *testing.T) {
portNum := int32(8080)
s := v1.PodSpec{}
s.HostNetwork = true
s.Containers = []v1.Container{
{
Ports: []v1.ContainerPort{
{
ContainerPort: portNum,
},
},
},
}
s.InitContainers = []v1.Container{
{
Ports: []v1.ContainerPort{
{
ContainerPort: portNum,
},
},
},
}
pod := &v1.Pod{
Spec: s,
}
obj2 := roundTrip(t, runtime.Object(pod))
pod2 := obj2.(*v1.Pod)
s2 := pod2.Spec
hostPortNum := s2.Containers[0].Ports[0].HostPort
if hostPortNum != portNum {
t.Errorf("Expected container port to be defaulted, was made %d instead of %d", hostPortNum, portNum)
}
hostPortNum = s2.InitContainers[0].Ports[0].HostPort
if hostPortNum != portNum {
t.Errorf("Expected container port to be defaulted, was made %d instead of %d", hostPortNum, portNum)
}
}
func TestSetDefaultNodeExternalID(t *testing.T) {
name := "node0"
n := &v1.Node{}
n.Name = name
obj2 := roundTrip(t, runtime.Object(n))
n2 := obj2.(*v1.Node)
if n2.Spec.ExternalID != name {
t.Errorf("Expected default External ID: %s, got: %s", name, n2.Spec.ExternalID)
}
if n2.Spec.ProviderID != "" {
t.Errorf("Expected empty default Cloud Provider ID, got: %s", n2.Spec.ProviderID)
}
}
func TestSetDefaultNodeStatusAllocatable(t *testing.T) {
capacity := v1.ResourceList{
v1.ResourceCPU: resource.MustParse("1000m"),
v1.ResourceMemory: resource.MustParse("10G"),
}
allocatable := v1.ResourceList{
v1.ResourceCPU: resource.MustParse("500m"),
v1.ResourceMemory: resource.MustParse("5G"),
}
tests := []struct {
capacity v1.ResourceList
allocatable v1.ResourceList
expectedAllocatable v1.ResourceList
}{{ // Everything set, no defaulting.
capacity: capacity,
allocatable: allocatable,
expectedAllocatable: allocatable,
}, { // Allocatable set, no defaulting.
capacity: nil,
allocatable: allocatable,
expectedAllocatable: allocatable,
}, { // Capacity set, allocatable defaults to capacity.
capacity: capacity,
allocatable: nil,
expectedAllocatable: capacity,
}, { // Nothing set, allocatable "defaults" to capacity.
capacity: nil,
allocatable: nil,
expectedAllocatable: nil,
}}
copyResourceList := func(rl v1.ResourceList) v1.ResourceList {
if rl == nil {
return nil
}
copy := make(v1.ResourceList, len(rl))
for k, v := range rl {
copy[k] = *v.Copy()
}
return copy
}
resourceListsEqual := func(a v1.ResourceList, b v1.ResourceList) bool {
if len(a) != len(b) {
return false
}
for k, v := range a {
vb, found := b[k]
if !found {
return false
}
if v.Cmp(vb) != 0 {
return false
}
}
return true
}
for i, testcase := range tests {
node := v1.Node{
Status: v1.NodeStatus{
Capacity: copyResourceList(testcase.capacity),
Allocatable: copyResourceList(testcase.allocatable),
},
}
node2 := roundTrip(t, runtime.Object(&node)).(*v1.Node)
actual := node2.Status.Allocatable
expected := testcase.expectedAllocatable
if !resourceListsEqual(expected, actual) {
t.Errorf("[%d] Expected NodeStatus.Allocatable: %+v; Got: %+v", i, expected, actual)
}
}
}
func TestSetDefaultObjectFieldSelectorAPIVersion(t *testing.T) {
s := v1.PodSpec{
Containers: []v1.Container{
{
Env: []v1.EnvVar{
{
ValueFrom: &v1.EnvVarSource{
FieldRef: &v1.ObjectFieldSelector{},
},
},
},
},
},
}
pod := &v1.Pod{
Spec: s,
}
obj2 := roundTrip(t, runtime.Object(pod))
pod2 := obj2.(*v1.Pod)
s2 := pod2.Spec
apiVersion := s2.Containers[0].Env[0].ValueFrom.FieldRef.APIVersion
if apiVersion != "v1" {
t.Errorf("Expected default APIVersion v1, got: %v", apiVersion)
}
}
func TestSetMinimumScalePod(t *testing.T) {
// verify we default if limits are specified (and that request=0 is preserved)
s := v1.PodSpec{}
s.Containers = []v1.Container{
{
Resources: v1.ResourceRequirements{
Requests: v1.ResourceList{
v1.ResourceMemory: resource.MustParse("1n"),
},
Limits: v1.ResourceList{
v1.ResourceCPU: resource.MustParse("2n"),
},
},
},
}
s.InitContainers = []v1.Container{
{
Resources: v1.ResourceRequirements{
Requests: v1.ResourceList{
v1.ResourceMemory: resource.MustParse("1n"),
},
Limits: v1.ResourceList{
v1.ResourceCPU: resource.MustParse("2n"),
},
},
},
}
pod := &v1.Pod{
Spec: s,
}
v1.SetObjectDefaults_Pod(pod)
if expect := resource.MustParse("1m"); expect.Cmp(pod.Spec.Containers[0].Resources.Requests[v1.ResourceMemory]) != 0 {
t.Errorf("did not round resources: %#v", pod.Spec.Containers[0].Resources)
}
if expect := resource.MustParse("1m"); expect.Cmp(pod.Spec.InitContainers[0].Resources.Requests[v1.ResourceMemory]) != 0 {
t.Errorf("did not round resources: %#v", pod.Spec.InitContainers[0].Resources)
}
}
func TestSetDefaultRequestsPod(t *testing.T) {
// verify we default if limits are specified (and that request=0 is preserved)
s := v1.PodSpec{}
s.Containers = []v1.Container{
{
Resources: v1.ResourceRequirements{
Requests: v1.ResourceList{
v1.ResourceMemory: resource.MustParse("0"),
},
Limits: v1.ResourceList{
v1.ResourceCPU: resource.MustParse("100m"),
v1.ResourceMemory: resource.MustParse("1Gi"),
},
},
},
}
s.InitContainers = []v1.Container{
{
Resources: v1.ResourceRequirements{
Requests: v1.ResourceList{
v1.ResourceMemory: resource.MustParse("0"),
},
Limits: v1.ResourceList{
v1.ResourceCPU: resource.MustParse("100m"),
v1.ResourceMemory: resource.MustParse("1Gi"),
},
},
},
}
pod := &v1.Pod{
Spec: s,
}
output := roundTrip(t, runtime.Object(pod))
pod2 := output.(*v1.Pod)
defaultRequest := pod2.Spec.Containers[0].Resources.Requests
if requestValue := defaultRequest[v1.ResourceCPU]; requestValue.String() != "100m" {
t.Errorf("Expected request cpu: %s, got: %s", "100m", requestValue.String())
}
if requestValue := defaultRequest[v1.ResourceMemory]; requestValue.String() != "0" {
t.Errorf("Expected request memory: %s, got: %s", "0", requestValue.String())
}
defaultRequest = pod2.Spec.InitContainers[0].Resources.Requests
if requestValue := defaultRequest[v1.ResourceCPU]; requestValue.String() != "100m" {
t.Errorf("Expected request cpu: %s, got: %s", "100m", requestValue.String())
}
if requestValue := defaultRequest[v1.ResourceMemory]; requestValue.String() != "0" {
t.Errorf("Expected request memory: %s, got: %s", "0", requestValue.String())
}
// verify we do nothing if no limits are specified
s = v1.PodSpec{}
s.Containers = []v1.Container{{}}
s.InitContainers = []v1.Container{{}}
pod = &v1.Pod{
Spec: s,
}
output = roundTrip(t, runtime.Object(pod))
pod2 = output.(*v1.Pod)
defaultRequest = pod2.Spec.Containers[0].Resources.Requests
if requestValue := defaultRequest[v1.ResourceCPU]; requestValue.String() != "0" {
t.Errorf("Expected 0 request value, got: %s", requestValue.String())
}
defaultRequest = pod2.Spec.InitContainers[0].Resources.Requests
if requestValue := defaultRequest[v1.ResourceCPU]; requestValue.String() != "0" {
t.Errorf("Expected 0 request value, got: %s", requestValue.String())
}
}
func TestDefaultRequestIsNotSetForReplicationController(t *testing.T) {
s := v1.PodSpec{}
s.Containers = []v1.Container{
{
Resources: v1.ResourceRequirements{
Limits: v1.ResourceList{
v1.ResourceCPU: resource.MustParse("100m"),
},
},
},
}
rc := &v1.ReplicationController{
Spec: v1.ReplicationControllerSpec{
Replicas: newInt(3),
Template: &v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: map[string]string{
"foo": "bar",
},
},
Spec: s,
},
},
}
output := roundTrip(t, runtime.Object(rc))
rc2 := output.(*v1.ReplicationController)
defaultRequest := rc2.Spec.Template.Spec.Containers[0].Resources.Requests
requestValue := defaultRequest[v1.ResourceCPU]
if requestValue.String() != "0" {
t.Errorf("Expected 0 request value, got: %s", requestValue.String())
}
}
func TestSetDefaultLimitRangeItem(t *testing.T) {
limitRange := &v1.LimitRange{
ObjectMeta: metav1.ObjectMeta{
Name: "test-defaults",
},
Spec: v1.LimitRangeSpec{
Limits: []v1.LimitRangeItem{{
Type: v1.LimitTypeContainer,
Max: v1.ResourceList{
v1.ResourceCPU: resource.MustParse("100m"),
},
Min: v1.ResourceList{
v1.ResourceMemory: resource.MustParse("100Mi"),
},
Default: v1.ResourceList{},
DefaultRequest: v1.ResourceList{},
}},
},
}
output := roundTrip(t, runtime.Object(limitRange))
limitRange2 := output.(*v1.LimitRange)
defaultLimit := limitRange2.Spec.Limits[0].Default
defaultRequest := limitRange2.Spec.Limits[0].DefaultRequest
// verify that default cpu was set to the max
defaultValue := defaultLimit[v1.ResourceCPU]
if defaultValue.String() != "100m" {
t.Errorf("Expected default cpu: %s, got: %s", "100m", defaultValue.String())
}
// verify that default request was set to the limit
requestValue := defaultRequest[v1.ResourceCPU]
if requestValue.String() != "100m" {
t.Errorf("Expected request cpu: %s, got: %s", "100m", requestValue.String())
}
// verify that if a min is provided, it will be the default if no limit is specified
requestMinValue := defaultRequest[v1.ResourceMemory]
if requestMinValue.String() != "100Mi" {
t.Errorf("Expected request memory: %s, got: %s", "100Mi", requestMinValue.String())
}
}
func TestSetDefaultProbe(t *testing.T) {
originalProbe := v1.Probe{}
expectedProbe := v1.Probe{
InitialDelaySeconds: 0,
TimeoutSeconds: 1,
PeriodSeconds: 10,
SuccessThreshold: 1,
FailureThreshold: 3,
}
pod := &v1.Pod{
Spec: v1.PodSpec{
Containers: []v1.Container{{LivenessProbe: &originalProbe}},
},
}
output := roundTrip(t, runtime.Object(pod)).(*v1.Pod)
actualProbe := *output.Spec.Containers[0].LivenessProbe
if actualProbe != expectedProbe {
t.Errorf("Expected probe: %+v\ngot: %+v\n", expectedProbe, actualProbe)
}
}
func TestSetDefaultSchedulerName(t *testing.T) {
pod := &v1.Pod{}
output := roundTrip(t, runtime.Object(pod)).(*v1.Pod)
if output.Spec.SchedulerName != v1.DefaultSchedulerName {
t.Errorf("Expected scheduler name: %+v\ngot: %+v\n", v1.DefaultSchedulerName, output.Spec.SchedulerName)
}
}
func TestDefaultSecurityContextConstraints(t *testing.T) {
tests := map[string]struct {
scc *versioned.SecurityContextConstraints
expectedFSGroup versioned.FSGroupStrategyType
expectedSupGroup versioned.SupplementalGroupsStrategyType
}{
"shouldn't default": {
scc: &versioned.SecurityContextConstraints{
FSGroup: versioned.FSGroupStrategyOptions{
Type: versioned.FSGroupStrategyMustRunAs,
},
SupplementalGroups: versioned.SupplementalGroupsStrategyOptions{
Type: versioned.SupplementalGroupsStrategyMustRunAs,
},
},
expectedFSGroup: versioned.FSGroupStrategyMustRunAs,
expectedSupGroup: versioned.SupplementalGroupsStrategyMustRunAs,
},
"default fsgroup runAsAny": {
scc: &versioned.SecurityContextConstraints{
RunAsUser: versioned.RunAsUserStrategyOptions{
Type: versioned.RunAsUserStrategyRunAsAny,
},
SupplementalGroups: versioned.SupplementalGroupsStrategyOptions{
Type: versioned.SupplementalGroupsStrategyMustRunAs,
},
},
expectedFSGroup: versioned.FSGroupStrategyRunAsAny,
expectedSupGroup: versioned.SupplementalGroupsStrategyMustRunAs,
},
"default sup group runAsAny": {
scc: &versioned.SecurityContextConstraints{
RunAsUser: versioned.RunAsUserStrategyOptions{
Type: versioned.RunAsUserStrategyRunAsAny,
},
FSGroup: versioned.FSGroupStrategyOptions{
Type: versioned.FSGroupStrategyMustRunAs,
},
},
expectedFSGroup: versioned.FSGroupStrategyMustRunAs,
expectedSupGroup: versioned.SupplementalGroupsStrategyRunAsAny,
},
"default fsgroup runAsAny with mustRunAs UID strat": {
scc: &versioned.SecurityContextConstraints{
RunAsUser: versioned.RunAsUserStrategyOptions{
Type: versioned.RunAsUserStrategyMustRunAsRange,
},
SupplementalGroups: versioned.SupplementalGroupsStrategyOptions{
Type: versioned.SupplementalGroupsStrategyMustRunAs,
},
},
expectedFSGroup: versioned.FSGroupStrategyRunAsAny,
expectedSupGroup: versioned.SupplementalGroupsStrategyMustRunAs,
},
"default sup group runAsAny with mustRunAs UID strat": {
scc: &versioned.SecurityContextConstraints{
RunAsUser: versioned.RunAsUserStrategyOptions{
Type: versioned.RunAsUserStrategyMustRunAsRange,
},
FSGroup: versioned.FSGroupStrategyOptions{
Type: versioned.FSGroupStrategyMustRunAs,
},
},
expectedFSGroup: versioned.FSGroupStrategyMustRunAs,
expectedSupGroup: versioned.SupplementalGroupsStrategyRunAsAny,
},
}
for k, v := range tests {
output := roundTrip(t, runtime.Object(v.scc))
scc := output.(*versioned.SecurityContextConstraints)
if scc.FSGroup.Type != v.expectedFSGroup {
t.Errorf("%s has invalid fsgroup. Expected: %v got: %v", k, v.expectedFSGroup, scc.FSGroup.Type)
}
if scc.SupplementalGroups.Type != v.expectedSupGroup {
t.Errorf("%s has invalid supplemental group. Expected: %v got: %v", k, v.expectedSupGroup, scc.SupplementalGroups.Type)
}
}
}
func TestDefaultSCCVolumes(t *testing.T) {
tests := map[string]struct {
scc *versioned.SecurityContextConstraints
expectedVolumes []versioned.FSType
expectedHostDir bool
}{
// this expects the volumes to default to all for an empty volume slice
// but since the host dir setting is false it should be all - host dir
"old client - default allow* fields, no volumes slice": {
scc: &versioned.SecurityContextConstraints{},
expectedVolumes: versioned.StringSetToFSType(sccutil.GetAllFSTypesExcept(string(versioned.FSTypeHostPath))),
expectedHostDir: false,
},
// this expects the volumes to default to all for an empty volume slice
"old client - set allowHostDir true fields, no volumes slice": {
scc: &versioned.SecurityContextConstraints{
AllowHostDirVolumePlugin: true,
},
expectedVolumes: []versioned.FSType{versioned.FSTypeAll},
expectedHostDir: true,
},
"new client - allow* fields set with matching volume slice": {
scc: &versioned.SecurityContextConstraints{
Volumes: []versioned.FSType{versioned.FSTypeEmptyDir, versioned.FSTypeHostPath},
AllowHostDirVolumePlugin: true,
},
expectedVolumes: []versioned.FSType{versioned.FSTypeEmptyDir, versioned.FSTypeHostPath},
expectedHostDir: true,
},
"new client - allow* fields set with mismatch host dir volume slice": {
scc: &versioned.SecurityContextConstraints{
Volumes: []versioned.FSType{versioned.FSTypeEmptyDir, versioned.FSTypeHostPath},
AllowHostDirVolumePlugin: false,
},
expectedVolumes: []versioned.FSType{versioned.FSTypeEmptyDir},
expectedHostDir: false,
},
"new client - allow* fields set with mismatch FSTypeAll volume slice": {
scc: &versioned.SecurityContextConstraints{
Volumes: []versioned.FSType{versioned.FSTypeAll},
AllowHostDirVolumePlugin: false,
},
expectedVolumes: versioned.StringSetToFSType(sccutil.GetAllFSTypesExcept(string(versioned.FSTypeHostPath))),
expectedHostDir: false,
},
"new client - allow* fields unset with volume slice": {
scc: &versioned.SecurityContextConstraints{
Volumes: []versioned.FSType{versioned.FSTypeEmptyDir, versioned.FSTypeHostPath},
},
expectedVolumes: []versioned.FSType{versioned.FSTypeEmptyDir},
expectedHostDir: false,
},
"new client - extra volume params retained": {
scc: &versioned.SecurityContextConstraints{
Volumes: []versioned.FSType{versioned.FSTypeEmptyDir, versioned.FSTypeHostPath, versioned.FSTypeGitRepo},
},
expectedVolumes: []versioned.FSType{versioned.FSTypeEmptyDir, versioned.FSTypeGitRepo},
expectedHostDir: false,
},
"new client - empty volume slice, host dir true": {
scc: &versioned.SecurityContextConstraints{
Volumes: []versioned.FSType{},
AllowHostDirVolumePlugin: true,
},
expectedVolumes: []versioned.FSType{versioned.FSTypeHostPath},
expectedHostDir: true,
},
"new client - empty volume slice, host dir false": {
scc: &versioned.SecurityContextConstraints{
Volumes: []versioned.FSType{},
AllowHostDirVolumePlugin: false,
},
expectedVolumes: []versioned.FSType{},
expectedHostDir: false,
},
}
for k, v := range tests {
output := roundTrip(t, runtime.Object(v.scc))
scc := output.(*versioned.SecurityContextConstraints)
if !reflect.DeepEqual(scc.Volumes, v.expectedVolumes) {
t.Errorf("%s has invalid volumes. Expected: %v got: %v", k, v.expectedVolumes, scc.Volumes)
}
if scc.AllowHostDirVolumePlugin != v.expectedHostDir {
t.Errorf("%s has invalid host dir. Expected: %v got: %v", k, v.expectedHostDir, scc.AllowHostDirVolumePlugin)
}
}
}
| thrasher-redhat/origin | vendor/k8s.io/kubernetes/pkg/api/v1/defaults_test.go | GO | apache-2.0 | 31,680 |
/**
* Licensed to Jasig under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Jasig licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portal.portlet.container.services;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.portlet.PortletRequest;
import javax.servlet.http.HttpServletRequest;
import org.apache.pluto.container.PortletContainerException;
import org.apache.pluto.container.PortletWindow;
import org.apache.pluto.container.UserInfoService;
import org.apache.pluto.container.om.portlet.PortletApplicationDefinition;
import org.apache.pluto.container.om.portlet.UserAttribute;
import org.jasig.portal.portlet.om.IPortletDefinition;
import org.jasig.portal.portlet.om.IPortletEntity;
import org.jasig.portal.portlet.om.IPortletWindow;
import org.jasig.portal.portlet.registry.IPortletDefinitionRegistry;
import org.jasig.portal.portlet.registry.IPortletEntityRegistry;
import org.jasig.portal.portlet.registry.IPortletWindowRegistry;
import org.jasig.portal.url.IPortalRequestUtils;
import org.jasig.services.persondir.IPersonAttributeDao;
import org.jasig.services.persondir.IPersonAttributes;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Ties the IPersonAttributeDao to the Pluto UserInfoService
*
* @author Eric Dalquist
* @version $Revision$
*/
public class PersonDirectoryUserInfoService implements UserInfoService {
private IPersonAttributeDao personAttributeDao;
private IPortletWindowRegistry portletWindowRegistry;
private IPortletEntityRegistry portletEntityRegistry;
private IPortletDefinitionRegistry portletDefinitionRegistry;
private IPortalRequestUtils portalRequestUtils;
/**
* @return the portalRequestUtils
*/
public IPortalRequestUtils getPortalRequestUtils() {
return portalRequestUtils;
}
/**
* @param portalRequestUtils the portalRequestUtils to set
*/
@Autowired
public void setPortalRequestUtils(IPortalRequestUtils portalRequestUtils) {
this.portalRequestUtils = portalRequestUtils;
}
/**
* @return the portletEntityRegistry
*/
public IPortletEntityRegistry getPortletEntityRegistry() {
return this.portletEntityRegistry;
}
/**
* @param portletEntityRegistry the portletEntityRegistry to set
*/
@Autowired
public void setPortletEntityRegistry(IPortletEntityRegistry portletEntityRegistry) {
this.portletEntityRegistry = portletEntityRegistry;
}
/**
* @return the personAttributeDao
*/
public IPersonAttributeDao getPersonAttributeDao() {
return this.personAttributeDao;
}
/**
* @param personAttributeDao the personAttributeDao to set
*/
@Autowired
public void setPersonAttributeDao(IPersonAttributeDao personAttributeDao) {
this.personAttributeDao = personAttributeDao;
}
/**
* @return the portletWindowRegistry
*/
public IPortletWindowRegistry getPortletWindowRegistry() {
return this.portletWindowRegistry;
}
/**
* @param portletWindowRegistry the portletWindowRegistry to set
*/
@Autowired
public void setPortletWindowRegistry(IPortletWindowRegistry portletWindowRegistry) {
this.portletWindowRegistry = portletWindowRegistry;
}
/**
* @return the portletDefinitionRegistry
*/
public IPortletDefinitionRegistry getPortletDefinitionRegistry() {
return this.portletDefinitionRegistry;
}
/**
* @param portletDefinitionRegistry the portletDefinitionRegistry to set
*/
@Autowired
public void setPortletDefinitionRegistry(IPortletDefinitionRegistry portletDefinitionRegistry) {
this.portletDefinitionRegistry = portletDefinitionRegistry;
}
/* (non-Javadoc)
* @see org.apache.pluto.spi.optional.UserInfoService#getUserInfo(javax.portlet.PortletRequest, org.apache.pluto.PortletWindow)
*/
public Map<String, String> getUserInfo(PortletRequest request, PortletWindow plutoPortletWindow) throws PortletContainerException {
//Get the remote user
final String remoteUser = request.getRemoteUser();
if (remoteUser == null) {
return null;
}
final HttpServletRequest httpServletRequest = this.portalRequestUtils.getPortletHttpRequest(request);
final IPortletWindow portletWindow = this.portletWindowRegistry.convertPortletWindow(httpServletRequest, plutoPortletWindow);
return this.getUserInfo(remoteUser, httpServletRequest, portletWindow);
}
/**
* Commons logic to get a subset of the user's attributes for the specified portlet window.
*
* @param remoteUser The user to get attributes for.
* @param httpServletRequest The current, underlying httpServletRequest
* @param portletWindow The window to filter attributes for
* @return A Map of user attributes for the user and windows
* @throws PortletContainerException
*/
protected Map<String, String> getUserInfo(String remoteUser, HttpServletRequest httpServletRequest, IPortletWindow portletWindow) throws PortletContainerException {
//Get the list of user attributes the portal knows about the user
final IPersonAttributes personAttributes = this.personAttributeDao.getPerson(remoteUser);
if (personAttributes == null) {
return Collections.emptyMap();
}
final List<? extends UserAttribute> expectedUserAttributes = this.getExpectedUserAttributes(httpServletRequest, portletWindow);
final Map<String, String> portletUserAttributes = this.generateUserInfo(personAttributes, expectedUserAttributes, httpServletRequest);
return portletUserAttributes;
}
/**
* Using the Map of portal user attributes and a List of expected attributes generate the USER_INFO map for the portlet
*
* @param portalUserAttributes All the attributes the portal knows about the user
* @param expectedUserAttributes The attributes the portlet expects to get
* @return The Map to use for the USER_INFO attribute
*/
protected Map<String, String> generateUserInfo(final IPersonAttributes personAttributes, final List<? extends UserAttribute> expectedUserAttributes,HttpServletRequest httpServletRequest) {
final Map<String, String> portletUserAttributes = new HashMap<String, String>(expectedUserAttributes.size());
//Copy expected attributes to the USER_INFO Map
final Map<String, List<Object>> attributes = personAttributes.getAttributes();
for (final UserAttribute userAttributeDD : expectedUserAttributes) {
final String attributeName = userAttributeDD.getName();
//TODO a personAttributes.hasAttribute(String) API is needed here, if hasAttribute and null then put the key with no value in the returned map
if (attributes.containsKey(attributeName)) {
final Object valueObj = personAttributes.getAttributeValue(attributeName);
final String value = valueObj == null ? null : String.valueOf(valueObj);
portletUserAttributes.put(attributeName, value);
}
}
return portletUserAttributes;
}
/**
* Converts the full portal user attribute Map to a USER_INFO map for the portlet
*
* @param portalUserAttributes All the attributes the portal knows about the user
* @return The Map to use for the USER_INFO attribute
*/
protected Map<String, String> generateUserInfo(final Map<String, Object> portalUserAttributes) {
final Map<String, String> portletUserAttributes = new HashMap<String, String>(portalUserAttributes.size());
//Copy expected attributes to the USER_INFO Map
for (final Map.Entry<String, Object> portalUserAttributeEntry : portalUserAttributes.entrySet()) {
final String attributeName = portalUserAttributeEntry.getKey();
final Object valueObj = portalUserAttributeEntry.getValue();
final String value = String.valueOf(valueObj);
portletUserAttributes.put(attributeName, value);
}
return portletUserAttributes;
}
/**
* Get the list of user attributes the portlet expects
*
* @param request The current request.
* @param portletWindow The window to get the expected user attributes for.
* @return The List of expected user attributes for the portlet
* @throws PortletContainerException If expected attributes cannot be determined
*/
protected List<? extends UserAttribute> getExpectedUserAttributes(HttpServletRequest request, final IPortletWindow portletWindow) throws PortletContainerException {
final IPortletEntity portletEntity = portletWindow.getPortletEntity();
final IPortletDefinition portletDefinition = portletEntity.getPortletDefinition();
final PortletApplicationDefinition portletApplicationDescriptor = this.portletDefinitionRegistry.getParentPortletApplicationDescriptor(portletDefinition.getPortletDefinitionId());
return portletApplicationDescriptor.getUserAttributes();
}
}
| pspaude/uPortal | uportal-war/src/main/java/org/jasig/portal/portlet/container/services/PersonDirectoryUserInfoService.java | Java | apache-2.0 | 9,940 |
<?php
final class PhabricatorConfigDatabaseStatusController
extends PhabricatorConfigDatabaseController {
private $database;
private $table;
private $column;
private $key;
private $ref;
public function handleRequest(AphrontRequest $request) {
$viewer = $request->getViewer();
$this->database = $request->getURIData('database');
$this->table = $request->getURIData('table');
$this->column = $request->getURIData('column');
$this->key = $request->getURIData('key');
$this->ref = $request->getURIData('ref');
$query = new PhabricatorConfigSchemaQuery();
$actual = $query->loadActualSchemata();
$expect = $query->loadExpectedSchemata();
$comp = $query->buildComparisonSchemata($expect, $actual);
if ($this->ref !== null) {
$server_actual = idx($actual, $this->ref);
if (!$server_actual) {
return new Aphront404Response();
}
$server_comparison = $comp[$this->ref];
$server_expect = $expect[$this->ref];
if ($this->column) {
return $this->renderColumn(
$server_comparison,
$server_expect,
$server_actual,
$this->database,
$this->table,
$this->column);
} else if ($this->key) {
return $this->renderKey(
$server_comparison,
$server_expect,
$server_actual,
$this->database,
$this->table,
$this->key);
} else if ($this->table) {
return $this->renderTable(
$server_comparison,
$server_expect,
$server_actual,
$this->database,
$this->table);
} else if ($this->database) {
return $this->renderDatabase(
$server_comparison,
$server_expect,
$server_actual,
$this->database);
}
}
return $this->renderServers(
$comp,
$expect,
$actual);
}
private function buildResponse($title, $body) {
$nav = $this->buildSideNavView();
$nav->selectFilter('database/');
if (!$title) {
$title = pht('Database Status');
}
$ref = $this->ref;
$database = $this->database;
$table = $this->table;
$column = $this->column;
$key = $this->key;
$links = array();
$links[] = array(
pht('Database Status'),
'database/',
);
if ($database) {
$links[] = array(
$database,
"database/{$ref}/{$database}/",
);
}
if ($table) {
$links[] = array(
$table,
"database/{$ref}/{$database}/{$table}/",
);
}
if ($column) {
$links[] = array(
$column,
"database/{$ref}/{$database}/{$table}/col/{$column}/",
);
}
if ($key) {
$links[] = array(
$key,
"database/{$ref}/{$database}/{$table}/key/{$key}/",
);
}
$crumbs = $this->buildApplicationCrumbs();
$crumbs->setBorder(true);
$last_key = last_key($links);
foreach ($links as $link_key => $link) {
list($name, $href) = $link;
if ($link_key == $last_key) {
$crumbs->addTextCrumb($name);
} else {
$crumbs->addTextCrumb($name, $this->getApplicationURI($href));
}
}
$doc_link = PhabricatorEnv::getDoclink('Managing Storage Adjustments');
$button = id(new PHUIButtonView())
->setTag('a')
->setIcon('fa-book')
->setHref($doc_link)
->setText(pht('Documentation'));
$header = $this->buildHeaderView($title, $button);
$content = id(new PHUITwoColumnView())
->setHeader($header)
->setNavigation($nav)
->setFixed(true)
->setMainColumn($body);
return $this->newPage()
->setTitle($title)
->setCrumbs($crumbs)
->appendChild($content);
}
private function renderServers(
array $comp_servers,
array $expect_servers,
array $actual_servers) {
$charset_issue = PhabricatorConfigStorageSchema::ISSUE_CHARSET;
$collation_issue = PhabricatorConfigStorageSchema::ISSUE_COLLATION;
$rows = array();
foreach ($comp_servers as $ref_key => $comp) {
$actual = $actual_servers[$ref_key];
$expect = $expect_servers[$ref_key];
foreach ($comp->getDatabases() as $database_name => $database) {
$actual_database = $actual->getDatabase($database_name);
if ($actual_database) {
$charset = $actual_database->getCharacterSet();
$collation = $actual_database->getCollation();
} else {
$charset = null;
$collation = null;
}
$status = $database->getStatus();
$issues = $database->getIssues();
$uri = $this->getURI(
array(
'ref' => $ref_key,
'database' => $database_name,
));
$rows[] = array(
$this->renderIcon($status),
$ref_key,
phutil_tag(
'a',
array(
'href' => $uri,
),
$database_name),
$this->renderAttr($charset, $database->hasIssue($charset_issue)),
$this->renderAttr($collation, $database->hasIssue($collation_issue)),
);
}
}
$table = id(new AphrontTableView($rows))
->setHeaders(
array(
null,
pht('Server'),
pht('Database'),
pht('Charset'),
pht('Collation'),
))
->setColumnClasses(
array(
null,
null,
'wide pri',
null,
null,
));
$title = pht('Database Status');
$properties = $this->buildProperties(
array(
),
$comp->getIssues());
$properties = $this->buildConfigBoxView(pht('Properties'), $properties);
$table = $this->buildConfigBoxView(pht('Database'), $table);
return $this->buildResponse($title, array($properties, $table));
}
private function renderDatabase(
PhabricatorConfigServerSchema $comp,
PhabricatorConfigServerSchema $expect,
PhabricatorConfigServerSchema $actual,
$database_name) {
$collation_issue = PhabricatorConfigStorageSchema::ISSUE_COLLATION;
$database = $comp->getDatabase($database_name);
if (!$database) {
return new Aphront404Response();
}
$rows = array();
foreach ($database->getTables() as $table_name => $table) {
$status = $table->getStatus();
$uri = $this->getURI(
array(
'table' => $table_name,
));
$rows[] = array(
$this->renderIcon($status),
phutil_tag(
'a',
array(
'href' => $uri,
),
$table_name),
$this->renderAttr(
$table->getCollation(),
$table->hasIssue($collation_issue)),
$table->getPersistenceTypeDisplayName(),
);
}
$table = id(new AphrontTableView($rows))
->setHeaders(
array(
null,
pht('Table'),
pht('Collation'),
pht('Persistence'),
))
->setColumnClasses(
array(
null,
'wide pri',
null,
null,
));
$title = $database_name;
$actual_database = $actual->getDatabase($database_name);
if ($actual_database) {
$actual_charset = $actual_database->getCharacterSet();
$actual_collation = $actual_database->getCollation();
} else {
$actual_charset = null;
$actual_collation = null;
}
$expect_database = $expect->getDatabase($database_name);
if ($expect_database) {
$expect_charset = $expect_database->getCharacterSet();
$expect_collation = $expect_database->getCollation();
} else {
$expect_charset = null;
$expect_collation = null;
}
$properties = $this->buildProperties(
array(
array(
pht('Server'),
$this->ref,
),
array(
pht('Character Set'),
$actual_charset,
),
array(
pht('Expected Character Set'),
$expect_charset,
),
array(
pht('Collation'),
$actual_collation,
),
array(
pht('Expected Collation'),
$expect_collation,
),
),
$database->getIssues());
$properties = $this->buildConfigBoxView(pht('Properties'), $properties);
$table = $this->buildConfigBoxView(pht('Database'), $table);
return $this->buildResponse($title, array($properties, $table));
}
private function renderTable(
PhabricatorConfigServerSchema $comp,
PhabricatorConfigServerSchema $expect,
PhabricatorConfigServerSchema $actual,
$database_name,
$table_name) {
$type_issue = PhabricatorConfigStorageSchema::ISSUE_COLUMNTYPE;
$charset_issue = PhabricatorConfigStorageSchema::ISSUE_CHARSET;
$collation_issue = PhabricatorConfigStorageSchema::ISSUE_COLLATION;
$nullable_issue = PhabricatorConfigStorageSchema::ISSUE_NULLABLE;
$unique_issue = PhabricatorConfigStorageSchema::ISSUE_UNIQUE;
$columns_issue = PhabricatorConfigStorageSchema::ISSUE_KEYCOLUMNS;
$longkey_issue = PhabricatorConfigStorageSchema::ISSUE_LONGKEY;
$auto_issue = PhabricatorConfigStorageSchema::ISSUE_AUTOINCREMENT;
$database = $comp->getDatabase($database_name);
if (!$database) {
return new Aphront404Response();
}
$table = $database->getTable($table_name);
if (!$table) {
return new Aphront404Response();
}
$actual_database = $actual->getDatabase($database_name);
$actual_table = null;
if ($actual_database) {
$actual_table = $actual_database->getTable($table_name);
}
$expect_database = $expect->getDatabase($database_name);
$expect_table = null;
if ($expect_database) {
$expect_table = $expect_database->getTable($table_name);
}
$rows = array();
foreach ($table->getColumns() as $column_name => $column) {
$expect_column = null;
if ($expect_table) {
$expect_column = $expect_table->getColumn($column_name);
}
$status = $column->getStatus();
$data_type = null;
if ($expect_column) {
$data_type = $expect_column->getDataType();
}
$uri = $this->getURI(
array(
'column' => $column_name,
));
$rows[] = array(
$this->renderIcon($status),
phutil_tag(
'a',
array(
'href' => $uri,
),
$column_name),
$data_type,
$this->renderAttr(
$column->getColumnType(),
$column->hasIssue($type_issue)),
$this->renderAttr(
$this->renderBoolean($column->getNullable()),
$column->hasIssue($nullable_issue)),
$this->renderAttr(
$this->renderBoolean($column->getAutoIncrement()),
$column->hasIssue($auto_issue)),
$this->renderAttr(
$column->getCharacterSet(),
$column->hasIssue($charset_issue)),
$this->renderAttr(
$column->getCollation(),
$column->hasIssue($collation_issue)),
);
}
$table_view = id(new AphrontTableView($rows))
->setHeaders(
array(
null,
pht('Column'),
pht('Data Type'),
pht('Column Type'),
pht('Nullable'),
pht('Autoincrement'),
pht('Character Set'),
pht('Collation'),
))
->setColumnClasses(
array(
null,
'wide pri',
null,
null,
null,
null,
null,
));
$key_rows = array();
foreach ($table->getKeys() as $key_name => $key) {
$expect_key = null;
if ($expect_table) {
$expect_key = $expect_table->getKey($key_name);
}
$status = $key->getStatus();
$size = 0;
foreach ($key->getColumnNames() as $column_spec) {
list($column_name, $prefix) = $key->getKeyColumnAndPrefix($column_spec);
$column = $table->getColumn($column_name);
if (!$column) {
$size = 0;
break;
}
$size += $column->getKeyByteLength($prefix);
}
$size_formatted = null;
if ($size) {
$size_formatted = $this->renderAttr(
$size,
$key->hasIssue($longkey_issue));
}
$uri = $this->getURI(
array(
'key' => $key_name,
));
$key_rows[] = array(
$this->renderIcon($status),
phutil_tag(
'a',
array(
'href' => $uri,
),
$key_name),
$this->renderAttr(
implode(', ', $key->getColumnNames()),
$key->hasIssue($columns_issue)),
$this->renderAttr(
$this->renderBoolean($key->getUnique()),
$key->hasIssue($unique_issue)),
$size_formatted,
);
}
$keys_view = id(new AphrontTableView($key_rows))
->setHeaders(
array(
null,
pht('Key'),
pht('Columns'),
pht('Unique'),
pht('Size'),
))
->setColumnClasses(
array(
null,
'wide pri',
null,
null,
null,
));
$title = pht('%s.%s', $database_name, $table_name);
if ($actual_table) {
$actual_collation = $actual_table->getCollation();
} else {
$actual_collation = null;
}
if ($expect_table) {
$expect_collation = $expect_table->getCollation();
} else {
$expect_collation = null;
}
$properties = $this->buildProperties(
array(
array(
pht('Server'),
$this->ref,
),
array(
pht('Collation'),
$actual_collation,
),
array(
pht('Expected Collation'),
$expect_collation,
),
),
$table->getIssues());
$box_header = pht('%s.%s', $database_name, $table_name);
$properties = $this->buildConfigBoxView(pht('Properties'), $properties);
$table = $this->buildConfigBoxView(pht('Database'), $table_view);
$keys = $this->buildConfigBoxView(pht('Keys'), $keys_view);
return $this->buildResponse(
$title, array($properties, $table, $keys));
}
private function renderColumn(
PhabricatorConfigServerSchema $comp,
PhabricatorConfigServerSchema $expect,
PhabricatorConfigServerSchema $actual,
$database_name,
$table_name,
$column_name) {
$database = $comp->getDatabase($database_name);
if (!$database) {
return new Aphront404Response();
}
$table = $database->getTable($table_name);
if (!$table) {
return new Aphront404Response();
}
$column = $table->getColumn($column_name);
if (!$column) {
return new Aphront404Response();
}
$actual_database = $actual->getDatabase($database_name);
$actual_table = null;
$actual_column = null;
if ($actual_database) {
$actual_table = $actual_database->getTable($table_name);
if ($actual_table) {
$actual_column = $actual_table->getColumn($column_name);
}
}
$expect_database = $expect->getDatabase($database_name);
$expect_table = null;
$expect_column = null;
if ($expect_database) {
$expect_table = $expect_database->getTable($table_name);
if ($expect_table) {
$expect_column = $expect_table->getColumn($column_name);
}
}
if ($actual_column) {
$actual_coltype = $actual_column->getColumnType();
$actual_charset = $actual_column->getCharacterSet();
$actual_collation = $actual_column->getCollation();
$actual_nullable = $actual_column->getNullable();
$actual_auto = $actual_column->getAutoIncrement();
} else {
$actual_coltype = null;
$actual_charset = null;
$actual_collation = null;
$actual_nullable = null;
$actual_auto = null;
}
if ($expect_column) {
$data_type = $expect_column->getDataType();
$expect_coltype = $expect_column->getColumnType();
$expect_charset = $expect_column->getCharacterSet();
$expect_collation = $expect_column->getCollation();
$expect_nullable = $expect_column->getNullable();
$expect_auto = $expect_column->getAutoIncrement();
} else {
$data_type = null;
$expect_coltype = null;
$expect_charset = null;
$expect_collation = null;
$expect_nullable = null;
$expect_auto = null;
}
$title = pht(
'%s.%s.%s',
$database_name,
$table_name,
$column_name);
$properties = $this->buildProperties(
array(
array(
pht('Server'),
$this->ref,
),
array(
pht('Data Type'),
$data_type,
),
array(
pht('Column Type'),
$actual_coltype,
),
array(
pht('Expected Column Type'),
$expect_coltype,
),
array(
pht('Character Set'),
$actual_charset,
),
array(
pht('Expected Character Set'),
$expect_charset,
),
array(
pht('Collation'),
$actual_collation,
),
array(
pht('Expected Collation'),
$expect_collation,
),
array(
pht('Nullable'),
$this->renderBoolean($actual_nullable),
),
array(
pht('Expected Nullable'),
$this->renderBoolean($expect_nullable),
),
array(
pht('Autoincrement'),
$this->renderBoolean($actual_auto),
),
array(
pht('Expected Autoincrement'),
$this->renderBoolean($expect_auto),
),
),
$column->getIssues());
$properties = $this->buildConfigBoxView(pht('Properties'), $properties);
return $this->buildResponse($title, $properties);
}
private function renderKey(
PhabricatorConfigServerSchema $comp,
PhabricatorConfigServerSchema $expect,
PhabricatorConfigServerSchema $actual,
$database_name,
$table_name,
$key_name) {
$database = $comp->getDatabase($database_name);
if (!$database) {
return new Aphront404Response();
}
$table = $database->getTable($table_name);
if (!$table) {
return new Aphront404Response();
}
$key = $table->getKey($key_name);
if (!$key) {
return new Aphront404Response();
}
$actual_database = $actual->getDatabase($database_name);
$actual_table = null;
$actual_key = null;
if ($actual_database) {
$actual_table = $actual_database->getTable($table_name);
if ($actual_table) {
$actual_key = $actual_table->getKey($key_name);
}
}
$expect_database = $expect->getDatabase($database_name);
$expect_table = null;
$expect_key = null;
if ($expect_database) {
$expect_table = $expect_database->getTable($table_name);
if ($expect_table) {
$expect_key = $expect_table->getKey($key_name);
}
}
if ($actual_key) {
$actual_columns = $actual_key->getColumnNames();
$actual_unique = $actual_key->getUnique();
} else {
$actual_columns = array();
$actual_unique = null;
}
if ($expect_key) {
$expect_columns = $expect_key->getColumnNames();
$expect_unique = $expect_key->getUnique();
} else {
$expect_columns = array();
$expect_unique = null;
}
$title = pht(
'%s.%s (%s)',
$database_name,
$table_name,
$key_name);
$properties = $this->buildProperties(
array(
array(
pht('Server'),
$this->ref,
),
array(
pht('Unique'),
$this->renderBoolean($actual_unique),
),
array(
pht('Expected Unique'),
$this->renderBoolean($expect_unique),
),
array(
pht('Columns'),
implode(', ', $actual_columns),
),
array(
pht('Expected Columns'),
implode(', ', $expect_columns),
),
),
$key->getIssues());
$properties = $this->buildConfigBoxView(pht('Properties'), $properties);
return $this->buildResponse($title, $properties);
}
private function buildProperties(array $properties, array $issues) {
$view = id(new PHUIPropertyListView())
->setUser($this->getRequest()->getUser());
foreach ($properties as $property) {
list($key, $value) = $property;
$view->addProperty($key, $value);
}
$status_view = new PHUIStatusListView();
if (!$issues) {
$status_view->addItem(
id(new PHUIStatusItemView())
->setIcon(PHUIStatusItemView::ICON_ACCEPT, 'green')
->setTarget(pht('No Schema Issues')));
} else {
foreach ($issues as $issue) {
$note = PhabricatorConfigStorageSchema::getIssueDescription($issue);
$status = PhabricatorConfigStorageSchema::getIssueStatus($issue);
switch ($status) {
case PhabricatorConfigStorageSchema::STATUS_WARN:
$icon = PHUIStatusItemView::ICON_WARNING;
$color = 'yellow';
break;
case PhabricatorConfigStorageSchema::STATUS_FAIL:
default:
$icon = PHUIStatusItemView::ICON_REJECT;
$color = 'red';
break;
}
$item = id(new PHUIStatusItemView())
->setTarget(PhabricatorConfigStorageSchema::getIssueName($issue))
->setIcon($icon, $color)
->setNote($note);
$status_view->addItem($item);
}
}
$view->addProperty(pht('Schema Status'), $status_view);
return phutil_tag_div('config-page-property', $view);
}
private function getURI(array $properties) {
$defaults = array(
'ref' => $this->ref,
'database' => $this->database,
'table' => $this->table,
'column' => $this->column,
'key' => $this->key,
);
$properties = $properties + $defaults;
$properties = array_select_keys($properties, array_keys($defaults));
$parts = array();
foreach ($properties as $key => $property) {
if (!strlen($property)) {
continue;
}
if ($key == 'column') {
$parts[] = 'col';
} else if ($key == 'key') {
$parts[] = 'key';
}
$parts[] = $property;
}
if ($parts) {
$parts = implode('/', $parts).'/';
} else {
$parts = null;
}
return $this->getApplicationURI('/database/'.$parts);
}
}
| devurandom/phabricator | src/applications/config/controller/PhabricatorConfigDatabaseStatusController.php | PHP | apache-2.0 | 22,570 |
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/lite/kernels/cpu_backend_threadpool.h"
#include <vector>
#include <gtest/gtest.h>
#include "tensorflow/lite/kernels/cpu_backend_context.h"
namespace tflite {
namespace {
class TestGenerateArrayOfIncrementingIntsTask
: public cpu_backend_threadpool::Task {
public:
TestGenerateArrayOfIncrementingIntsTask(int* buffer, int start, int end)
: buffer_(buffer), start_(start), end_(end) {}
void Run() override {
for (int i = start_; i < end_; i++) {
buffer_[i] = i;
}
}
private:
int* buffer_;
int start_;
int end_;
};
void TestGenerateArrayOfIncrementingInts(int num_threads, int size) {
// The buffer that our threads will write to.
std::vector<int> buffer(size);
// The tasks that our threads will run.
std::vector<TestGenerateArrayOfIncrementingIntsTask> tasks;
// Create task objects.
int rough_size_per_thread = size / num_threads;
int start = 0;
for (int thread = 0; thread < num_threads; thread++) {
int end = start + rough_size_per_thread;
if (thread == num_threads - 1) {
end = size;
}
tasks.emplace_back(buffer.data(), start, end);
start = end;
}
ASSERT_EQ(num_threads, tasks.size());
CpuBackendContext context;
// This SetMaxNumThreads is only to satisfy an assertion in Execute.
// What actually determines the number of threads used is the parameter
// passed to Execute, since Execute does 1:1 mapping of tasks to threads.
context.SetMaxNumThreads(num_threads);
// Execute tasks on the threadpool.
cpu_backend_threadpool::Execute(tasks.size(), tasks.data(), &context);
// Check contents of the generated buffer.
for (int i = 0; i < size; i++) {
ASSERT_EQ(buffer[i], i);
}
}
TEST(CpuBackendThreadpoolTest, OneThreadSize100) {
TestGenerateArrayOfIncrementingInts(1, 100);
}
TEST(CpuBackendThreadpoolTest, ThreeThreadsSize1000000) {
TestGenerateArrayOfIncrementingInts(3, 1000000);
}
TEST(CpuBackendThreadpoolTest, TenThreadsSize1234567) {
TestGenerateArrayOfIncrementingInts(10, 1234567);
}
} // namespace
} // namespace tflite
int main(int argc, char** argv) {
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
| karllessard/tensorflow | tensorflow/lite/kernels/cpu_backend_threadpool_test.cc | C++ | apache-2.0 | 2,864 |
package com.intellij.openapi.vfs;
public abstract class VirtualFile {
void test() {
assert this == this;
assert this != this;
VirtualFileImpl first = new VirtualFileImpl();
VirtualFile second = new VirtualFileImpl();
assert first == null;
assert first != null;
assert this == second;
assert second != this;
assert <warning descr="'VirtualFile' instances should be compared by 'equals()', not '=='">first == second</warning>;
assert <warning descr="'VirtualFile' instances should be compared by 'equals()', not '=='">first != second</warning>;
assert <warning descr="'VirtualFile' instances should be compared by 'equals()', not '=='">second == first</warning>;
assert <warning descr="'VirtualFile' instances should be compared by 'equals()', not '=='">second != first</warning>;
}
static final class VirtualFileImpl extends VirtualFile {}
} | siosio/intellij-community | plugins/devkit/devkit-java-tests/testData/inspections/internal/com/intellij/openapi/vfs/VirtualFile.java | Java | apache-2.0 | 902 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.search.suggest;
import org.apache.lucene.analysis.core.SimpleAnalyzer;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.MockFieldMapper;
import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.index.mapper.TextSearchInfo;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static org.elasticsearch.common.lucene.BytesRefs.toBytesRef;
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public abstract class AbstractSuggestionBuilderTestCase<SB extends SuggestionBuilder<SB>> extends ESTestCase {
private static final int NUMBER_OF_TESTBUILDERS = 20;
protected static NamedWriteableRegistry namedWriteableRegistry;
protected static NamedXContentRegistry xContentRegistry;
/**
* setup for the whole base test class
*/
@BeforeClass
public static void init() {
SearchModule searchModule = new SearchModule(Settings.EMPTY, emptyList());
namedWriteableRegistry = new NamedWriteableRegistry(searchModule.getNamedWriteables());
xContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents());
}
@AfterClass
public static void afterClass() {
namedWriteableRegistry = null;
xContentRegistry = null;
}
/**
* Test serialization and deserialization of the suggestion builder
*/
public void testSerialization() throws IOException {
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
SB original = randomTestBuilder();
SB deserialized = copy(original);
assertEquals(deserialized, original);
assertEquals(deserialized.hashCode(), original.hashCode());
assertNotSame(deserialized, original);
}
}
/**
* returns a random suggestion builder, setting the common options randomly
*/
protected SB randomTestBuilder() {
return randomSuggestionBuilder();
}
public static void setCommonPropertiesOnRandomBuilder(SuggestionBuilder<?> randomSuggestion) {
randomSuggestion.text(randomAlphaOfLengthBetween(2, 20)); // have to set the text because we don't know if the global text was set
maybeSet(randomSuggestion::prefix, randomAlphaOfLengthBetween(2, 20));
maybeSet(randomSuggestion::regex, randomAlphaOfLengthBetween(2, 20));
maybeSet(randomSuggestion::analyzer, randomAlphaOfLengthBetween(2, 20));
maybeSet(randomSuggestion::size, randomIntBetween(1, 20));
maybeSet(randomSuggestion::shardSize, randomIntBetween(1, 20));
}
/**
* create a randomized {@link SuggestBuilder} that is used in further tests
*/
protected abstract SB randomSuggestionBuilder();
/**
* Test equality and hashCode properties
*/
public void testEqualsAndHashcode() {
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
checkEqualsAndHashCode(randomTestBuilder(), this::copy, this::mutate);
}
}
/**
* creates random suggestion builder, renders it to xContent and back to new
* instance that should be equal to original
*/
public void testFromXContent() throws IOException {
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
SB suggestionBuilder = randomTestBuilder();
XContentBuilder xContentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
if (randomBoolean()) {
xContentBuilder.prettyPrint();
}
xContentBuilder.startObject();
suggestionBuilder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS);
xContentBuilder.endObject();
XContentBuilder shuffled = shuffleXContent(xContentBuilder, shuffleProtectedFields());
try (XContentParser parser = createParser(shuffled)) {
// we need to skip the start object and the name, those will be parsed by outer SuggestBuilder
parser.nextToken();
SuggestionBuilder<?> secondSuggestionBuilder = SuggestionBuilder.fromXContent(parser);
assertNotSame(suggestionBuilder, secondSuggestionBuilder);
assertEquals(suggestionBuilder, secondSuggestionBuilder);
assertEquals(suggestionBuilder.hashCode(), secondSuggestionBuilder.hashCode());
}
}
}
public void testBuild() throws IOException {
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
SB suggestionBuilder = randomTestBuilder();
Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index(randomAlphaOfLengthBetween(1, 10), "_na_"),
indexSettings);
ScriptService scriptService = mock(ScriptService.class);
MappedFieldType fieldType = mockFieldType(suggestionBuilder.field());
IndexAnalyzers indexAnalyzers = new IndexAnalyzers(
new HashMap<>() {
@Override
public NamedAnalyzer get(Object key) {
return new NamedAnalyzer(key.toString(), AnalyzerScope.INDEX, new SimpleAnalyzer());
}
},
Collections.emptyMap(),
Collections.emptyMap());
MapperService mapperService = mock(MapperService.class);
when(mapperService.getIndexAnalyzers()).thenReturn(indexAnalyzers);
when(scriptService.compile(any(Script.class), any())).then(invocation -> new TestTemplateService.MockTemplateScript.Factory(
((Script) invocation.getArguments()[0]).getIdOrCode()));
List<FieldMapper> mappers = Collections.singletonList(new MockFieldMapper(fieldType));
MappingLookup lookup = MappingLookup.fromMappers(Mapping.EMPTY, mappers, emptyList(), emptyList());
SearchExecutionContext mockContext = new SearchExecutionContext(0, 0, idxSettings, null,
null, mapperService, lookup, null, scriptService, xContentRegistry(), namedWriteableRegistry, null, null,
System::currentTimeMillis, null, null, () -> true, null, emptyMap());
SuggestionContext suggestionContext = suggestionBuilder.build(mockContext);
assertEquals(toBytesRef(suggestionBuilder.text()), suggestionContext.getText());
if (suggestionBuilder.text() != null && suggestionBuilder.prefix() == null) {
assertEquals(toBytesRef(suggestionBuilder.text()), suggestionContext.getPrefix());
} else {
assertEquals(toBytesRef(suggestionBuilder.prefix()), suggestionContext.getPrefix());
}
assertEquals(toBytesRef(suggestionBuilder.regex()), suggestionContext.getRegex());
assertEquals(suggestionBuilder.field(), suggestionContext.getField());
int expectedSize = suggestionBuilder.size() != null ? suggestionBuilder.size : 5;
assertEquals(expectedSize, suggestionContext.getSize());
Integer expectedShardSize = suggestionBuilder.shardSize != null ? suggestionBuilder.shardSize : Math.max(expectedSize, 5);
assertEquals(expectedShardSize, suggestionContext.getShardSize());
assertSame(mockContext, suggestionContext.getSearchExecutionContext());
if (suggestionBuilder.analyzer() != null) {
assertEquals(suggestionBuilder.analyzer(), ((NamedAnalyzer) suggestionContext.getAnalyzer()).name());
} else {
assertEquals("fieldSearchAnalyzer", ((NamedAnalyzer) suggestionContext.getAnalyzer()).name());
}
assertSuggestionContext(suggestionBuilder, suggestionContext);
}
}
public void testBuildWithUnmappedField() {
Settings.Builder builder = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT);
if (randomBoolean()) {
builder.put(IndexSettings.ALLOW_UNMAPPED.getKey(), randomBoolean());
}
Settings indexSettings = builder.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index(randomAlphaOfLengthBetween(1, 10), "_na_"),
indexSettings);
SearchExecutionContext mockContext = new SearchExecutionContext(0, 0, idxSettings, null,
null, mock(MapperService.class), MappingLookup.EMPTY, null, null, xContentRegistry(), namedWriteableRegistry, null, null,
System::currentTimeMillis, null, null, () -> true, null, emptyMap());
if (randomBoolean()) {
mockContext.setAllowUnmappedFields(randomBoolean());
}
SB suggestionBuilder = randomTestBuilder();
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> suggestionBuilder.build(mockContext));
assertEquals("no mapping found for field [" + suggestionBuilder.field + "]", iae.getMessage());
}
/**
* put implementation dependent assertions in the sub-type test
*/
protected abstract void assertSuggestionContext(SB builder, SuggestionContext context) throws IOException;
protected MappedFieldType mockFieldType(String fieldName) {
MappedFieldType fieldType = mock(MappedFieldType.class);
when(fieldType.name()).thenReturn(fieldName);
NamedAnalyzer searchAnalyzer = new NamedAnalyzer("fieldSearchAnalyzer", AnalyzerScope.INDEX, new SimpleAnalyzer());
TextSearchInfo tsi = new TextSearchInfo(TextFieldMapper.Defaults.FIELD_TYPE, null, searchAnalyzer, searchAnalyzer);
when(fieldType.getTextSearchInfo()).thenReturn(tsi);
return fieldType;
}
/**
* Subclasses can override this method and return a set of fields which should be protected from
* recursive random shuffling in the {@link #testFromXContent()} test case
*/
protected String[] shuffleProtectedFields() {
return new String[0];
}
private SB mutate(SB firstBuilder) throws IOException {
SB mutation = copy(firstBuilder);
assertNotSame(mutation, firstBuilder);
// change ither one of the shared SuggestionBuilder parameters, or delegate to the specific tests mutate method
if (randomBoolean()) {
switch (randomIntBetween(0, 5)) {
case 0:
mutation.text(randomValueOtherThan(mutation.text(), () -> randomAlphaOfLengthBetween(2, 20)));
break;
case 1:
mutation.prefix(randomValueOtherThan(mutation.prefix(), () -> randomAlphaOfLengthBetween(2, 20)));
break;
case 2:
mutation.regex(randomValueOtherThan(mutation.regex(), () -> randomAlphaOfLengthBetween(2, 20)));
break;
case 3:
mutation.analyzer(randomValueOtherThan(mutation.analyzer(), () -> randomAlphaOfLengthBetween(2, 20)));
break;
case 4:
mutation.size(randomValueOtherThan(mutation.size(), () -> randomIntBetween(1, 20)));
break;
case 5:
mutation.shardSize(randomValueOtherThan(mutation.shardSize(), () -> randomIntBetween(1, 20)));
break;
}
} else {
mutateSpecificParameters(firstBuilder);
}
return mutation;
}
/**
* take and input {@link SuggestBuilder} and return another one that is
* different in one aspect (to test non-equality)
*/
protected abstract void mutateSpecificParameters(SB firstBuilder) throws IOException;
@SuppressWarnings("unchecked")
protected SB copy(SB original) throws IOException {
return copyWriteable(original, namedWriteableRegistry,
(Writeable.Reader<SB>) namedWriteableRegistry.getReader(SuggestionBuilder.class, original.getWriteableName()));
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return xContentRegistry;
}
}
| robin13/elasticsearch | server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java | Java | apache-2.0 | 14,348 |
from math import exp
from collections import defaultdict
@outputSchema("scaled: double")
def logistic_scale(val, logistic_param):
return -1.0 + 2.0 / (1.0 + exp(-logistic_param * val))
@outputSchema("t: (item_A, item_B, dist: double, raw_weight: double)")
def best_path(paths):
return sorted(paths, key=lambda t:t[2])[0]
@outputSchema("t: (item_A, item_B, dist: double, raw_weight: double, link_data: map[], linking_item: chararray)")
def best_path_detailed(paths):
return sorted(paths, key=lambda t:t[2])[0]
@outputSchema("signal_map:map[]")
def aggregate_signal_types(signal_list):
signal_dict = {}
for row in signal_list:
if row[3]:
if not signal_dict.get(row[3]):
signal_dict[row[3]] = 0
signal_dict[row[3]] += 1
return signal_dict
@outputSchema("signal_map:map[]")
def combine_signals(signal_list):
signal_dict = {}
for row in signal_list:
if row[3]:
for val in row[3].keys():
if not signal_dict.get(row[3]):
signal_dict[row[3]] = 0
signal_dict[val] += row[3][val]
return signal_dict
| ShadySQL/mortar-recsys | udfs/jython/recsys.py | Python | apache-2.0 | 1,151 |
/**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2015 the original author or authors.
*/
package org.assertj.core.api.fail;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.util.StackTraceUtils.hasStackTraceElementRelatedToAssertJ;
import org.assertj.core.api.Fail;
import org.junit.Test;
/**
* Tests for <code>{@link Fail#setRemoveAssertJRelatedElementsFromStackTrace(boolean)}</code>.
*
* @author Joel Costigliola
*/
public class Fail_fest_elements_stack_trace_filtering_Test {
@Test
public void fest_elements_should_be_removed_from_assertion_error_stack_trace() {
Fail.setRemoveAssertJRelatedElementsFromStackTrace(true);
try {
assertThat(5).isLessThan(0);
} catch (AssertionError assertionError) {
assertThat(hasStackTraceElementRelatedToAssertJ(assertionError)).isFalse();
}
}
@Test
public void fest_elements_should_be_kept_in_assertion_error_stack_trace() {
Fail.setRemoveAssertJRelatedElementsFromStackTrace(false);
try {
assertThat(5).isLessThan(0);
} catch (AssertionError assertionError) {
assertThat(hasStackTraceElementRelatedToAssertJ(assertionError)).isTrue();
}
}
}
| mdecourci/assertj-core | src/test/java/org/assertj/core/api/fail/Fail_fest_elements_stack_trace_filtering_Test.java | Java | apache-2.0 | 1,721 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.readcommitter;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
import org.apache.carbondata.common.annotations.InterfaceAudience;
import org.apache.carbondata.common.annotations.InterfaceStability;
import org.apache.carbondata.core.statusmanager.SegmentRefreshInfo;
/**
* This class is going to save the the Index files which are taken snapshot
* from the readCommitter Interface.
*/
@InterfaceAudience.Internal
@InterfaceStability.Evolving
public class ReadCommittedIndexFileSnapShot implements Serializable {
/**
* Segment Numbers are mapped with list of Index Files.
*/
private Map<String, List<String>> segmentIndexFileMap;
private Map<String, SegmentRefreshInfo> segmentTimestampUpdaterMap;
public ReadCommittedIndexFileSnapShot(Map<String, List<String>> segmentIndexFileMap,
Map<String, SegmentRefreshInfo> segmentTimestampUpdaterMap) {
this.segmentIndexFileMap = segmentIndexFileMap;
this.segmentTimestampUpdaterMap = segmentTimestampUpdaterMap;
}
public Map<String, List<String>> getSegmentIndexFileMap() {
return segmentIndexFileMap;
}
public Map<String, SegmentRefreshInfo> getSegmentTimestampUpdaterMap() {
return segmentTimestampUpdaterMap;
}
}
| ravipesala/incubator-carbondata | core/src/main/java/org/apache/carbondata/core/readcommitter/ReadCommittedIndexFileSnapShot.java | Java | apache-2.0 | 2,076 |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.validation;
import com.intellij.codeInspection.util.InspectionMessage;
import com.intellij.lang.annotation.HighlightSeverity;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiElement;
import com.intellij.psi.util.PsiTreeUtil;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.PyPsiBundle;
import com.jetbrains.python.codeInsight.controlflow.ScopeOwner;
import com.jetbrains.python.codeInsight.dataflow.scope.ScopeUtil;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.impl.PyPsiUtils;
import com.jetbrains.python.sdk.PythonSdkUtil;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import static com.jetbrains.python.PyPsiBundle.message;
public class AssignTargetAnnotator extends PyAnnotator {
private enum Operation {
Assign, AugAssign, Delete, Except, For, With
}
@Override
public void visitPyAssignmentStatement(final @NotNull PyAssignmentStatement node) {
for (PyExpression expression : node.getRawTargets()) {
expression.accept(new ExprVisitor(Operation.Assign));
}
PyExpression expression = node.getAssignedValue();
if (expression instanceof PyAssignmentExpression) {
getHolder()
.newAnnotation(HighlightSeverity.ERROR, PyPsiBundle.message("ANN.unparenthesized.assignment.expression.value"))
.range(expression)
.create();
}
}
@Override
public void visitPyAugAssignmentStatement(final @NotNull PyAugAssignmentStatement node) {
node.getTarget().accept(new ExprVisitor(Operation.AugAssign));
}
@Override
public void visitPyDelStatement(final @NotNull PyDelStatement node) {
ExprVisitor visitor = new ExprVisitor(Operation.Delete);
for (PyExpression expr : node.getTargets()) {
expr.accept(visitor);
}
}
@Override
public void visitPyExceptBlock(final @NotNull PyExceptPart node) {
PyExpression target = node.getTarget();
if (target != null) {
target.accept(new ExprVisitor(Operation.Except));
}
}
@Override
public void visitPyForStatement(final @NotNull PyForStatement node) {
PyExpression target = node.getForPart().getTarget();
if (target != null) {
target.accept(new ExprVisitor(Operation.For));
checkNotAssignmentExpression(target, PyPsiBundle.message("ANN.assignment.expression.as.a.target"));
}
}
@Override
public void visitPyWithItem(@NotNull PyWithItem node) {
PyExpression target = node.getTarget();
if (target != null) {
target.accept(new ExprVisitor(Operation.With));
}
}
@Override
public void visitPyExpressionStatement(@NotNull PyExpressionStatement node) {
PyExpression expression = node.getExpression();
if (expression instanceof PyAssignmentExpression) {
getHolder()
.newAnnotation(HighlightSeverity.ERROR, PyPsiBundle.message("ANN.unparenthesized.assignment.expression.statement"))
.range(expression)
.create();
}
}
@Override
public void visitPyAssignmentExpression(@NotNull PyAssignmentExpression node) {
final PyComprehensionElement comprehensionElement = PsiTreeUtil.getParentOfType(node, PyComprehensionElement.class, true, ScopeOwner.class);
if (ScopeUtil.getScopeOwner(comprehensionElement) instanceof PyClass) {
getHolder().newAnnotation(HighlightSeverity.ERROR,
PyPsiBundle.message("ANN.assignment.expressions.within.a.comprehension.cannot.be.used.in.a.class.body")).create();
}
}
@Override
public void visitPyComprehensionElement(@NotNull PyComprehensionElement node) {
final String targetMessage = PyPsiBundle.message("ANN.assignment.expression.as.a.target");
final String iterableMessage = PyPsiBundle.message("ANN.assignment.expression.in.an.iterable");
node.getForComponents().forEach(
it -> {
checkNotAssignmentExpression(it.getIteratorVariable(), targetMessage);
checkNoAssignmentExpressionAsChild(it.getIteratedList(), iterableMessage);
}
);
}
private void checkNoAssignmentExpressionAsChild(@Nullable PyExpression expression, @NotNull @InspectionMessage String message) {
PsiTreeUtil
.findChildrenOfType(expression, PyAssignmentExpression.class)
.forEach(it -> checkNotAssignmentExpression(it, message));
}
private void checkNotAssignmentExpression(@Nullable PyExpression expression, @NotNull @InspectionMessage String message) {
if (PyPsiUtils.flattenParens(expression) instanceof PyAssignmentExpression) {
getHolder()
.newAnnotation(HighlightSeverity.ERROR, message)
.range(expression)
.create();
}
}
private class ExprVisitor extends PyElementVisitor {
private final Operation myOp;
private final @Nls String DELETING_NONE = message("ANN.deleting.none");
private final @Nls String ASSIGNMENT_TO_NONE = message("ANN.assign.to.none");
private final @Nls String CANT_ASSIGN_TO_FUNCTION_CALL = message("ANN.cant.assign.to.call");
private final @Nls String CANT_DELETE_FUNCTION_CALL = message("ANN.cant.delete.call");
ExprVisitor(Operation op) {
myOp = op;
}
@Override
public void visitPyReferenceExpression(final @NotNull PyReferenceExpression node) {
String referencedName = node.getReferencedName();
if (PyNames.NONE.equals(referencedName)) {
//noinspection DialogTitleCapitalization
getHolder().newAnnotation(HighlightSeverity.ERROR, (myOp == Operation.Delete) ? DELETING_NONE : ASSIGNMENT_TO_NONE).range(node).create();
}
}
@Override
public void visitPyTargetExpression(final @NotNull PyTargetExpression node) {
String targetName = node.getName();
if (PyNames.NONE.equals(targetName)) {
final VirtualFile vfile = node.getContainingFile().getVirtualFile();
if (vfile != null && !vfile.getUrl().contains("/" + PythonSdkUtil.SKELETON_DIR_NAME + "/")){
//noinspection DialogTitleCapitalization
getHolder().newAnnotation(HighlightSeverity.ERROR, (myOp == Operation.Delete) ? DELETING_NONE : ASSIGNMENT_TO_NONE).range(node).create();
}
}
if (PyNames.DEBUG.equals(targetName)) {
if (LanguageLevel.forElement(node).isPy3K()) {
getHolder().newAnnotation(HighlightSeverity.ERROR, PyPsiBundle.message("ANN.assignment.to.keyword")).range(node).create();
}
else {
getHolder().newAnnotation(HighlightSeverity.ERROR, PyPsiBundle.message("ANN.cannot.assign.to.debug")).range(node).create();
}
}
}
@Override
public void visitPyCallExpression(final @NotNull PyCallExpression node) {
getHolder().newAnnotation(HighlightSeverity.ERROR, (myOp == Operation.Delete) ? CANT_DELETE_FUNCTION_CALL : CANT_ASSIGN_TO_FUNCTION_CALL).range(node).create();
}
@Override
public void visitPyGeneratorExpression(final @NotNull PyGeneratorExpression node) {
getHolder().newAnnotation(HighlightSeverity.ERROR, message(
myOp == Operation.AugAssign ? "ANN.cant.aug.assign.to.generator" : "ANN.cant.assign.to.generator")).range(node).create();
}
@Override
public void visitPyBinaryExpression(final @NotNull PyBinaryExpression node) {
getHolder().newAnnotation(HighlightSeverity.ERROR, message("ANN.cant.assign.to.operator")).range(node).create();
}
@Override
public void visitPyTupleExpression(final @NotNull PyTupleExpression node) {
if (node.isEmpty() && LanguageLevel.forElement(node).isPython2()) {
getHolder().newAnnotation(HighlightSeverity.ERROR, message("ANN.cant.assign.to.parens")).range(node).create();
}
else if (myOp == Operation.AugAssign) {
getHolder().newAnnotation(HighlightSeverity.ERROR, message("ANN.cant.aug.assign.to.tuple.or.generator")).range(node).create();
}
else {
node.acceptChildren(this);
}
}
@Override
public void visitPyParenthesizedExpression(final @NotNull PyParenthesizedExpression node) {
if (myOp == Operation.AugAssign) {
getHolder().newAnnotation(HighlightSeverity.ERROR, message("ANN.cant.aug.assign.to.tuple.or.generator")).range(node).create();
}
else {
node.acceptChildren(this);
}
}
@Override
public void visitPyListLiteralExpression(final @NotNull PyListLiteralExpression node) {
if (myOp == Operation.AugAssign) {
getHolder().newAnnotation(HighlightSeverity.ERROR, message("ANN.cant.aug.assign.to.list.or.comprh")).range(node).create();
}
else {
node.acceptChildren(this);
}
}
@Override
public void visitPyListCompExpression(final @NotNull PyListCompExpression node) {
markError(node, message(myOp == Operation.AugAssign ? "ANN.cant.aug.assign.to.comprh" : "ANN.cant.assign.to.comprh"));
}
@Override
public void visitPyDictCompExpression(@NotNull PyDictCompExpression node) {
markError(node, message(myOp == Operation.AugAssign ? "ANN.cant.aug.assign.to.dict.comprh" : "ANN.cant.assign.to.dict.comprh"));
}
@Override
public void visitPySetCompExpression(@NotNull PySetCompExpression node) {
markError(node, message(myOp == Operation.AugAssign ? "ANN.cant.aug.assign.to.set.comprh" : "ANN.cant.assign.to.set.comprh"));
}
@Override
public void visitPyStarExpression(@NotNull PyStarExpression node) {
super.visitPyStarExpression(node);
if (!(node.getParent() instanceof PySequenceExpression)) {
markError(node, message("ANN.cant.aug.assign.starred.assignment.target.must.be.in.list.or.tuple"));
}
}
@Override
public void visitPyDictLiteralExpression(@NotNull PyDictLiteralExpression node) {
checkLiteral(node);
}
@Override
public void visitPySetLiteralExpression(@NotNull PySetLiteralExpression node) {
checkLiteral(node);
}
@Override
public void visitPyNumericLiteralExpression(final @NotNull PyNumericLiteralExpression node) {
checkLiteral(node);
}
@Override
public void visitPyStringLiteralExpression(final @NotNull PyStringLiteralExpression node) {
checkLiteral(node);
}
private void checkLiteral(@NotNull PsiElement node) {
getHolder().newAnnotation(HighlightSeverity.ERROR, message(myOp == Operation.Delete ? "ANN.cant.delete.literal" : "ANN.cant.assign.to.literal")).range(node).create();
}
@Override
public void visitPyLambdaExpression(final @NotNull PyLambdaExpression node) {
getHolder().newAnnotation(HighlightSeverity.ERROR, message("ANN.cant.assign.to.lambda")).range(node).create();
}
@Override
public void visitPyNoneLiteralExpression(@NotNull PyNoneLiteralExpression node) {
getHolder().newAnnotation(HighlightSeverity.ERROR, PyPsiBundle.message("ANN.assignment.to.keyword")).range(node).create();
}
@Override
public void visitPyBoolLiteralExpression(@NotNull PyBoolLiteralExpression node) {
getHolder().newAnnotation(HighlightSeverity.ERROR, PyPsiBundle.message("ANN.assignment.to.keyword")).range(node).create();
}
}
}
| smmribeiro/intellij-community | python/python-psi-impl/src/com/jetbrains/python/validation/AssignTargetAnnotator.java | Java | apache-2.0 | 11,767 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.model;
import java.nio.charset.Charset;
import java.nio.charset.UnsupportedCharsetException;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
import org.apache.camel.Processor;
import org.apache.camel.processor.ConvertBodyProcessor;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.Required;
import org.apache.camel.spi.RouteContext;
/**
* Converts the message body to another type
*/
@Metadata(label = "eip,transformation")
@XmlRootElement(name = "convertBodyTo")
@XmlAccessorType(XmlAccessType.FIELD)
public class ConvertBodyDefinition extends NoOutputDefinition<ConvertBodyDefinition> {
@XmlAttribute(required = true)
private String type;
@XmlAttribute
private String charset;
@XmlTransient
private Class<?> typeClass;
public ConvertBodyDefinition() {
}
public ConvertBodyDefinition(String type) {
setType(type);
}
public ConvertBodyDefinition(Class<?> typeClass) {
setTypeClass(typeClass);
setType(typeClass.getName());
}
public ConvertBodyDefinition(Class<?> typeClass, String charset) {
setTypeClass(typeClass);
setType(typeClass.getName());
setCharset(charset);
}
@Override
public String toString() {
return "ConvertBodyTo[" + getType() + "]";
}
@Override
public String getLabel() {
return "convertBodyTo[" + getType() + "]";
}
public static void validateCharset(String charset) throws UnsupportedCharsetException {
if (charset != null) {
if (Charset.isSupported(charset)) {
Charset.forName(charset);
return;
}
}
throw new UnsupportedCharsetException(charset);
}
@Override
public Processor createProcessor(RouteContext routeContext) throws Exception {
if (typeClass == null && type != null) {
typeClass = routeContext.getCamelContext().getClassResolver().resolveMandatoryClass(type);
}
// validate charset
if (charset != null) {
validateCharset(charset);
}
return new ConvertBodyProcessor(getTypeClass(), getCharset());
}
public String getType() {
return type;
}
/**
* The java type to convert to
*/
@Required
public void setType(String type) {
this.type = type;
}
public Class<?> getTypeClass() {
return typeClass;
}
public void setTypeClass(Class<?> typeClass) {
this.typeClass = typeClass;
}
public String getCharset() {
return charset;
}
/**
* To use a specific charset when converting
*/
public void setCharset(String charset) {
this.charset = charset;
}
}
| skinzer/camel | camel-core/src/main/java/org/apache/camel/model/ConvertBodyDefinition.java | Java | apache-2.0 | 3,784 |
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/core/kernels/lookup_util.h"
#include "tensorflow/core/framework/tensor.h"
#include "tensorflow/core/framework/tensor_shape.h"
#include "tensorflow/core/lib/core/errors.h"
#include "tensorflow/core/lib/io/inputbuffer.h"
namespace tensorflow {
namespace lookup {
namespace {
static const int kInputBufferSize = 1 * 1024 * 1024; /* bytes */
static const int kLineNumber = -1;
static const int kWholeLine = -2;
Status GetNumLinesInTextFile(Env* env, const string& vocab_file,
int64* num_lines) {
std::unique_ptr<RandomAccessFile> file;
TF_RETURN_IF_ERROR(env->NewRandomAccessFile(vocab_file, &file));
io::InputBuffer input_buffer(file.get(), kInputBufferSize);
string line;
Status s = input_buffer.ReadLine(&line);
int64 next_id = 0;
while (s.ok()) {
next_id++;
s = input_buffer.ReadLine(&line);
}
if (!errors::IsOutOfRange(s)) {
return s;
}
*num_lines = next_id;
return Status::OK();
}
// Iterator that reads a text file. Each iteration process one line, it parses
// the line and populates the keys and values tensors used for initialization
// with a single key and corresponding value.
//
// What information of the line to populate the key or values is specified by
// providing key_index and value_index.
class TextFileLineIterator
: public InitializableLookupTable::InitTableIterator {
public:
TextFileLineIterator()
: valid_(false),
vocab_size_(-1),
status_(errors::FailedPrecondition("Not initialized")) {}
// Initialize iterator.
//
// Prepares the file 'filename' and sets the data types to return the keys and
// values tensors. It requires the indices of the tokens in the line given a
// delimiter to specify where to pick the data from.
//
// - Index -2 means the entire line as string.
// - Index -1 means the line number stored in int64.
// - Index >= 0 represent index (starting at zero) of the split line based on
// delimiter.
Status Init(const string& filename, int64 vocab_size, char delimiter,
DataType key_dtype, int64 key_index, DataType value_dtype,
int64 value_index, Env* env) {
if (vocab_size == -1) {
TF_RETURN_IF_ERROR(GetNumLinesInTextFile(env, filename, &vocab_size));
}
filename_ = filename;
vocab_size_ = vocab_size;
delimiter_ = delimiter;
key_ = Tensor(key_dtype, TensorShape({}));
value_ = Tensor(value_dtype, TensorShape({}));
key_index_ = key_index;
value_index_ = value_index;
status_ = env->NewRandomAccessFile(filename_, &file_);
if (!status_.ok()) return status_;
input_buffer_.reset(new io::InputBuffer(file_.get(), kInputBufferSize));
valid_ = true;
next_id_ = 0;
ignore_split_ = std::max(key_index_, value_index_) < 0;
Next();
return status_;
}
void Next() override {
if (!valid_) return;
string line;
status_ = input_buffer_->ReadLine(&line);
if (!status_.ok()) {
if (errors::IsOutOfRange(status_) && next_id_ != vocab_size_) {
status_ = errors::InvalidArgument("Invalid vocab_size in ", filename_,
": expected ", vocab_size_,
" but got ", next_id_);
}
valid_ = false;
return;
}
if (next_id_ >= vocab_size_) {
LOG(WARNING) << "Truncated " << filename_ << " before its end at "
<< vocab_size_ << " records.";
LOG(WARNING) << "next_id_ : " << next_id_;
status_ = errors::OutOfRange("Finished reading ", vocab_size_,
" of lines from ", filename_);
valid_ = false;
return;
}
if (line.empty()) {
status_ = errors::InvalidArgument("Invalid content in ", filename_,
": empty line found at position ",
input_buffer_->Tell(), ".");
valid_ = false;
return;
}
std::vector<string> tokens;
if (!ignore_split_) {
tokens = str_util::Split(line, delimiter_);
if (std::max(key_index_, value_index_) >= tokens.size()) {
status_ = errors::InvalidArgument(
"Invalid number of columns in ", filename_, " line ", next_id_,
" (", line, ") : expected ", std::max(key_index_, value_index_),
" got ", tokens.size());
valid_ = false;
return;
}
}
status_ = SetValue(line, tokens, key_index_, &key_);
if (!status_.ok()) {
valid_ = false;
return;
}
status_ = SetValue(line, tokens, value_index_, &value_);
if (!status_.ok()) {
valid_ = false;
return;
}
next_id_++;
}
bool Valid() const override { return valid_; }
const Tensor& keys() const override { return key_; }
const Tensor& values() const override { return value_; }
Status status() const override { return status_; }
int64 total_size() const override { return vocab_size_; }
private:
Tensor key_;
Tensor value_;
bool valid_; // true if the iterator points to an existing range.
int64 key_index_;
int64 value_index_;
int64 next_id_;
int64 vocab_size_;
string filename_;
char delimiter_;
Status status_;
bool ignore_split_;
std::unique_ptr<RandomAccessFile> file_; // must outlive input_buffer_
std::unique_ptr<io::InputBuffer> input_buffer_;
// Set the corresponding value from line or tokens based on 'index' into the
// tensor 't'. The value is transformed to the given data type 'dtype'.
Status SetValue(const string& line, const std::vector<string>& tokens,
int64 index, Tensor* tensor) {
if (index == kLineNumber) {
tensor->flat<int64>()(0) = next_id_;
return Status::OK();
}
const string& token = (index == kWholeLine) ? line : tokens[index];
const DataType& dtype = tensor->dtype();
switch (dtype) {
case DT_INT32: {
int32 value;
if (!strings::safe_strto32(token.c_str(), &value)) {
valid_ = false;
return errors::InvalidArgument("Field ", token, " in line ", next_id_,
" is not a valid int32.");
}
tensor->flat<int32>()(0) = value;
} break;
case DT_INT64: {
int64 value;
if (!strings::safe_strto64(token.c_str(), &value)) {
valid_ = false;
return errors::InvalidArgument("Field ", token, " in line ", next_id_,
" is not a valid int64.");
}
tensor->flat<int64>()(0) = value;
} break;
case DT_FLOAT: {
float value;
if (!strings::safe_strtof(token.c_str(), &value)) {
valid_ = false;
return errors::InvalidArgument("Field ", token, " in line ", next_id_,
" is not a valid float.");
}
tensor->flat<float>()(0) = value;
} break;
case DT_DOUBLE: {
double value;
if (!strings::safe_strtod(token.c_str(), &value)) {
valid_ = false;
return errors::InvalidArgument("Field ", token, " in line ", next_id_,
" is not a valid double.");
}
tensor->flat<double>()(0) = value;
} break;
case DT_STRING:
tensor->flat<string>()(0) = token;
break;
default:
valid_ = false;
return errors::InvalidArgument("Data type ", dtype, " not supported.");
}
return Status::OK();
}
TF_DISALLOW_COPY_AND_ASSIGN(TextFileLineIterator);
};
Status GetTableHandle(const string& input_name, OpKernelContext* ctx,
string* container, string* table_handle) {
{
mutex* mu;
TF_RETURN_IF_ERROR(ctx->input_ref_mutex(input_name, &mu));
mutex_lock l(*mu);
Tensor tensor;
TF_RETURN_IF_ERROR(ctx->mutable_input(input_name, &tensor, true));
if (tensor.NumElements() != 2) {
return errors::InvalidArgument(
"Lookup table handle must be scalar, but had shape: ",
tensor.shape().DebugString());
}
auto h = tensor.flat<string>();
*container = h(0);
*table_handle = h(1);
}
return Status::OK();
}
} // namespace
Status GetLookupTable(const string& input_name, OpKernelContext* ctx,
LookupInterface** table) {
string container;
string table_handle;
DataType handle_dtype;
TF_RETURN_IF_ERROR(ctx->input_dtype(input_name, &handle_dtype));
if (handle_dtype == DT_RESOURCE) {
ResourceHandle handle;
TF_RETURN_IF_ERROR(HandleFromInput(ctx, input_name, &handle));
return LookupResource(ctx, handle, table);
} else {
TF_RETURN_IF_ERROR(
GetTableHandle(input_name, ctx, &container, &table_handle));
return ctx->resource_manager()->Lookup(container, table_handle, table);
}
}
Status GetInitializableLookupTable(const string& input_name,
OpKernelContext* ctx,
InitializableLookupTable** table) {
LookupInterface* lookup_table;
DataType handle_dtype;
TF_RETURN_IF_ERROR(ctx->input_dtype(input_name, &handle_dtype));
if (handle_dtype == DT_RESOURCE) {
ResourceHandle handle;
TF_RETURN_IF_ERROR(HandleFromInput(ctx, input_name, &handle));
TF_RETURN_IF_ERROR(LookupResource(ctx, handle, &lookup_table));
*table = lookup_table->GetInitializableLookupTable();
if (*table == nullptr) {
lookup_table->Unref();
return errors::InvalidArgument("Table ", handle.container(), " ",
handle.name(), " is not initializable");
}
} else {
string container;
string table_handle;
TF_RETURN_IF_ERROR(
GetTableHandle(input_name, ctx, &container, &table_handle));
TF_RETURN_IF_ERROR(ctx->resource_manager()->Lookup(container, table_handle,
&lookup_table));
*table = lookup_table->GetInitializableLookupTable();
if (*table == nullptr) {
lookup_table->Unref();
return errors::InvalidArgument("Table ", container, " ", table_handle,
" is not initializable");
}
}
return Status::OK();
}
Status CheckTableDataTypes(const LookupInterface& table, DataType key_dtype,
DataType value_dtype, const string& table_name) {
if (table.key_dtype() != key_dtype || table.value_dtype() != value_dtype) {
return errors::InvalidArgument(
"Conflicting key/value dtypes ", key_dtype, "->", value_dtype, " with ",
table.key_dtype(), "-", table.value_dtype(), " for table ", table_name);
}
return Status::OK();
}
// Helper function to initialize an InitializableLookupTable from a text file.
Status InitializeTableFromTextFile(const string& filename, int64 vocab_size,
char delimiter, int32 key_index,
int32 value_index, Env* env,
InitializableLookupTable* table) {
if (key_index == kLineNumber && table->key_dtype() != DT_INT64) {
return errors::InvalidArgument(
"Key index for line number requires table key dtype of int64, got ",
table->key_dtype());
}
const DataType& key_dtype = table->key_dtype();
const DataType& value_dtype = table->value_dtype();
if (key_index == kWholeLine && !DataTypeIsInteger(key_dtype) &&
key_dtype != DT_STRING) {
return errors::InvalidArgument(
"Key index for whole line requires string or integer table key, got ",
table->key_dtype());
}
if (value_index == kLineNumber && value_dtype != DT_INT64) {
return errors::InvalidArgument(
"Value index for line number requires table value dtype of int64, got ",
table->value_dtype());
}
if (value_index == kWholeLine && value_dtype != DT_STRING) {
return errors::InvalidArgument(
"Value index for whole line requires table value dtype of string, got ",
table->value_dtype());
}
TextFileLineIterator iter;
TF_RETURN_IF_ERROR(iter.Init(filename, vocab_size, delimiter, key_dtype,
key_index, value_dtype, value_index, env));
// For initialization from files, ignore if the table is already
// initialized. The table shared name should contain the filename to
// avoid trying to initialize the same table from the same file at the same
// time.
Status s = table->Initialize(iter);
if (errors::IsFailedPrecondition(s) && table->is_initialized()) {
LOG(INFO) << "Table trying to initialize from file " << filename
<< " is already initialized.";
return Status::OK();
}
return s;
}
} // namespace lookup
} // namespace tensorflow
| Mistobaan/tensorflow | tensorflow/core/kernels/lookup_util.cc | C++ | apache-2.0 | 13,450 |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using Microsoft.CodeAnalysis.Formatting;
using Microsoft.CodeAnalysis.Formatting.Rules;
using Microsoft.CodeAnalysis.LanguageServices;
using Microsoft.CodeAnalysis.Options;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Editor.Implementation.SmartIndent
{
internal abstract partial class AbstractIndentationService : ISynchronousIndentationService
{
protected abstract IFormattingRule GetSpecializedIndentationFormattingRule();
private IEnumerable<IFormattingRule> GetFormattingRules(Document document, int position)
{
var workspace = document.Project.Solution.Workspace;
var formattingRuleFactory = workspace.Services.GetService<IHostDependentFormattingRuleFactoryService>();
var baseIndentationRule = formattingRuleFactory.CreateRule(document, position);
var formattingRules = new[] { baseIndentationRule, this.GetSpecializedIndentationFormattingRule() }.Concat(Formatter.GetDefaultFormattingRules(document));
return formattingRules;
}
public IndentationResult? GetDesiredIndentation(Document document, int lineNumber, CancellationToken cancellationToken)
{
var root = document.GetSyntaxRootSynchronously(cancellationToken);
var sourceText = root.SyntaxTree.GetText(cancellationToken);
var lineToBeIndented = sourceText.Lines[lineNumber];
var formattingRules = GetFormattingRules(document, lineToBeIndented.Start);
// enter on a token case.
if (ShouldUseSmartTokenFormatterInsteadOfIndenter(formattingRules, root, lineToBeIndented, document.Options, cancellationToken))
{
return null;
}
var indenter = GetIndenter(
document.GetLanguageService<ISyntaxFactsService>(),
root.SyntaxTree, lineToBeIndented, formattingRules,
document.Options, cancellationToken);
return indenter.GetDesiredIndentation();
}
protected abstract AbstractIndenter GetIndenter(
ISyntaxFactsService syntaxFacts, SyntaxTree syntaxTree, TextLine lineToBeIndented, IEnumerable<IFormattingRule> formattingRules, OptionSet optionSet, CancellationToken cancellationToken);
protected abstract bool ShouldUseSmartTokenFormatterInsteadOfIndenter(
IEnumerable<IFormattingRule> formattingRules, SyntaxNode root, TextLine line, OptionSet optionSet, CancellationToken cancellationToken);
}
}
| MatthieuMEZIL/roslyn | src/EditorFeatures/Core/Implementation/SmartIndent/AbstractIndentationService.cs | C# | apache-2.0 | 2,846 |
cask "sonarr" do
version "3.0.6.1196"
sha256 "41055ce7e24f46f1ea62745251d245dd9ed827a2fbfdf63732ff4a65470ce707"
url "https://github.com/Sonarr/Sonarr/archive/refs/tags/v#{version}.zip",
verified: "github.com/Sonarr/Sonarr/"
name "Sonarr"
homepage "https://sonarr.tv/"
livecheck do
url :url
strategy :github_latest
end
depends_on cask: "mono-mdk"
app "Sonarr-#{version}/distribution/osx/Sonarr.app"
preflight do
set_permissions "#{staged_path}/Sonarr.app", "0755"
end
zap trash: [
"~/Library/Application Support/Sonarr",
"~/Library/Preferences/tv.sonarr.Sonarr.plist",
"~/.config/NzbDrone",
]
end
| joshka/homebrew-cask | Casks/sonarr.rb | Ruby | bsd-2-clause | 658 |
class Lha < Formula
desc "Utility for creating and opening lzh archives"
homepage "https://lha.osdn.jp/"
# Canonical: https://osdn.net/dl/lha/lha-1.14i-ac20050924p1.tar.gz
url "https://dotsrc.dl.osdn.net/osdn/lha/22231/lha-1.14i-ac20050924p1.tar.gz"
version "1.14i-ac20050924p1"
sha256 "b5261e9f98538816aa9e64791f23cb83f1632ecda61f02e54b6749e9ca5e9ee4"
bottle do
cellar :any_skip_relocation
sha256 "27d0090517f08c929e062ea580515f38297ac00ff403830bc78c2b85caea0447" => :catalina
sha256 "2b5e8d256e2d232014ee9b4dc08a52188dc8e5369f61290f5cdb7381e78b3561" => :mojave
sha256 "f1dac02888773ade3d6c35eeb69c6cb25e08bf91584ae66fec7a362f80583e78" => :high_sierra
sha256 "450fa8188af44eef619302c402860dfd2debab864487424211fbbfa7ff065955" => :sierra
sha256 "35f3e193c1bf0d26c62ea6897721c559191fea64f27d71781a90f670d9a23557" => :el_capitan
sha256 "9cb516a73d1d117c39f63d16b3211df626783c9bb1a7038f524dd9c36045b1ac" => :yosemite
sha256 "bd26a5a48396d06019f7998f4c9bf511a74ef237814fee5f5c8ba9df31b30a37" => :mavericks
end
head do
url "https://github.com/jca02266/lha.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
end
conflicts_with "lhasa", :because => "both install a `lha` binary"
def install
system "autoreconf", "-is" if build.head?
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--prefix=#{prefix}",
"--mandir=#{man}"
system "make", "install"
end
test do
(testpath/"foo").write "test"
system "#{bin}/lha", "c", "foo.lzh", "foo"
assert_equal "::::::::\nfoo\n::::::::\ntest",
shell_output("#{bin}/lha p foo.lzh")
end
end
| BrewTestBot/homebrew-core | Formula/lha.rb | Ruby | bsd-2-clause | 1,746 |
# definitions which are not being deprecated from wagtail.admin.forms
from .models import ( # NOQA
DIRECT_FORM_FIELD_OVERRIDES, FORM_FIELD_OVERRIDES, WagtailAdminModelForm,
WagtailAdminModelFormMetaclass, formfield_for_dbfield)
from .pages import WagtailAdminPageForm # NOQA
| zerolab/wagtail | wagtail/admin/forms/__init__.py | Python | bsd-3-clause | 285 |
/******************************************************************************
* Copyright (c) 2014, Hobu Inc. (hobu@hobu.co)
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following
* conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided
* with the distribution.
* * Neither the name of Hobu, Inc. nor the names of its contributors
* may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
****************************************************************************/
#pragma once
#include <pdal/Writer.hpp>
#include <pdal/XMLSchema.hpp>
#include <string>
#include <unordered_map>
namespace pdal
{
class SQLiteWriter;
class PgWriter;
class OciWriter;
class PDAL_DLL DbWriter : public Writer
{
protected:
DbWriter()
{}
virtual void setAutoXForm(const PointViewPtr view);
XMLDimList dbDimTypes() const
{ return m_dbDims; }
size_t readField(const PointView& view, char *pos, Dimension::Id::Enum id,
PointId idx);
size_t readPoint(const PointView& view, PointId idx, char *outbuf);
size_t packedPointSize() const
{ return m_packedPointSize; }
// Allows subclass access to ready() without the mess of friends.
void doReady(PointTableRef table)
{ DbWriter::ready(table); }
private:
virtual void prepared(PointTableRef table);
virtual void ready(PointTableRef table);
DimTypeList m_dimTypes;
XMLDimList m_dbDims;
std::unordered_map<int, DimType> m_dimMap;
std::pair<int, int> m_xOffsets;
std::pair<int, int> m_yOffsets;
std::pair<int, int> m_zOffsets;
// Size of point data as read from PointTable.
size_t m_packedPointSize;
// Size of point data as written to DB.
size_t m_dbPointSize;
bool m_locationScaling;
DbWriter& operator=(const DbWriter&); // not implemented
DbWriter(const DbWriter&); // not implemented
};
} // namespace pdal
| DougFirErickson/PDAL | include/pdal/DbWriter.hpp | C++ | bsd-3-clause | 3,146 |
<?php
$this->breadcrumbs = [
Yii::t('StoreModule.store', 'Attributes') => ['index'],
$model->name,
];
$this->pageTitle = Yii::t('StoreModule.store', 'Attributes - view');
$this->menu = [
['icon' => 'fa fa-fw fa-list-alt', 'label' => Yii::t('StoreModule.store', 'Manage attributes'), 'url' => ['/store/attributeBackend/index']],
['icon' => 'fa fa-fw fa-plus-square', 'label' => Yii::t('StoreModule.store', 'Create attribute'), 'url' => ['/store/attributeBackend/create']],
['label' => Yii::t('StoreModule.store', 'Attribute') . ' «' . mb_substr($model->name, 0, 32) . '»'],
[
'icon' => 'fa fa-fw fa-pencil',
'label' => Yii::t('StoreModule.store', 'Update attribute'),
'url' => [
'/store/attributeBackend/update',
'id' => $model->id
]
],
[
'icon' => 'fa fa-fw fa-eye',
'label' => Yii::t('StoreModule.store', 'View attribute'),
'url' => [
'/store/attributeBackend/view',
'id' => $model->id
]
],
[
'icon' => 'fa fa-fw fa-trash-o',
'label' => Yii::t('StoreModule.store', 'Delete attribute'),
'url' => '#',
'linkOptions' => [
'submit' => ['/store/attributeBackend/delete', 'id' => $model->id],
'params' => [Yii::app()->getRequest()->csrfTokenName => Yii::app()->getRequest()->csrfToken],
'confirm' => Yii::t('StoreModule.store', 'Do you really want to remove attribute?'),
'csrf' => true,
]
],
];
?>
<div class="page-header">
<h1>
<?= Yii::t('StoreModule.store', 'Viewing attribute'); ?><br/>
<small>«<?= $model->name; ?>»</small>
</h1>
</div>
<?php $this->widget(
'bootstrap.widgets.TbDetailView',
[
'data' => $model,
'attributes' => [
'id',
'name',
'title',
[
'name' => 'type',
'type' => 'text',
'value' => $model->getTypeTitle($model->type),
],
[
'name' => 'required',
'value' => $model->required ? Yii::t("StoreModule.store", 'Yes') : Yii::t("StoreModule.store", 'No'),
],
],
]
); ?>
| elorian/crm.inreserve.kz | protected/modules/store/views/attributeBackend/view.php | PHP | bsd-3-clause | 2,267 |
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/autofill_assistant/browser/chip.h"
#include "components/autofill_assistant/browser/user_action.h"
namespace autofill_assistant {
Chip::Chip() = default;
Chip::~Chip() = default;
Chip::Chip(const Chip& other) = default;
Chip::Chip(const ChipProto& proto)
: type(proto.type()),
icon(proto.icon()),
text(proto.text()),
sticky(proto.sticky()),
content_description(proto.content_description()),
is_content_description_set(proto.has_content_description()) {}
bool Chip::empty() const {
return type == UNKNOWN_CHIP_TYPE && text.empty() && icon == NO_ICON;
}
void SetDefaultChipType(std::vector<UserAction>* user_actions) {
for (UserAction& user_action : *user_actions) {
if (user_action.chip().empty())
continue;
if (user_action.chip().type == UNKNOWN_CHIP_TYPE) {
// Assume chips with unknown type are normal actions.
user_action.chip().type = NORMAL_ACTION;
}
}
}
} // namespace autofill_assistant
| scheib/chromium | components/autofill_assistant/browser/chip.cc | C++ | bsd-3-clause | 1,157 |
<?php
/**
* LoginForm
* @copyright Copyright (c) 2011 - 2014 Aleksandr Torosh (http://wezoom.com.ua)
* @author Aleksandr Torosh <webtorua@gmail.com>
*/
namespace Admin\Form;
use Phalcon\Forms\Element\Text;
use Phalcon\Forms\Element\Password;
use Phalcon\Validation\Validator\PresenceOf;
class LoginForm extends \Phalcon\Forms\Form
{
public function initialize()
{
$login = new Text('login', array(
'required' => true,
'placeholder' => 'Enter login',
));
$login->addValidator(new PresenceOf(array('message' => 'Login is required')));
$this->add($login);
$password = new Password('password', array(
'required' => true,
));
$password->addValidator(new PresenceOf(array('message' => 'Password is required')));
$this->add($password);
}
}
| giappv/gcide | app/modules/Admin/Form/LoginForm.php | PHP | bsd-3-clause | 857 |
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/paint_preview/browser/paint_preview_base_service.h"
#include <memory>
#include <utility>
#include "base/bind.h"
#include "base/callback.h"
#include "base/files/file_path.h"
#include "base/logging.h"
#include "base/metrics/histogram_functions.h"
#include "build/build_config.h"
#include "components/paint_preview/browser/compositor_utils.h"
#include "components/paint_preview/browser/paint_preview_client.h"
#include "components/paint_preview/common/mojom/paint_preview_recorder.mojom.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/web_contents.h"
#include "ui/gfx/geometry/rect.h"
namespace paint_preview {
PaintPreviewBaseService::PaintPreviewBaseService(
std::unique_ptr<PaintPreviewFileMixin> file_mixin,
std::unique_ptr<PaintPreviewPolicy> policy,
bool is_off_the_record)
: file_mixin_(std::move(file_mixin)),
policy_(std::move(policy)),
is_off_the_record_(is_off_the_record) {}
PaintPreviewBaseService::~PaintPreviewBaseService() = default;
void PaintPreviewBaseService::CapturePaintPreview(CaptureParams capture_params,
OnCapturedCallback callback) {
DCHECK_CURRENTLY_ON(content::BrowserThread::UI);
content::WebContents* web_contents = capture_params.web_contents;
content::RenderFrameHost* render_frame_host =
capture_params.render_frame_host ? capture_params.render_frame_host
: web_contents->GetMainFrame();
if (policy_ && !policy_->SupportedForContents(web_contents)) {
std::move(callback).Run(CaptureStatus::kContentUnsupported, {});
return;
}
PaintPreviewClient::CreateForWebContents(web_contents); // Is a singleton.
auto* client = PaintPreviewClient::FromWebContents(web_contents);
if (!client) {
std::move(callback).Run(CaptureStatus::kClientCreationFailed, {});
return;
}
PaintPreviewClient::PaintPreviewParams params(capture_params.persistence);
if (capture_params.root_dir) {
params.root_dir = *capture_params.root_dir;
}
params.inner.clip_rect = capture_params.clip_rect;
params.inner.is_main_frame =
(render_frame_host == web_contents->GetMainFrame());
params.inner.capture_links = capture_params.capture_links;
params.inner.max_capture_size = capture_params.max_per_capture_size;
params.inner.max_decoded_image_size_bytes =
capture_params.max_decoded_image_size_bytes;
params.inner.skip_accelerated_content =
capture_params.skip_accelerated_content;
// TODO(crbug/1064253): Consider moving to client so that this always happens.
// Although, it is harder to get this right in the client due to its
// lifecycle.
auto capture_handle =
web_contents->IncrementCapturerCount(gfx::Size(), /*stay_hidden=*/true,
/*stay_awake=*/true);
auto start_time = base::TimeTicks::Now();
client->CapturePaintPreview(
params, render_frame_host,
base::BindOnce(&PaintPreviewBaseService::OnCaptured,
weak_ptr_factory_.GetWeakPtr(), std::move(capture_handle),
start_time, std::move(callback)));
}
void PaintPreviewBaseService::OnCaptured(
base::ScopedClosureRunner capture_handle,
base::TimeTicks start_time,
OnCapturedCallback callback,
base::UnguessableToken guid,
mojom::PaintPreviewStatus status,
std::unique_ptr<CaptureResult> result) {
capture_handle.RunAndReset();
if (!(status == mojom::PaintPreviewStatus::kOk ||
status == mojom::PaintPreviewStatus::kPartialSuccess) ||
!result->capture_success) {
DVLOG(1) << "ERROR: Paint Preview failed to capture for document "
<< guid.ToString() << " with error " << status;
std::move(callback).Run(CaptureStatus::kCaptureFailed, {});
return;
}
base::UmaHistogramTimes("Browser.PaintPreview.Capture.TotalCaptureDuration",
base::TimeTicks::Now() - start_time);
std::move(callback).Run(CaptureStatus::kOk, std::move(result));
}
} // namespace paint_preview
| chromium/chromium | components/paint_preview/browser/paint_preview_base_service.cc | C++ | bsd-3-clause | 4,255 |
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/** @fileoverview Handles interprocess communication for the system page. */
// clang-format on
import {loadTimeData} from 'chrome://resources/js/load_time_data.m.js';
// clang-format off
export interface SystemPageBrowserProxy {
/** Shows the native system proxy settings. */
showProxySettings(): void;
/**
* @return Whether hardware acceleration was enabled when the user
* started Chrome.
*/
wasHardwareAccelerationEnabledAtStartup(): boolean;
}
export class SystemPageBrowserProxyImpl implements SystemPageBrowserProxy {
showProxySettings() {
chrome.send('showProxySettings');
}
wasHardwareAccelerationEnabledAtStartup() {
return loadTimeData.getBoolean('hardwareAccelerationEnabledAtStartup');
}
static getInstance(): SystemPageBrowserProxy {
return instance || (instance = new SystemPageBrowserProxyImpl());
}
static setInstance(obj: SystemPageBrowserProxy) {
instance = obj;
}
}
let instance: SystemPageBrowserProxy|null = null;
| ric2b/Vivaldi-browser | chromium/chrome/browser/resources/settings/system_page/system_page_browser_proxy.ts | TypeScript | bsd-3-clause | 1,163 |
/*
* Copyright (c) 2013, Ford Motor Company
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the
* distribution.
*
* Neither the name of the Ford Motor Company nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "application_manager/commands/hmi/on_vr_language_change_notification.h"
#include "application_manager/application_impl.h"
#include "application_manager/state_controller.h"
#include "application_manager/message_helper.h"
#include "interfaces/MOBILE_API.h"
namespace application_manager {
namespace commands {
OnVRLanguageChangeNotification::OnVRLanguageChangeNotification(
const MessageSharedPtr& message, ApplicationManager& application_manager)
: NotificationFromHMI(message, application_manager) {}
OnVRLanguageChangeNotification::~OnVRLanguageChangeNotification() {}
void OnVRLanguageChangeNotification::Run() {
LOG4CXX_AUTO_TRACE(logger_);
HMICapabilities& hmi_capabilities = application_manager_.hmi_capabilities();
hmi_capabilities.set_active_vr_language(
static_cast<hmi_apis::Common_Language::eType>(
(*message_)[strings::msg_params][strings::language].asInt()));
(*message_)[strings::msg_params][strings::hmi_display_language] =
hmi_capabilities.active_ui_language();
(*message_)[strings::params][strings::function_id] =
static_cast<int32_t>(mobile_apis::FunctionID::OnLanguageChangeID);
const ApplicationSet& accessor =
application_manager_.applications().GetData();
ApplicationSetConstIt it = accessor.begin();
for (; accessor.end() != it;) {
ApplicationSharedPtr app = *it++;
(*message_)[strings::params][strings::connection_key] = app->app_id();
SendNotificationToMobile(message_);
if (static_cast<int32_t>(app->language()) !=
(*message_)[strings::msg_params][strings::language].asInt()) {
application_manager_.state_controller().SetRegularState(
app, mobile_api::HMILevel::HMI_NONE, false);
application_manager_.ManageMobileCommand(
MessageHelper::GetOnAppInterfaceUnregisteredNotificationToMobile(
app->app_id(),
mobile_api::AppInterfaceUnregisteredReason::LANGUAGE_CHANGE),
commands::Command::ORIGIN_SDL);
application_manager_.UnregisterApplication(
app->app_id(), mobile_apis::Result::SUCCESS, false);
}
}
}
} // namespace commands
} // namespace application_manager
| BrandonHe/sdl_core | src/components/application_manager/src/commands/hmi/on_vr_language_change_notification.cc | C++ | bsd-3-clause | 3,756 |
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.explore_sites;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.eq;
import android.content.Context;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.Spy;
import org.mockito.invocation.InvocationOnMock;
import org.robolectric.RuntimeEnvironment;
import org.robolectric.annotation.Config;
import org.robolectric.annotation.Implementation;
import org.robolectric.annotation.Implements;
import org.robolectric.shadows.multidex.ShadowMultiDex;
import org.chromium.base.Callback;
import org.chromium.base.metrics.test.ShadowRecordHistogram;
import org.chromium.base.test.BaseRobolectricTestRunner;
import org.chromium.chrome.browser.device.DeviceConditions;
import org.chromium.chrome.browser.device.ShadowDeviceConditions;
import org.chromium.chrome.browser.init.BrowserParts;
import org.chromium.chrome.browser.init.ChromeBrowserInitializer;
import org.chromium.chrome.browser.profiles.Profile;
import org.chromium.components.background_task_scheduler.BackgroundTaskScheduler;
import org.chromium.components.background_task_scheduler.BackgroundTaskSchedulerFactory;
import org.chromium.components.background_task_scheduler.NativeBackgroundTask;
import org.chromium.components.background_task_scheduler.TaskIds;
import org.chromium.components.background_task_scheduler.TaskInfo;
import org.chromium.components.background_task_scheduler.TaskParameters;
import org.chromium.net.ConnectionType;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.TimeUnit;
/** Unit tests for {@link ExploreSitesBackgroundTask}. */
@RunWith(BaseRobolectricTestRunner.class)
@Config(manifest = Config.NONE,
shadows = {ShadowMultiDex.class, ShadowDeviceConditions.class, ShadowRecordHistogram.class,
ExploreSitesBackgroundTaskUnitTest.ShadowExploreSitesBridge.class})
public class ExploreSitesBackgroundTaskUnitTest {
/** Implementation of ExploreSitesBridge which does not rely on native. */
@Implements(ExploreSitesBridge.class)
public static class ShadowExploreSitesBridge {
public static Callback<Void> mUpdateCatalogFinishedCallback;
public static int mVariation = ExploreSitesVariation.ENABLED;
@Implementation
public static void getEspCatalog(
Profile profile, Callback<List<ExploreSitesCategory>> callback) {}
@Implementation
public static void updateCatalogFromNetwork(
Profile profile, boolean isImmediateFetch, Callback<Void> finishedCallback) {
mUpdateCatalogFinishedCallback = finishedCallback;
}
@Implementation
@ExploreSitesVariation
public static int getVariation() {
return mVariation;
}
}
/**
* Fake of BackgroundTaskScheduler system service.
*/
public static class FakeBackgroundTaskScheduler implements BackgroundTaskScheduler {
private HashMap<Integer, TaskInfo> mTaskInfos = new HashMap<>();
@Override
public boolean schedule(Context context, TaskInfo taskInfo) {
mTaskInfos.put(taskInfo.getTaskId(), taskInfo);
return true;
}
@Override
public void cancel(Context context, int taskId) {
mTaskInfos.remove(taskId);
}
@Override
public boolean isScheduled(Context context, int taskId) {
return (mTaskInfos.get(taskId) != null);
}
@Override
public void checkForOSUpgrade(Context context) {}
@Override
public void reschedule(Context context) {}
public TaskInfo getTaskInfo(int taskId) {
return mTaskInfos.get(taskId);
}
}
void initDeviceConditions(@ConnectionType int connectionType) {
boolean powerConnected = true;
boolean powerSaveModeOn = true;
int highBatteryLevel = 75;
boolean metered = true;
boolean screenOnAndUnlocked = true;
DeviceConditions deviceConditions = new DeviceConditions(!powerConnected, highBatteryLevel,
connectionType, !powerSaveModeOn, !metered, screenOnAndUnlocked);
ShadowDeviceConditions.setCurrentConditions(deviceConditions);
}
@Spy
private ExploreSitesBackgroundTask mExploreSitesBackgroundTask =
new ExploreSitesBackgroundTask();
@Mock
private ChromeBrowserInitializer mChromeBrowserInitializer;
@Captor
ArgumentCaptor<BrowserParts> mBrowserParts;
private FakeBackgroundTaskScheduler mFakeTaskScheduler;
public void disableExploreSites() {
ShadowExploreSitesBridge.mVariation = ExploreSitesVariation.DISABLED;
}
@Before
public void setUp() {
ShadowRecordHistogram.reset();
MockitoAnnotations.initMocks(this);
doNothing()
.when(mChromeBrowserInitializer)
.handlePreNativeStartupAndLoadLibraries(any(BrowserParts.class));
doAnswer((InvocationOnMock invocation) -> {
mBrowserParts.getValue().finishNativeInitialization();
return null;
})
.when(mChromeBrowserInitializer)
.handlePostNativeStartup(eq(true), mBrowserParts.capture());
ChromeBrowserInitializer.setForTesting(mChromeBrowserInitializer);
mFakeTaskScheduler = new FakeBackgroundTaskScheduler();
BackgroundTaskSchedulerFactory.setSchedulerForTesting(mFakeTaskScheduler);
doReturn(null).when(mExploreSitesBackgroundTask).getProfile();
ShadowExploreSitesBridge.mVariation = ExploreSitesVariation.ENABLED;
}
@Test
public void scheduleTask() {
ExploreSitesBackgroundTask.schedule(false /* updateCurrent */);
TaskInfo scheduledTask =
mFakeTaskScheduler.getTaskInfo(TaskIds.EXPLORE_SITES_REFRESH_JOB_ID);
assertNotNull(scheduledTask);
assertEquals(TimeUnit.HOURS.toMillis(ExploreSitesBackgroundTask.DEFAULT_DELAY_HOURS),
scheduledTask.getPeriodicInfo().getIntervalMs());
assertEquals(true, scheduledTask.isPersisted());
assertEquals(TaskInfo.NetworkType.ANY, scheduledTask.getRequiredNetworkType());
}
@Test
public void cancelTask() {
TaskInfo scheduledTask =
mFakeTaskScheduler.getTaskInfo(TaskIds.EXPLORE_SITES_REFRESH_JOB_ID);
assertNull(scheduledTask);
ExploreSitesBackgroundTask.schedule(false /* updateCurrent */);
scheduledTask = mFakeTaskScheduler.getTaskInfo(TaskIds.EXPLORE_SITES_REFRESH_JOB_ID);
assertNotNull(scheduledTask);
assertEquals(TimeUnit.HOURS.toMillis(ExploreSitesBackgroundTask.DEFAULT_DELAY_HOURS),
scheduledTask.getPeriodicInfo().getIntervalMs());
ExploreSitesBackgroundTask.cancelTask();
scheduledTask = mFakeTaskScheduler.getTaskInfo(TaskIds.EXPLORE_SITES_REFRESH_JOB_ID);
assertNull(scheduledTask);
}
@Test
public void testNoNetwork() {
initDeviceConditions(ConnectionType.CONNECTION_NONE);
TaskParameters params = TaskParameters.create(TaskIds.EXPLORE_SITES_REFRESH_JOB_ID).build();
int result = mExploreSitesBackgroundTask.onStartTaskBeforeNativeLoaded(
RuntimeEnvironment.application, params, (boolean needsReschedule) -> {
fail("Finished callback should not be run, network conditions not met.");
});
assertEquals(NativeBackgroundTask.StartBeforeNativeResult.RESCHEDULE, result);
}
@Test
public void testRemovesDeprecatedJobId() {
TaskInfo.Builder deprecatedTaskInfoBuilder =
TaskInfo.createPeriodicTask(TaskIds.DEPRECATED_EXPLORE_SITES_REFRESH_JOB_ID,
TimeUnit.HOURS.toMillis(4), TimeUnit.HOURS.toMillis(1))
.setRequiredNetworkType(TaskInfo.NetworkType.ANY)
.setIsPersisted(true)
.setUpdateCurrent(false);
mFakeTaskScheduler.schedule(
RuntimeEnvironment.application, deprecatedTaskInfoBuilder.build());
TaskInfo deprecatedTask =
mFakeTaskScheduler.getTaskInfo(TaskIds.DEPRECATED_EXPLORE_SITES_REFRESH_JOB_ID);
assertNotNull(deprecatedTask);
ExploreSitesBackgroundTask.schedule(false /* updateCurrent */);
TaskInfo scheduledTask =
mFakeTaskScheduler.getTaskInfo(TaskIds.EXPLORE_SITES_REFRESH_JOB_ID);
assertNotNull(scheduledTask);
deprecatedTask =
mFakeTaskScheduler.getTaskInfo(TaskIds.DEPRECATED_EXPLORE_SITES_REFRESH_JOB_ID);
assertNull(deprecatedTask);
}
@Test
public void testRemovesTaskIfFeatureIsDisabled() {
disableExploreSites();
TaskInfo.Builder taskInfoBuilder =
TaskInfo.createPeriodicTask(TaskIds.EXPLORE_SITES_REFRESH_JOB_ID,
TimeUnit.HOURS.toMillis(4), TimeUnit.HOURS.toMillis(1))
.setRequiredNetworkType(TaskInfo.NetworkType.ANY)
.setIsPersisted(true)
.setUpdateCurrent(false);
mFakeTaskScheduler.schedule(RuntimeEnvironment.application, taskInfoBuilder.build());
TaskInfo task = mFakeTaskScheduler.getTaskInfo(TaskIds.EXPLORE_SITES_REFRESH_JOB_ID);
assertNotNull(task);
TaskParameters params = TaskParameters.create(TaskIds.EXPLORE_SITES_REFRESH_JOB_ID).build();
mExploreSitesBackgroundTask.onStartTaskWithNative(
RuntimeEnvironment.application, params, (boolean needsReschedule) -> {
fail("Finished callback should not be run, the task should be cancelled.");
});
TaskInfo scheduledTask =
mFakeTaskScheduler.getTaskInfo(TaskIds.EXPLORE_SITES_REFRESH_JOB_ID);
assertNull(scheduledTask);
}
@Test
public void testDoesNotRemoveTaskIfFeatureIsEnabled() {
TaskInfo.Builder taskInfoBuilder =
TaskInfo.createPeriodicTask(TaskIds.EXPLORE_SITES_REFRESH_JOB_ID,
TimeUnit.HOURS.toMillis(4), TimeUnit.HOURS.toMillis(1))
.setRequiredNetworkType(TaskInfo.NetworkType.ANY)
.setIsPersisted(true)
.setUpdateCurrent(false);
mFakeTaskScheduler.schedule(RuntimeEnvironment.application, taskInfoBuilder.build());
TaskInfo task = mFakeTaskScheduler.getTaskInfo(TaskIds.EXPLORE_SITES_REFRESH_JOB_ID);
assertNotNull(task);
TaskParameters params = TaskParameters.create(TaskIds.EXPLORE_SITES_REFRESH_JOB_ID).build();
mExploreSitesBackgroundTask.onStartTaskWithNative(RuntimeEnvironment.application, params,
(boolean needsReschedule) -> { fail("Finished callback should not be run"); });
TaskInfo scheduledTask =
mFakeTaskScheduler.getTaskInfo(TaskIds.EXPLORE_SITES_REFRESH_JOB_ID);
assertNotNull(scheduledTask);
}
}
| ric2b/Vivaldi-browser | chromium/chrome/android/junit/src/org/chromium/chrome/browser/explore_sites/ExploreSitesBackgroundTaskUnitTest.java | Java | bsd-3-clause | 11,676 |
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.ntp;
import android.view.View;
import android.widget.CompoundButton;
/**
* This interface includes methods that are shared in LegacyIncognitoDescriptionView and
* RevampedIncognitoDescriptionView.
*/
public interface IncognitoDescriptionView {
/**
* Set learn more on click listener.
* @param listener The given listener.
*/
void setLearnMoreOnclickListener(View.OnClickListener listener);
/**
* Set cookie controls toggle's checked value.
* @param enabled The value to set the toggle to.
*/
void setCookieControlsToggle(boolean enabled);
/**
* Set cookie controls toggle on checked change listerner.
* @param listener The given listener.
*/
void setCookieControlsToggleOnCheckedChangeListener(
CompoundButton.OnCheckedChangeListener listener);
/**
* Sets the cookie controls enforced state.
* @param enforcement A CookieControlsEnforcement enum type indicating the type of
* enforcement policy being applied to Cookie Controls.
*/
void setCookieControlsEnforcement(int enforcement);
/**
* Add click listener that redirects user to the Cookie Control Settings.
* @param listener The given listener.
*/
void setCookieControlsIconOnclickListener(View.OnClickListener listener);
}
| ric2b/Vivaldi-browser | chromium/chrome/android/java/src/org/chromium/chrome/browser/ntp/IncognitoDescriptionView.java | Java | bsd-3-clause | 1,529 |
/*
Copyright (c) 2003-2012, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.html or http://ckeditor.com/license
*/
/**
* @fileOverview Contains the third and last part of the {@link CKEDITOR} object
* definition.
*/
// Remove the CKEDITOR.loadFullCore reference defined on ckeditor_basic.
delete CKEDITOR.loadFullCore;
/**
* Holds references to all editor instances created. The name of the properties
* in this object correspond to instance names, and their values contains the
* {@link CKEDITOR.editor} object representing them.
* @type {Object}
* @example
* alert( <b>CKEDITOR.instances</b>.editor1.name ); // "editor1"
*/
CKEDITOR.instances = {};
/**
* The document of the window holding the CKEDITOR object.
* @type {CKEDITOR.dom.document}
* @example
* alert( <b>CKEDITOR.document</b>.getBody().getName() ); // "body"
*/
CKEDITOR.document = new CKEDITOR.dom.document( document );
/**
* Adds an editor instance to the global {@link CKEDITOR} object. This function
* is available for internal use mainly.
* @param {CKEDITOR.editor} editor The editor instance to be added.
* @example
*/
CKEDITOR.add = function( editor )
{
CKEDITOR.instances[ editor.name ] = editor;
editor.on( 'focus', function()
{
if ( CKEDITOR.currentInstance != editor )
{
CKEDITOR.currentInstance = editor;
CKEDITOR.fire( 'currentInstance' );
}
});
editor.on( 'blur', function()
{
if ( CKEDITOR.currentInstance == editor )
{
CKEDITOR.currentInstance = null;
CKEDITOR.fire( 'currentInstance' );
}
});
};
/**
* Removes an editor instance from the global {@link CKEDITOR} object. This function
* is available for internal use only. External code must use {@link CKEDITOR.editor.prototype.destroy}
* to avoid memory leaks.
* @param {CKEDITOR.editor} editor The editor instance to be removed.
* @example
*/
CKEDITOR.remove = function( editor )
{
delete CKEDITOR.instances[ editor.name ];
};
/**
* Perform global clean up to free as much memory as possible
* when there are no instances left
*/
CKEDITOR.on( 'instanceDestroyed', function ()
{
if ( CKEDITOR.tools.isEmpty( this.instances ) )
CKEDITOR.fire( 'reset' );
});
// Load the bootstrap script.
CKEDITOR.loader.load( 'core/_bootstrap' ); // @Packager.RemoveLine
// Tri-state constants.
/**
* Used to indicate the ON or ACTIVE state.
* @constant
* @example
*/
CKEDITOR.TRISTATE_ON = 1;
/**
* Used to indicate the OFF or NON ACTIVE state.
* @constant
* @example
*/
CKEDITOR.TRISTATE_OFF = 2;
/**
* Used to indicate DISABLED state.
* @constant
* @example
*/
CKEDITOR.TRISTATE_DISABLED = 0;
/**
* The editor which is currently active (have user focus).
* @name CKEDITOR.currentInstance
* @type CKEDITOR.editor
* @see CKEDITOR#currentInstance
* @example
* function showCurrentEditorName()
* {
* if ( CKEDITOR.currentInstance )
* alert( CKEDITOR.currentInstance.name );
* else
* alert( 'Please focus an editor first.' );
* }
*/
/**
* Fired when the CKEDITOR.currentInstance object reference changes. This may
* happen when setting the focus on different editor instances in the page.
* @name CKEDITOR#currentInstance
* @event
* var editor; // Variable to hold a reference to the current editor.
* CKEDITOR.on( 'currentInstance' , function( e )
* {
* editor = CKEDITOR.currentInstance;
* });
*/
/**
* Fired when the last instance has been destroyed. This event is used to perform
* global memory clean up.
* @name CKEDITOR#reset
* @event
*/
| Videl/ALK | web/ckeditor/_source/core/ckeditor.js | JavaScript | mit | 3,716 |
/**
@module breeze
**/
var EntityAction = (function () {
/**
EntityAction is an 'Enum' containing all of the valid actions that can occur to an 'Entity'.
@class EntityAction
@static
**/
var entityActionMethods = {
isAttach: function () {
return !!this.isAttach;
},
isDetach: function () {
return !!this.isDetach;
},
isModification: function () {
return !!this.isModification;
}
};
var EntityAction = new Enum("EntityAction", entityActionMethods);
/**
Attach - Entity was attached via an AttachEntity call.
@property Attach {EntityAction}
@final
@static
**/
EntityAction.Attach = EntityAction.addSymbol({ isAttach: true});
/**
AttachOnQuery - Entity was attached as a result of a query.
@property AttachOnQuery {EntityAction}
@final
@static
**/
EntityAction.AttachOnQuery = EntityAction.addSymbol({ isAttach: true});
/**
AttachOnImport - Entity was attached as a result of an import.
@property AttachOnImport {EntityAction}
@final
@static
**/
EntityAction.AttachOnImport = EntityAction.addSymbol({ isAttach: true});
/**
Detach - Entity was detached.
@property Detach {EntityAction}
@final
@static
**/
EntityAction.Detach = EntityAction.addSymbol({ isDetach: true });
/**
MergeOnQuery - Properties on the entity were merged as a result of a query.
@property MergeOnQuery {EntityAction}
@final
@static
**/
EntityAction.MergeOnQuery = EntityAction.addSymbol({ isModification: true });
/**
MergeOnImport - Properties on the entity were merged as a result of an import.
@property MergeOnImport {EntityAction}
@final
@static
**/
EntityAction.MergeOnImport = EntityAction.addSymbol({ isModification: true });
/**
MergeOnSave - Properties on the entity were merged as a result of a save
@property MergeOnSave {EntityAction}
@final
@static
**/
EntityAction.MergeOnSave = EntityAction.addSymbol({ isModification: true });
/**
PropertyChange - A property on the entity was changed.
@property PropertyChange {EntityAction}
@final
@static
**/
EntityAction.PropertyChange = EntityAction.addSymbol({ isModification: true});
/**
EntityStateChange - The EntityState of the entity was changed.
@property EntityStateChange {EntityAction}
@final
@static
**/
EntityAction.EntityStateChange = EntityAction.addSymbol();
/**
AcceptChanges - AcceptChanges was called on the entity, or its entityState was set to Unmodified.
@property AcceptChanges {EntityAction}
@final
@static
**/
EntityAction.AcceptChanges = EntityAction.addSymbol();
/**
RejectChanges - RejectChanges was called on the entity.
@property RejectChanges {EntityAction}
@final
@static
**/
EntityAction.RejectChanges = EntityAction.addSymbol({ isModification: true});
/**
Clear - The EntityManager was cleared. All entities detached.
@property Clear {EntityAction}
@final
@static
**/
EntityAction.Clear = EntityAction.addSymbol({ isDetach: true});
EntityAction.resolveSymbols();
return EntityAction;
})();
breeze.EntityAction = EntityAction;
| edigitalresearch/breeze.js | src/a30_entityAction.js | JavaScript | mit | 3,148 |
/**
* Globalize v1.4.0-alpha.2
*
* http://github.com/jquery/globalize
*
* Copyright jQuery Foundation and other contributors
* Released under the MIT license
* http://jquery.org/license
*
* Date: 2018-03-09T13:51Z
*/
/*!
* Globalize v1.4.0-alpha.2 2018-03-09T13:51Z Released under the MIT license
* http://git.io/TrdQbw
*/
(function( root, factory ) {
// UMD returnExports
if ( typeof define === "function" && define.amd ) {
// AMD
define([
"cldr",
"../globalize",
"./number",
"cldr/event",
"cldr/supplemental"
], factory );
} else if ( typeof exports === "object" ) {
// Node, CommonJS
module.exports = factory( require( "cldrjs" ), require( "../globalize" ) );
} else {
// Extend global
factory( root.Cldr, root.Globalize );
}
}(this, function( Cldr, Globalize ) {
var createError = Globalize._createError,
createErrorUnsupportedFeature = Globalize._createErrorUnsupportedFeature,
formatMessage = Globalize._formatMessage,
isPlainObject = Globalize._isPlainObject,
looseMatching = Globalize._looseMatching,
numberNumberingSystemDigitsMap = Globalize._numberNumberingSystemDigitsMap,
numberSymbol = Globalize._numberSymbol,
regexpEscape = Globalize._regexpEscape,
removeLiteralQuotes = Globalize._removeLiteralQuotes,
runtimeBind = Globalize._runtimeBind,
stringPad = Globalize._stringPad,
validate = Globalize._validate,
validateCldr = Globalize._validateCldr,
validateDefaultLocale = Globalize._validateDefaultLocale,
validateParameterPresence = Globalize._validateParameterPresence,
validateParameterType = Globalize._validateParameterType,
validateParameterTypePlainObject = Globalize._validateParameterTypePlainObject,
validateParameterTypeString = Globalize._validateParameterTypeString;
var validateParameterTypeDate = function( value, name ) {
validateParameterType( value, name, value === undefined || value instanceof Date, "Date" );
};
var createErrorInvalidParameterValue = function( name, value ) {
return createError( "E_INVALID_PAR_VALUE", "Invalid `{name}` value ({value}).", {
name: name,
value: value
});
};
/**
* Create a map between the skeleton fields and their positions, e.g.,
* {
* G: 0
* y: 1
* ...
* }
*/
var validateSkeletonFieldsPosMap = "GyYuUrQqMLlwWEecdDFghHKkmsSAzZOvVXx".split( "" ).reduce(function( memo, item, i ) {
memo[ item ] = i;
return memo;
}, {});
/**
* validateSkeleton( skeleton )
*
* skeleton: Assume `j` has already been converted into a localized hour field.
*/
var validateSkeleton = function validateSkeleton( skeleton ) {
var last,
// Using easier to read variable.
fieldsPosMap = validateSkeletonFieldsPosMap;
// "The fields are from the Date Field Symbol Table in Date Format Patterns"
// Ref: http://www.unicode.org/reports/tr35/tr35-dates.html#availableFormats_appendItems
// I.e., check for invalid characters.
skeleton.replace( /[^GyYuUrQqMLlwWEecdDFghHKkmsSAzZOvVXx]/, function( field ) {
throw createError(
"E_INVALID_OPTIONS", "Invalid field `{invalidField}` of skeleton `{value}`",
{
invalidField: field,
type: "skeleton",
value: skeleton
}
);
});
// "The canonical order is from top to bottom in that table; that is, yM not My".
// http://www.unicode.org/reports/tr35/tr35-dates.html#availableFormats_appendItems
// I.e., check for invalid order.
skeleton.split( "" ).every(function( field ) {
if ( fieldsPosMap[ field ] < last ) {
throw createError(
"E_INVALID_OPTIONS", "Invalid order `{invalidField}` of skeleton `{value}`",
{
invalidField: field,
type: "skeleton",
value: skeleton
}
);
}
last = fieldsPosMap[ field ];
return true;
});
};
/**
* Returns a new object created by using `object`'s values as keys, and the keys as values.
*/
var objectInvert = function( object, fn ) {
fn = fn || function( object, key, value ) {
object[ value ] = key;
return object;
};
return Object.keys( object ).reduce(function( newObject, key ) {
return fn( newObject, key, object[ key ] );
}, {});
};
// Invert key and values, e.g., {"e": "eEc"} ==> {"e": "e", "E": "e", "c": "e"}.
var dateExpandPatternSimilarFieldsMap = objectInvert({
"e": "eEc",
"L": "ML"
}, function( object, key, value ) {
value.split( "" ).forEach(function( field ) {
object[ field ] = key;
});
return object;
});
var dateExpandPatternNormalizePatternType = function( character ) {
return dateExpandPatternSimilarFieldsMap[ character ] || character;
};
var datePatternRe = ( /([a-z])\1*|'([^']|'')+'|''|./ig );
var stringRepeat = function( str, count ) {
var i, result = "";
for ( i = 0; i < count; i++ ) {
result = result + str;
}
return result;
};
var dateExpandPatternAugmentFormat = function( requestedSkeleton, bestMatchFormat ) {
var i, j, matchedType, matchedLength, requestedType, requestedLength,
// Using an easier to read variable.
normalizePatternType = dateExpandPatternNormalizePatternType;
requestedSkeleton = requestedSkeleton.match( datePatternRe );
bestMatchFormat = bestMatchFormat.match( datePatternRe );
for ( i = 0; i < bestMatchFormat.length; i++ ) {
matchedType = bestMatchFormat[i].charAt( 0 );
matchedLength = bestMatchFormat[i].length;
for ( j = 0; j < requestedSkeleton.length; j++ ) {
requestedType = requestedSkeleton[j].charAt( 0 );
requestedLength = requestedSkeleton[j].length;
if ( normalizePatternType( matchedType ) === normalizePatternType( requestedType ) &&
matchedLength < requestedLength
) {
bestMatchFormat[i] = stringRepeat( matchedType, requestedLength );
}
}
}
return bestMatchFormat.join( "" );
};
var dateExpandPatternCompareFormats = function( formatA, formatB ) {
var a, b, distance, lenA, lenB, typeA, typeB, i, j,
// Using easier to read variables.
normalizePatternType = dateExpandPatternNormalizePatternType;
if ( formatA === formatB ) {
return 0;
}
formatA = formatA.match( datePatternRe );
formatB = formatB.match( datePatternRe );
if ( formatA.length !== formatB.length ) {
return -1;
}
distance = 1;
for ( i = 0; i < formatA.length; i++ ) {
a = formatA[ i ].charAt( 0 );
typeA = normalizePatternType( a );
typeB = null;
for ( j = 0; j < formatB.length; j++ ) {
b = formatB[ j ].charAt( 0 );
typeB = normalizePatternType( b );
if ( typeA === typeB ) {
break;
} else {
typeB = null;
}
}
if ( typeB === null ) {
return -1;
}
lenA = formatA[ i ].length;
lenB = formatB[ j ].length;
distance = distance + Math.abs( lenA - lenB );
// Most symbols have a small distance from each other, e.g., M ≅ L; E ≅ c; a ≅ b ≅ B;
// H ≅ k ≅ h ≅ K; ...
if ( a !== b ) {
distance += 1;
}
// Numeric (l<3) and text fields (l>=3) are given a larger distance from each other.
if ( ( lenA < 3 && lenB >= 3 ) || ( lenA >= 3 && lenB < 3 ) ) {
distance += 20;
}
}
return distance;
};
var dateExpandPatternGetBestMatchPattern = function( cldr, askedSkeleton ) {
var availableFormats, pattern, ratedFormats, skeleton,
path = "dates/calendars/gregorian/dateTimeFormats/availableFormats",
// Using easier to read variables.
augmentFormat = dateExpandPatternAugmentFormat,
compareFormats = dateExpandPatternCompareFormats;
pattern = cldr.main([ path, askedSkeleton ]);
if ( askedSkeleton && !pattern ) {
availableFormats = cldr.main([ path ]);
ratedFormats = [];
for ( skeleton in availableFormats ) {
ratedFormats.push({
skeleton: skeleton,
pattern: availableFormats[ skeleton ],
rate: compareFormats( askedSkeleton, skeleton )
});
}
ratedFormats = ratedFormats
.filter( function( format ) {
return format.rate > -1;
} )
.sort( function( formatA, formatB ) {
return formatA.rate - formatB.rate;
});
if ( ratedFormats.length ) {
pattern = augmentFormat( askedSkeleton, ratedFormats[0].pattern );
}
}
return pattern;
};
/**
* expandPattern( options, cldr )
*
* @options [Object] if String, it's considered a skeleton. Object accepts:
* - skeleton: [String] lookup availableFormat;
* - date: [String] ( "full" | "long" | "medium" | "short" );
* - time: [String] ( "full" | "long" | "medium" | "short" );
* - datetime: [String] ( "full" | "long" | "medium" | "short" );
* - raw: [String] For more info see datetime/format.js.
*
* @cldr [Cldr instance].
*
* Return the corresponding pattern.
* Eg for "en":
* - "GyMMMd" returns "MMM d, y G";
* - { skeleton: "GyMMMd" } returns "MMM d, y G";
* - { date: "full" } returns "EEEE, MMMM d, y";
* - { time: "full" } returns "h:mm:ss a zzzz";
* - { datetime: "full" } returns "EEEE, MMMM d, y 'at' h:mm:ss a zzzz";
* - { raw: "dd/mm" } returns "dd/mm";
*/
var dateExpandPattern = function( options, cldr ) {
var dateSkeleton, result, skeleton, timeSkeleton, type,
// Using easier to read variables.
getBestMatchPattern = dateExpandPatternGetBestMatchPattern;
function combineDateTime( type, datePattern, timePattern ) {
return formatMessage(
cldr.main([
"dates/calendars/gregorian/dateTimeFormats",
type
]),
[ timePattern, datePattern ]
);
}
switch ( true ) {
case "skeleton" in options:
skeleton = options.skeleton;
// Preferred hour (j).
skeleton = skeleton.replace( /j/g, function() {
return cldr.supplemental.timeData.preferred();
});
validateSkeleton( skeleton );
// Try direct map (note that getBestMatchPattern handles it).
// ... or, try to "best match" the whole skeleton.
result = getBestMatchPattern(
cldr,
skeleton
);
if ( result ) {
break;
}
// ... or, try to "best match" the date and time parts individually.
timeSkeleton = skeleton.split( /[^hHKkmsSAzZOvVXx]/ ).slice( -1 )[ 0 ];
dateSkeleton = skeleton.split( /[^GyYuUrQqMLlwWdDFgEec]/ )[ 0 ];
dateSkeleton = getBestMatchPattern(
cldr,
dateSkeleton
);
timeSkeleton = getBestMatchPattern(
cldr,
timeSkeleton
);
if ( /(MMMM|LLLL).*[Ec]/.test( dateSkeleton ) ) {
type = "full";
} else if ( /MMMM|LLLL/.test( dateSkeleton ) ) {
type = "long";
} else if ( /MMM|LLL/.test( dateSkeleton ) ) {
type = "medium";
} else {
type = "short";
}
if ( dateSkeleton && timeSkeleton ) {
result = combineDateTime( type, dateSkeleton, timeSkeleton );
} else {
result = dateSkeleton || timeSkeleton;
}
break;
case "date" in options:
case "time" in options:
result = cldr.main([
"dates/calendars/gregorian",
"date" in options ? "dateFormats" : "timeFormats",
( options.date || options.time )
]);
break;
case "datetime" in options:
result = combineDateTime( options.datetime,
cldr.main([ "dates/calendars/gregorian/dateFormats", options.datetime ]),
cldr.main([ "dates/calendars/gregorian/timeFormats", options.datetime ])
);
break;
case "raw" in options:
result = options.raw;
break;
default:
throw createErrorInvalidParameterValue({
name: "options",
value: options
});
}
return result;
};
var dateWeekDays = [ "sun", "mon", "tue", "wed", "thu", "fri", "sat" ];
/**
* firstDayOfWeek
*/
var dateFirstDayOfWeek = function( cldr ) {
return dateWeekDays.indexOf( cldr.supplemental.weekData.firstDay() );
};
/**
* getTimeZoneName( length, type )
*/
var dateGetTimeZoneName = function( length, type, timeZone, cldr ) {
var metaZone, result;
if ( !timeZone ) {
return;
}
result = cldr.main([
"dates/timeZoneNames/zone",
timeZone,
length < 4 ? "short" : "long",
type
]);
if ( result ) {
return result;
}
// The latest metazone data of the metazone array.
// TODO expand to support the historic metazones based on the given date.
metaZone = cldr.supplemental([
"metaZones/metazoneInfo/timezone", timeZone, 0,
"usesMetazone/_mzone"
]);
return cldr.main([
"dates/timeZoneNames/metazone",
metaZone,
length < 4 ? "short" : "long",
type
]);
};
/**
* timezoneHourFormatShortH( hourFormat )
*
* @hourFormat [String]
*
* Unofficial deduction of the short hourFormat given time zone `hourFormat` element.
* Official spec is pending resolution: http://unicode.org/cldr/trac/ticket/8293
*
* Example:
* - "+HH.mm;-HH.mm" => "+H;-H"
* - "+HH:mm;-HH:mm" => "+H;-H"
* - "+HH:mm;−HH:mm" => "+H;−H" (Note MINUS SIGN \u2212)
* - "+HHmm;-HHmm" => "+H:-H"
*/
var dateTimezoneHourFormatH = function( hourFormat ) {
return hourFormat
.split( ";" )
.map(function( format ) {
return format.slice( 0, format.indexOf( "H" ) + 1 );
})
.join( ";" );
};
/**
* timezoneHourFormatLongHm( hourFormat )
*
* @hourFormat [String]
*
* Unofficial deduction of the short hourFormat given time zone `hourFormat` element.
* Official spec is pending resolution: http://unicode.org/cldr/trac/ticket/8293
*
* Example (hFormat === "H"): (used for short Hm)
* - "+HH.mm;-HH.mm" => "+H.mm;-H.mm"
* - "+HH:mm;-HH:mm" => "+H:mm;-H:mm"
* - "+HH:mm;−HH:mm" => "+H:mm;−H:mm" (Note MINUS SIGN \u2212)
* - "+HHmm;-HHmm" => "+Hmm:-Hmm"
*
* Example (hFormat === "HH": (used for long Hm)
* - "+HH.mm;-HH.mm" => "+HH.mm;-HH.mm"
* - "+HH:mm;-HH:mm" => "+HH:mm;-HH:mm"
* - "+H:mm;-H:mm" => "+HH:mm;-HH:mm"
* - "+HH:mm;−HH:mm" => "+HH:mm;−HH:mm" (Note MINUS SIGN \u2212)
* - "+HHmm;-HHmm" => "+HHmm:-HHmm"
*/
var dateTimezoneHourFormatHm = function( hourFormat, hFormat ) {
return hourFormat
.split( ";" )
.map(function( format ) {
var parts = format.split( /H+/ );
parts.splice( 1, 0, hFormat );
return parts.join( "" );
})
.join( ";" );
};
var runtimeCacheDataBind = function( key, data ) {
var fn = function() {
return data;
};
fn.dataCacheKey = key;
return fn;
};
/**
* properties( pattern, cldr )
*
* @pattern [String] raw pattern.
* ref: http://www.unicode.org/reports/tr35/tr35-dates.html#Date_Format_Patterns
*
* @cldr [Cldr instance].
*
* Return the properties given the pattern and cldr.
*
* TODO Support other calendar types.
*/
var dateFormatProperties = function( pattern, cldr, timeZone ) {
var properties = {
numberFormatters: {},
pattern: pattern,
timeSeparator: numberSymbol( "timeSeparator", cldr )
},
widths = [ "abbreviated", "wide", "narrow" ];
function setNumberFormatterPattern( pad ) {
properties.numberFormatters[ pad ] = stringPad( "", pad );
}
if ( timeZone ) {
properties.timeZoneData = runtimeCacheDataBind( "iana/" + timeZone, {
offsets: cldr.get([ "globalize-iana/zoneData", timeZone, "offsets" ]),
untils: cldr.get([ "globalize-iana/zoneData", timeZone, "untils" ]),
isdsts: cldr.get([ "globalize-iana/zoneData", timeZone, "isdsts" ])
});
}
pattern.replace( datePatternRe, function( current ) {
var aux, chr, daylightTzName, formatNumber, genericTzName, length, standardTzName;
chr = current.charAt( 0 );
length = current.length;
if ( chr === "j" ) {
// Locale preferred hHKk.
// http://www.unicode.org/reports/tr35/tr35-dates.html#Time_Data
properties.preferredTime = chr = cldr.supplemental.timeData.preferred();
}
// ZZZZ: same as "OOOO".
if ( chr === "Z" && length === 4 ) {
chr = "O";
length = 4;
}
// z...zzz: "{shortRegion}", eg. "PST" or "PDT".
// zzzz: "{regionName} {Standard Time}" or "{regionName} {Daylight Time}",
// e.g., "Pacific Standard Time" or "Pacific Daylight Time".
// http://unicode.org/reports/tr35/tr35-dates.html#Date_Format_Patterns
if ( chr === "z" ) {
standardTzName = dateGetTimeZoneName( length, "standard", timeZone, cldr );
daylightTzName = dateGetTimeZoneName( length, "daylight", timeZone, cldr );
if ( standardTzName ) {
properties.standardTzName = standardTzName;
}
if ( daylightTzName ) {
properties.daylightTzName = daylightTzName;
}
// Fall through the "O" format in case one name is missing.
if ( !standardTzName || !daylightTzName ) {
chr = "O";
if ( length < 4 ) {
length = 1;
}
}
}
// v...vvv: "{shortRegion}", eg. "PT".
// vvvv: "{regionName} {Time}" or "{regionName} {Time}",
// e.g., "Pacific Time"
// http://unicode.org/reports/tr35/tr35-dates.html#Date_Format_Patterns
if ( chr === "v" ) {
genericTzName = dateGetTimeZoneName( length, "generic", timeZone, cldr );
// Fall back to "V" format.
if ( !genericTzName ) {
chr = "V";
length = 4;
}
}
switch ( chr ) {
// Era
case "G":
properties.eras = cldr.main([
"dates/calendars/gregorian/eras",
length <= 3 ? "eraAbbr" : ( length === 4 ? "eraNames" : "eraNarrow" )
]);
break;
// Year
case "y":
// Plain year.
formatNumber = true;
break;
case "Y":
// Year in "Week of Year"
properties.firstDay = dateFirstDayOfWeek( cldr );
properties.minDays = cldr.supplemental.weekData.minDays();
formatNumber = true;
break;
case "u": // Extended year. Need to be implemented.
case "U": // Cyclic year name. Need to be implemented.
throw createErrorUnsupportedFeature({
feature: "year pattern `" + chr + "`"
});
// Quarter
case "Q":
case "q":
if ( length > 2 ) {
if ( !properties.quarters ) {
properties.quarters = {};
}
if ( !properties.quarters[ chr ] ) {
properties.quarters[ chr ] = {};
}
properties.quarters[ chr ][ length ] = cldr.main([
"dates/calendars/gregorian/quarters",
chr === "Q" ? "format" : "stand-alone",
widths[ length - 3 ]
]);
} else {
formatNumber = true;
}
break;
// Month
case "M":
case "L":
if ( length > 2 ) {
if ( !properties.months ) {
properties.months = {};
}
if ( !properties.months[ chr ] ) {
properties.months[ chr ] = {};
}
properties.months[ chr ][ length ] = cldr.main([
"dates/calendars/gregorian/months",
chr === "M" ? "format" : "stand-alone",
widths[ length - 3 ]
]);
} else {
formatNumber = true;
}
break;
// Week - Week of Year (w) or Week of Month (W).
case "w":
case "W":
properties.firstDay = dateFirstDayOfWeek( cldr );
properties.minDays = cldr.supplemental.weekData.minDays();
formatNumber = true;
break;
// Day
case "d":
case "D":
case "F":
formatNumber = true;
break;
case "g":
// Modified Julian day. Need to be implemented.
throw createErrorUnsupportedFeature({
feature: "Julian day pattern `g`"
});
// Week day
case "e":
case "c":
if ( length <= 2 ) {
properties.firstDay = dateFirstDayOfWeek( cldr );
formatNumber = true;
break;
}
/* falls through */
case "E":
if ( !properties.days ) {
properties.days = {};
}
if ( !properties.days[ chr ] ) {
properties.days[ chr ] = {};
}
if ( length === 6 ) {
// If short day names are not explicitly specified, abbreviated day names are
// used instead.
// http://www.unicode.org/reports/tr35/tr35-dates.html#months_days_quarters_eras
// http://unicode.org/cldr/trac/ticket/6790
properties.days[ chr ][ length ] = cldr.main([
"dates/calendars/gregorian/days",
chr === "c" ? "stand-alone" : "format",
"short"
]) || cldr.main([
"dates/calendars/gregorian/days",
chr === "c" ? "stand-alone" : "format",
"abbreviated"
]);
} else {
properties.days[ chr ][ length ] = cldr.main([
"dates/calendars/gregorian/days",
chr === "c" ? "stand-alone" : "format",
widths[ length < 3 ? 0 : length - 3 ]
]);
}
break;
// Period (AM or PM)
case "a":
properties.dayPeriods = {
am: cldr.main(
"dates/calendars/gregorian/dayPeriods/format/wide/am"
),
pm: cldr.main(
"dates/calendars/gregorian/dayPeriods/format/wide/pm"
)
};
break;
// Hour
case "h": // 1-12
case "H": // 0-23
case "K": // 0-11
case "k": // 1-24
// Minute
case "m":
// Second
case "s":
case "S":
case "A":
formatNumber = true;
break;
// Zone
case "v":
if ( length !== 1 && length !== 4 ) {
throw createErrorUnsupportedFeature({
feature: "timezone pattern `" + pattern + "`"
});
}
properties.genericTzName = genericTzName;
break;
case "V":
if ( length === 1 ) {
throw createErrorUnsupportedFeature({
feature: "timezone pattern `" + pattern + "`"
});
}
if ( timeZone ) {
if ( length === 2 ) {
properties.timeZoneName = timeZone;
break;
}
var timeZoneName,
exemplarCity = cldr.main([
"dates/timeZoneNames/zone", timeZone, "exemplarCity"
]);
if ( length === 3 ) {
if ( !exemplarCity ) {
exemplarCity = cldr.main([
"dates/timeZoneNames/zone/Etc/Unknown/exemplarCity"
]);
}
timeZoneName = exemplarCity;
}
if ( exemplarCity && length === 4 ) {
timeZoneName = formatMessage(
cldr.main(
"dates/timeZoneNames/regionFormat"
),
[ exemplarCity ]
);
}
if ( timeZoneName ) {
properties.timeZoneName = timeZoneName;
break;
}
}
if ( current === "v" ) {
length = 1;
}
/* falls through */
case "O":
// O: "{gmtFormat}+H;{gmtFormat}-H" or "{gmtZeroFormat}", eg. "GMT-8" or "GMT".
// OOOO: "{gmtFormat}{hourFormat}" or "{gmtZeroFormat}", eg. "GMT-08:00" or "GMT".
properties.gmtFormat = cldr.main( "dates/timeZoneNames/gmtFormat" );
properties.gmtZeroFormat = cldr.main( "dates/timeZoneNames/gmtZeroFormat" );
// Unofficial deduction of the hourFormat variations.
// Official spec is pending resolution: http://unicode.org/cldr/trac/ticket/8293
aux = cldr.main( "dates/timeZoneNames/hourFormat" );
properties.hourFormat = length < 4 ?
[ dateTimezoneHourFormatH( aux ), dateTimezoneHourFormatHm( aux, "H" ) ] :
dateTimezoneHourFormatHm( aux, "HH" );
/* falls through */
case "Z":
case "X":
case "x":
setNumberFormatterPattern( 1 );
setNumberFormatterPattern( 2 );
break;
}
if ( formatNumber ) {
setNumberFormatterPattern( length );
}
});
return properties;
};
var dateFormatterFn = function( dateToPartsFormatter ) {
return function dateFormatter( value ) {
return dateToPartsFormatter( value ).map( function( part ) {
return part.value;
}).join( "" );
};
};
/**
* parseProperties( cldr )
*
* @cldr [Cldr instance].
*
* @timeZone [String] FIXME.
*
* Return parser properties.
*/
var dateParseProperties = function( cldr, timeZone ) {
var properties = {
preferredTimeData: cldr.supplemental.timeData.preferred()
};
if ( timeZone ) {
properties.timeZoneData = runtimeCacheDataBind( "iana/" + timeZone, {
offsets: cldr.get([ "globalize-iana/zoneData", timeZone, "offsets" ]),
untils: cldr.get([ "globalize-iana/zoneData", timeZone, "untils" ]),
isdsts: cldr.get([ "globalize-iana/zoneData", timeZone, "isdsts" ])
});
}
return properties;
};
var ZonedDateTime = (function() {
function definePrivateProperty(object, property, value) {
Object.defineProperty(object, property, {
value: value
});
}
function getUntilsIndex(original, untils) {
var index = 0;
var originalTime = original.getTime();
// TODO Should we do binary search for improved performance?
while (index < untils.length - 1 && originalTime >= untils[index]) {
index++;
}
return index;
}
function setWrap(fn) {
var offset1 = this.getTimezoneOffset();
var ret = fn();
this.original.setTime(new Date(this.getTime()));
var offset2 = this.getTimezoneOffset();
if (offset2 - offset1) {
this.original.setMinutes(this.original.getMinutes() + offset2 - offset1);
}
return ret;
}
var ZonedDateTime = function(date, timeZoneData) {
definePrivateProperty(this, "original", new Date(date.getTime()));
definePrivateProperty(this, "local", new Date(date.getTime()));
definePrivateProperty(this, "timeZoneData", timeZoneData);
definePrivateProperty(this, "setWrap", setWrap);
if (!(timeZoneData.untils && timeZoneData.offsets && timeZoneData.isdsts)) {
throw new Error("Invalid IANA data");
}
this.setTime(this.local.getTime() - this.getTimezoneOffset() * 60 * 1000);
};
ZonedDateTime.prototype.clone = function() {
return new ZonedDateTime(this.original, this.timeZoneData);
};
// Date field getters.
["getFullYear", "getMonth", "getDate", "getDay", "getHours", "getMinutes",
"getSeconds", "getMilliseconds"].forEach(function(method) {
// Corresponding UTC method, e.g., "getUTCFullYear" if method === "getFullYear".
var utcMethod = "getUTC" + method.substr(3);
ZonedDateTime.prototype[method] = function() {
return this.local[utcMethod]();
};
});
// Note: Define .valueOf = .getTime for arithmetic operations like date1 - date2.
ZonedDateTime.prototype.valueOf =
ZonedDateTime.prototype.getTime = function() {
return this.local.getTime() + this.getTimezoneOffset() * 60 * 1000;
};
ZonedDateTime.prototype.getTimezoneOffset = function() {
var index = getUntilsIndex(this.original, this.timeZoneData.untils);
return this.timeZoneData.offsets[index];
};
// Date field setters.
["setFullYear", "setMonth", "setDate", "setHours", "setMinutes", "setSeconds", "setMilliseconds"].forEach(function(method) {
// Corresponding UTC method, e.g., "setUTCFullYear" if method === "setFullYear".
var utcMethod = "setUTC" + method.substr(3);
ZonedDateTime.prototype[method] = function(value) {
var local = this.local;
// Note setWrap is needed for seconds and milliseconds just because
// abs(value) could be >= a minute.
return this.setWrap(function() {
return local[utcMethod](value);
});
};
});
ZonedDateTime.prototype.setTime = function(time) {
return this.local.setTime(time);
};
ZonedDateTime.prototype.isDST = function() {
var index = getUntilsIndex(this.original, this.timeZoneData.untils);
return Boolean(this.timeZoneData.isdsts[index]);
};
ZonedDateTime.prototype.inspect = function() {
var index = getUntilsIndex(this.original, this.timeZoneData.untils);
var abbrs = this.timeZoneData.abbrs;
return this.local.toISOString().replace(/Z$/, "") + " " +
(abbrs && abbrs[index] + " " || (this.getTimezoneOffset() * -1) + " ") +
(this.isDST() ? "(daylight savings)" : "");
};
ZonedDateTime.prototype.toDate = function() {
return new Date(this.getTime());
};
// Type cast getters.
["toISOString", "toJSON", "toUTCString"].forEach(function(method) {
ZonedDateTime.prototype[method] = function() {
return this.toDate()[method]();
};
});
return ZonedDateTime;
}());
/**
* isLeapYear( year )
*
* @year [Number]
*
* Returns an indication whether the specified year is a leap year.
*/
var dateIsLeapYear = function( year ) {
return new Date( year, 1, 29 ).getMonth() === 1;
};
/**
* lastDayOfMonth( date )
*
* @date [Date]
*
* Return the last day of the given date's month
*/
var dateLastDayOfMonth = function( date ) {
return new Date( date.getFullYear(), date.getMonth() + 1, 0 ).getDate();
};
/**
* startOf changes the input to the beginning of the given unit.
*
* For example, starting at the start of a day, resets hours, minutes
* seconds and milliseconds to 0. Starting at the month does the same, but
* also sets the date to 1.
*
* Returns the modified date
*/
var dateStartOf = function( date, unit ) {
date = date instanceof ZonedDateTime ? date.clone() : new Date( date.getTime() );
switch ( unit ) {
case "year":
date.setMonth( 0 );
/* falls through */
case "month":
date.setDate( 1 );
/* falls through */
case "day":
date.setHours( 0 );
/* falls through */
case "hour":
date.setMinutes( 0 );
/* falls through */
case "minute":
date.setSeconds( 0 );
/* falls through */
case "second":
date.setMilliseconds( 0 );
}
return date;
};
/**
* Differently from native date.setDate(), this function returns a date whose
* day remains inside the month boundaries. For example:
*
* setDate( FebDate, 31 ): a "Feb 28" date.
* setDate( SepDate, 31 ): a "Sep 30" date.
*/
var dateSetDate = function( date, day ) {
var lastDay = new Date( date.getFullYear(), date.getMonth() + 1, 0 ).getDate();
date.setDate( day < 1 ? 1 : day < lastDay ? day : lastDay );
};
/**
* Differently from native date.setMonth(), this function adjusts date if
* needed, so final month is always the one set.
*
* setMonth( Jan31Date, 1 ): a "Feb 28" date.
* setDate( Jan31Date, 8 ): a "Sep 30" date.
*/
var dateSetMonth = function( date, month ) {
var originalDate = date.getDate();
date.setDate( 1 );
date.setMonth( month );
dateSetDate( date, originalDate );
};
var outOfRange = function( value, low, high ) {
return value < low || value > high;
};
/**
* parse( value, tokens, properties )
*
* @value [String] string date.
*
* @tokens [Object] tokens returned by date/tokenizer.
*
* @properties [Object] output returned by date/tokenizer-properties.
*
* ref: http://www.unicode.org/reports/tr35/tr35-dates.html#Date_Format_Patterns
*/
var dateParse = function( value, tokens, properties ) {
var amPm, day, daysOfYear, month, era, hour, hour12, timezoneOffset, valid,
YEAR = 0,
MONTH = 1,
DAY = 2,
HOUR = 3,
MINUTE = 4,
SECOND = 5,
MILLISECONDS = 6,
date = new Date(),
truncateAt = [],
units = [ "year", "month", "day", "hour", "minute", "second", "milliseconds" ];
// Create globalize date with given timezone data.
if ( properties.timeZoneData ) {
date = new ZonedDateTime( date, properties.timeZoneData() );
}
if ( !tokens.length ) {
return null;
}
valid = tokens.every(function( token ) {
var century, chr, value, length;
if ( token.type === "literal" ) {
// continue
return true;
}
chr = token.type.charAt( 0 );
length = token.type.length;
if ( chr === "j" ) {
// Locale preferred hHKk.
// http://www.unicode.org/reports/tr35/tr35-dates.html#Time_Data
chr = properties.preferredTimeData;
}
switch ( chr ) {
// Era
case "G":
truncateAt.push( YEAR );
era = +token.value;
break;
// Year
case "y":
value = token.value;
if ( length === 2 ) {
if ( outOfRange( value, 0, 99 ) ) {
return false;
}
// mimic dojo/date/locale: choose century to apply, according to a sliding
// window of 80 years before and 20 years after present year.
century = Math.floor( date.getFullYear() / 100 ) * 100;
value += century;
if ( value > date.getFullYear() + 20 ) {
value -= 100;
}
}
date.setFullYear( value );
truncateAt.push( YEAR );
break;
case "Y": // Year in "Week of Year"
throw createErrorUnsupportedFeature({
feature: "year pattern `" + chr + "`"
});
// Quarter (skip)
case "Q":
case "q":
break;
// Month
case "M":
case "L":
if ( length <= 2 ) {
value = token.value;
} else {
value = +token.value;
}
if ( outOfRange( value, 1, 12 ) ) {
return false;
}
// Setting the month later so that we have the correct year and can determine
// the correct last day of February in case of leap year.
month = value;
truncateAt.push( MONTH );
break;
// Week (skip)
case "w": // Week of Year.
case "W": // Week of Month.
break;
// Day
case "d":
day = token.value;
truncateAt.push( DAY );
break;
case "D":
daysOfYear = token.value;
truncateAt.push( DAY );
break;
case "F":
// Day of Week in month. eg. 2nd Wed in July.
// Skip
break;
// Week day
case "e":
case "c":
case "E":
// Skip.
// value = arrayIndexOf( dateWeekDays, token.value );
break;
// Period (AM or PM)
case "a":
amPm = token.value;
break;
// Hour
case "h": // 1-12
value = token.value;
if ( outOfRange( value, 1, 12 ) ) {
return false;
}
hour = hour12 = true;
date.setHours( value === 12 ? 0 : value );
truncateAt.push( HOUR );
break;
case "K": // 0-11
value = token.value;
if ( outOfRange( value, 0, 11 ) ) {
return false;
}
hour = hour12 = true;
date.setHours( value );
truncateAt.push( HOUR );
break;
case "k": // 1-24
value = token.value;
if ( outOfRange( value, 1, 24 ) ) {
return false;
}
hour = true;
date.setHours( value === 24 ? 0 : value );
truncateAt.push( HOUR );
break;
case "H": // 0-23
value = token.value;
if ( outOfRange( value, 0, 23 ) ) {
return false;
}
hour = true;
date.setHours( value );
truncateAt.push( HOUR );
break;
// Minute
case "m":
value = token.value;
if ( outOfRange( value, 0, 59 ) ) {
return false;
}
date.setMinutes( value );
truncateAt.push( MINUTE );
break;
// Second
case "s":
value = token.value;
if ( outOfRange( value, 0, 59 ) ) {
return false;
}
date.setSeconds( value );
truncateAt.push( SECOND );
break;
case "A":
date.setHours( 0 );
date.setMinutes( 0 );
date.setSeconds( 0 );
/* falls through */
case "S":
value = Math.round( token.value * Math.pow( 10, 3 - length ) );
date.setMilliseconds( value );
truncateAt.push( MILLISECONDS );
break;
// Zone
case "z":
case "Z":
case "O":
case "v":
case "V":
case "X":
case "x":
if ( typeof token.value === "number" ) {
timezoneOffset = token.value;
}
break;
}
return true;
});
if ( !valid ) {
return null;
}
// 12-hour format needs AM or PM, 24-hour format doesn't, ie. return null
// if amPm && !hour12 || !amPm && hour12.
if ( hour && !( !amPm ^ hour12 ) ) {
return null;
}
if ( era === 0 ) {
// 1 BC = year 0
date.setFullYear( date.getFullYear() * -1 + 1 );
}
if ( month !== undefined ) {
dateSetMonth( date, month - 1 );
}
if ( day !== undefined ) {
if ( outOfRange( day, 1, dateLastDayOfMonth( date ) ) ) {
return null;
}
date.setDate( day );
} else if ( daysOfYear !== undefined ) {
if ( outOfRange( daysOfYear, 1, dateIsLeapYear( date.getFullYear() ) ? 366 : 365 ) ) {
return null;
}
date.setMonth( 0 );
date.setDate( daysOfYear );
}
if ( hour12 && amPm === "pm" ) {
date.setHours( date.getHours() + 12 );
}
if ( timezoneOffset !== undefined ) {
date.setMinutes( date.getMinutes() + timezoneOffset - date.getTimezoneOffset() );
}
// Truncate date at the most precise unit defined. Eg.
// If value is "12/31", and pattern is "MM/dd":
// => new Date( <current Year>, 12, 31, 0, 0, 0, 0 );
truncateAt = Math.max.apply( null, truncateAt );
date = dateStartOf( date, units[ truncateAt ] );
// Get date back from globalize date.
if ( date instanceof ZonedDateTime ) {
date = date.toDate();
}
return date;
};
/**
* tokenizer( value, numberParser, properties )
*
* @value [String] string date.
*
* @numberParser [Function]
*
* @properties [Object] output returned by date/tokenizer-properties.
*
* Returns an Array of tokens, eg. value "5 o'clock PM", pattern "h 'o''clock' a":
* [{
* type: "h",
* lexeme: "5"
* }, {
* type: "literal",
* lexeme: " "
* }, {
* type: "literal",
* lexeme: "o'clock"
* }, {
* type: "literal",
* lexeme: " "
* }, {
* type: "a",
* lexeme: "PM",
* value: "pm"
* }]
*
* OBS: lexeme's are always String and may return invalid ranges depending of the token type.
* Eg. "99" for month number.
*
* Return an empty Array when not successfully parsed.
*/
var dateTokenizer = function( value, numberParser, properties ) {
var digitsRe, valid,
tokens = [],
widths = [ "abbreviated", "wide", "narrow" ];
digitsRe = properties.digitsRe;
value = looseMatching( value );
valid = properties.pattern.match( datePatternRe ).every(function( current ) {
var aux, chr, length, numeric, tokenRe,
token = {};
function hourFormatParse( tokenRe, numberParser ) {
var aux, isPositive,
match = value.match( tokenRe );
numberParser = numberParser || function( value ) {
return +value;
};
if ( !match ) {
return false;
}
isPositive = match[ 1 ];
// hourFormat containing H only, e.g., `+H;-H`
if ( match.length < 6 ) {
aux = isPositive ? 1 : 3;
token.value = numberParser( match[ aux ] ) * 60;
// hourFormat containing H and m, e.g., `+HHmm;-HHmm`
} else if ( match.length < 10 ) {
aux = isPositive ? [ 1, 3 ] : [ 5, 7 ];
token.value = numberParser( match[ aux[ 0 ] ] ) * 60 +
numberParser( match[ aux[ 1 ] ] );
// hourFormat containing H, m, and s e.g., `+HHmmss;-HHmmss`
} else {
aux = isPositive ? [ 1, 3, 5 ] : [ 7, 9, 11 ];
token.value = numberParser( match[ aux[ 0 ] ] ) * 60 +
numberParser( match[ aux[ 1 ] ] ) +
numberParser( match[ aux[ 2 ] ] ) / 60;
}
if ( isPositive ) {
token.value *= -1;
}
return true;
}
function oneDigitIfLengthOne() {
if ( length === 1 ) {
// Unicode equivalent to /\d/
numeric = true;
return tokenRe = digitsRe;
}
}
function oneOrTwoDigitsIfLengthOne() {
if ( length === 1 ) {
// Unicode equivalent to /\d\d?/
numeric = true;
return tokenRe = new RegExp( "^(" + digitsRe.source + "){1,2}" );
}
}
function oneOrTwoDigitsIfLengthOneOrTwo() {
if ( length === 1 || length === 2 ) {
// Unicode equivalent to /\d\d?/
numeric = true;
return tokenRe = new RegExp( "^(" + digitsRe.source + "){1,2}" );
}
}
function twoDigitsIfLengthTwo() {
if ( length === 2 ) {
// Unicode equivalent to /\d\d/
numeric = true;
return tokenRe = new RegExp( "^(" + digitsRe.source + "){2}" );
}
}
// Brute-force test every locale entry in an attempt to match the given value.
// Return the first found one (and set token accordingly), or null.
function lookup( path ) {
var array = properties[ path.join( "/" ) ];
if ( !array ) {
return null;
}
// array of pairs [key, value] sorted by desc value length.
array.some(function( item ) {
var valueRe = item[ 1 ];
if ( valueRe.test( value ) ) {
token.value = item[ 0 ];
tokenRe = item[ 1 ];
return true;
}
});
return null;
}
token.type = current;
chr = current.charAt( 0 );
length = current.length;
if ( chr === "Z" ) {
// Z..ZZZ: same as "xxxx".
if ( length < 4 ) {
chr = "x";
length = 4;
// ZZZZ: same as "OOOO".
} else if ( length < 5 ) {
chr = "O";
length = 4;
// ZZZZZ: same as "XXXXX"
} else {
chr = "X";
length = 5;
}
}
if ( chr === "z" ) {
if ( properties.standardOrDaylightTzName ) {
token.value = null;
tokenRe = properties.standardOrDaylightTzName;
}
}
// v...vvv: "{shortRegion}", eg. "PT".
// vvvv: "{regionName} {Time}" or "{regionName} {Time}",
// e.g., "Pacific Time"
// http://unicode.org/reports/tr35/tr35-dates.html#Date_Format_Patterns
if ( chr === "v" ) {
if ( properties.genericTzName ) {
token.value = null;
tokenRe = properties.genericTzName;
// Fall back to "V" format.
} else {
chr = "V";
length = 4;
}
}
if ( chr === "V" && properties.timeZoneName ) {
token.value = length === 2 ? properties.timeZoneName : null;
tokenRe = properties.timeZoneNameRe;
}
switch ( chr ) {
// Era
case "G":
lookup([
"gregorian/eras",
length <= 3 ? "eraAbbr" : ( length === 4 ? "eraNames" : "eraNarrow" )
]);
break;
// Year
case "y":
case "Y":
numeric = true;
// number l=1:+, l=2:{2}, l=3:{3,}, l=4:{4,}, ...
if ( length === 1 ) {
// Unicode equivalent to /\d+/.
tokenRe = new RegExp( "^(" + digitsRe.source + ")+" );
} else if ( length === 2 ) {
// Lenient parsing: there's no year pattern to indicate non-zero-padded 2-digits
// year, so parser accepts both zero-padded and non-zero-padded for `yy`.
//
// Unicode equivalent to /\d\d?/
tokenRe = new RegExp( "^(" + digitsRe.source + "){1,2}" );
} else {
// Unicode equivalent to /\d{length,}/
tokenRe = new RegExp( "^(" + digitsRe.source + "){" + length + ",}" );
}
break;
// Quarter
case "Q":
case "q":
// number l=1:{1}, l=2:{2}.
// lookup l=3...
oneDigitIfLengthOne() || twoDigitsIfLengthTwo() ||
lookup([
"gregorian/quarters",
chr === "Q" ? "format" : "stand-alone",
widths[ length - 3 ]
]);
break;
// Month
case "M":
case "L":
// number l=1:{1,2}, l=2:{2}.
// lookup l=3...
//
// Lenient parsing: skeleton "yMd" (i.e., one M) may include MM for the pattern,
// therefore parser accepts both zero-padded and non-zero-padded for M and MM.
// Similar for L.
oneOrTwoDigitsIfLengthOneOrTwo() || lookup([
"gregorian/months",
chr === "M" ? "format" : "stand-alone",
widths[ length - 3 ]
]);
break;
// Day
case "D":
// number {l,3}.
if ( length <= 3 ) {
// Equivalent to /\d{length,3}/
numeric = true;
tokenRe = new RegExp( "^(" + digitsRe.source + "){" + length + ",3}" );
}
break;
case "W":
case "F":
// number l=1:{1}.
oneDigitIfLengthOne();
break;
// Week day
case "e":
case "c":
// number l=1:{1}, l=2:{2}.
// lookup for length >=3.
if ( length <= 2 ) {
oneDigitIfLengthOne() || twoDigitsIfLengthTwo();
break;
}
/* falls through */
case "E":
if ( length === 6 ) {
// Note: if short day names are not explicitly specified, abbreviated day
// names are used instead http://www.unicode.org/reports/tr35/tr35-dates.html#months_days_quarters_eras
lookup([
"gregorian/days",
[ chr === "c" ? "stand-alone" : "format" ],
"short"
]) || lookup([
"gregorian/days",
[ chr === "c" ? "stand-alone" : "format" ],
"abbreviated"
]);
} else {
lookup([
"gregorian/days",
[ chr === "c" ? "stand-alone" : "format" ],
widths[ length < 3 ? 0 : length - 3 ]
]);
}
break;
// Period (AM or PM)
case "a":
lookup([
"gregorian/dayPeriods/format/wide"
]);
break;
// Week
case "w":
// number l1:{1,2}, l2:{2}.
oneOrTwoDigitsIfLengthOne() || twoDigitsIfLengthTwo();
break;
// Day, Hour, Minute, or Second
case "d":
case "h":
case "H":
case "K":
case "k":
case "j":
case "m":
case "s":
// number l1:{1,2}, l2:{2}.
//
// Lenient parsing:
// - skeleton "hms" (i.e., one m) always includes mm for the pattern, i.e., it's
// impossible to use a different skeleton to parse non-zero-padded minutes,
// therefore parser accepts both zero-padded and non-zero-padded for m. Similar
// for seconds s.
// - skeleton "hms" (i.e., one h) may include h or hh for the pattern, i.e., it's
// impossible to use a different skeleton to parser non-zero-padded hours for some
// locales, therefore parser accepts both zero-padded and non-zero-padded for h.
// Similar for d (in skeleton yMd).
oneOrTwoDigitsIfLengthOneOrTwo();
break;
case "S":
// number {l}.
// Unicode equivalent to /\d{length}/
numeric = true;
tokenRe = new RegExp( "^(" + digitsRe.source + "){" + length + "}" );
break;
case "A":
// number {l+5}.
// Unicode equivalent to /\d{length+5}/
numeric = true;
tokenRe = new RegExp( "^(" + digitsRe.source + "){" + ( length + 5 ) + "}" );
break;
// Zone
case "v":
case "V":
case "z":
if ( tokenRe && tokenRe.test( value ) ) {
break;
}
if ( chr === "V" && length === 2 ) {
break;
}
/* falls through */
case "O":
// O: "{gmtFormat}+H;{gmtFormat}-H" or "{gmtZeroFormat}", eg. "GMT-8" or "GMT".
// OOOO: "{gmtFormat}{hourFormat}" or "{gmtZeroFormat}", eg. "GMT-08:00" or "GMT".
if ( value === properties[ "timeZoneNames/gmtZeroFormat" ] ) {
token.value = 0;
tokenRe = properties[ "timeZoneNames/gmtZeroFormatRe" ];
} else {
aux = properties[ "timeZoneNames/hourFormat" ].some(function( hourFormatRe ) {
if ( hourFormatParse( hourFormatRe, numberParser ) ) {
tokenRe = hourFormatRe;
return true;
}
});
if ( !aux ) {
return null;
}
}
break;
case "X":
// Same as x*, except it uses "Z" for zero offset.
if ( value === "Z" ) {
token.value = 0;
tokenRe = /^Z/;
break;
}
/* falls through */
case "x":
// x: hourFormat("+HH[mm];-HH[mm]")
// xx: hourFormat("+HHmm;-HHmm")
// xxx: hourFormat("+HH:mm;-HH:mm")
// xxxx: hourFormat("+HHmm[ss];-HHmm[ss]")
// xxxxx: hourFormat("+HH:mm[:ss];-HH:mm[:ss]")
aux = properties.x.some(function( hourFormatRe ) {
if ( hourFormatParse( hourFormatRe ) ) {
tokenRe = hourFormatRe;
return true;
}
});
if ( !aux ) {
return null;
}
break;
case "'":
token.type = "literal";
tokenRe = new RegExp( "^" + regexpEscape( removeLiteralQuotes( current ) ) );
break;
default:
token.type = "literal";
tokenRe = new RegExp( "^" + regexpEscape( current ) );
}
if ( !tokenRe ) {
return false;
}
// Get lexeme and consume it.
value = value.replace( tokenRe, function( lexeme ) {
token.lexeme = lexeme;
if ( numeric ) {
token.value = numberParser( lexeme );
}
return "";
});
if ( !token.lexeme ) {
return false;
}
if ( numeric && isNaN( token.value ) ) {
return false;
}
tokens.push( token );
return true;
});
if ( value !== "" ) {
valid = false;
}
return valid ? tokens : [];
};
var dateParserFn = function( numberParser, parseProperties, tokenizerProperties ) {
return function dateParser( value ) {
var tokens;
validateParameterPresence( value, "value" );
validateParameterTypeString( value, "value" );
tokens = dateTokenizer( value, numberParser, tokenizerProperties );
return dateParse( value, tokens, parseProperties ) || null;
};
};
var objectFilter = function( object, testRe ) {
var key,
copy = {};
for ( key in object ) {
if ( testRe.test( key ) ) {
copy[ key ] = object[ key ];
}
}
return copy;
};
/**
* tokenizerProperties( pattern, cldr )
*
* @pattern [String] raw pattern.
*
* @cldr [Cldr instance].
*
* Return Object with data that will be used by tokenizer.
*/
var dateTokenizerProperties = function( pattern, cldr, timeZone ) {
var digitsReSource,
properties = {
pattern: looseMatching( pattern )
},
timeSeparator = numberSymbol( "timeSeparator", cldr ),
widths = [ "abbreviated", "wide", "narrow" ];
digitsReSource = numberNumberingSystemDigitsMap( cldr );
digitsReSource = digitsReSource ? "[" + digitsReSource + "]" : "\\d";
properties.digitsRe = new RegExp( digitsReSource );
// Transform:
// - "+H;-H" -> /\+(\d\d?)|-(\d\d?)/
// - "+HH;-HH" -> /\+(\d\d)|-(\d\d)/
// - "+HHmm;-HHmm" -> /\+(\d\d)(\d\d)|-(\d\d)(\d\d)/
// - "+HH:mm;-HH:mm" -> /\+(\d\d):(\d\d)|-(\d\d):(\d\d)/
//
// If gmtFormat is GMT{0}, the regexp must fill {0} in each side, e.g.:
// - "+H;-H" -> /GMT\+(\d\d?)|GMT-(\d\d?)/
function hourFormatRe( hourFormat, gmtFormat, digitsReSource, timeSeparator ) {
var re;
if ( !digitsReSource ) {
digitsReSource = "\\d";
}
if ( !gmtFormat ) {
gmtFormat = "{0}";
}
re = hourFormat
.replace( "+", "\\+" )
// Unicode equivalent to (\\d\\d)
.replace( /HH|mm|ss/g, "((" + digitsReSource + "){2})" )
// Unicode equivalent to (\\d\\d?)
.replace( /H|m/g, "((" + digitsReSource + "){1,2})" );
if ( timeSeparator ) {
re = re.replace( /:/g, timeSeparator );
}
re = re.split( ";" ).map(function( part ) {
return gmtFormat.replace( "{0}", part );
}).join( "|" );
return new RegExp( "^" + re );
}
function populateProperties( path, value ) {
// Skip
var skipRe = /(timeZoneNames\/zone|supplemental\/metaZones|timeZoneNames\/metazone|timeZoneNames\/regionFormat|timeZoneNames\/gmtFormat)/;
if ( skipRe.test( path ) ) {
return;
}
if ( !value ) {
return;
}
// The `dates` and `calendars` trim's purpose is to reduce properties' key size only.
path = path.replace( /^.*\/dates\//, "" ).replace( /calendars\//, "" );
// Specific filter for "gregorian/dayPeriods/format/wide".
if ( path === "gregorian/dayPeriods/format/wide" ) {
value = objectFilter( value, /^am|^pm/ );
}
// Transform object into array of pairs [key, /value/], sort by desc value length.
if ( isPlainObject( value ) ) {
value = Object.keys( value ).map(function( key ) {
return [ key, new RegExp( "^" + regexpEscape( looseMatching( value[ key ] ) ) ) ];
}).sort(function( a, b ) {
return b[ 1 ].source.length - a[ 1 ].source.length;
});
// If typeof value === "string".
} else {
value = looseMatching( value );
}
properties[ path ] = value;
}
function regexpSourceSomeTerm( terms ) {
return "(" + terms.filter(function( item ) {
return item;
}).reduce(function( memo, item ) {
return memo + "|" + item;
}) + ")";
}
cldr.on( "get", populateProperties );
pattern.match( datePatternRe ).forEach(function( current ) {
var aux, chr, daylightTzName, gmtFormat, length, standardTzName;
chr = current.charAt( 0 );
length = current.length;
if ( chr === "Z" ) {
if ( length < 5 ) {
chr = "O";
length = 4;
} else {
chr = "X";
length = 5;
}
}
// z...zzz: "{shortRegion}", eg. "PST" or "PDT".
// zzzz: "{regionName} {Standard Time}" or "{regionName} {Daylight Time}",
// e.g., "Pacific Standard Time" or "Pacific Daylight Time".
// http://unicode.org/reports/tr35/tr35-dates.html#Date_Format_Patterns
if ( chr === "z" ) {
standardTzName = dateGetTimeZoneName( length, "standard", timeZone, cldr );
daylightTzName = dateGetTimeZoneName( length, "daylight", timeZone, cldr );
if ( standardTzName ) {
standardTzName = regexpEscape( looseMatching( standardTzName ) );
}
if ( daylightTzName ) {
daylightTzName = regexpEscape( looseMatching( daylightTzName ) );
}
if ( standardTzName || daylightTzName ) {
properties.standardOrDaylightTzName = new RegExp(
"^" + regexpSourceSomeTerm([ standardTzName, daylightTzName ])
);
}
// Fall through the "O" format in case one name is missing.
if ( !standardTzName || !daylightTzName ) {
chr = "O";
if ( length < 4 ) {
length = 1;
}
}
}
// v...vvv: "{shortRegion}", eg. "PT".
// vvvv: "{regionName} {Time}" or "{regionName} {Time}",
// e.g., "Pacific Time"
// http://unicode.org/reports/tr35/tr35-dates.html#Date_Format_Patterns
if ( chr === "v" ) {
if ( length !== 1 && length !== 4 ) {
throw createErrorUnsupportedFeature({
feature: "timezone pattern `" + pattern + "`"
});
}
var genericTzName = dateGetTimeZoneName( length, "generic", timeZone, cldr );
if ( genericTzName ) {
properties.genericTzName = new RegExp(
"^" + regexpEscape( looseMatching( genericTzName ) )
);
chr = "O";
// Fall back to "V" format.
} else {
chr = "V";
length = 4;
}
}
switch ( chr ) {
// Era
case "G":
cldr.main([
"dates/calendars/gregorian/eras",
length <= 3 ? "eraAbbr" : ( length === 4 ? "eraNames" : "eraNarrow" )
]);
break;
// Year
case "u": // Extended year. Need to be implemented.
case "U": // Cyclic year name. Need to be implemented.
throw createErrorUnsupportedFeature({
feature: "year pattern `" + chr + "`"
});
// Quarter
case "Q":
case "q":
if ( length > 2 ) {
cldr.main([
"dates/calendars/gregorian/quarters",
chr === "Q" ? "format" : "stand-alone",
widths[ length - 3 ]
]);
}
break;
// Month
case "M":
case "L":
// number l=1:{1,2}, l=2:{2}.
// lookup l=3...
if ( length > 2 ) {
cldr.main([
"dates/calendars/gregorian/months",
chr === "M" ? "format" : "stand-alone",
widths[ length - 3 ]
]);
}
break;
// Day
case "g":
// Modified Julian day. Need to be implemented.
throw createErrorUnsupportedFeature({
feature: "Julian day pattern `g`"
});
// Week day
case "e":
case "c":
// lookup for length >=3.
if ( length <= 2 ) {
break;
}
/* falls through */
case "E":
if ( length === 6 ) {
// Note: if short day names are not explicitly specified, abbreviated day
// names are used instead http://www.unicode.org/reports/tr35/tr35-dates.html#months_days_quarters_eras
cldr.main([
"dates/calendars/gregorian/days",
[ chr === "c" ? "stand-alone" : "format" ],
"short"
]) || cldr.main([
"dates/calendars/gregorian/days",
[ chr === "c" ? "stand-alone" : "format" ],
"abbreviated"
]);
} else {
cldr.main([
"dates/calendars/gregorian/days",
[ chr === "c" ? "stand-alone" : "format" ],
widths[ length < 3 ? 0 : length - 3 ]
]);
}
break;
// Period (AM or PM)
case "a":
cldr.main(
"dates/calendars/gregorian/dayPeriods/format/wide"
);
break;
// Zone
case "V":
if ( length === 1 ) {
throw createErrorUnsupportedFeature({
feature: "timezone pattern `" + pattern + "`"
});
}
if ( timeZone ) {
if ( length === 2 ) {
// Skip looseMatching processing since timeZone is a canonical posix value.
properties.timeZoneName = timeZone;
properties.timeZoneNameRe = new RegExp( "^" + regexpEscape( timeZone ) );
break;
}
var timeZoneName,
exemplarCity = cldr.main([
"dates/timeZoneNames/zone", timeZone, "exemplarCity"
]);
if ( length === 3 ) {
if ( !exemplarCity ) {
exemplarCity = cldr.main([
"dates/timeZoneNames/zone/Etc/Unknown/exemplarCity"
]);
}
timeZoneName = exemplarCity;
}
if ( exemplarCity && length === 4 ) {
timeZoneName = formatMessage(
cldr.main(
"dates/timeZoneNames/regionFormat"
),
[ exemplarCity ]
);
}
if ( timeZoneName ) {
timeZoneName = looseMatching( timeZoneName );
properties.timeZoneName = timeZoneName;
properties.timeZoneNameRe = new RegExp(
"^" + regexpEscape( timeZoneName )
);
}
}
if ( current === "v" ) {
length = 1;
}
/* falls through */
case "z":
case "O":
gmtFormat = cldr.main( "dates/timeZoneNames/gmtFormat" );
cldr.main( "dates/timeZoneNames/gmtZeroFormat" );
cldr.main( "dates/timeZoneNames/hourFormat" );
properties[ "timeZoneNames/gmtZeroFormatRe" ] =
new RegExp( "^" + regexpEscape( properties[ "timeZoneNames/gmtZeroFormat" ] ) );
aux = properties[ "timeZoneNames/hourFormat" ];
properties[ "timeZoneNames/hourFormat" ] = (
length < 4 ?
[ dateTimezoneHourFormatHm( aux, "H" ), dateTimezoneHourFormatH( aux ) ] :
[ dateTimezoneHourFormatHm( aux, "HH" ) ]
).map(function( hourFormat ) {
return hourFormatRe(
hourFormat,
gmtFormat,
digitsReSource,
timeSeparator
);
});
/* falls through */
case "X":
case "x":
// x: hourFormat("+HH[mm];-HH[mm]")
// xx: hourFormat("+HHmm;-HHmm")
// xxx: hourFormat("+HH:mm;-HH:mm")
// xxxx: hourFormat("+HHmm[ss];-HHmm[ss]")
// xxxxx: hourFormat("+HH:mm[:ss];-HH:mm[:ss]")
properties.x = [
[ "+HHmm;-HHmm", "+HH;-HH" ],
[ "+HHmm;-HHmm" ],
[ "+HH:mm;-HH:mm" ],
[ "+HHmmss;-HHmmss", "+HHmm;-HHmm" ],
[ "+HH:mm:ss;-HH:mm:ss", "+HH:mm;-HH:mm" ]
][ length - 1 ].map(function( hourFormat ) {
return hourFormatRe( hourFormat );
});
}
});
cldr.off( "get", populateProperties );
return properties;
};
/**
* dayOfWeek( date, firstDay )
*
* @date
*
* @firstDay the result of `dateFirstDayOfWeek( cldr )`
*
* Return the day of the week normalized by the territory's firstDay [0-6].
* Eg for "mon":
* - return 0 if territory is GB, or BR, or DE, or FR (week starts on "mon");
* - return 1 if territory is US (week starts on "sun");
* - return 2 if territory is EG (week starts on "sat");
*/
var dateDayOfWeek = function( date, firstDay ) {
return ( date.getDay() - firstDay + 7 ) % 7;
};
/**
* distanceInDays( from, to )
*
* Return the distance in days between from and to Dates.
*/
var dateDistanceInDays = function( from, to ) {
var inDays = 864e5;
return ( to.getTime() - from.getTime() ) / inDays;
};
/**
* dayOfYear
*
* Return the distance in days of the date to the begin of the year [0-d].
*/
var dateDayOfYear = function( date ) {
return Math.floor( dateDistanceInDays( dateStartOf( date, "year" ), date ) );
};
// Invert key and values, e.g., {"year": "yY"} ==> {"y": "year", "Y": "year"}
var dateFieldsMap = objectInvert({
"era": "G",
"year": "yY",
"quarter": "qQ",
"month": "ML",
"week": "wW",
"day": "dDF",
"weekday": "ecE",
"dayperiod": "a",
"hour": "hHkK",
"minute": "m",
"second": "sSA",
"zone": "zvVOxX"
}, function( object, key, value ) {
value.split( "" ).forEach(function( symbol ) {
object[ symbol ] = key;
});
return object;
});
/**
* millisecondsInDay
*/
var dateMillisecondsInDay = function( date ) {
// TODO Handle daylight savings discontinuities
return date - dateStartOf( date, "day" );
};
/**
* hourFormat( date, format, timeSeparator, formatNumber )
*
* Return date's timezone offset according to the format passed.
* Eg for format when timezone offset is 180:
* - "+H;-H": -3
* - "+HHmm;-HHmm": -0300
* - "+HH:mm;-HH:mm": -03:00
* - "+HH:mm:ss;-HH:mm:ss": -03:00:00
*/
var dateTimezoneHourFormat = function( date, format, timeSeparator, formatNumber ) {
var absOffset,
offset = date.getTimezoneOffset();
absOffset = Math.abs( offset );
formatNumber = formatNumber || {
1: function( value ) {
return stringPad( value, 1 );
},
2: function( value ) {
return stringPad( value, 2 );
}
};
return format
// Pick the correct sign side (+ or -).
.split( ";" )[ offset > 0 ? 1 : 0 ]
// Localize time separator
.replace( ":", timeSeparator )
// Update hours offset.
.replace( /HH?/, function( match ) {
return formatNumber[ match.length ]( Math.floor( absOffset / 60 ) );
})
// Update minutes offset and return.
.replace( /mm/, function() {
return formatNumber[ 2 ]( Math.floor( absOffset % 60 ) );
})
// Update minutes offset and return.
.replace( /ss/, function() {
return formatNumber[ 2 ]( Math.floor( absOffset % 1 * 60 ) );
});
};
/**
* format( date, properties )
*
* @date [Date instance].
*
* @properties
*
* TODO Support other calendar types.
*
* Disclosure: this function borrows excerpts of dojo/date/locale.
*/
var dateFormat = function( date, numberFormatters, properties ) {
var parts = [];
var timeSeparator = properties.timeSeparator;
// create globalize date with given timezone data
if ( properties.timeZoneData ) {
date = new ZonedDateTime( date, properties.timeZoneData() );
}
properties.pattern.replace( datePatternRe, function( current ) {
var aux, dateField, type, value,
chr = current.charAt( 0 ),
length = current.length;
if ( chr === "j" ) {
// Locale preferred hHKk.
// http://www.unicode.org/reports/tr35/tr35-dates.html#Time_Data
chr = properties.preferredTime;
}
if ( chr === "Z" ) {
// Z..ZZZ: same as "xxxx".
if ( length < 4 ) {
chr = "x";
length = 4;
// ZZZZ: same as "OOOO".
} else if ( length < 5 ) {
chr = "O";
length = 4;
// ZZZZZ: same as "XXXXX"
} else {
chr = "X";
length = 5;
}
}
// z...zzz: "{shortRegion}", e.g., "PST" or "PDT".
// zzzz: "{regionName} {Standard Time}" or "{regionName} {Daylight Time}",
// e.g., "Pacific Standard Time" or "Pacific Daylight Time".
if ( chr === "z" ) {
if ( date.isDST ) {
value = date.isDST() ? properties.daylightTzName : properties.standardTzName;
}
// Fall back to "O" format.
if ( !value ) {
chr = "O";
if ( length < 4 ) {
length = 1;
}
}
}
switch ( chr ) {
// Era
case "G":
value = properties.eras[ date.getFullYear() < 0 ? 0 : 1 ];
break;
// Year
case "y":
// Plain year.
// The length specifies the padding, but for two letters it also specifies the
// maximum length.
value = date.getFullYear();
if ( length === 2 ) {
value = String( value );
value = +value.substr( value.length - 2 );
}
break;
case "Y":
// Year in "Week of Year"
// The length specifies the padding, but for two letters it also specifies the
// maximum length.
// yearInWeekofYear = date + DaysInAWeek - (dayOfWeek - firstDay) - minDays
value = new Date( date.getTime() );
value.setDate(
value.getDate() + 7 -
dateDayOfWeek( date, properties.firstDay ) -
properties.firstDay -
properties.minDays
);
value = value.getFullYear();
if ( length === 2 ) {
value = String( value );
value = +value.substr( value.length - 2 );
}
break;
// Quarter
case "Q":
case "q":
value = Math.ceil( ( date.getMonth() + 1 ) / 3 );
if ( length > 2 ) {
value = properties.quarters[ chr ][ length ][ value ];
}
break;
// Month
case "M":
case "L":
value = date.getMonth() + 1;
if ( length > 2 ) {
value = properties.months[ chr ][ length ][ value ];
}
break;
// Week
case "w":
// Week of Year.
// woy = ceil( ( doy + dow of 1/1 ) / 7 ) - minDaysStuff ? 1 : 0.
// TODO should pad on ww? Not documented, but I guess so.
value = dateDayOfWeek( dateStartOf( date, "year" ), properties.firstDay );
value = Math.ceil( ( dateDayOfYear( date ) + value ) / 7 ) -
( 7 - value >= properties.minDays ? 0 : 1 );
break;
case "W":
// Week of Month.
// wom = ceil( ( dom + dow of `1/month` ) / 7 ) - minDaysStuff ? 1 : 0.
value = dateDayOfWeek( dateStartOf( date, "month" ), properties.firstDay );
value = Math.ceil( ( date.getDate() + value ) / 7 ) -
( 7 - value >= properties.minDays ? 0 : 1 );
break;
// Day
case "d":
value = date.getDate();
break;
case "D":
value = dateDayOfYear( date ) + 1;
break;
case "F":
// Day of Week in month. eg. 2nd Wed in July.
value = Math.floor( date.getDate() / 7 ) + 1;
break;
// Week day
case "e":
case "c":
if ( length <= 2 ) {
// Range is [1-7] (deduced by example provided on documentation)
// TODO Should pad with zeros (not specified in the docs)?
value = dateDayOfWeek( date, properties.firstDay ) + 1;
break;
}
/* falls through */
case "E":
value = dateWeekDays[ date.getDay() ];
value = properties.days[ chr ][ length ][ value ];
break;
// Period (AM or PM)
case "a":
value = properties.dayPeriods[ date.getHours() < 12 ? "am" : "pm" ];
break;
// Hour
case "h": // 1-12
value = ( date.getHours() % 12 ) || 12;
break;
case "H": // 0-23
value = date.getHours();
break;
case "K": // 0-11
value = date.getHours() % 12;
break;
case "k": // 1-24
value = date.getHours() || 24;
break;
// Minute
case "m":
value = date.getMinutes();
break;
// Second
case "s":
value = date.getSeconds();
break;
case "S":
value = Math.round( date.getMilliseconds() * Math.pow( 10, length - 3 ) );
break;
case "A":
value = Math.round( dateMillisecondsInDay( date ) * Math.pow( 10, length - 3 ) );
break;
// Zone
case "z":
break;
case "v":
// v...vvv: "{shortRegion}", eg. "PT".
// vvvv: "{regionName} {Time}",
// e.g., "Pacific Time".
if ( properties.genericTzName ) {
value = properties.genericTzName;
break;
}
/* falls through */
case "V":
//VVVV: "{explarCity} {Time}", e.g., "Los Angeles Time"
if ( properties.timeZoneName ) {
value = properties.timeZoneName;
break;
}
if ( current === "v" ) {
length = 1;
}
/* falls through */
case "O":
// O: "{gmtFormat}+H;{gmtFormat}-H" or "{gmtZeroFormat}", eg. "GMT-8" or "GMT".
// OOOO: "{gmtFormat}{hourFormat}" or "{gmtZeroFormat}", eg. "GMT-08:00" or "GMT".
if ( date.getTimezoneOffset() === 0 ) {
value = properties.gmtZeroFormat;
} else {
// If O..OOO and timezone offset has non-zero minutes, show minutes.
if ( length < 4 ) {
aux = date.getTimezoneOffset();
aux = properties.hourFormat[ aux % 60 - aux % 1 === 0 ? 0 : 1 ];
} else {
aux = properties.hourFormat;
}
value = dateTimezoneHourFormat(
date,
aux,
timeSeparator,
numberFormatters
);
value = properties.gmtFormat.replace( /\{0\}/, value );
}
break;
case "X":
// Same as x*, except it uses "Z" for zero offset.
if ( date.getTimezoneOffset() === 0 ) {
value = "Z";
break;
}
/* falls through */
case "x":
// x: hourFormat("+HH[mm];-HH[mm]")
// xx: hourFormat("+HHmm;-HHmm")
// xxx: hourFormat("+HH:mm;-HH:mm")
// xxxx: hourFormat("+HHmm[ss];-HHmm[ss]")
// xxxxx: hourFormat("+HH:mm[:ss];-HH:mm[:ss]")
aux = date.getTimezoneOffset();
// If x and timezone offset has non-zero minutes, use xx (i.e., show minutes).
if ( length === 1 && aux % 60 - aux % 1 !== 0 ) {
length += 1;
}
// If (xxxx or xxxxx) and timezone offset has zero seconds, use xx or xxx
// respectively (i.e., don't show optional seconds).
if ( ( length === 4 || length === 5 ) && aux % 1 === 0 ) {
length -= 2;
}
value = [
"+HH;-HH",
"+HHmm;-HHmm",
"+HH:mm;-HH:mm",
"+HHmmss;-HHmmss",
"+HH:mm:ss;-HH:mm:ss"
][ length - 1 ];
value = dateTimezoneHourFormat( date, value, ":" );
break;
// timeSeparator
case ":":
value = timeSeparator;
break;
// ' literals.
case "'":
value = removeLiteralQuotes( current );
break;
// Anything else is considered a literal, including [ ,:/.@#], chinese, japonese, and
// arabic characters.
default:
value = current;
}
if ( typeof value === "number" ) {
value = numberFormatters[ length ]( value );
}
dateField = dateFieldsMap[ chr ];
type = dateField ? dateField : "literal";
// Concat two consecutive literals
if ( type === "literal" && parts.length && parts[ parts.length - 1 ].type === "literal" ) {
parts[ parts.length - 1 ].value += value;
return;
}
parts.push( { type: type, value: value } );
});
return parts;
};
var dateToPartsFormatterFn = function( numberFormatters, properties ) {
return function dateToPartsFormatter( value ) {
validateParameterPresence( value, "value" );
validateParameterTypeDate( value, "value" );
return dateFormat( value, numberFormatters, properties );
};
};
function optionsHasStyle( options ) {
return options.skeleton !== undefined ||
options.date !== undefined ||
options.time !== undefined ||
options.datetime !== undefined ||
options.raw !== undefined;
}
function validateRequiredCldr( path, value ) {
validateCldr( path, value, {
skip: [
/dates\/calendars\/gregorian\/dateTimeFormats\/availableFormats/,
/dates\/calendars\/gregorian\/days\/.*\/short/,
/dates\/timeZoneNames\/zone/,
/dates\/timeZoneNames\/metazone/,
/globalize-iana/,
/supplemental\/metaZones/,
/supplemental\/timeData\/(?!001)/,
/supplemental\/weekData\/(?!001)/
]
});
}
function validateOptionsPreset( options ) {
validateOptionsPresetEach( "date", options );
validateOptionsPresetEach( "time", options );
validateOptionsPresetEach( "datetime", options );
}
function validateOptionsPresetEach( type, options ) {
var value = options[ type ];
validate(
"E_INVALID_OPTIONS",
"Invalid `{{type}: \"{value}\"}`.",
value === undefined || [ "short", "medium", "long", "full" ].indexOf( value ) !== -1,
{ type: type, value: value }
);
}
function validateOptionsSkeleton( pattern, skeleton ) {
validate(
"E_INVALID_OPTIONS",
"Invalid `{skeleton: \"{value}\"}` based on provided CLDR.",
skeleton === undefined || ( typeof pattern === "string" && pattern ),
{ type: "skeleton", value: skeleton }
);
}
function validateRequiredIana( timeZone ) {
return function( path, value ) {
if ( !/globalize-iana/.test( path ) ) {
return;
}
validate(
"E_MISSING_IANA_TZ",
"Missing required IANA timezone content for `{timeZone}`: `{path}`.",
value,
{
path: path.replace( /globalize-iana\//, "" ),
timeZone: timeZone
}
);
};
}
/**
* .loadTimeZone( json )
*
* @json [JSON]
*
* Load IANA timezone data.
*/
Globalize.loadTimeZone = function( json ) {
var customData = {
"globalize-iana": json
};
validateParameterPresence( json, "json" );
validateParameterTypePlainObject( json, "json" );
Cldr.load( customData );
};
/**
* .dateFormatter( options )
*
* @options [Object] see date/expand_pattern for more info.
*
* Return a date formatter function (of the form below) according to the given options and the
* default/instance locale.
*
* fn( value )
*
* @value [Date]
*
* Return a function that formats a date according to the given `format` and the default/instance
* locale.
*/
Globalize.dateFormatter =
Globalize.prototype.dateFormatter = function( options ) {
var args, dateToPartsFormatter, returnFn;
validateParameterTypePlainObject( options, "options" );
options = options || {};
if ( !optionsHasStyle( options ) ) {
options.skeleton = "yMd";
}
args = [ options ];
dateToPartsFormatter = this.dateToPartsFormatter( options );
returnFn = dateFormatterFn( dateToPartsFormatter );
runtimeBind( args, this.cldr, returnFn, [ dateToPartsFormatter ] );
return returnFn;
};
/**
* .dateToPartsFormatter( options )
*
* @options [Object] see date/expand_pattern for more info.
*
* Return a date formatter function (of the form below) according to the given options and the
* default/instance locale.
*
* fn( value )
*
* @value [Date]
*
* Return a function that formats a date to parts according to the given `format`
* and the default/instance
* locale.
*/
Globalize.dateToPartsFormatter =
Globalize.prototype.dateToPartsFormatter = function( options ) {
var args, cldr, numberFormatters, pad, pattern, properties, returnFn,
timeZone, ianaListener;
validateParameterTypePlainObject( options, "options" );
cldr = this.cldr;
options = options || {};
if ( !optionsHasStyle( options ) ) {
options.skeleton = "yMd";
}
validateOptionsPreset( options );
validateDefaultLocale( cldr );
timeZone = options.timeZone;
validateParameterTypeString( timeZone, "options.timeZone" );
args = [ options ];
cldr.on( "get", validateRequiredCldr );
if ( timeZone ) {
ianaListener = validateRequiredIana( timeZone );
cldr.on( "get", ianaListener );
}
pattern = dateExpandPattern( options, cldr );
validateOptionsSkeleton( pattern, options.skeleton );
properties = dateFormatProperties( pattern, cldr, timeZone );
cldr.off( "get", validateRequiredCldr );
if ( ianaListener ) {
cldr.off( "get", ianaListener );
}
// Create needed number formatters.
numberFormatters = properties.numberFormatters;
delete properties.numberFormatters;
for ( pad in numberFormatters ) {
numberFormatters[ pad ] = this.numberFormatter({
raw: numberFormatters[ pad ]
});
}
returnFn = dateToPartsFormatterFn( numberFormatters, properties );
runtimeBind( args, cldr, returnFn, [ numberFormatters, properties ] );
return returnFn;
};
/**
* .dateParser( options )
*
* @options [Object] see date/expand_pattern for more info.
*
* Return a function that parses a string date according to the given `formats` and the
* default/instance locale.
*/
Globalize.dateParser =
Globalize.prototype.dateParser = function( options ) {
var args, cldr, numberParser, parseProperties, pattern, returnFn, timeZone,
tokenizerProperties;
validateParameterTypePlainObject( options, "options" );
cldr = this.cldr;
options = options || {};
if ( !optionsHasStyle( options ) ) {
options.skeleton = "yMd";
}
validateOptionsPreset( options );
validateDefaultLocale( cldr );
timeZone = options.timeZone;
validateParameterTypeString( timeZone, "options.timeZone" );
args = [ options ];
cldr.on( "get", validateRequiredCldr );
if ( timeZone ) {
cldr.on( "get", validateRequiredIana( timeZone ) );
}
pattern = dateExpandPattern( options, cldr );
validateOptionsSkeleton( pattern, options.skeleton );
tokenizerProperties = dateTokenizerProperties( pattern, cldr, timeZone );
parseProperties = dateParseProperties( cldr, timeZone );
cldr.off( "get", validateRequiredCldr );
if ( timeZone ) {
cldr.off( "get", validateRequiredIana( timeZone ) );
}
numberParser = this.numberParser({ raw: "0" });
returnFn = dateParserFn( numberParser, parseProperties, tokenizerProperties );
runtimeBind( args, cldr, returnFn, [ numberParser, parseProperties, tokenizerProperties ] );
return returnFn;
};
/**
* .formatDate( value, options )
*
* @value [Date]
*
* @options [Object] see date/expand_pattern for more info.
*
* Formats a date or number according to the given options string and the default/instance locale.
*/
Globalize.formatDate =
Globalize.prototype.formatDate = function( value, options ) {
validateParameterPresence( value, "value" );
validateParameterTypeDate( value, "value" );
return this.dateFormatter( options )( value );
};
/**
* .formatDateToParts( value, options )
*
* @value [Date]
*
* @options [Object] see date/expand_pattern for more info.
*
* Formats a date or number to parts according to the given options and the default/instance locale.
*/
Globalize.formatDateToParts =
Globalize.prototype.formatDateToParts = function( value, options ) {
validateParameterPresence( value, "value" );
validateParameterTypeDate( value, "value" );
return this.dateToPartsFormatter( options )( value );
};
/**
* .parseDate( value, options )
*
* @value [String]
*
* @options [Object] see date/expand_pattern for more info.
*
* Return a Date instance or null.
*/
Globalize.parseDate =
Globalize.prototype.parseDate = function( value, options ) {
validateParameterPresence( value, "value" );
validateParameterTypeString( value, "value" );
return this.dateParser( options )( value );
};
return Globalize;
}));
| ahocevar/cdnjs | ajax/libs/globalize/1.4.0-alpha.2/globalize/date.js | JavaScript | mit | 74,659 |
import util from "../utils";
class ApplicationService {
getServiceFramework(controller) {
let sf = util.utilities.sf;
sf.moduleRoot = "PersonaBar";
sf.controller = controller;
return sf;
}
getGeneralSettings(callback) {
const sf = this.getServiceFramework("SEO");
sf.get("GetGeneralSettings", {}, callback);
}
updateGeneralSettings(payload, callback, failureCallback) {
const sf = this.getServiceFramework("SEO");
sf.post("UpdateGeneralSettings", payload, callback, failureCallback);
}
getRegexSettings(callback) {
const sf = this.getServiceFramework("SEO");
sf.get("GetRegexSettings", {}, callback);
}
updateRegexSettings(payload, callback, failureCallback) {
const sf = this.getServiceFramework("SEO");
sf.post("UpdateRegexSettings", payload, callback, failureCallback);
}
testUrl(pageId, queryString, customPageName, callback) {
const sf = this.getServiceFramework("SEO");
sf.get("TestUrl?pageId=" + pageId + "&queryString=" + encodeURIComponent(queryString) + "&customPageName=" + encodeURIComponent(customPageName), {}, callback);
}
testUrlRewrite(uri, callback) {
const sf = this.getServiceFramework("SEO");
sf.get("TestUrlRewrite?uri=" + uri, {}, callback);
}
getSitemapSettings(callback) {
const sf = this.getServiceFramework("SEO");
sf.get("GetSitemapSettings", {}, callback);
}
updateSitemapSettings(payload, callback, failureCallback) {
const sf = this.getServiceFramework("SEO");
sf.post("UpdateSitemapSettings", payload, callback, failureCallback);
}
getSitemapProviders(callback) {
const sf = this.getServiceFramework("SEO");
sf.get("GetSitemapProviders", {}, callback);
}
updateSitemapProvider(payload, callback, failureCallback) {
const sf = this.getServiceFramework("SEO");
sf.post("UpdateSitemapProvider", payload, callback, failureCallback);
}
createVerification(verification, callback, failureCallback) {
const sf = this.getServiceFramework("SEO");
sf.post("CreateVerification?verification=" + verification, {}, callback, failureCallback);
}
clearCache(callback, failureCallback) {
const sf = this.getServiceFramework("SEO");
sf.post("ResetCache", {}, callback, failureCallback);
}
getExtensionUrlProviders(callback) {
const sf = this.getServiceFramework("SEO");
sf.get("GetExtensionUrlProviders", {}, callback);
}
updateExtensionUrlProviderStatus(payload, callback, failureCallback) {
const sf = this.getServiceFramework("SEO");
sf.post("UpdateExtensionUrlProviderStatus", payload, callback, failureCallback);
}
}
const applicationService = new ApplicationService();
export default applicationService; | dnnsoftware/Dnn.AdminExperience.Extensions | src/Modules/Settings/Dnn.PersonaBar.Seo/Seo.Web/src/services/applicationService.js | JavaScript | mit | 2,914 |
<?php
namespace Neos\Flow\Http\Helper;
/*
* This file is part of the Neos.Flow package.
*
* (c) Contributors of the Neos Project - www.neos.io
*
* This package is Open Source Software. For the full copyright and license
* information, please view the LICENSE file which was distributed with this
* source code.
*/
use Neos\Utility\MediaTypes;
use Psr\Http\Message\RequestInterface;
/**
* Helper for dealing with HTTP media type resolution.
*/
abstract class MediaTypeHelper
{
/**
* Get accepted media types for the given request.
* If no "Accept" header was found all media types are acceptable.
*
* @param RequestInterface $request
* @return array
*/
public static function determineAcceptedMediaTypes(RequestInterface $request): array
{
$rawValues = $request->getHeaderLine('Accept');
if (empty($rawValues) || !is_string($rawValues)) {
return ['*/*'];
}
$acceptedMediaTypes = self::parseContentNegotiationQualityValues($rawValues);
return $acceptedMediaTypes;
}
/**
* Returns the best fitting IANA media type after applying the content negotiation
* rules on the accepted media types.
*
* @param array $acceptedMediaTypes A list of accepted media types according to a request.
* @param array $supportedMediaTypes A list of media types which are supported by the application / controller
* @param bool $trim If TRUE, only the type/subtype of the media type is returned. If FALSE, the full original media type string is returned.
* @return string The media type and sub type which matched, NULL if none matched
*/
public static function negotiateMediaType(array $acceptedMediaTypes, array $supportedMediaTypes, bool $trim = true): ?string
{
$negotiatedMediaType = null;
foreach ($acceptedMediaTypes as $acceptedMediaType) {
foreach ($supportedMediaTypes as $supportedMediaType) {
if (MediaTypes::mediaRangeMatches($acceptedMediaType, $supportedMediaType)) {
$negotiatedMediaType = $supportedMediaType;
break 2;
}
}
}
return ($trim && $negotiatedMediaType !== null ? MediaTypes::trimMediaType($negotiatedMediaType) : $negotiatedMediaType);
}
/**
* Parses a RFC 2616 content negotiation header field by evaluating the Quality
* Values and splitting the options into an array list, ordered by user preference.
*
* @param string $rawValues The raw Accept* Header field value
* @return array The parsed list of field values, ordered by user preference
*/
public static function parseContentNegotiationQualityValues(string $rawValues): array
{
$acceptedTypes = array_map(
function ($acceptType) {
$typeAndQuality = preg_split('/;\s*q=/', $acceptType);
return [$typeAndQuality[0], (isset($typeAndQuality[1]) ? (float)$typeAndQuality[1] : '')];
},
preg_split('/,\s*/', $rawValues)
);
$flattenedAcceptedTypes = [];
$valuesWithoutQualityValue = [[], [], [], []];
foreach ($acceptedTypes as $typeAndQuality) {
if ($typeAndQuality[1] === '') {
$parsedType = MediaTypes::parseMediaType($typeAndQuality[0]);
if ($parsedType['type'] === '*') {
$valuesWithoutQualityValue[3][$typeAndQuality[0]] = true;
} elseif ($parsedType['subtype'] === '*') {
$valuesWithoutQualityValue[2][$typeAndQuality[0]] = true;
} elseif ($parsedType['parameters'] === []) {
$valuesWithoutQualityValue[1][$typeAndQuality[0]] = true;
} else {
$valuesWithoutQualityValue[0][$typeAndQuality[0]] = true;
}
} else {
$flattenedAcceptedTypes[$typeAndQuality[0]] = $typeAndQuality[1];
}
}
$valuesWithoutQualityValue = array_merge(array_keys($valuesWithoutQualityValue[0]), array_keys($valuesWithoutQualityValue[1]), array_keys($valuesWithoutQualityValue[2]), array_keys($valuesWithoutQualityValue[3]));
arsort($flattenedAcceptedTypes);
$parsedValues = array_merge($valuesWithoutQualityValue, array_keys($flattenedAcceptedTypes));
return $parsedValues;
}
}
| daniellienert/flow-development-collection | Neos.Flow/Classes/Http/Helper/MediaTypeHelper.php | PHP | mit | 4,432 |
package net.sf.jabref.logic.importer;
import net.sf.jabref.logic.help.HelpFile;
/**
* Searches web resources for bibliographic information.
*/
public interface WebFetcher {
/**
* Returns the localized name of this fetcher.
* The title can be used to display the fetcher in the menu and in the side pane.
*
* @return the localized name
*/
String getName();
/**
* Returns the help page for this fetcher.
*
* @return the {@link HelpFile} enum constant for the help page
*/
default HelpFile getHelpPage() {
return null; // no help page by default
}
}
| Mr-DLib/jabref | src/main/java/net/sf/jabref/logic/importer/WebFetcher.java | Java | mit | 626 |
/*
* Copyright (C) 2013 The Libphonenumber Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.i18n.phonenumbers;
import com.google.i18n.phonenumbers.Phonemetadata.PhoneMetadata;
import com.google.i18n.phonenumbers.Phonemetadata.PhoneNumberDesc;
import com.google.i18n.phonenumbers.Phonenumber.PhoneNumber;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
/**
* Methods for getting information about short phone numbers, such as short codes and emergency
* numbers. Note that most commercial short numbers are not handled here, but by the
* {@link PhoneNumberUtil}.
*
* @author Shaopeng Jia
* @author David Yonge-Mallo
*/
public class ShortNumberInfo {
private static final Logger logger = Logger.getLogger(ShortNumberInfo.class.getName());
private static final ShortNumberInfo INSTANCE =
new ShortNumberInfo(PhoneNumberUtil.getInstance());
// In these countries, if extra digits are added to an emergency number, it no longer connects
// to the emergency service.
private static final Set<String> REGIONS_WHERE_EMERGENCY_NUMBERS_MUST_BE_EXACT =
new HashSet<String>();
static {
REGIONS_WHERE_EMERGENCY_NUMBERS_MUST_BE_EXACT.add("BR");
REGIONS_WHERE_EMERGENCY_NUMBERS_MUST_BE_EXACT.add("CL");
REGIONS_WHERE_EMERGENCY_NUMBERS_MUST_BE_EXACT.add("NI");
}
/** Cost categories of short numbers. */
public enum ShortNumberCost {
TOLL_FREE,
STANDARD_RATE,
PREMIUM_RATE,
UNKNOWN_COST
}
/** Returns the singleton instance of the ShortNumberInfo. */
public static ShortNumberInfo getInstance() {
return INSTANCE;
}
private final PhoneNumberUtil phoneUtil;
// @VisibleForTesting
ShortNumberInfo(PhoneNumberUtil util) {
phoneUtil = util;
}
/**
* Check whether a short number is a possible number when dialled from a region, given the number
* in the form of a string, and the region where the number is dialed from. This provides a more
* lenient check than {@link #isValidShortNumberForRegion}.
*
* @param shortNumber the short number to check as a string
* @param regionDialingFrom the region from which the number is dialed
* @return whether the number is a possible short number
*/
public boolean isPossibleShortNumberForRegion(String shortNumber, String regionDialingFrom) {
PhoneMetadata phoneMetadata =
MetadataManager.getShortNumberMetadataForRegion(regionDialingFrom);
if (phoneMetadata == null) {
return false;
}
PhoneNumberDesc generalDesc = phoneMetadata.getGeneralDesc();
return phoneUtil.isNumberPossibleForDesc(shortNumber, generalDesc);
}
/**
* Check whether a short number is a possible number. If a country calling code is shared by
* multiple regions, this returns true if it's possible in any of them. This provides a more
* lenient check than {@link #isValidShortNumber}. See {@link
* #isPossibleShortNumberForRegion(String, String)} for details.
*
* @param number the short number to check
* @return whether the number is a possible short number
*/
public boolean isPossibleShortNumber(PhoneNumber number) {
List<String> regionCodes = phoneUtil.getRegionCodesForCountryCode(number.getCountryCode());
String shortNumber = phoneUtil.getNationalSignificantNumber(number);
for (String region : regionCodes) {
PhoneMetadata phoneMetadata = MetadataManager.getShortNumberMetadataForRegion(region);
if (phoneUtil.isNumberPossibleForDesc(shortNumber, phoneMetadata.getGeneralDesc())) {
return true;
}
}
return false;
}
/**
* Tests whether a short number matches a valid pattern in a region. Note that this doesn't verify
* the number is actually in use, which is impossible to tell by just looking at the number
* itself.
*
* @param shortNumber the short number to check as a string
* @param regionDialingFrom the region from which the number is dialed
* @return whether the short number matches a valid pattern
*/
public boolean isValidShortNumberForRegion(String shortNumber, String regionDialingFrom) {
PhoneMetadata phoneMetadata =
MetadataManager.getShortNumberMetadataForRegion(regionDialingFrom);
if (phoneMetadata == null) {
return false;
}
PhoneNumberDesc generalDesc = phoneMetadata.getGeneralDesc();
if (!generalDesc.hasNationalNumberPattern() ||
!phoneUtil.isNumberMatchingDesc(shortNumber, generalDesc)) {
return false;
}
PhoneNumberDesc shortNumberDesc = phoneMetadata.getShortCode();
if (!shortNumberDesc.hasNationalNumberPattern()) {
logger.log(Level.WARNING, "No short code national number pattern found for region: " +
regionDialingFrom);
return false;
}
return phoneUtil.isNumberMatchingDesc(shortNumber, shortNumberDesc);
}
/**
* Tests whether a short number matches a valid pattern. If a country calling code is shared by
* multiple regions, this returns true if it's valid in any of them. Note that this doesn't verify
* the number is actually in use, which is impossible to tell by just looking at the number
* itself. See {@link #isValidShortNumberForRegion(String, String)} for details.
*
* @param number the short number for which we want to test the validity
* @return whether the short number matches a valid pattern
*/
public boolean isValidShortNumber(PhoneNumber number) {
List<String> regionCodes = phoneUtil.getRegionCodesForCountryCode(number.getCountryCode());
String shortNumber = phoneUtil.getNationalSignificantNumber(number);
String regionCode = getRegionCodeForShortNumberFromRegionList(number, regionCodes);
if (regionCodes.size() > 1 && regionCode != null) {
// If a matching region had been found for the phone number from among two or more regions,
// then we have already implicitly verified its validity for that region.
return true;
}
return isValidShortNumberForRegion(shortNumber, regionCode);
}
/**
* Gets the expected cost category of a short number when dialled from a region (however, nothing
* is implied about its validity). If it is important that the number is valid, then its validity
* must first be checked using {@link isValidShortNumberForRegion}. Note that emergency numbers
* are always considered toll-free. Example usage:
* <pre>{@code
* ShortNumberInfo shortInfo = ShortNumberInfo.getInstance();
* String shortNumber = "110";
* String regionCode = "FR";
* if (shortInfo.isValidShortNumberForRegion(shortNumber, regionCode)) {
* ShortNumberInfo.ShortNumberCost cost = shortInfo.getExpectedCostForRegion(shortNumber,
* regionCode);
* // Do something with the cost information here.
* }}</pre>
*
* @param shortNumber the short number for which we want to know the expected cost category,
* as a string
* @param regionDialingFrom the region from which the number is dialed
* @return the expected cost category for that region of the short number. Returns UNKNOWN_COST if
* the number does not match a cost category. Note that an invalid number may match any cost
* category.
*/
public ShortNumberCost getExpectedCostForRegion(String shortNumber, String regionDialingFrom) {
// Note that regionDialingFrom may be null, in which case phoneMetadata will also be null.
PhoneMetadata phoneMetadata = MetadataManager.getShortNumberMetadataForRegion(
regionDialingFrom);
if (phoneMetadata == null) {
return ShortNumberCost.UNKNOWN_COST;
}
// The cost categories are tested in order of decreasing expense, since if for some reason the
// patterns overlap the most expensive matching cost category should be returned.
if (phoneUtil.isNumberMatchingDesc(shortNumber, phoneMetadata.getPremiumRate())) {
return ShortNumberCost.PREMIUM_RATE;
}
if (phoneUtil.isNumberMatchingDesc(shortNumber, phoneMetadata.getStandardRate())) {
return ShortNumberCost.STANDARD_RATE;
}
if (phoneUtil.isNumberMatchingDesc(shortNumber, phoneMetadata.getTollFree())) {
return ShortNumberCost.TOLL_FREE;
}
if (isEmergencyNumber(shortNumber, regionDialingFrom)) {
// Emergency numbers are implicitly toll-free.
return ShortNumberCost.TOLL_FREE;
}
return ShortNumberCost.UNKNOWN_COST;
}
/**
* Gets the expected cost category of a short number (however, nothing is implied about its
* validity). If the country calling code is unique to a region, this method behaves exactly the
* same as {@link #getExpectedCostForRegion(String, String)}. However, if the country calling
* code is shared by multiple regions, then it returns the highest cost in the sequence
* PREMIUM_RATE, UNKNOWN_COST, STANDARD_RATE, TOLL_FREE. The reason for the position of
* UNKNOWN_COST in this order is that if a number is UNKNOWN_COST in one region but STANDARD_RATE
* or TOLL_FREE in another, its expected cost cannot be estimated as one of the latter since it
* might be a PREMIUM_RATE number.
*
* For example, if a number is STANDARD_RATE in the US, but TOLL_FREE in Canada, the expected cost
* returned by this method will be STANDARD_RATE, since the NANPA countries share the same country
* calling code.
*
* Note: If the region from which the number is dialed is known, it is highly preferable to call
* {@link #getExpectedCostForRegion(String, String)} instead.
*
* @param number the short number for which we want to know the expected cost category
* @return the highest expected cost category of the short number in the region(s) with the given
* country calling code
*/
public ShortNumberCost getExpectedCost(PhoneNumber number) {
List<String> regionCodes = phoneUtil.getRegionCodesForCountryCode(number.getCountryCode());
if (regionCodes.size() == 0) {
return ShortNumberCost.UNKNOWN_COST;
}
String shortNumber = phoneUtil.getNationalSignificantNumber(number);
if (regionCodes.size() == 1) {
return getExpectedCostForRegion(shortNumber, regionCodes.get(0));
}
ShortNumberCost cost = ShortNumberCost.TOLL_FREE;
for (String regionCode : regionCodes) {
ShortNumberCost costForRegion = getExpectedCostForRegion(shortNumber, regionCode);
switch (costForRegion) {
case PREMIUM_RATE:
return ShortNumberCost.PREMIUM_RATE;
case UNKNOWN_COST:
cost = ShortNumberCost.UNKNOWN_COST;
break;
case STANDARD_RATE:
if (cost != ShortNumberCost.UNKNOWN_COST) {
cost = ShortNumberCost.STANDARD_RATE;
}
break;
case TOLL_FREE:
// Do nothing.
break;
default:
logger.log(Level.SEVERE, "Unrecognised cost for region: " + costForRegion);
}
}
return cost;
}
// Helper method to get the region code for a given phone number, from a list of possible region
// codes. If the list contains more than one region, the first region for which the number is
// valid is returned.
private String getRegionCodeForShortNumberFromRegionList(PhoneNumber number,
List<String> regionCodes) {
if (regionCodes.size() == 0) {
return null;
} else if (regionCodes.size() == 1) {
return regionCodes.get(0);
}
String nationalNumber = phoneUtil.getNationalSignificantNumber(number);
for (String regionCode : regionCodes) {
PhoneMetadata phoneMetadata = MetadataManager.getShortNumberMetadataForRegion(regionCode);
if (phoneMetadata != null &&
phoneUtil.isNumberMatchingDesc(nationalNumber, phoneMetadata.getShortCode())) {
// The number is valid for this region.
return regionCode;
}
}
return null;
}
/**
* Convenience method to get a list of what regions the library has metadata for.
*/
Set<String> getSupportedRegions() {
return Collections.unmodifiableSet(MetadataManager.getShortNumberMetadataSupportedRegions());
}
/**
* Gets a valid short number for the specified region.
*
* @param regionCode the region for which an example short number is needed
* @return a valid short number for the specified region. Returns an empty string when the
* metadata does not contain such information.
*/
// @VisibleForTesting
String getExampleShortNumber(String regionCode) {
PhoneMetadata phoneMetadata = MetadataManager.getShortNumberMetadataForRegion(regionCode);
if (phoneMetadata == null) {
return "";
}
PhoneNumberDesc desc = phoneMetadata.getShortCode();
if (desc.hasExampleNumber()) {
return desc.getExampleNumber();
}
return "";
}
/**
* Gets a valid short number for the specified cost category.
*
* @param regionCode the region for which an example short number is needed
* @param cost the cost category of number that is needed
* @return a valid short number for the specified region and cost category. Returns an empty
* string when the metadata does not contain such information, or the cost is UNKNOWN_COST.
*/
// @VisibleForTesting
String getExampleShortNumberForCost(String regionCode, ShortNumberCost cost) {
PhoneMetadata phoneMetadata = MetadataManager.getShortNumberMetadataForRegion(regionCode);
if (phoneMetadata == null) {
return "";
}
PhoneNumberDesc desc = null;
switch (cost) {
case TOLL_FREE:
desc = phoneMetadata.getTollFree();
break;
case STANDARD_RATE:
desc = phoneMetadata.getStandardRate();
break;
case PREMIUM_RATE:
desc = phoneMetadata.getPremiumRate();
break;
default:
// UNKNOWN_COST numbers are computed by the process of elimination from the other cost
// categories.
}
if (desc != null && desc.hasExampleNumber()) {
return desc.getExampleNumber();
}
return "";
}
/**
* Returns true if the number might be used to connect to an emergency service in the given
* region.
*
* This method takes into account cases where the number might contain formatting, or might have
* additional digits appended (when it is okay to do that in the region specified).
*
* @param number the phone number to test
* @param regionCode the region where the phone number is being dialed
* @return whether the number might be used to connect to an emergency service in the given region
*/
public boolean connectsToEmergencyNumber(String number, String regionCode) {
return matchesEmergencyNumberHelper(number, regionCode, true /* allows prefix match */);
}
/**
* Returns true if the number exactly matches an emergency service number in the given region.
*
* This method takes into account cases where the number might contain formatting, but doesn't
* allow additional digits to be appended.
*
* @param number the phone number to test
* @param regionCode the region where the phone number is being dialed
* @return whether the number exactly matches an emergency services number in the given region
*/
public boolean isEmergencyNumber(String number, String regionCode) {
return matchesEmergencyNumberHelper(number, regionCode, false /* doesn't allow prefix match */);
}
private boolean matchesEmergencyNumberHelper(String number, String regionCode,
boolean allowPrefixMatch) {
number = PhoneNumberUtil.extractPossibleNumber(number);
if (PhoneNumberUtil.PLUS_CHARS_PATTERN.matcher(number).lookingAt()) {
// Returns false if the number starts with a plus sign. We don't believe dialing the country
// code before emergency numbers (e.g. +1911) works, but later, if that proves to work, we can
// add additional logic here to handle it.
return false;
}
PhoneMetadata metadata = MetadataManager.getShortNumberMetadataForRegion(regionCode);
if (metadata == null || !metadata.hasEmergency()) {
return false;
}
Pattern emergencyNumberPattern =
Pattern.compile(metadata.getEmergency().getNationalNumberPattern());
String normalizedNumber = PhoneNumberUtil.normalizeDigitsOnly(number);
return (!allowPrefixMatch || REGIONS_WHERE_EMERGENCY_NUMBERS_MUST_BE_EXACT.contains(regionCode))
? emergencyNumberPattern.matcher(normalizedNumber).matches()
: emergencyNumberPattern.matcher(normalizedNumber).lookingAt();
}
/**
* Given a valid short number, determines whether it is carrier-specific (however, nothing is
* implied about its validity). If it is important that the number is valid, then its validity
* must first be checked using {@link #isValidShortNumber} or
* {@link #isValidShortNumberForRegion}.
*
* @param number the valid short number to check
* @return whether the short number is carrier-specific (assuming the input was a valid short
* number).
*/
public boolean isCarrierSpecific(PhoneNumber number) {
List<String> regionCodes = phoneUtil.getRegionCodesForCountryCode(number.getCountryCode());
String regionCode = getRegionCodeForShortNumberFromRegionList(number, regionCodes);
String nationalNumber = phoneUtil.getNationalSignificantNumber(number);
PhoneMetadata phoneMetadata = MetadataManager.getShortNumberMetadataForRegion(regionCode);
return (phoneMetadata != null) &&
(phoneUtil.isNumberMatchingDesc(nationalNumber, phoneMetadata.getCarrierSpecific()));
}
}
| yoolk/fakie | lib/fakie/js/java/libphonenumber/src/com/google/i18n/phonenumbers/ShortNumberInfo.java | Java | mit | 18,190 |
// Copyright (c) 2014-2018 The Bitcoin Core developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include <chain.h>
#include <util.h>
#include <test/test_bitcoin.h>
#include <vector>
#include <boost/test/unit_test.hpp>
#define SKIPLIST_LENGTH 300000
BOOST_FIXTURE_TEST_SUITE(skiplist_tests, BasicTestingSetup)
BOOST_AUTO_TEST_CASE(skiplist_test)
{
std::vector<CBlockIndex> vIndex(SKIPLIST_LENGTH);
for (int i=0; i<SKIPLIST_LENGTH; i++) {
vIndex[i].nHeight = i;
vIndex[i].pprev = (i == 0) ? nullptr : &vIndex[i - 1];
vIndex[i].BuildSkip();
}
for (int i=0; i<SKIPLIST_LENGTH; i++) {
if (i > 0) {
BOOST_CHECK(vIndex[i].pskip == &vIndex[vIndex[i].pskip->nHeight]);
BOOST_CHECK(vIndex[i].pskip->nHeight < i);
} else {
BOOST_CHECK(vIndex[i].pskip == nullptr);
}
}
for (int i=0; i < 1000; i++) {
int from = InsecureRandRange(SKIPLIST_LENGTH - 1);
int to = InsecureRandRange(from + 1);
BOOST_CHECK(vIndex[SKIPLIST_LENGTH - 1].GetAncestor(from) == &vIndex[from]);
BOOST_CHECK(vIndex[from].GetAncestor(to) == &vIndex[to]);
BOOST_CHECK(vIndex[from].GetAncestor(0) == vIndex.data());
}
}
BOOST_AUTO_TEST_CASE(getlocator_test)
{
// Build a main chain 100000 blocks long.
std::vector<uint256> vHashMain(100000);
std::vector<CBlockIndex> vBlocksMain(100000);
for (unsigned int i=0; i<vBlocksMain.size(); i++) {
vHashMain[i] = ArithToUint256(i); // Set the hash equal to the height, so we can quickly check the distances.
vBlocksMain[i].nHeight = i;
vBlocksMain[i].pprev = i ? &vBlocksMain[i - 1] : nullptr;
vBlocksMain[i].phashBlock = &vHashMain[i];
vBlocksMain[i].BuildSkip();
BOOST_CHECK_EQUAL((int)UintToArith256(vBlocksMain[i].GetBlockHash()).GetLow64(), vBlocksMain[i].nHeight);
BOOST_CHECK(vBlocksMain[i].pprev == nullptr || vBlocksMain[i].nHeight == vBlocksMain[i].pprev->nHeight + 1);
}
// Build a branch that splits off at block 49999, 50000 blocks long.
std::vector<uint256> vHashSide(50000);
std::vector<CBlockIndex> vBlocksSide(50000);
for (unsigned int i=0; i<vBlocksSide.size(); i++) {
vHashSide[i] = ArithToUint256(i + 50000 + (arith_uint256(1) << 128)); // Add 1<<128 to the hashes, so GetLow64() still returns the height.
vBlocksSide[i].nHeight = i + 50000;
vBlocksSide[i].pprev = i ? &vBlocksSide[i - 1] : (vBlocksMain.data()+49999);
vBlocksSide[i].phashBlock = &vHashSide[i];
vBlocksSide[i].BuildSkip();
BOOST_CHECK_EQUAL((int)UintToArith256(vBlocksSide[i].GetBlockHash()).GetLow64(), vBlocksSide[i].nHeight);
BOOST_CHECK(vBlocksSide[i].pprev == nullptr || vBlocksSide[i].nHeight == vBlocksSide[i].pprev->nHeight + 1);
}
// Build a CChain for the main branch.
CChain chain;
chain.SetTip(&vBlocksMain.back());
// Test 100 random starting points for locators.
for (int n=0; n<100; n++) {
int r = InsecureRandRange(150000);
CBlockIndex* tip = (r < 100000) ? &vBlocksMain[r] : &vBlocksSide[r - 100000];
CBlockLocator locator = chain.GetLocator(tip);
// The first result must be the block itself, the last one must be genesis.
BOOST_CHECK(locator.vHave.front() == tip->GetBlockHash());
BOOST_CHECK(locator.vHave.back() == vBlocksMain[0].GetBlockHash());
// Entries 1 through 11 (inclusive) go back one step each.
for (unsigned int i = 1; i < 12 && i < locator.vHave.size() - 1; i++) {
BOOST_CHECK_EQUAL(UintToArith256(locator.vHave[i]).GetLow64(), tip->nHeight - i);
}
// The further ones (excluding the last one) go back with exponential steps.
unsigned int dist = 2;
for (unsigned int i = 12; i < locator.vHave.size() - 1; i++) {
BOOST_CHECK_EQUAL(UintToArith256(locator.vHave[i - 1]).GetLow64() - UintToArith256(locator.vHave[i]).GetLow64(), dist);
dist *= 2;
}
}
}
BOOST_AUTO_TEST_CASE(findearliestatleast_test)
{
std::vector<uint256> vHashMain(100000);
std::vector<CBlockIndex> vBlocksMain(100000);
for (unsigned int i=0; i<vBlocksMain.size(); i++) {
vHashMain[i] = ArithToUint256(i); // Set the hash equal to the height
vBlocksMain[i].nHeight = i;
vBlocksMain[i].pprev = i ? &vBlocksMain[i - 1] : nullptr;
vBlocksMain[i].phashBlock = &vHashMain[i];
vBlocksMain[i].BuildSkip();
if (i < 10) {
vBlocksMain[i].nTime = i;
vBlocksMain[i].nTimeMax = i;
} else {
// randomly choose something in the range [MTP, MTP*2]
int64_t medianTimePast = vBlocksMain[i].GetMedianTimePast();
int r = InsecureRandRange(medianTimePast);
vBlocksMain[i].nTime = r + medianTimePast;
vBlocksMain[i].nTimeMax = std::max(vBlocksMain[i].nTime, vBlocksMain[i-1].nTimeMax);
}
}
// Check that we set nTimeMax up correctly.
unsigned int curTimeMax = 0;
for (unsigned int i=0; i<vBlocksMain.size(); ++i) {
curTimeMax = std::max(curTimeMax, vBlocksMain[i].nTime);
BOOST_CHECK(curTimeMax == vBlocksMain[i].nTimeMax);
}
// Build a CChain for the main branch.
CChain chain;
chain.SetTip(&vBlocksMain.back());
// Verify that FindEarliestAtLeast is correct.
for (unsigned int i=0; i<10000; ++i) {
// Pick a random element in vBlocksMain.
int r = InsecureRandRange(vBlocksMain.size());
int64_t test_time = vBlocksMain[r].nTime;
CBlockIndex *ret = chain.FindEarliestAtLeast(test_time);
BOOST_CHECK(ret->nTimeMax >= test_time);
BOOST_CHECK((ret->pprev==nullptr) || ret->pprev->nTimeMax < test_time);
BOOST_CHECK(vBlocksMain[r].GetAncestor(ret->nHeight) == ret);
}
}
BOOST_AUTO_TEST_CASE(findearliestatleast_edge_test)
{
std::list<CBlockIndex> blocks;
for (unsigned int timeMax : {100, 100, 100, 200, 200, 200, 300, 300, 300}) {
CBlockIndex* prev = blocks.empty() ? nullptr : &blocks.back();
blocks.emplace_back();
blocks.back().nHeight = prev ? prev->nHeight + 1 : 0;
blocks.back().pprev = prev;
blocks.back().BuildSkip();
blocks.back().nTimeMax = timeMax;
}
CChain chain;
chain.SetTip(&blocks.back());
BOOST_CHECK_EQUAL(chain.FindEarliestAtLeast(50)->nHeight, 0);
BOOST_CHECK_EQUAL(chain.FindEarliestAtLeast(100)->nHeight, 0);
BOOST_CHECK_EQUAL(chain.FindEarliestAtLeast(150)->nHeight, 3);
BOOST_CHECK_EQUAL(chain.FindEarliestAtLeast(200)->nHeight, 3);
BOOST_CHECK_EQUAL(chain.FindEarliestAtLeast(250)->nHeight, 6);
BOOST_CHECK_EQUAL(chain.FindEarliestAtLeast(300)->nHeight, 6);
BOOST_CHECK(!chain.FindEarliestAtLeast(350));
BOOST_CHECK_EQUAL(chain.FindEarliestAtLeast(0)->nHeight, 0);
BOOST_CHECK_EQUAL(chain.FindEarliestAtLeast(-1)->nHeight, 0);
BOOST_CHECK_EQUAL(chain.FindEarliestAtLeast(std::numeric_limits<int64_t>::min())->nHeight, 0);
BOOST_CHECK_EQUAL(chain.FindEarliestAtLeast(std::numeric_limits<unsigned int>::min())->nHeight, 0);
BOOST_CHECK_EQUAL(chain.FindEarliestAtLeast(-int64_t(std::numeric_limits<unsigned int>::max()) - 1)->nHeight, 0);
BOOST_CHECK(!chain.FindEarliestAtLeast(std::numeric_limits<int64_t>::max()));
BOOST_CHECK(!chain.FindEarliestAtLeast(std::numeric_limits<unsigned int>::max()));
BOOST_CHECK(!chain.FindEarliestAtLeast(int64_t(std::numeric_limits<unsigned int>::max()) + 1));
}
BOOST_AUTO_TEST_SUITE_END()
| h4x3rotab/BTCGPU | src/test/skiplist_tests.cpp | C++ | mit | 7,701 |
#include "AnimationGraphFactory.h"
namespace animation
{
AnimationGraphFactory::AnimationGraphFactory()
{
}
AnimationGraph* AnimationGraphFactory::createGraph(const std::string& _filename)
{
AnimationGraph* result = 0;
MyGUI::xml::Document doc;
if (doc.open(_filename))
{
MyGUI::xml::Element* root = doc.getRoot();
if (root)
{
result = new AnimationGraph(root->findAttribute("name"));
MyGUI::xml::ElementEnumerator data = root->getElementEnumerator();
while (data.next())
{
if (data->getName() == "Node")
{
IAnimationNode* node = mNodeFactory.createNode(data->findAttribute("type"), data->findAttribute("name"), result);
result->addNode(node);
MyGUI::xml::ElementEnumerator prop = data->getElementEnumerator();
while (prop.next("Property"))
{
std::string key = prop->findAttribute("key");
std::string value = prop->findAttribute("value");
node->setProperty(key, value);
}
}
else if (data->getName() == "Connection")
{
IAnimationNode* from_node = result->getNodeByName(data->findAttribute("from"));
IAnimationNode* to_node = result->getNodeByName(data->findAttribute("to"));
MyGUI::xml::ElementEnumerator point = data->getElementEnumerator();
while (point.next("Point"))
{
from_node->addConnection(
point->findAttribute("from"),
to_node,
point->findAttribute("to"));
}
}
}
}
}
return result;
}
} // namespace animation
| Anomalous-Software/mygui | UnitTests/UnitTest_GraphView/AnimationGraphFactory.cpp | C++ | mit | 1,532 |
module Elasticsearch
module API
module Actions
# Return information and statistics about terms in the fields of a particular document
#
# @example Get statistics for a specific document
#
# client.indices.create index: 'my_index',
# body: {
# mappings: {
# my_type: {
# properties: {
# text: {
# type: 'string',
# term_vector: 'with_positions_offsets_payloads'
# }
# }
# }
# }
# }
#
# client.index index: 'my_index', type: 'my_type', id: '1', body: { text: 'Foo Bar Fox' }
#
# client.termvector index: 'my_index', type: 'my_type', id: '1'
# # => { ..., "term_vectors" => { "text" => { "field_statistics" => { ... }, "terms" => { "bar" => ... } } }
#
# @option arguments [String] :index The name of the index (*Required*)
# @option arguments [String] :type The type of the document (*Required*)
# @option arguments [String] :id The document ID (*Required*)
# @option arguments [Hash] :body The request definition
# @option arguments [Boolean] :term_statistics Whether total term frequency and
# document frequency should be returned
# @option arguments [Boolean] :field_statistics Whether document count, sum of document frequencies
# and sum of total term frequencies should be returned
# @option arguments [List] :fields A comma-separated list of fields to return
# @option arguments [Boolean] :offsets Whether term offsets should be returned
# @option arguments [Boolean] :positions Whether term positions should be returned
# @option arguments [Boolean] :payloads Whether term payloads should be returned
# @option arguments [String] :preference Specify the node or shard the operation should be performed on
# (default: random)
# @option arguments [String] :routing Specific routing value
# @option arguments [String] :parent Parent ID of the documents
#
# @see http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/docs-termvectors.html
#
def termvector(arguments={})
raise ArgumentError, "Required argument 'index' missing" unless arguments[:index]
raise ArgumentError, "Required argument 'type' missing" unless arguments[:type]
raise ArgumentError, "Required argument 'id' missing" unless arguments[:id]
valid_params = [
:term_statistics,
:field_statistics,
:fields,
:offsets,
:positions,
:payloads,
:preference,
:routing,
:parent ]
method = 'GET'
path = Utils.__pathify Utils.__escape(arguments[:index]),
Utils.__escape(arguments[:type]),
arguments[:id],
'_termvector'
params = Utils.__validate_and_extract_params arguments, valid_params
body = arguments[:body]
perform_request(method, path, params, body).body
end
end
end
end
| HelainSchoonjans/LogstashRabbitMQExample | logstash-producer/vendor/bundle/jruby/2.1/gems/elasticsearch-api-1.0.1/lib/elasticsearch/api/actions/termvector.rb | Ruby | mit | 3,615 |
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
'use strict';
// THIS CHECK SHOULD BE THE FIRST THING IN THIS FILE
// This is to ensure that we catch env issues before we error while requiring other dependencies.
const engines = require('./package.json').engines;
require('./tools/check-environment')({
requiredNodeVersion: engines.node,
requiredNpmVersion: engines.npm,
requiredYarnVersion: engines.yarn
});
const gulp = require('gulp');
// See `tools/gulp-tasks/README.md` for information about task loading.
function loadTask(fileName, taskName) {
const taskModule = require('./tools/gulp-tasks/' + fileName);
const task = taskName ? taskModule[taskName] : taskModule;
return task(gulp);
}
gulp.task('format:enforce', loadTask('format', 'enforce'));
gulp.task('format', loadTask('format', 'format'));
gulp.task('build.sh', loadTask('build', 'all'));
gulp.task('build.sh:no-bundle', loadTask('build', 'no-bundle'));
gulp.task('public-api:enforce', loadTask('public-api', 'enforce'));
gulp.task('public-api:update', ['build.sh'], loadTask('public-api', 'update'));
gulp.task('lint', ['format:enforce', 'validate-commit-messages', 'tslint']);
gulp.task('tslint', ['tools:build'], loadTask('lint'));
gulp.task('validate-commit-messages', loadTask('validate-commit-message'));
gulp.task('tools:build', loadTask('tools-build'));
gulp.task('check-cycle', loadTask('check-cycle'));
gulp.task('serve', loadTask('serve', 'default'));
gulp.task('serve-examples', loadTask('serve', 'examples'));
gulp.task('changelog', loadTask('changelog'));
gulp.task('check-env', () => {/* this is a noop because the env test ran already above */});
| chalin/angular | gulpfile.js | JavaScript | mit | 1,800 |
require File.dirname(__FILE__) + '/../spec_helper'
require File.dirname(__FILE__) + '/../fixtures/class_variables'
describe "A class variable" do
it "can be accessed from a subclass" do
ClassVariablesSpec::ClassB.new.cvar_a.should == :cvar_a
end
it "is set in the superclass" do
a = ClassVariablesSpec::ClassA.new
b = ClassVariablesSpec::ClassB.new
b.cvar_a = :new_val
a.cvar_a.should == :new_val
end
end
describe "A class variable defined in a module" do
it "can be accessed from classes that extend the module" do
ClassVariablesSpec::ClassC.cvar_m.should == :value
end
it "is not defined in these classes" do
ClassVariablesSpec::ClassC.cvar_defined?.should be_false
end
it "is only updated in the module a method defined in the module is used" do
ClassVariablesSpec::ClassC.cvar_m = "new value"
ClassVariablesSpec::ClassC.cvar_m.should == "new value"
ClassVariablesSpec::ClassC.cvar_defined?.should be_false
end
it "is updated in the class when a Method defined in the class is used" do
ClassVariablesSpec::ClassC.cvar_c = "new value"
ClassVariablesSpec::ClassC.cvar_defined?.should be_true
end
it "can be accessed inside the class using the module methods" do
ClassVariablesSpec::ClassC.cvar_c = "new value"
ClassVariablesSpec::ClassC.cvar_m.should == "new value"
end
it "can be accessed from modules that extend the module" do
ClassVariablesSpec::ModuleO.cvar_n.should == :value
end
it "is defined in the extended module" do
ClassVariablesSpec::ModuleN.class_variable_defined?(:@@cvar_n).should be_true
end
it "is not defined in the extending module" do
ClassVariablesSpec::ModuleO.class_variable_defined?(:@@cvar_n).should be_false
end
end
| bradediger/rubyspec | language/class_variable_spec.rb | Ruby | mit | 1,778 |
using System.Collections.Generic;
using WebService.Models;
namespace WebService.Service
{
public interface IUserService
{
ICollection<User> GetAllUsers();
User GetById(int userId);
User UpdateUser(User user);
User CreateNewUser(User user);
void RemoveUserById(int userId);
}
} | varun466/Personal-Works | backend/dotnet-backend/WebService/WebService/Service/IUserService.cs | C# | mit | 345 |
from __future__ import unicode_literals
from .. import Provider as PhoneNumberProvider
class Provider(PhoneNumberProvider):
formats = (
# Mobile
# Government website: http://www.uke.gov.pl/numeracja-843
'50# ### ###',
'51# ### ###',
'53# ### ###',
'57# ### ###',
'60# ### ###',
'66# ### ###',
'69# ### ###',
'72# ### ###',
'73# ### ###',
'78# ### ###',
'79# ### ###',
'88# ### ###',
'+48 50# ### ###',
'+48 51# ### ###',
'+48 53# ### ###',
'+48 57# ### ###',
'+48 60# ### ###',
'+48 66# ### ###',
'+48 69# ### ###',
'+48 72# ### ###',
'+48 73# ### ###',
'+48 78# ### ###',
'+48 79# ### ###',
'+48 88# ### ###',
'32 ### ## ##',
'+48 32 ### ## ##',
'22 ### ## ##',
'+48 22 ### ## ##',
)
| deanishe/alfred-fakeum | src/libs/faker/providers/phone_number/pl_PL/__init__.py | Python | mit | 937 |
<?php
namespace Oro\Bundle\EmailBundle\Provider;
class VariablesProvider
{
/** @var SystemVariablesProviderInterface[] */
protected $systemVariablesProviders = [];
/** @var EntityVariablesProviderInterface[] */
protected $entityVariablesProviders = [];
/**
* @param SystemVariablesProviderInterface $provider
*/
public function addSystemVariablesProvider(SystemVariablesProviderInterface $provider)
{
$this->systemVariablesProviders[] = $provider;
}
/**
* @param EntityVariablesProviderInterface $provider
*/
public function addEntityVariablesProvider(EntityVariablesProviderInterface $provider)
{
$this->entityVariablesProviders[] = $provider;
}
/**
* Gets system variables available in a template
* Returned variables are sorted be name.
*
* @return array The list of variables in the following format:
* {variable name} => array
* 'type' => {variable data type}
* 'name' => {translated variable name}
*/
public function getSystemVariableDefinitions()
{
$result = [];
foreach ($this->systemVariablesProviders as $provider) {
$result = array_merge(
$result,
$provider->getVariableDefinitions()
);
}
ksort($result);
return $result;
}
/**
* Gets entity related variables available in a template
* Returned variables are sorted by name.
*
* @param string $entityClass The entity class name. If it is not specified the definitions for all
* entities are returned.
*
* @return array The list of variables in the following format:
* {variable name} => array
* 'type' => {variable data type}
* 'name' => {translated variable name}
* If a field represents a relation the following attributes are added:
* 'related_entity_name' => {related entity full class name}
* If $entityClass is NULL variables are grouped by entity class:
* {entity class} => array
* {variable name} => array of attributes described above
*/
public function getEntityVariableDefinitions($entityClass = null)
{
$result = [];
foreach ($this->entityVariablesProviders as $provider) {
$result = array_merge_recursive(
$result,
$provider->getVariableDefinitions($entityClass)
);
}
if ($entityClass) {
ksort($result);
} else {
foreach ($result as &$variables) {
ksort($variables);
}
}
return $result;
}
/**
* Gets values of system variables available in a template
*
* @return array The list of values
* key = {variable name}
* value = {variable value}
*/
public function getSystemVariableValues()
{
$result = [];
foreach ($this->systemVariablesProviders as $provider) {
$result = array_merge(
$result,
$provider->getVariableValues()
);
}
return $result;
}
/**
* Gets getters of entity related variables available in a template
*
* @param string $entityClass The entity class name. If it is not specified the definitions for all
* entities are returned.
*
* @return string[] The list of getter names
* key = {variable name}
* value = {method name} // can be NULL if entity field is public
*/
public function getEntityVariableGetters($entityClass = null)
{
$result = [];
foreach ($this->entityVariablesProviders as $provider) {
$result = array_merge_recursive(
$result,
$provider->getVariableGetters($entityClass)
);
}
return $result;
}
}
| Djamy/platform | src/Oro/Bundle/EmailBundle/Provider/VariablesProvider.php | PHP | mit | 4,230 |
// Copyright 2015 Reborndb Org. All Rights Reserved.
// Licensed under the MIT (MIT-LICENSE.txt) license.
package resp
import (
"bufio"
"bytes"
"strconv"
"github.com/juju/errors"
"github.com/ngaut/log"
)
type encoder struct {
w *bufio.Writer
}
var (
imap []string
)
func init() {
imap = make([]string, 1024*512+1024)
for i := 0; i < len(imap); i++ {
imap[i] = strconv.Itoa(i - 1024)
}
}
func itos(i int64) string {
if n := i + 1024; n >= 0 && n < int64(len(imap)) {
return imap[n]
} else {
return strconv.FormatInt(i, 10)
}
}
func Encode(w *bufio.Writer, r Resp) error {
return encode(w, r, false)
}
func encode(w *bufio.Writer, r Resp, needFlush bool) error {
e := &encoder{w}
if err := e.encodeResp(r); err != nil {
return err
}
if needFlush {
return w.Flush()
} else {
return nil
}
}
func MustEncode(w *bufio.Writer, r Resp) {
if err := Encode(w, r); err != nil {
log.Fatalf("encode redis resp failed - %s", err)
}
}
const defaultEncodeBufSize = 16
func EncodeToBytes(r Resp) ([]byte, error) {
var b bytes.Buffer
err := encode(bufio.NewWriterSize(&b, defaultEncodeBufSize), r, true)
return b.Bytes(), err
}
func EncodeToString(r Resp) (string, error) {
var b bytes.Buffer
err := encode(bufio.NewWriterSize(&b, defaultEncodeBufSize), r, true)
return b.String(), err
}
func MustEncodeToBytes(r Resp) []byte {
b, err := EncodeToBytes(r)
if err != nil {
log.Fatalf("encode redis resp to bytes failed - %s", err)
}
return b
}
func (e *encoder) encodeResp(r Resp) error {
switch x := r.(type) {
default:
return errors.Trace(ErrBadRespType)
case *String:
if err := e.encodeType(TypeString); err != nil {
return err
}
return e.encodeText(x.Value)
case *Error:
if err := e.encodeType(TypeError); err != nil {
return err
}
return e.encodeText(x.Value)
case *Int:
if err := e.encodeType(TypeInt); err != nil {
return err
}
return e.encodeInt(x.Value)
case *BulkBytes:
if err := e.encodeType(TypeBulkBytes); err != nil {
return err
}
return e.encodeBulkBytes(x.Value)
case *Array:
if err := e.encodeType(TypeArray); err != nil {
return err
}
return e.encodeArray(x.Value)
case Ping:
return errors.Trace(e.w.WriteByte('\n'))
}
}
func (e *encoder) encodeType(t RespType) error {
return errors.Trace(e.w.WriteByte(byte(t)))
}
func (e *encoder) encodeText(s string) error {
if _, err := e.w.WriteString(s); err != nil {
return errors.Trace(err)
}
if _, err := e.w.WriteString("\r\n"); err != nil {
return errors.Trace(err)
}
return nil
}
func (e *encoder) encodeInt(v int64) error {
return e.encodeText(itos(v))
}
func (e *encoder) encodeBulkBytes(b []byte) error {
if b == nil {
return e.encodeInt(-1)
} else {
if err := e.encodeInt(int64(len(b))); err != nil {
return err
}
if _, err := e.w.Write(b); err != nil {
return errors.Trace(err)
}
if _, err := e.w.WriteString("\r\n"); err != nil {
return errors.Trace(err)
}
return nil
}
}
func (e *encoder) encodeArray(a []Resp) error {
if a == nil {
return e.encodeInt(-1)
} else {
if err := e.encodeInt(int64(len(a))); err != nil {
return err
}
for i := 0; i < len(a); i++ {
if err := e.encodeResp(a[i]); err != nil {
return err
}
}
return nil
}
}
| reborndb/go | redis/resp/encoder.go | GO | mit | 3,278 |
# frozen_string_literal: true
RSpec.describe RuboCop::Cop::Performance::Caller do
subject(:cop) { described_class.new }
it 'accepts `caller` without argument and method chain' do
expect_no_offenses('caller')
end
it 'accepts `caller` with arguments' do
expect_no_offenses('caller(1, 1).first')
end
it 'accepts `caller_locations` without argument and method chain' do
expect_no_offenses('caller_locations')
end
it 'registers an offense when :first is called on caller' do
expect(caller.first).to eq(caller(1..1).first)
expect_offense(<<-RUBY.strip_indent)
caller.first
^^^^^^^^^^^^ Use `caller(1..1).first` instead of `caller.first`.
RUBY
end
it 'registers an offense when :first is called on caller with 1' do
expect(caller(1).first).to eq(caller(1..1).first)
expect_offense(<<-RUBY.strip_indent)
caller(1).first
^^^^^^^^^^^^^^^ Use `caller(1..1).first` instead of `caller.first`.
RUBY
end
it 'registers an offense when :first is called on caller with 2' do
expect(caller(2).first).to eq(caller(2..2).first)
expect_offense(<<-RUBY.strip_indent)
caller(2).first
^^^^^^^^^^^^^^^ Use `caller(2..2).first` instead of `caller.first`.
RUBY
end
it 'registers an offense when :[] is called on caller' do
expect(caller[1]).to eq(caller(2..2).first)
expect_offense(<<-RUBY.strip_indent)
caller[1]
^^^^^^^^^ Use `caller(2..2).first` instead of `caller[1]`.
RUBY
end
it 'registers an offense when :[] is called on caller with 1' do
expect(caller(1)[1]).to eq(caller(2..2).first)
expect_offense(<<-RUBY.strip_indent)
caller(1)[1]
^^^^^^^^^^^^ Use `caller(2..2).first` instead of `caller[1]`.
RUBY
end
it 'registers an offense when :[] is called on caller with 2' do
expect(caller(2)[1]).to eq(caller(3..3).first)
expect_offense(<<-RUBY.strip_indent)
caller(2)[1]
^^^^^^^^^^^^ Use `caller(3..3).first` instead of `caller[1]`.
RUBY
end
it 'registers an offense when :first is called on caller_locations also' do
expect(caller_locations.first.to_s).to eq(caller_locations(1..1).first.to_s)
expect_offense(<<-RUBY.strip_indent)
caller_locations.first
^^^^^^^^^^^^^^^^^^^^^^ Use `caller_locations(1..1).first` instead of `caller_locations.first`.
RUBY
end
it 'registers an offense when :[] is called on caller_locations also' do
expect(caller_locations[1].to_s).to eq(caller_locations(2..2).first.to_s)
expect_offense(<<-RUBY.strip_indent)
caller_locations[1]
^^^^^^^^^^^^^^^^^^^ Use `caller_locations(2..2).first` instead of `caller_locations[1]`.
RUBY
end
end
| palkan/rubocop | spec/rubocop/cop/performance/caller_spec.rb | Ruby | mit | 2,705 |
import { installation as ActionTypes, extension as ExtensionActionTypes } from "constants/actionTypes";
import { InstallationService } from "services";
const installationActions = {
parsePackage(file, callback, errorCallback) {
return (dispatch) => {
InstallationService.parsePackage(file, (data) => {
dispatch({
type: ActionTypes.PARSED_INSTALLATION_PACKAGE,
payload: JSON.parse(data)
});
if (callback) {
callback(data);
}
}, errorCallback);
};
},
navigateWizard(wizardStep, callback) {
return (dispatch) => {
dispatch({
type: ActionTypes.GO_TO_WIZARD_STEP,
payload: {
wizardStep
}
});
if (callback) {
setTimeout(() => {
callback();
}, 0);
}
};
},
setInstallingAvailablePackage(FileName, PackageType, callback) {
return (dispatch) => {
dispatch({
type: ActionTypes.INSTALLING_AVAILABLE_PACKAGE,
payload: {
PackageType,
FileName
}
});
if (callback) {
setTimeout(() => {
callback();
}, 0);
}
};
},
notInstallingAvailablePackage(callback) {
return (dispatch) => {
dispatch({
type: ActionTypes.NOT_INSTALLING_AVAILABLE_PACKAGE
});
if (callback) {
setTimeout(() => {
callback();
}, 0);
}
};
},
installExtension(file, newExtension, legacyType, isPortalPackage, callback, addToList) {
let _newExtension = JSON.parse(JSON.stringify(newExtension));
return (dispatch) => {
InstallationService.installPackage(file, legacyType, isPortalPackage, (data) => {
dispatch({
type: ActionTypes.INSTALLED_EXTENSION_LOGS,
payload: JSON.parse(data)
});
if (addToList) {
_newExtension.packageId = JSON.parse(data).newPackageId;
_newExtension.inUse = "No";
dispatch({
type: ExtensionActionTypes.INSTALLED_EXTENSION,
payload: {
PackageInfo: _newExtension,
logs: JSON.parse(data).logs
}
});
}
if (callback) {
callback(data);
}
});
};
},
clearParsedInstallationPackage(callback) {
return (dispatch) => {
dispatch({
type: ActionTypes.CLEAR_PARSED_INSTALLATION_PACKAGE
});
if (callback) {
callback();
}
};
},
toggleAcceptLicense(value, callback) {
return (dispatch) => {
dispatch({
type: ActionTypes.TOGGLE_ACCEPT_LICENSE,
payload: value
});
if (callback) {
callback();
}
};
},
setViewingLog(value, callback) {
return (dispatch) => {
dispatch({
type: ActionTypes.TOGGLE_VIEWING_LOG,
payload: value
});
if (callback) {
callback();
}
};
},
setIsPortalPackage(value, callback) {
return (dispatch) => {
dispatch({
type: ActionTypes.SET_IS_PORTAL_PACKAGE,
payload: value
});
if (callback) {
callback();
}
};
}
};
export default installationActions;
| dnnsoftware/Dnn.AdminExperience.Extensions | src/Modules/Settings/Dnn.PersonaBar.Extensions/Extensions.Web/src/actions/installation.js | JavaScript | mit | 4,016 |
<?php
namespace PSU\Rave;
class Phone extends \PSU\Phone {
public $aliases = array();
/**
* constructor
*/
public function __construct( $data = null ) {
// in the event that 'source' wasn't passed in,
// prepare a default source user
$default_user = \PSU::nvl( $_SESSION['wp_id'], 'script' );
$defaults = array(
'phone_type' => 'CE',
'source' => 'USER:' . $default_user,
);
$data = \PSU::params( $data, $defaults );
parent::__construct( $data );
if( $this->phone ) {
// remove any formatting on the passed in phone number
$this->phone = self::unformat( $this->phone );
// split the number out into parts (for compatibility with \PSU\Phone)
$data = $this->parse( $this->phone );
$this->area = $data['area'] ?: '603';
$this->number = $data['number'];
}//end if
}//end constructor
/**
* confirm the phone
*
* @param $pin \b Confirmation code
*/
public function confirm( $pin ) {
if( $rave_user = \PSU\Rave\User::get( $this->wp_id ) ) {
if( $ok = $rave_user->confirmPhone( $pin ) ) {
$sql = "
UPDATE person_phone
SET last_confirmed_date = NOW()
WHERE wp_id = ?
AND phone_type = ?
AND end_date is null
";
$args = $this->sanitize();
$args = array(
$args['wp_id'],
$args['phone_type'],
);
return \PSU::db('emergency_notification')->Execute( $sql, $args );
} // end if
} // end if
return false;
} //end confirm
/**
* expire all active numbers matching the provided sanitized data
*/
public static function expire_all_active( $wp_id, $phone_type ) {
// make sure this insert has the correct order and values
$args = array(
$wp_id,
$phone_type,
);
// TODO: sanitize the args
// add an end date because we have a new number or they are resubmitting an opt out
$sql = "
UPDATE person_phone
SET end_date = NOW()
WHERE wp_id = ?
AND phone_type = ?
AND end_date is null
";
return \PSU::db('emergency_notificationt')->Execute($sql, $args );
}//end expire_all_active
/**
* returns a friendly string for the phone's Rave status
*/
public function friendly_status() {
if( $this->end_date ) {
return 'Phone record ended: '.$this->end_date;
} elseif( $this->opt_notnow ) {
return 'Has chosen deferred on: '.$this->opt_notnow;
} elseif( $this->opt_nocell ) {
return 'Has opted out on: '.$this->opt_nocell;
} elseif( ! $this->last_confirmed_date ) {
return 'Not confirmed, will prompt on next login';
} else {
return 'Confirmed: '.$this->last_confirmed_date;
} // end else
}//end friendly_status
/**
* returns whether or not the given user has the
* given phone as a confirmed number in Rave
*/
public static function is_confirmed( $rave_user, $phone ) {
if( ! is_object( $rave_user ) ) {
/**
* if $rave_user is not an object, assume it is a
* wp_id and instantiate \PSU\Rave\User
*/
$rave_user = \PSU\Rave\User::get( $rave_user );
}//end if
// TODO: throw error if object is not a \PSU\Rave\User
if( $rave_user ) {
if( is_object( $phone ) ) {
/**
* if $phone is an object, assume \PSU\Phone,
* \PSU\Person\Phone, \PSU\Rave\Phone, etc and build
* a string accordingly
*/
$phone = $phone->area . $phone->number;
}//end if
$phone = self::unformat( $phone );
// does the phone number in Rave match the passed phone?
if( $rave_user->mobilePhone1 == $phone ) {
// yes! return the confirmation status for that phone
return $rave_user->mobile1Confirmed;
} // end if
} // end if
return false;
}//end is_confirmed
/**
* sanitize the object properties so we don't get h4xx0r3d
*/
public function sanitize() {
// fields are ordered the same as in gb_telephone package
$fields = array(
'wp_id' => FILTER_SANITIZE_STRING,
'phone_type' => FILTER_SANITIZE_STRING,
'source' => FILTER_SANITIZE_STRING,
);
$data = array();
// build the data array for inserts/updates
foreach( $fields as $field => $filter ) {
$data[ $field ] = filter_var( $this->$field, $filter ) ?: null;
}//end foreach
$data['phone'] = self::unformat( $this->phone );
return $data;
}//end sanitize
/**
* save the Rave phone
*
* @param $option \b Rave save option (false, opted_out, deferred, resend, no_confirm)
*/
public function save( $option = false ) {
if( ! $this->wp_id ) {
throw new UnexpectedValueException('A user id (wp_id) is required before saving. No user id was provided.');
}//end if
$args = $this->sanitize();
// we need to get the current phone for the user.
// load the phones
$phones = new Phones( $this->wp_id );
$phones->load();
// grab the most recent, active number whose type matches this phone's type
$current = $phones->current( $phones->type( $args['phone_type'] ) );
if($current || $option == 'deferred' || $option == 'opted_out' ) {
/* it will enter the following if (not the ifesles) when:
* 1) they have a current phone that doesn't match the phone they are saving, OR
* 2) they are deferring, OR
* 3) they are opting out, OR
* 4) they have a current phone that matches the phone they are saving AND they
* are either continuing/not confirming AND the current phone is set to optout or deferred
*/
if(
($current && $current->phone != $args['phone'])
|| $option == 'deferred'
|| $option == 'opted_out'
|| (
/**
* if the phone being entered matches the current active phone
* and the save option is 'continue' AND the current active
* phone is set to either nocell or notnow, then we need to
* enter a new row in the table and inactivate all the active
* numbers.
*/
$current
&& $current->phone == $args['phone']
&& ( $option == 'continue' || $option == 'no_confirm' )
&& ( $current->opt_nocell || $current->opt_notnow )
)
) {
\PSU::db('emergency_notificationt')->StartTrans();
if( $current && ! ( $current->phone == $args['phone'] && $option == 'continue' ) ) {
$current->unconfirm( $current->phone );
}//end if
// if we get here, we're going to be adding a new phone number. Expire
// all of the active ones that match this number's type
self::expire_all_active( $args['wp_id'], $args['phone_type'] );
$do_final_commit = true;
} elseif( $option === 'resend' ) {
$rave_user = \PSU\Rave\User::get( $this->wp_id );
return $rave_user->save( true );
} else {
// phone has not changed, do nothing
return true;
} // end else
} //end if
// assume we want to actually commit...we'll cancel
// the commit later if there was an error
$commit = true;
if( $ok = $this->_insert( $args, $option ) ) {
// if we get in here, the insert worked swimmingly
if( ! ( $ok = $this->_rave_save( $args['phone'], $option ) ) ) {
// if we get in here, Rave had an error
// explicitly fail the transaction
$commit = false;
} // end rollback
\PSU::db('emergency_notificationt')->CompleteTrans( $commit );
if( $do_final_commit ) {
\PSU::db('emergency_notificationt')->CompleteTrans( $commit );
}
return $ok;
} // end if
return false;
}//end save
/**
* returns the phone's current status
*/
public function status() {
if( $this->opt_nocell ) {
return 'opt_nocell';
} elseif( $this->opt_notnow ) {
// is the "not now" more than a day ago?
if( strtotime( $this->opt_notnow ) + ( 5 * 86400 ) < time() ) {
// re-prompt
return false;
} // end if
return 'opt_notnow';
} elseif( ! $this->last_confirmed_date ) {
return 'not_verified';
} // end else if
// everything is setup and good
return true;
}//end status
/**
* function to end text services on a phone
*/
public function unconfirm( $phone = null ) {
$sql = "
UPDATE person_phone
SET end_date = NOW(),
source = ?
WHERE wp_id = ?
AND end_date is null
";
$args = $this->sanitize();
$args = array(
$args['source'],
$args['wp_id'],
);
// if we only want to unconfirm a specific phone, specify it here
if( $phone ) {
$args[] = $phone;
$sql .= " AND phone = ?";
}//end if
if( $ok = \PSU::db('emergency_notification')->Execute($sql, $args ) ) {
if( $rave_user = \PSU\Rave\User::get( $this->wp_id ) ) {
return $rave_user->unconfirmPhone();
}//end if
} // end if
return $ok;
} // end unconfirm
/**
* insert a phone number into the MySQL table
*
* @param $args \b sanitized arguments used for insertion
* @param $option \b the type of Rave manipulation that is being done: (false, opt_out, deferred, resend)
*/
private function _insert( $args, $option = false ) {
// start the transaction in case it wasn't in the update
// ADOdb smart transactions smartly support nesting, so if a StartTrans has already occurred, the following line will be ignored
\PSU::db('emergency_notificationt')->StartTrans();
$args = array(
$args['wp_id'],
$args['phone_type'],
$args['source'],
$args['phone'],
);
$sql = "
INSERT INTO person_phone (
wp_id,
phone_type,
source,
phone,
opt_notnow,
opt_nocell
) VALUES (
?,
?,
?,
?,
?,
?
)
";
$opt_notnow = null;
$opt_nocell = null;
$now = date('Y-m-d H:i:s');
if( $option === 'deferred' ) {
$opt_notnow = $now;
} elseif( $option === 'opted_out' ) {
$opt_nocell = $now;
} // end elseif
$args[] = $opt_notnow;
$args[] = $opt_nocell;
return \PSU::db('emergency_notificationt')->Execute( $sql, $args );
}//end _insert
/**
* save the number in Rave
*
* @param $phone \b phone number
* @param $option \b the type of Rave manipulation that is being done: (false, opt_out, deferred, resend)
*/
protected function _rave_save( $phone, $option = false ) {
$user = \PSU\Rave\User::get( $this->wp_id );
if( ! $user ) {
if( ! ( $user = \PSU\Rave\User::create( $this->wp_id ) ) ) {
throw new UnexpectedValueException('Could not load user information for '.$this->wp_id);
}//end if
} // end if
if( ! $phone || $option == 'deferred' || $option == 'opted_out' ) {
unset( $user->mobileNumber1 );
$confirm = false;
} else {
$user->rave_set_phone( $phone );
$confirm = ($option != 'no_confirm');
}
return $user->save( $confirm );
}//end _rave_save
}//end class \PSU\Rave\Phone
| bscoleman-psu/plymouth-webapp | lib/PSU/Rave/Phone.php | PHP | mit | 10,486 |
require 'spec_helper'
describe "articles/index_atom_feed.atom.builder" do
before do
stub_default_blog
end
describe "with no items" do
before do
assign(:articles, [])
render
end
it "renders the atom header partial" do
view.should render_template(:partial => "shared/_atom_header")
end
end
describe "rendering articles (with some funny characters)" do
before do
article1 = stub_full_article(1.minute.ago)
article1.body = 'écoute!'
article2 = stub_full_article(2.minutes.ago)
article2.body = 'is 4 < 2? no!'
assign(:articles, [article1, article2])
render
end
it "creates a valid feed" do
assert_feedvalidator rendered
end
it "creates an atom feed with two items" do
assert_atom10 rendered, 2
end
it "renders the article atom partial twice" do
view.should render_template(:partial => "shared/_atom_item_article",
:count => 2)
end
end
describe "rendering a single article" do
before do
@article = stub_full_article
@article.body = "public info"
@article.extended = "and more"
assign(:articles, [@article])
end
it "has the correct id" do
render
rendered_entry.css("id").first.content.should == "urn:uuid:#{@article.guid}"
end
describe "on a blog that shows extended content in feeds" do
before do
Blog.default.hide_extended_on_rss = false
render
end
it "shows the body and extended content in the feed" do
rendered_entry.css("content").first.content.should =~ /public info.*and more/m
end
it "does not have a summary element in addition to the content element" do
rendered_entry.css("summary").should be_empty
end
end
describe "on a blog that hides extended content in feeds" do
before do
Blog.default.hide_extended_on_rss = true
render
end
it "shows only the body content in the feed" do
entry = rendered_entry
entry.css("content").first.content.should =~ /public info/
entry.css("content").first.content.should_not =~ /public info.*and more/m
end
it "does not have a summary element in addition to the content element" do
rendered_entry.css("summary").should be_empty
end
end
describe "on a blog that has an RSS description set" do
before do
Blog.default.rss_description = true
Blog.default.rss_description_text = "rss description"
render
end
it "shows the body content in the feed" do
rendered_entry.css("content").first.content.should =~ /public info/
end
it "shows the RSS description in the feed" do
rendered_entry.css("content").first.content.should =~ /rss description/
end
end
end
describe "rendering a password protected article" do
before do
@article = stub_full_article
@article.body = "shh .. it's a secret!"
@article.extended = "even more secret!"
@article.stub(:password) { "password" }
assign(:articles, [@article])
end
describe "on a blog that shows extended content in feeds" do
before do
Blog.default.hide_extended_on_rss = false
render
end
it "shows only a link to the article" do
rendered_entry.css("content").first.content.should ==
"<p>This article is password protected. Please <a href='#{@article.permalink_url}'>fill in your password</a> to read it</p>"
end
it "does not have a summary element in addition to the content element" do
rendered_entry.css("summary").should be_empty
end
it "does not show any secret bits anywhere" do
rendered.should_not =~ /secret/
end
end
describe "on a blog that hides extended content in feeds" do
before do
Blog.default.hide_extended_on_rss = true
render
end
it "shows only a link to the article" do
rendered_entry.css("content").first.content.should ==
"<p>This article is password protected. Please <a href='#{@article.permalink_url}'>fill in your password</a> to read it</p>"
end
it "does not have a summary element in addition to the content element" do
rendered_entry.css("summary").should be_empty
end
it "does not show any secret bits anywhere" do
rendered.should_not =~ /secret/
end
end
end
def rendered_entry
parsed = Nokogiri::XML.parse(rendered)
parsed.css("entry").first
end
end
| faraazkhan/myblog | spec/views/articles/index_atom_feed_spec.rb | Ruby | mit | 4,627 |
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {AotCompilerHost} from '@angular/compiler';
import {dirname, resolve} from 'path';
import * as ts from 'typescript';
import {Import, getImportOfIdentifier} from '../../../utils/typescript/imports';
import {getValueSymbolOfDeclaration} from '../../../utils/typescript/symbol';
import {ImportManager} from '../import_manager';
import {getPosixPath} from './path_format';
import {ResolvedExport, getExportSymbolsOfFile} from './source_file_exports';
/**
* Factory that creates a TypeScript transformer which ensures that
* referenced identifiers are available at the target file location.
*
* Imports cannot be just added as sometimes identifiers collide in the
* target source file and the identifier needs to be aliased.
*/
export class ImportRewriteTransformerFactory {
private sourceFileExports = new Map<ts.SourceFile, ResolvedExport[]>();
constructor(
private importManager: ImportManager, private typeChecker: ts.TypeChecker,
private compilerHost: AotCompilerHost) {}
create<T extends ts.Node>(ctx: ts.TransformationContext, newSourceFile: ts.SourceFile):
ts.Transformer<T> {
const visitNode: ts.Visitor = (node: ts.Node) => {
if (ts.isIdentifier(node)) {
// Record the identifier reference and return the new identifier. The identifier
// name can change if the generated import uses an namespaced import or aliased
// import identifier (to avoid collisions).
return this._recordIdentifierReference(node, newSourceFile);
}
return ts.visitEachChild(node, visitNode, ctx);
};
return (node: T) => ts.visitNode(node, visitNode);
}
private _recordIdentifierReference(node: ts.Identifier, targetSourceFile: ts.SourceFile):
ts.Node {
// For object literal elements we don't want to check identifiers that describe the
// property name. These identifiers do not refer to a value but rather to a property
// name and therefore don't need to be imported. The exception is that for shorthand
// property assignments the "name" identifier is both used as value and property name.
if (ts.isObjectLiteralElementLike(node.parent) &&
!ts.isShorthandPropertyAssignment(node.parent) && node.parent.name === node) {
return node;
}
const resolvedImport = getImportOfIdentifier(this.typeChecker, node);
const sourceFile = node.getSourceFile();
if (resolvedImport) {
const symbolName = resolvedImport.name;
const moduleFileName =
this.compilerHost.moduleNameToFileName(resolvedImport.importModule, sourceFile.fileName);
// In case the identifier refers to an export in the target source file, we need to use
// the local identifier in the scope of the target source file. This is necessary because
// the export could be aliased and the alias is not available to the target source file.
if (moduleFileName && resolve(moduleFileName) === resolve(targetSourceFile.fileName)) {
const resolvedExport =
this._getSourceFileExports(targetSourceFile).find(e => e.exportName === symbolName);
if (resolvedExport) {
return resolvedExport.identifier;
}
}
return this.importManager.addImportToSourceFile(
targetSourceFile, symbolName,
this._rewriteModuleImport(resolvedImport, targetSourceFile));
} else {
let symbol = getValueSymbolOfDeclaration(node, this.typeChecker);
if (symbol) {
// If the symbol refers to a shorthand property assignment, we want to resolve the
// value symbol of the shorthand property assignment. This is necessary because the
// value symbol is ambiguous for shorthand property assignment identifiers as the
// identifier resolves to both property name and property value.
if (symbol.valueDeclaration && ts.isShorthandPropertyAssignment(symbol.valueDeclaration)) {
symbol = this.typeChecker.getShorthandAssignmentValueSymbol(symbol.valueDeclaration);
}
const resolvedExport =
this._getSourceFileExports(sourceFile).find(e => e.symbol === symbol);
if (resolvedExport) {
return this.importManager.addImportToSourceFile(
targetSourceFile, resolvedExport.exportName,
getPosixPath(this.compilerHost.fileNameToModuleName(
sourceFile.fileName, targetSourceFile.fileName)));
}
}
// The referenced identifier cannot be imported. In that case we throw an exception
// which can be handled outside of the transformer.
throw new UnresolvedIdentifierError();
}
}
/**
* Gets the resolved exports of a given source file. Exports are cached
* for subsequent calls.
*/
private _getSourceFileExports(sourceFile: ts.SourceFile): ResolvedExport[] {
if (this.sourceFileExports.has(sourceFile)) {
return this.sourceFileExports.get(sourceFile) !;
}
const sourceFileExports = getExportSymbolsOfFile(sourceFile, this.typeChecker);
this.sourceFileExports.set(sourceFile, sourceFileExports);
return sourceFileExports;
}
/** Rewrites a module import to be relative to the target file location. */
private _rewriteModuleImport(resolvedImport: Import, newSourceFile: ts.SourceFile): string {
if (!resolvedImport.importModule.startsWith('.')) {
return resolvedImport.importModule;
}
const importFilePath = resolvedImport.node.getSourceFile().fileName;
const resolvedModulePath = resolve(dirname(importFilePath), resolvedImport.importModule);
const relativeModuleName =
this.compilerHost.fileNameToModuleName(resolvedModulePath, newSourceFile.fileName);
return getPosixPath(relativeModuleName);
}
}
/** Error that will be thrown if a given identifier cannot be resolved. */
export class UnresolvedIdentifierError extends Error {}
| Toxicable/angular | packages/core/schematics/migrations/undecorated-classes-with-di/decorator_rewrite/import_rewrite_visitor.ts | TypeScript | mit | 6,085 |
import { ElementRef, EventEmitter, NgZone } from '@angular/core';
import { Content, ScrollEvent } from '../content/content';
import { DomController } from '../../platform/dom-controller';
/**
* @name InfiniteScroll
* @description
* The Infinite Scroll allows you to perform an action when the user
* scrolls a specified distance from the bottom of the page.
*
* The expression assigned to the `infinite` event is called when
* the user scrolls to the specified distance. When this expression
* has finished its tasks, it should call the `complete()` method
* on the infinite scroll instance.
*
* @usage
* ```html
* <ion-content>
*
* <ion-list>
* <ion-item *ngFor="let i of items">{% raw %}{{i}}{% endraw %}</ion-item>
* </ion-list>
*
* <ion-infinite-scroll (ionInfinite)="doInfinite($event)">
* <ion-infinite-scroll-content></ion-infinite-scroll-content>
* </ion-infinite-scroll>
*
* </ion-content>
* ```
*
* ```ts
* @Component({...})
* export class NewsFeedPage {
* items = [];
*
* constructor() {
* for (let i = 0; i < 30; i++) {
* this.items.push( this.items.length );
* }
* }
*
* doInfinite(infiniteScroll) {
* console.log('Begin async operation');
*
* setTimeout(() => {
* for (let i = 0; i < 30; i++) {
* this.items.push( this.items.length );
* }
*
* console.log('Async operation has ended');
* infiniteScroll.complete();
* }, 500);
* }
*
* }
* ```
*
*
* ## Infinite Scroll Content
*
* By default, Ionic uses the infinite scroll spinner that looks
* best for the platform the user is on. However, you can change the
* default spinner or add text by adding properties to the
* `ion-infinite-scroll-content` component.
*
* ```html
* <ion-content>
*
* <ion-infinite-scroll (ionInfinite)="doInfinite($event)">
* <ion-infinite-scroll-content
* loadingSpinner="bubbles"
* loadingText="Loading more data...">
* </ion-infinite-scroll-content>
* </ion-infinite-scroll>
*
* </ion-content>
* ```
*
*
* ## Further Customizing Infinite Scroll Content
*
* The `ion-infinite-scroll` component holds the infinite scroll logic.
* It requires a child component in order to display the content.
* Ionic uses `ion-infinite-scroll-content` by default. This component
* displays the infinite scroll and changes the look depending
* on the infinite scroll's state. Separating these components allows
* developers to create their own infinite scroll content components.
* You could replace our default content with custom SVG or CSS animations.
*
* @demo /docs/v2/demos/src/infinite-scroll/
*
*/
export declare class InfiniteScroll {
private _content;
private _zone;
private _elementRef;
private _dom;
_lastCheck: number;
_highestY: number;
_scLsn: any;
_thr: string;
_thrPx: number;
_thrPc: number;
_init: boolean;
/**
* @internal
*/
state: string;
/**
* @input {string} The threshold distance from the bottom
* of the content to call the `infinite` output event when scrolled.
* The threshold value can be either a percent, or
* in pixels. For example, use the value of `10%` for the `infinite`
* output event to get called when the user has scrolled 10%
* from the bottom of the page. Use the value `100px` when the
* scroll is within 100 pixels from the bottom of the page.
* Default is `15%`.
*/
threshold: string;
/**
* @input {boolean} If true, Whether or not the infinite scroll should be
* enabled or not. Setting to `false` will remove scroll event listeners
* and hide the display.
*/
enabled: boolean;
/**
* @output {event} Emitted when the scroll reaches
* the threshold distance. From within your infinite handler,
* you must call the infinite scroll's `complete()` method when
* your async operation has completed.
*/
ionInfinite: EventEmitter<InfiniteScroll>;
constructor(_content: Content, _zone: NgZone, _elementRef: ElementRef, _dom: DomController);
_onScroll(ev: ScrollEvent): number;
/**
* Call `complete()` within the `infinite` output event handler when
* your async operation has completed. For example, the `loading`
* state is while the app is performing an asynchronous operation,
* such as receiving more data from an AJAX request to add more items
* to a data list. Once the data has been received and UI updated, you
* then call this method to signify that the loading has completed.
* This method will change the infinite scroll's state from `loading`
* to `enabled`.
*/
complete(): void;
/**
* Call `enable(false)` to disable the infinite scroll from actively
* trying to receive new data while scrolling. This method is useful
* when it is known that there is no more data that can be added, and
* the infinite scroll is no longer needed.
* @param {boolean} shouldEnable If the infinite scroll should be
* enabled or not. Setting to `false` will remove scroll event listeners
* and hide the display.
*/
enable(shouldEnable: boolean): void;
/**
* @private
*/
_setListeners(shouldListen: boolean): void;
/**
* @private
*/
ngAfterContentInit(): void;
/**
* @private
*/
ngOnDestroy(): void;
}
| Spect-AR/Spect-AR | node_modules/node_modules/ionic-angular/umd/components/infinite-scroll/infinite-scroll.d.ts | TypeScript | mit | 5,443 |
'use strict';
/* Controllers */
app
// Flot Chart controller
.controller('FlotChartDemoCtrl', ['$scope', function($scope) {
$scope.d = [ [1,6.5],[2,6.5],[3,7],[4,8],[5,7.5],[6,7],[7,6.8],[8,7],[9,7.2],[10,7],[11,6.8],[12,7] ];
$scope.d0_1 = [ [0,7],[1,6.5],[2,12.5],[3,7],[4,9],[5,6],[6,11],[7,6.5],[8,8],[9,7] ];
$scope.d0_2 = [ [0,4],[1,4.5],[2,7],[3,4.5],[4,3],[5,3.5],[6,6],[7,3],[8,4],[9,3] ];
$scope.d1_1 = [ [10, 120], [20, 70], [30, 70], [40, 60] ];
$scope.d1_2 = [ [10, 50], [20, 60], [30, 90], [40, 35] ];
$scope.d1_3 = [ [10, 80], [20, 40], [30, 30], [40, 20] ];
$scope.d2 = [];
for (var i = 0; i < 20; ++i) {
$scope.d2.push([i, Math.round( Math.sin(i)*100)/100] );
}
$scope.d3 = [
{ label: "iPhone5S", data: 40 },
{ label: "iPad Mini", data: 10 },
{ label: "iPad Mini Retina", data: 20 },
{ label: "iPhone4S", data: 12 },
{ label: "iPad Air", data: 18 }
];
$scope.refreshData = function(){
$scope.d0_1 = $scope.d0_2;
};
$scope.getRandomData = function() {
var data = [],
totalPoints = 150;
if (data.length > 0)
data = data.slice(1);
while (data.length < totalPoints) {
var prev = data.length > 0 ? data[data.length - 1] : 50,
y = prev + Math.random() * 10 - 5;
if (y < 0) {
y = 0;
} else if (y > 100) {
y = 100;
}
data.push(Math.round(y*100)/100);
}
// Zip the generated y values with the x values
var res = [];
for (var i = 0; i < data.length; ++i) {
res.push([i, data[i]])
}
return res;
}
$scope.d4 = $scope.getRandomData();
}]);
| tahirakhan/bilal-cattle-farm | public/src/js/controllers/chart.js | JavaScript | mit | 1,725 |
<?php
/**
* CodeIgniter
*
* An open source application development framework for PHP
*
* This content is released under the MIT License (MIT)
*
* Copyright (c) 2014 - 2015, British Columbia Institute of Technology
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* @package CodeIgniter
* @author EllisLab Dev Team
* @copyright Copyright (c) 2008 - 2014, EllisLab, Inc. (http://ellislab.com/)
* @copyright Copyright (c) 2014 - 2015, British Columbia Institute of Technology (http://bcit.ca/)
* @license http://opensource.org/licenses/MIT MIT License
* @link http://codeigniter.com
* @since Version 3.0.0
* @filesource
*/
defined('BASEPATH') OR exit('No direct script access allowed');
/**
* CodeIgniter Session Memcached Driver
*
* @package CodeIgniter
* @subpackage Libraries
* @category Sessions
* @author Andrey Andreev
* @link http://codeigniter.com/user_guide/libraries/sessions.html
*/
class CI_Session_memcached_driver extends CI_Session_driver implements SessionHandlerInterface {
/**
* Memcached instance
*
* @var Memcached
*/
protected $_memcached;
/**
* Key prefix
*
* @var string
*/
protected $_key_prefix = 'ci_session:';
/**
* Lock key
*
* @var string
*/
protected $_lock_key;
// ------------------------------------------------------------------------
/**
* Class constructor
*
* @param array $params Configuration parameters
* @return void
*/
public function __construct(&$params)
{
parent::__construct($params);
if (empty($this->_config['save_path']))
{
log_message('error', 'Session: No Memcached save path configured.');
}
if ($this->_config['match_ip'] === TRUE)
{
$this->_key_prefix .= $_SERVER['REMOTE_ADDR'].':';
}
}
// ------------------------------------------------------------------------
/**
* Open
*
* Sanitizes save_path and initializes connections.
*
* @param string $save_path Server path(s)
* @param string $name Session cookie name, unused
* @return bool
*/
public function open($save_path, $name)
{
$this->_memcached = new Memcached();
$this->_memcached->setOption(Memcached::OPT_BINARY_PROTOCOL, TRUE); // required for touch() usage
$server_list = array();
foreach ($this->_memcached->getServerList() as $server)
{
$server_list[] = $server['host'].':'.$server['port'];
}
if ( ! preg_match_all('#,?([^,:]+)\:(\d{1,5})(?:\:(\d+))?#', $this->_config['save_path'], $matches, PREG_SET_ORDER))
{
$this->_memcached = NULL;
log_message('error', 'Session: Invalid Memcached save path format: '.$this->_config['save_path']);
return FALSE;
}
foreach ($matches as $match)
{
// If Memcached already has this server (or if the port is invalid), skip it
if (in_array($match[1].':'.$match[2], $server_list, TRUE))
{
log_message('debug', 'Session: Memcached server pool already has '.$match[1].':'.$match[2]);
continue;
}
if ( ! $this->_memcached->addServer($match[1], $match[2], isset($match[3]) ? $match[3] : 0))
{
log_message('error', 'Could not add '.$match[1].':'.$match[2].' to Memcached server pool.');
}
else
{
$server_list[] = $match[1].':'.$match[2];
}
}
if (empty($server_list))
{
log_message('error', 'Session: Memcached server pool is empty.');
return FALSE;
}
return TRUE;
}
// ------------------------------------------------------------------------
/**
* Read
*
* Reads session data and acquires a lock
*
* @param string $session_id Session ID
* @return string Serialized session data
*/
public function read($session_id)
{
if (isset($this->_memcached) && $this->_get_lock($session_id))
{
// Needed by write() to detect session_regenerate_id() calls
$this->_session_id = $session_id;
$session_data = (string) $this->_memcached->get($this->_key_prefix.$session_id);
$this->_fingerprint = md5($session_data);
return $session_data;
}
return FALSE;
}
// ------------------------------------------------------------------------
/**
* Write
*
* Writes (create / update) session data
*
* @param string $session_id Session ID
* @param string $session_data Serialized session data
* @return bool
*/
public function write($session_id, $session_data)
{
if ( ! isset($this->_memcached))
{
return FALSE;
}
// Was the ID regenerated?
elseif ($session_id !== $this->_session_id)
{
if ( ! $this->_release_lock() OR ! $this->_get_lock($session_id))
{
return FALSE;
}
$this->_fingerprint = md5('');
$this->_session_id = $session_id;
}
if (isset($this->_lock_key))
{
$this->_memcached->replace($this->_lock_key, time(), 300);
if ($this->_fingerprint !== ($fingerprint = md5($session_data)))
{
if ($this->_memcached->set($this->_key_prefix.$session_id, $session_data, $this->_config['expiration']))
{
$this->_fingerprint = $fingerprint;
return TRUE;
}
return FALSE;
}
return $this->_memcached->touch($this->_key_prefix.$session_id, $this->_config['expiration']);
}
return FALSE;
}
// ------------------------------------------------------------------------
/**
* Close
*
* Releases locks and closes connection.
*
* @return bool
*/
public function close()
{
if (isset($this->_memcached))
{
isset($this->_lock_key) && $this->_memcached->delete($this->_lock_key);
if ( ! $this->_memcached->quit())
{
return FALSE;
}
$this->_memcached = NULL;
return TRUE;
}
return FALSE;
}
// ------------------------------------------------------------------------
/**
* Destroy
*
* Destroys the current session.
*
* @param string $session_id Session ID
* @return bool
*/
public function destroy($session_id)
{
if (isset($this->_memcached, $this->_lock_key))
{
$this->_memcached->delete($this->_key_prefix.$session_id);
return $this->_cookie_destroy();
}
return FALSE;
}
// ------------------------------------------------------------------------
/**
* Garbage Collector
*
* Deletes expired sessions
*
* @param int $maxlifetime Maximum lifetime of sessions
* @return bool
*/
public function gc($maxlifetime)
{
// Not necessary, Memcached takes care of that.
return TRUE;
}
// ------------------------------------------------------------------------
/**
* Get lock
*
* Acquires an (emulated) lock.
*
* @param string $session_id Session ID
* @return bool
*/
protected function _get_lock($session_id)
{
if (isset($this->_lock_key))
{
return $this->_memcached->replace($this->_lock_key, time(), 300);
}
// 30 attempts to obtain a lock, in case another request already has it
$lock_key = $this->_key_prefix.$session_id.':lock';
$attempt = 0;
do
{
if ($this->_memcached->get($lock_key))
{
sleep(1);
continue;
}
if ( ! $this->_memcached->set($lock_key, time(), 300))
{
log_message('error', 'Session: Error while trying to obtain lock for '.$this->_key_prefix.$session_id);
return FALSE;
}
$this->_lock_key = $lock_key;
break;
}
while ($attempt++ < 30);
if ($attempt === 30)
{
log_message('error', 'Session: Unable to obtain lock for '.$this->_key_prefix.$session_id.' after 30 attempts, aborting.');
return FALSE;
}
$this->_lock = TRUE;
return TRUE;
}
// ------------------------------------------------------------------------
/**
* Release lock
*
* Releases a previously acquired lock
*
* @return bool
*/
protected function _release_lock()
{
if (isset($this->_memcached, $this->_lock_key) && $this->_lock)
{
if ( ! $this->_memcached->delete($this->_lock_key) && $this->_memcached->getResultCode() !== Memcached::RES_NOTFOUND)
{
log_message('error', 'Session: Error while trying to free lock for '.$this->_lock_key);
return FALSE;
}
$this->_lock_key = NULL;
$this->_lock = FALSE;
}
return TRUE;
}
}
| binshen/oa | system/libraries/Session/drivers/Session_memcached_driver.php | PHP | mit | 9,332 |
/*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.org.apache.xerces.internal.jaxp.validation;
import com.sun.org.apache.xerces.internal.xni.parser.XMLErrorHandler;
import com.sun.org.apache.xerces.internal.xni.parser.XMLParseException;
import org.xml.sax.ErrorHandler;
import org.xml.sax.SAXException;
/**
* Receives errors through Xerces {@link XMLErrorHandler}
* and pass them down to SAX {@link ErrorHandler}.
*
* @author
* Kohsuke Kawaguchi (kohsuke.kawaguchi@sun.com)
*/
public abstract class ErrorHandlerAdaptor implements XMLErrorHandler
{
/** set to true if there was any error. */
private boolean hadError = false;
/**
* returns if there was an error since the last invocation of
* the resetError method.
*/
public boolean hadError() { return hadError; }
/** resets the error flag. */
public void reset() { hadError = false; }
/**
* Implemented by the derived class to return the actual
* {@link ErrorHandler} to which errors are sent.
*
* @return always return non-null valid object.
*/
protected abstract ErrorHandler getErrorHandler();
public void fatalError( String domain, String key, XMLParseException e ) {
try {
hadError = true;
getErrorHandler().fatalError( Util.toSAXParseException(e) );
} catch( SAXException se ) {
throw new WrappedSAXException(se);
}
}
public void error( String domain, String key, XMLParseException e ) {
try {
hadError = true;
getErrorHandler().error( Util.toSAXParseException(e) );
} catch( SAXException se ) {
throw new WrappedSAXException(se);
}
}
public void warning( String domain, String key, XMLParseException e ) {
try {
getErrorHandler().warning( Util.toSAXParseException(e) );
} catch( SAXException se ) {
throw new WrappedSAXException(se);
}
}
}
| YouDiSN/OpenJDK-Research | jdk9/jaxp/src/java.xml/share/classes/com/sun/org/apache/xerces/internal/jaxp/validation/ErrorHandlerAdaptor.java | Java | gpl-2.0 | 2,810 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
lasoverlapPro.py
---------------------
Date : October 2014
Copyright : (C) 2014 by Martin Isenburg
Email : martin near rapidlasso point com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Martin Isenburg'
__date__ = 'October 2014'
__copyright__ = '(C) 2014, Martin Isenburg'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from LAStoolsUtils import LAStoolsUtils
from LAStoolsAlgorithm import LAStoolsAlgorithm
from processing.core.parameters import ParameterBoolean
from processing.core.parameters import ParameterNumber
from processing.core.parameters import ParameterSelection
class lasoverlapPro(LAStoolsAlgorithm):
CHECK_STEP = "CHECK_STEP"
ATTRIBUTE = "ATTRIBUTE"
OPERATION = "OPERATION"
ATTRIBUTES = ["elevation", "intensity", "number_of_returns", "scan_angle_abs", "density"]
OPERATIONS = ["lowest", "highest", "average"]
CREATE_OVERLAP_RASTER = "CREATE_OVERLAP_RASTER"
CREATE_DIFFERENCE_RASTER = "CREATE_DIFFERENCE_RASTER"
def defineCharacteristics(self):
self.name = "lasoverlapPro"
self.group = "LAStools Production"
self.addParametersPointInputFolderGUI()
self.addParametersFilesAreFlightlinesGUI()
self.addParametersFilter1ReturnClassFlagsGUI()
self.addParameter(ParameterNumber(lasoverlapPro.CHECK_STEP,
self.tr("size of grid used for overlap check"), 0, None, 2.0))
self.addParameter(ParameterSelection(lasoverlapPro.ATTRIBUTE,
self.tr("attribute to check"), lasoverlapPro.ATTRIBUTES, 0))
self.addParameter(ParameterSelection(lasoverlapPro.OPERATION,
self.tr("operation on attribute per cell"), lasoverlapPro.OPERATIONS, 0))
self.addParameter(ParameterBoolean(lasoverlapPro.CREATE_OVERLAP_RASTER,
self.tr("create overlap raster"), True))
self.addParameter(ParameterBoolean(lasoverlapPro.CREATE_DIFFERENCE_RASTER,
self.tr("create difference raster"), True))
self.addParametersOutputDirectoryGUI()
self.addParametersOutputAppendixGUI()
self.addParametersRasterOutputFormatGUI()
self.addParametersRasterOutputGUI()
self.addParametersAdditionalGUI()
self.addParametersCoresGUI()
self.addParametersVerboseGUI()
def processAlgorithm(self, progress):
commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "lasoverlap")]
self.addParametersVerboseCommands(commands)
self.addParametersPointInputFolderCommands(commands)
self.addParametersFilesAreFlightlinesCommands(commands)
self.addParametersFilter1ReturnClassFlagsCommands(commands)
step = self.getParameterValue(lasoverlapPro.CHECK_STEP)
if step != 0.0:
commands.append("-step")
commands.append(str(step))
commands.append("-values")
attribute = self.getParameterValue(lasoverlapPro.ATTRIBUTE)
if attribute != 0:
commands.append("-" + lasoverlapPro.ATTRIBUTES[attribute])
operation = self.getParameterValue(lasoverlapPro.OPERATION)
if operation != 0:
commands.append("-" + lasoverlapPro.OPERATIONS[operation])
if not self.getParameterValue(lasoverlapPro.CREATE_OVERLAP_RASTER):
commands.append("-no_over")
if not self.getParameterValue(lasoverlapPro.CREATE_DIFFERENCE_RASTER):
commands.append("-no_diff")
self.addParametersOutputDirectoryCommands(commands)
self.addParametersOutputAppendixCommands(commands)
self.addParametersRasterOutputFormatCommands(commands)
self.addParametersRasterOutputCommands(commands)
self.addParametersAdditionalCommands(commands)
self.addParametersCoresCommands(commands)
LAStoolsUtils.runLAStools(commands, progress)
| pavlova-marina/QGIS | python/plugins/processing/algs/lidar/lastools/lasoverlapPro.py | Python | gpl-2.0 | 4,596 |
/*
* This file is part of the TYPO3 CMS project.
*
* It is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License, either version 2
* of the License, or any later version.
*
* For the full copyright and license information, please read the
* LICENSE.txt file that was distributed with this source code.
*
* The TYPO3 project - inspiring people to share!
*/
/***************************************************
* Color utilities
***************************************************/
define('TYPO3/CMS/Rtehtmlarea/HTMLArea/Util/String',
['TYPO3/CMS/Rtehtmlarea/HTMLArea/UserAgent/UserAgent'],
function (UserAgent) {
// Create the ruler
if (!document.getElementById('htmlarea-ruler')) {
// Insert the css rule in the stylesheet
var styleSheet = document.styleSheets[0];
var selector = '#htmlarea-ruler';
var style = 'visibility: hidden; white-space: nowrap;';
var rule = selector + ' { ' + style + ' }';
if (!UserAgent.isIEBeforeIE9) {
try {
styleSheet.insertRule(rule, styleSheet.cssRules.length);
} catch (e) {}
} else {
styleSheet.addRule(selector, style);
}
//Insert the ruler on the document
var ruler = document.createElement('span');
ruler.setAttribute('id', 'htmlarea-ruler');
document.body.appendChild(ruler);
}
/**
* Get the visual length of a string
*/
String.prototype.visualLength = function() {
var ruler = document.getElementById('htmlarea-ruler');
ruler.innerHTML = this;
return ruler.offsetWidth;
};
/**
* Set an ellipsis on a string
*/
String.prototype.ellipsis = function(length) {
var temp = this;
var trimmed = this;
if (temp.visualLength() > length) {
trimmed += "...";
while (trimmed.visualLength() > length) {
temp = temp.substring(0, temp.length-1);
trimmed = temp + "...";
}
}
return trimmed;
};
});
| Loopshape/Portfolio | typo3_src-7.3.1/typo3/sysext/rtehtmlarea/Resources/Public/JavaScript/HTMLArea/Util/String.js | JavaScript | gpl-2.0 | 1,882 |
/*
Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
CKEDITOR.plugins.setLang("codesnippet","bg",{button:"Въвеждане на блок с код",codeContents:"Съдържание на кода",emptySnippetError:"Блока с код не може да бъде празен.",language:"Език",title:"Блок с код",pathName:"блок с код"}); | maddy2101/TYPO3.CMS | typo3/sysext/rte_ckeditor/Resources/Public/JavaScript/Contrib/plugins/codesnippet/lang/bg.js | JavaScript | gpl-2.0 | 477 |
<?php
if (!defined('TYPO3_MODE')) {
die('Access denied.');
}
// Add flexform
\TYPO3\CMS\Core\Utility\ExtensionManagementUtility::addPiFlexFormValue('*', 'FILE:EXT:css_styled_content/flexform_ds.xml', 'table');
$GLOBALS['TCA']['tt_content']['types']['table']['showitem'] = 'CType;;4;;1-1-1, hidden, header;;3;;2-2-2, linkToTop;;;;4-4-4,
--div--;LLL:EXT:cms/locallang_ttc.xlf:CType.I.5, layout;;10;;3-3-3, cols, bodytext;;9;nowrap:wizards[table], pi_flexform,
--div--;LLL:EXT:cms/locallang_tca.xlf:pages.tabs.access, starttime, endtime, fe_group';
$GLOBALS['TCA']['tt_content']['columns']['section_frame']['config']['items'][0] = array(
'LLL:EXT:css_styled_content/locallang_db.xlf:tt_content.tx_cssstyledcontent_section_frame.I.0', '0'
);
$GLOBALS['TCA']['tt_content']['columns']['section_frame']['config']['items'][9] = array(
'LLL:EXT:css_styled_content/locallang_db.xlf:tt_content.tx_cssstyledcontent_section_frame.I.9', '66'
);
\TYPO3\CMS\Core\Utility\ExtensionManagementUtility::addStaticFile('css_styled_content', 'static/', 'CSS Styled Content');
\TYPO3\CMS\Core\Utility\ExtensionManagementUtility::addStaticFile('css_styled_content', 'static/v4.5/', 'CSS Styled Content TYPO3 v4.5');
\TYPO3\CMS\Core\Utility\ExtensionManagementUtility::addStaticFile('css_styled_content', 'static/v4.6/', 'CSS Styled Content TYPO3 v4.6');
\TYPO3\CMS\Core\Utility\ExtensionManagementUtility::addStaticFile('css_styled_content', 'static/v4.7/', 'CSS Styled Content TYPO3 v4.7');
\TYPO3\CMS\Core\Utility\ExtensionManagementUtility::addStaticFile('css_styled_content', 'static/v6.0/', 'CSS Styled Content TYPO3 v6.0');
\TYPO3\CMS\Core\Utility\ExtensionManagementUtility::addStaticFile('css_styled_content', 'static/v6.1/', 'CSS Styled Content TYPO3 v6.1');
| demonege/sutogo | typo3/sysext/css_styled_content/Configuration/TCA/Overrides/tt_content.php | PHP | gpl-2.0 | 1,756 |
/*
Copyright (c) 2000, 2012, Oracle and/or its affiliates. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; version 2 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */
#define MYSQL_SERVER 1
#include "sql_priv.h"
#include "probes_mysql.h"
#include "key.h" // key_copy
#include "sql_plugin.h"
#include <m_ctype.h>
#include <my_bit.h>
#include <myisampack.h>
#include "ha_myisam.h"
#include <stdarg.h>
#include "myisamdef.h"
#include "rt_index.h"
#include "sql_table.h" // tablename_to_filename
#include "sql_class.h" // THD
#include <algorithm>
using std::min;
using std::max;
ulonglong myisam_recover_options;
static ulong opt_myisam_block_size;
/* Interface to mysqld, to check system tables supported by SE */
static bool myisam_is_supported_system_table(const char *db,
const char *table_name,
bool is_sql_layer_system_table);
/* bits in myisam_recover_options */
const char *myisam_recover_names[] =
{ "DEFAULT", "BACKUP", "FORCE", "QUICK", "OFF", NullS};
TYPELIB myisam_recover_typelib= {array_elements(myisam_recover_names)-1,"",
myisam_recover_names, NULL};
const char *myisam_stats_method_names[] = {"nulls_unequal", "nulls_equal",
"nulls_ignored", NullS};
TYPELIB myisam_stats_method_typelib= {
array_elements(myisam_stats_method_names) - 1, "",
myisam_stats_method_names, NULL};
static MYSQL_SYSVAR_ULONG(block_size, opt_myisam_block_size,
PLUGIN_VAR_NOSYSVAR | PLUGIN_VAR_RQCMDARG,
"Block size to be used for MyISAM index pages", NULL, NULL,
MI_KEY_BLOCK_LENGTH, MI_MIN_KEY_BLOCK_LENGTH, MI_MAX_KEY_BLOCK_LENGTH,
MI_MIN_KEY_BLOCK_LENGTH);
static MYSQL_SYSVAR_ULONG(data_pointer_size, myisam_data_pointer_size,
PLUGIN_VAR_RQCMDARG, "Default pointer size to be used for MyISAM tables",
NULL, NULL, 6, 2, 7, 1);
#define MB (1024*1024)
static MYSQL_SYSVAR_ULONGLONG(max_sort_file_size, myisam_max_temp_length,
PLUGIN_VAR_RQCMDARG, "Don't use the fast sort index method to created "
"index if the temporary file would get bigger than this", NULL, NULL,
LONG_MAX/MB*MB, 0, MAX_FILE_SIZE, MB);
static MYSQL_SYSVAR_SET(recover_options, myisam_recover_options,
PLUGIN_VAR_OPCMDARG|PLUGIN_VAR_READONLY,
"Syntax: myisam-recover-options[=option[,option...]], where option can be "
"DEFAULT, BACKUP, FORCE, QUICK, or OFF",
NULL, NULL, 0, &myisam_recover_typelib);
static MYSQL_THDVAR_ULONG(repair_threads, PLUGIN_VAR_RQCMDARG,
"If larger than 1, when repairing a MyISAM table all indexes will be "
"created in parallel, with one thread per index. The value of 1 "
"disables parallel repair", NULL, NULL,
1, 1, ULONG_MAX, 1);
static MYSQL_THDVAR_ULONGLONG(sort_buffer_size, PLUGIN_VAR_RQCMDARG,
"The buffer that is allocated when sorting the index when doing "
"a REPAIR or when creating indexes with CREATE INDEX or ALTER TABLE", NULL, NULL,
8192 * 1024, (long) (MIN_SORT_BUFFER + MALLOC_OVERHEAD), SIZE_T_MAX, 1);
static MYSQL_SYSVAR_BOOL(use_mmap, opt_myisam_use_mmap, PLUGIN_VAR_NOCMDARG,
"Use memory mapping for reading and writing MyISAM tables", NULL, NULL, FALSE);
static MYSQL_SYSVAR_ULONGLONG(mmap_size, myisam_mmap_size,
PLUGIN_VAR_RQCMDARG|PLUGIN_VAR_READONLY, "Restricts the total memory "
"used for memory mapping of MySQL tables", NULL, NULL,
SIZE_T_MAX, MEMMAP_EXTRA_MARGIN, SIZE_T_MAX, 1);
static MYSQL_THDVAR_ENUM(stats_method, PLUGIN_VAR_RQCMDARG,
"Specifies how MyISAM index statistics collection code should "
"treat NULLs. Possible values of name are NULLS_UNEQUAL (default "
"behavior for 4.1 and later), NULLS_EQUAL (emulate 4.0 behavior), "
"and NULLS_IGNORED", NULL, NULL,
MI_STATS_METHOD_NULLS_NOT_EQUAL, &myisam_stats_method_typelib);
#ifndef DBUG_OFF
/**
Causes the thread to wait in a spin lock for a query kill signal.
This function is used by the test frame work to identify race conditions.
The signal is caught and ignored and the thread is not killed.
*/
static void debug_wait_for_kill(const char *info)
{
DBUG_ENTER("debug_wait_for_kill");
const char *prev_info;
THD *thd;
thd= current_thd;
prev_info= thd_proc_info(thd, info);
while(!thd->killed)
my_sleep(1000);
DBUG_PRINT("info", ("Exit debug_wait_for_kill"));
thd_proc_info(thd, prev_info);
DBUG_VOID_RETURN;
}
#endif
/*****************************************************************************
** MyISAM tables
*****************************************************************************/
static handler *myisam_create_handler(handlerton *hton,
TABLE_SHARE *table,
MEM_ROOT *mem_root)
{
return new (mem_root) ha_myisam(hton, table);
}
// collect errors printed by mi_check routines
static void mi_check_print_msg(MI_CHECK *param, const char* msg_type,
const char *fmt, va_list args)
{
THD* thd = (THD*)param->thd;
Protocol *protocol= thd->protocol;
size_t length, msg_length;
char msgbuf[MI_MAX_MSG_BUF];
char name[NAME_LEN*2+2];
msg_length= my_vsnprintf(msgbuf, sizeof(msgbuf), fmt, args);
msgbuf[sizeof(msgbuf) - 1] = 0; // healthy paranoia
DBUG_PRINT(msg_type,("message: %s",msgbuf));
if (!thd->vio_ok())
{
sql_print_error("%s", msgbuf);
return;
}
if (param->testflag & (T_CREATE_MISSING_KEYS | T_SAFE_REPAIR |
T_AUTO_REPAIR))
{
my_message(ER_NOT_KEYFILE,msgbuf,MYF(MY_WME));
return;
}
length=(uint) (strxmov(name, param->db_name,".",param->table_name,NullS) -
name);
/*
TODO: switch from protocol to push_warning here. The main reason we didn't
it yet is parallel repair. Due to following trace:
mi_check_print_msg/push_warning/sql_alloc/my_pthread_getspecific_ptr.
Also we likely need to lock mutex here (in both cases with protocol and
push_warning).
*/
if (param->need_print_msg_lock)
mysql_mutex_lock(¶m->print_msg_mutex);
protocol->prepare_for_resend();
protocol->store(name, length, system_charset_info);
protocol->store(param->op_name, system_charset_info);
protocol->store(msg_type, system_charset_info);
protocol->store(msgbuf, msg_length, system_charset_info);
if (protocol->write())
sql_print_error("Failed on my_net_write, writing to stderr instead: %s\n",
msgbuf);
if (param->need_print_msg_lock)
mysql_mutex_unlock(¶m->print_msg_mutex);
return;
}
/*
Convert TABLE object to MyISAM key and column definition
SYNOPSIS
table2myisam()
table_arg in TABLE object.
keydef_out out MyISAM key definition.
recinfo_out out MyISAM column definition.
records_out out Number of fields.
DESCRIPTION
This function will allocate and initialize MyISAM key and column
definition for further use in mi_create or for a check for underlying
table conformance in merge engine.
The caller needs to free *recinfo_out after use. Since *recinfo_out
and *keydef_out are allocated with a my_multi_malloc, *keydef_out
is freed automatically when *recinfo_out is freed.
RETURN VALUE
0 OK
!0 error code
*/
int table2myisam(TABLE *table_arg, MI_KEYDEF **keydef_out,
MI_COLUMNDEF **recinfo_out, uint *records_out)
{
uint i, j, recpos, minpos, fieldpos, temp_length, length;
enum ha_base_keytype type= HA_KEYTYPE_BINARY;
uchar *record;
KEY *pos;
MI_KEYDEF *keydef;
MI_COLUMNDEF *recinfo, *recinfo_pos;
HA_KEYSEG *keyseg;
TABLE_SHARE *share= table_arg->s;
uint options= share->db_options_in_use;
DBUG_ENTER("table2myisam");
if (!(my_multi_malloc(MYF(MY_WME),
recinfo_out, (share->fields * 2 + 2) * sizeof(MI_COLUMNDEF),
keydef_out, share->keys * sizeof(MI_KEYDEF),
&keyseg,
(share->key_parts + share->keys) * sizeof(HA_KEYSEG),
NullS)))
DBUG_RETURN(HA_ERR_OUT_OF_MEM); /* purecov: inspected */
keydef= *keydef_out;
recinfo= *recinfo_out;
pos= table_arg->key_info;
for (i= 0; i < share->keys; i++, pos++)
{
keydef[i].flag= ((uint16) pos->flags & (HA_NOSAME | HA_FULLTEXT | HA_SPATIAL));
keydef[i].key_alg= pos->algorithm == HA_KEY_ALG_UNDEF ?
(pos->flags & HA_SPATIAL ? HA_KEY_ALG_RTREE : HA_KEY_ALG_BTREE) :
pos->algorithm;
keydef[i].block_length= pos->block_size;
keydef[i].seg= keyseg;
keydef[i].keysegs= pos->user_defined_key_parts;
for (j= 0; j < pos->user_defined_key_parts; j++)
{
Field *field= pos->key_part[j].field;
type= field->key_type();
keydef[i].seg[j].flag= pos->key_part[j].key_part_flag;
if (options & HA_OPTION_PACK_KEYS ||
(pos->flags & (HA_PACK_KEY | HA_BINARY_PACK_KEY |
HA_SPACE_PACK_USED)))
{
if (pos->key_part[j].length > 8 &&
(type == HA_KEYTYPE_TEXT ||
type == HA_KEYTYPE_NUM ||
(type == HA_KEYTYPE_BINARY && !field->zero_pack())))
{
/* No blobs here */
if (j == 0)
keydef[i].flag|= HA_PACK_KEY;
if (!(field->flags & ZEROFILL_FLAG) &&
(field->type() == MYSQL_TYPE_STRING ||
field->type() == MYSQL_TYPE_VAR_STRING ||
((int) (pos->key_part[j].length - field->decimals())) >= 4))
keydef[i].seg[j].flag|= HA_SPACE_PACK;
}
else if (j == 0 && (!(pos->flags & HA_NOSAME) || pos->key_length > 16))
keydef[i].flag|= HA_BINARY_PACK_KEY;
}
keydef[i].seg[j].type= (int) type;
keydef[i].seg[j].start= pos->key_part[j].offset;
keydef[i].seg[j].length= pos->key_part[j].length;
keydef[i].seg[j].bit_start= keydef[i].seg[j].bit_end=
keydef[i].seg[j].bit_length= 0;
keydef[i].seg[j].bit_pos= 0;
keydef[i].seg[j].language= field->charset_for_protocol()->number;
if (field->real_maybe_null())
{
keydef[i].seg[j].null_bit= field->null_bit;
keydef[i].seg[j].null_pos= field->null_offset();
}
else
{
keydef[i].seg[j].null_bit= 0;
keydef[i].seg[j].null_pos= 0;
}
if (field->type() == MYSQL_TYPE_BLOB ||
field->type() == MYSQL_TYPE_GEOMETRY)
{
keydef[i].seg[j].flag|= HA_BLOB_PART;
/* save number of bytes used to pack length */
keydef[i].seg[j].bit_start= (uint) (field->pack_length() -
portable_sizeof_char_ptr);
}
else if (field->type() == MYSQL_TYPE_BIT)
{
keydef[i].seg[j].bit_length= ((Field_bit *) field)->bit_len;
keydef[i].seg[j].bit_start= ((Field_bit *) field)->bit_ofs;
keydef[i].seg[j].bit_pos= (uint) (((Field_bit *) field)->bit_ptr -
(uchar*) table_arg->record[0]);
}
}
keyseg+= pos->user_defined_key_parts;
}
if (table_arg->found_next_number_field)
keydef[share->next_number_index].flag|= HA_AUTO_KEY;
record= table_arg->record[0];
recpos= 0;
recinfo_pos= recinfo;
while (recpos < (uint) share->reclength)
{
Field **field, *found= 0;
minpos= share->reclength;
length= 0;
for (field= table_arg->field; *field; field++)
{
if ((fieldpos= (*field)->offset(record)) >= recpos &&
fieldpos <= minpos)
{
/* skip null fields */
if (!(temp_length= (*field)->pack_length_in_rec()))
continue; /* Skip null-fields */
if (! found || fieldpos < minpos ||
(fieldpos == minpos && temp_length < length))
{
minpos= fieldpos;
found= *field;
length= temp_length;
}
}
}
DBUG_PRINT("loop", ("found: 0x%lx recpos: %d minpos: %d length: %d",
(long) found, recpos, minpos, length));
if (recpos != minpos)
{ // Reserved space (Null bits?)
memset(recinfo_pos, 0, sizeof(*recinfo_pos));
recinfo_pos->type= (int) FIELD_NORMAL;
recinfo_pos++->length= (uint16) (minpos - recpos);
}
if (!found)
break;
if (found->flags & BLOB_FLAG)
recinfo_pos->type= (int) FIELD_BLOB;
else if (found->type() == MYSQL_TYPE_VARCHAR)
recinfo_pos->type= FIELD_VARCHAR;
else if (!(options & HA_OPTION_PACK_RECORD))
recinfo_pos->type= (int) FIELD_NORMAL;
else if (found->zero_pack())
recinfo_pos->type= (int) FIELD_SKIP_ZERO;
else
recinfo_pos->type= (int) ((length <= 3 ||
(found->flags & ZEROFILL_FLAG)) ?
FIELD_NORMAL :
found->type() == MYSQL_TYPE_STRING ||
found->type() == MYSQL_TYPE_VAR_STRING ?
FIELD_SKIP_ENDSPACE :
FIELD_SKIP_PRESPACE);
if (found->real_maybe_null())
{
recinfo_pos->null_bit= found->null_bit;
recinfo_pos->null_pos= found->null_offset();
}
else
{
recinfo_pos->null_bit= 0;
recinfo_pos->null_pos= 0;
}
(recinfo_pos++)->length= (uint16) length;
recpos= minpos + length;
DBUG_PRINT("loop", ("length: %d type: %d",
recinfo_pos[-1].length,recinfo_pos[-1].type));
}
*records_out= (uint) (recinfo_pos - recinfo);
DBUG_RETURN(0);
}
/*
Check for underlying table conformance
SYNOPSIS
check_definition()
t1_keyinfo in First table key definition
t1_recinfo in First table record definition
t1_keys in Number of keys in first table
t1_recs in Number of records in first table
t2_keyinfo in Second table key definition
t2_recinfo in Second table record definition
t2_keys in Number of keys in second table
t2_recs in Number of records in second table
strict in Strict check switch
table in handle to the table object
DESCRIPTION
This function compares two MyISAM definitions. By intention it was done
to compare merge table definition against underlying table definition.
It may also be used to compare dot-frm and MYI definitions of MyISAM
table as well to compare different MyISAM table definitions.
For merge table it is not required that number of keys in merge table
must exactly match number of keys in underlying table. When calling this
function for underlying table conformance check, 'strict' flag must be
set to false, and converted merge definition must be passed as t1_*.
Otherwise 'strict' flag must be set to 1 and it is not required to pass
converted dot-frm definition as t1_*.
For compatibility reasons we relax some checks, specifically:
- 4.0 (and earlier versions) always set key_alg to 0.
- 4.0 (and earlier versions) have the same language for all keysegs.
RETURN VALUE
0 - Equal definitions.
1 - Different definitions.
TODO
- compare FULLTEXT keys;
- compare SPATIAL keys;
- compare FIELD_SKIP_ZERO which is converted to FIELD_NORMAL correctly
(should be corretly detected in table2myisam).
*/
int check_definition(MI_KEYDEF *t1_keyinfo, MI_COLUMNDEF *t1_recinfo,
uint t1_keys, uint t1_recs,
MI_KEYDEF *t2_keyinfo, MI_COLUMNDEF *t2_recinfo,
uint t2_keys, uint t2_recs, bool strict, TABLE *table_arg)
{
uint i, j;
DBUG_ENTER("check_definition");
my_bool mysql_40_compat= table_arg && table_arg->s->frm_version < FRM_VER_TRUE_VARCHAR;
if ((strict ? t1_keys != t2_keys : t1_keys > t2_keys))
{
DBUG_PRINT("error", ("Number of keys differs: t1_keys=%u, t2_keys=%u",
t1_keys, t2_keys));
DBUG_RETURN(1);
}
if (t1_recs != t2_recs)
{
DBUG_PRINT("error", ("Number of recs differs: t1_recs=%u, t2_recs=%u",
t1_recs, t2_recs));
DBUG_RETURN(1);
}
for (i= 0; i < t1_keys; i++)
{
HA_KEYSEG *t1_keysegs= t1_keyinfo[i].seg;
HA_KEYSEG *t2_keysegs= t2_keyinfo[i].seg;
if (t1_keyinfo[i].flag & HA_FULLTEXT && t2_keyinfo[i].flag & HA_FULLTEXT)
continue;
else if (t1_keyinfo[i].flag & HA_FULLTEXT ||
t2_keyinfo[i].flag & HA_FULLTEXT)
{
DBUG_PRINT("error", ("Key %d has different definition", i));
DBUG_PRINT("error", ("t1_fulltext= %d, t2_fulltext=%d",
test(t1_keyinfo[i].flag & HA_FULLTEXT),
test(t2_keyinfo[i].flag & HA_FULLTEXT)));
DBUG_RETURN(1);
}
if (t1_keyinfo[i].flag & HA_SPATIAL && t2_keyinfo[i].flag & HA_SPATIAL)
continue;
else if (t1_keyinfo[i].flag & HA_SPATIAL ||
t2_keyinfo[i].flag & HA_SPATIAL)
{
DBUG_PRINT("error", ("Key %d has different definition", i));
DBUG_PRINT("error", ("t1_spatial= %d, t2_spatial=%d",
test(t1_keyinfo[i].flag & HA_SPATIAL),
test(t2_keyinfo[i].flag & HA_SPATIAL)));
DBUG_RETURN(1);
}
if ((!mysql_40_compat &&
t1_keyinfo[i].key_alg != t2_keyinfo[i].key_alg) ||
t1_keyinfo[i].keysegs != t2_keyinfo[i].keysegs)
{
DBUG_PRINT("error", ("Key %d has different definition", i));
DBUG_PRINT("error", ("t1_keysegs=%d, t1_key_alg=%d",
t1_keyinfo[i].keysegs, t1_keyinfo[i].key_alg));
DBUG_PRINT("error", ("t2_keysegs=%d, t2_key_alg=%d",
t2_keyinfo[i].keysegs, t2_keyinfo[i].key_alg));
DBUG_RETURN(1);
}
for (j= t1_keyinfo[i].keysegs; j--;)
{
uint8 t1_keysegs_j__type= t1_keysegs[j].type;
/*
Table migration from 4.1 to 5.1. In 5.1 a *TEXT key part is
always HA_KEYTYPE_VARTEXT2. In 4.1 we had only the equivalent of
HA_KEYTYPE_VARTEXT1. Since we treat both the same on MyISAM
level, we can ignore a mismatch between these types.
*/
if ((t1_keysegs[j].flag & HA_BLOB_PART) &&
(t2_keysegs[j].flag & HA_BLOB_PART))
{
if ((t1_keysegs_j__type == HA_KEYTYPE_VARTEXT2) &&
(t2_keysegs[j].type == HA_KEYTYPE_VARTEXT1))
t1_keysegs_j__type= HA_KEYTYPE_VARTEXT1; /* purecov: tested */
else if ((t1_keysegs_j__type == HA_KEYTYPE_VARBINARY2) &&
(t2_keysegs[j].type == HA_KEYTYPE_VARBINARY1))
t1_keysegs_j__type= HA_KEYTYPE_VARBINARY1; /* purecov: inspected */
}
if ((!mysql_40_compat &&
t1_keysegs[j].language != t2_keysegs[j].language) ||
t1_keysegs_j__type != t2_keysegs[j].type ||
t1_keysegs[j].null_bit != t2_keysegs[j].null_bit ||
t1_keysegs[j].length != t2_keysegs[j].length ||
t1_keysegs[j].start != t2_keysegs[j].start)
{
DBUG_PRINT("error", ("Key segment %d (key %d) has different "
"definition", j, i));
DBUG_PRINT("error", ("t1_type=%d, t1_language=%d, t1_null_bit=%d, "
"t1_length=%d",
t1_keysegs[j].type, t1_keysegs[j].language,
t1_keysegs[j].null_bit, t1_keysegs[j].length));
DBUG_PRINT("error", ("t2_type=%d, t2_language=%d, t2_null_bit=%d, "
"t2_length=%d",
t2_keysegs[j].type, t2_keysegs[j].language,
t2_keysegs[j].null_bit, t2_keysegs[j].length));
DBUG_RETURN(1);
}
}
}
for (i= 0; i < t1_recs; i++)
{
MI_COLUMNDEF *t1_rec= &t1_recinfo[i];
MI_COLUMNDEF *t2_rec= &t2_recinfo[i];
/*
FIELD_SKIP_ZERO can be changed to FIELD_NORMAL in mi_create,
see NOTE1 in mi_create.c
*/
if ((t1_rec->type != t2_rec->type &&
!(t1_rec->type == (int) FIELD_SKIP_ZERO &&
t1_rec->length == 1 &&
t2_rec->type == (int) FIELD_NORMAL)) ||
t1_rec->length != t2_rec->length ||
t1_rec->null_bit != t2_rec->null_bit)
{
DBUG_PRINT("error", ("Field %d has different definition", i));
DBUG_PRINT("error", ("t1_type=%d, t1_length=%d, t1_null_bit=%d",
t1_rec->type, t1_rec->length, t1_rec->null_bit));
DBUG_PRINT("error", ("t2_type=%d, t2_length=%d, t2_null_bit=%d",
t2_rec->type, t2_rec->length, t2_rec->null_bit));
DBUG_RETURN(1);
}
}
DBUG_RETURN(0);
}
extern "C" {
volatile int *killed_ptr(MI_CHECK *param)
{
/* In theory Unsafe conversion, but should be ok for now */
return (int*) &(((THD *)(param->thd))->killed);
}
void mi_check_print_error(MI_CHECK *param, const char *fmt,...)
{
param->error_printed|=1;
param->out_flag|= O_DATA_LOST;
va_list args;
va_start(args, fmt);
mi_check_print_msg(param, "error", fmt, args);
va_end(args);
}
void mi_check_print_info(MI_CHECK *param, const char *fmt,...)
{
va_list args;
va_start(args, fmt);
mi_check_print_msg(param, "info", fmt, args);
va_end(args);
}
void mi_check_print_warning(MI_CHECK *param, const char *fmt,...)
{
param->warning_printed=1;
param->out_flag|= O_DATA_LOST;
va_list args;
va_start(args, fmt);
mi_check_print_msg(param, "warning", fmt, args);
va_end(args);
}
/**
Report list of threads (and queries) accessing a table, thread_id of a
thread that detected corruption, ource file name and line number where
this corruption was detected, optional extra information (string).
This function is intended to be used when table corruption is detected.
@param[in] file MI_INFO object.
@param[in] message Optional error message.
@param[in] sfile Name of source file.
@param[in] sline Line number in source file.
@return void
*/
void _mi_report_crashed(MI_INFO *file, const char *message,
const char *sfile, uint sline)
{
THD *cur_thd;
LIST *element;
char buf[1024];
mysql_mutex_lock(&file->s->intern_lock);
if ((cur_thd= (THD*) file->in_use.data))
sql_print_error("Got an error from thread_id=%lu, %s:%d", cur_thd->thread_id,
sfile, sline);
else
sql_print_error("Got an error from unknown thread, %s:%d", sfile, sline);
if (message)
sql_print_error("%s", message);
for (element= file->s->in_use; element; element= list_rest(element))
{
THD *thd= (THD*) element->data;
sql_print_error("%s", thd ? thd_security_context(thd, buf, sizeof(buf), 0)
: "Unknown thread accessing table");
}
mysql_mutex_unlock(&file->s->intern_lock);
}
}
ha_myisam::ha_myisam(handlerton *hton, TABLE_SHARE *table_arg)
:handler(hton, table_arg), file(0),
int_table_flags(HA_NULL_IN_KEY | HA_CAN_FULLTEXT | HA_CAN_SQL_HANDLER |
HA_BINLOG_ROW_CAPABLE | HA_BINLOG_STMT_CAPABLE |
HA_DUPLICATE_POS | HA_CAN_INDEX_BLOBS | HA_AUTO_PART_KEY |
HA_FILE_BASED | HA_CAN_GEOMETRY | HA_NO_TRANSACTIONS |
HA_CAN_INSERT_DELAYED | HA_CAN_BIT_FIELD | HA_CAN_RTREEKEYS |
HA_HAS_RECORDS | HA_STATS_RECORDS_IS_EXACT | HA_CAN_REPAIR),
can_enable_indexes(1)
{}
handler *ha_myisam::clone(const char *name, MEM_ROOT *mem_root)
{
ha_myisam *new_handler= static_cast <ha_myisam *>(handler::clone(name,
mem_root));
if (new_handler)
new_handler->file->state= file->state;
return new_handler;
}
static const char *ha_myisam_exts[] = {
".MYI",
".MYD",
NullS
};
const char **ha_myisam::bas_ext() const
{
return ha_myisam_exts;
}
/**
@brief Check if the given db.tablename is a system table for this SE.
@param db Database name to check.
@param table_name table name to check.
@param is_sql_layer_system_table if the supplied db.table_name is a SQL
layer system table.
@note Currently, only MYISAM engine supports all the SQL layer
system tables, and hence it returns true, when
is_sql_layer_system_table is set.
@note In case there is a need to define MYISAM specific system
database, then please see reference implementation in
ha_example.cc.
@return
@retval TRUE Given db.table_name is supported system table.
@retval FALSE Given db.table_name is not a supported system table.
*/
static bool myisam_is_supported_system_table(const char *db,
const char *table_name,
bool is_sql_layer_system_table)
{
// Does MYISAM support "ALL" SQL layer system tables ?
if (is_sql_layer_system_table)
return true;
/*
Currently MYISAM does not support any other SE specific
system tables. If in future it does, please see ha_example.cc
for reference implementation
*/
return false;
}
const char *ha_myisam::index_type(uint key_number)
{
return ((table->key_info[key_number].flags & HA_FULLTEXT) ?
"FULLTEXT" :
(table->key_info[key_number].flags & HA_SPATIAL) ?
"SPATIAL" :
(table->key_info[key_number].algorithm == HA_KEY_ALG_RTREE) ?
"RTREE" :
"BTREE");
}
/* Name is here without an extension */
int ha_myisam::open(const char *name, int mode, uint test_if_locked)
{
MI_KEYDEF *keyinfo;
MI_COLUMNDEF *recinfo= 0;
uint recs;
uint i;
/*
If the user wants to have memory mapped data files, add an
open_flag. Do not memory map temporary tables because they are
expected to be inserted and thus extended a lot. Memory mapping is
efficient for files that keep their size, but very inefficient for
growing files. Using an open_flag instead of calling mi_extra(...
HA_EXTRA_MMAP ...) after mi_open() has the advantage that the
mapping is not repeated for every open, but just done on the initial
open, when the MyISAM share is created. Everytime the server
requires to open a new instance of a table it calls this method. We
will always supply HA_OPEN_MMAP for a permanent table. However, the
MyISAM storage engine will ignore this flag if this is a secondary
open of a table that is in use by other threads already (if the
MyISAM share exists already).
*/
if (!(test_if_locked & HA_OPEN_TMP_TABLE) && opt_myisam_use_mmap)
test_if_locked|= HA_OPEN_MMAP;
if (!(file=mi_open(name, mode, test_if_locked | HA_OPEN_FROM_SQL_LAYER)))
return (my_errno ? my_errno : -1);
if (!table->s->tmp_table) /* No need to perform a check for tmp table */
{
if ((my_errno= table2myisam(table, &keyinfo, &recinfo, &recs)))
{
/* purecov: begin inspected */
DBUG_PRINT("error", ("Failed to convert TABLE object to MyISAM "
"key and column definition"));
goto err;
/* purecov: end */
}
if (check_definition(keyinfo, recinfo, table->s->keys, recs,
file->s->keyinfo, file->s->rec,
file->s->base.keys, file->s->base.fields,
true, table))
{
/* purecov: begin inspected */
my_errno= HA_ERR_CRASHED;
goto err;
/* purecov: end */
}
}
if (test_if_locked & (HA_OPEN_IGNORE_IF_LOCKED | HA_OPEN_TMP_TABLE))
(void) mi_extra(file, HA_EXTRA_NO_WAIT_LOCK, 0);
info(HA_STATUS_NO_LOCK | HA_STATUS_VARIABLE | HA_STATUS_CONST);
if (!(test_if_locked & HA_OPEN_WAIT_IF_LOCKED))
(void) mi_extra(file, HA_EXTRA_WAIT_LOCK, 0);
if (!table->s->db_record_offset)
int_table_flags|=HA_REC_NOT_IN_SEQ;
if (file->s->options & (HA_OPTION_CHECKSUM | HA_OPTION_COMPRESS_RECORD))
int_table_flags|=HA_HAS_CHECKSUM;
for (i= 0; i < table->s->keys; i++)
{
plugin_ref parser= table->key_info[i].parser;
if (table->key_info[i].flags & HA_USES_PARSER)
file->s->keyinfo[i].parser=
(struct st_mysql_ftparser *)plugin_decl(parser)->info;
table->key_info[i].block_size= file->s->keyinfo[i].block_length;
}
my_errno= 0;
goto end;
err:
this->close();
end:
/*
Both recinfo and keydef are allocated by my_multi_malloc(), thus only
recinfo must be freed.
*/
if (recinfo)
my_free(recinfo);
return my_errno;
}
int ha_myisam::close(void)
{
MI_INFO *tmp=file;
file=0;
return mi_close(tmp);
}
int ha_myisam::write_row(uchar *buf)
{
ha_statistic_increment(&SSV::ha_write_count);
/*
If we have an auto_increment column and we are writing a changed row
or a new row, then update the auto_increment value in the record.
*/
if (table->next_number_field && buf == table->record[0])
{
int error;
if ((error= update_auto_increment()))
return error;
}
return mi_write(file,buf);
}
int ha_myisam::check(THD* thd, HA_CHECK_OPT* check_opt)
{
if (!file) return HA_ADMIN_INTERNAL_ERROR;
int error;
MI_CHECK param;
MYISAM_SHARE* share = file->s;
const char *old_proc_info=thd->proc_info;
thd_proc_info(thd, "Checking table");
myisamchk_init(¶m);
param.thd = thd;
param.op_name = "check";
param.db_name= table->s->db.str;
param.table_name= table->alias;
param.testflag = check_opt->flags | T_CHECK | T_SILENT;
param.stats_method= (enum_mi_stats_method)THDVAR(thd, stats_method);
if (!(table->db_stat & HA_READ_ONLY))
param.testflag|= T_STATISTICS;
param.using_global_keycache = 1;
if (!mi_is_crashed(file) &&
(((param.testflag & T_CHECK_ONLY_CHANGED) &&
!(share->state.changed & (STATE_CHANGED | STATE_CRASHED |
STATE_CRASHED_ON_REPAIR)) &&
share->state.open_count == 0) ||
((param.testflag & T_FAST) && (share->state.open_count ==
(uint) (share->global_changed ? 1 : 0)))))
return HA_ADMIN_ALREADY_DONE;
error = chk_status(¶m, file); // Not fatal
error = chk_size(¶m, file);
if (!error)
error |= chk_del(¶m, file, param.testflag);
if (!error)
error = chk_key(¶m, file);
if (!error)
{
if ((!(param.testflag & T_QUICK) &&
((share->options &
(HA_OPTION_PACK_RECORD | HA_OPTION_COMPRESS_RECORD)) ||
(param.testflag & (T_EXTEND | T_MEDIUM)))) ||
mi_is_crashed(file))
{
uint old_testflag=param.testflag;
param.testflag|=T_MEDIUM;
if (!(error= init_io_cache(¶m.read_cache, file->dfile,
my_default_record_cache_size, READ_CACHE,
share->pack.header_length, 1, MYF(MY_WME))))
{
error= chk_data_link(¶m, file, param.testflag & T_EXTEND);
end_io_cache(&(param.read_cache));
}
param.testflag= old_testflag;
}
}
if (!error)
{
if ((share->state.changed & (STATE_CHANGED |
STATE_CRASHED_ON_REPAIR |
STATE_CRASHED | STATE_NOT_ANALYZED)) ||
(param.testflag & T_STATISTICS) ||
mi_is_crashed(file))
{
file->update|=HA_STATE_CHANGED | HA_STATE_ROW_CHANGED;
mysql_mutex_lock(&share->intern_lock);
share->state.changed&= ~(STATE_CHANGED | STATE_CRASHED |
STATE_CRASHED_ON_REPAIR);
if (!(table->db_stat & HA_READ_ONLY))
error=update_state_info(¶m,file,UPDATE_TIME | UPDATE_OPEN_COUNT |
UPDATE_STAT);
mysql_mutex_unlock(&share->intern_lock);
info(HA_STATUS_NO_LOCK | HA_STATUS_TIME | HA_STATUS_VARIABLE |
HA_STATUS_CONST);
}
}
else if (!mi_is_crashed(file) && !thd->killed)
{
mi_mark_crashed(file);
file->update |= HA_STATE_CHANGED | HA_STATE_ROW_CHANGED;
}
thd_proc_info(thd, old_proc_info);
return error ? HA_ADMIN_CORRUPT : HA_ADMIN_OK;
}
/*
analyze the key distribution in the table
As the table may be only locked for read, we have to take into account that
two threads may do an analyze at the same time!
*/
int ha_myisam::analyze(THD *thd, HA_CHECK_OPT* check_opt)
{
int error=0;
MI_CHECK param;
MYISAM_SHARE* share = file->s;
myisamchk_init(¶m);
param.thd = thd;
param.op_name= "analyze";
param.db_name= table->s->db.str;
param.table_name= table->alias;
param.testflag= (T_FAST | T_CHECK | T_SILENT | T_STATISTICS |
T_DONT_CHECK_CHECKSUM);
param.using_global_keycache = 1;
param.stats_method= (enum_mi_stats_method)THDVAR(thd, stats_method);
if (!(share->state.changed & STATE_NOT_ANALYZED))
return HA_ADMIN_ALREADY_DONE;
error = chk_key(¶m, file);
if (!error)
{
mysql_mutex_lock(&share->intern_lock);
error=update_state_info(¶m,file,UPDATE_STAT);
mysql_mutex_unlock(&share->intern_lock);
}
else if (!mi_is_crashed(file) && !thd->killed)
mi_mark_crashed(file);
return error ? HA_ADMIN_CORRUPT : HA_ADMIN_OK;
}
int ha_myisam::repair(THD* thd, HA_CHECK_OPT *check_opt)
{
int error;
MI_CHECK param;
ha_rows start_records;
if (!file) return HA_ADMIN_INTERNAL_ERROR;
myisamchk_init(¶m);
param.thd = thd;
param.op_name= "repair";
param.testflag= ((check_opt->flags & ~(T_EXTEND)) |
T_SILENT | T_FORCE_CREATE | T_CALC_CHECKSUM |
(check_opt->flags & T_EXTEND ? T_REP : T_REP_BY_SORT));
param.sort_buffer_length= THDVAR(thd, sort_buffer_size);
start_records=file->state->records;
while ((error=repair(thd,param,0)) && param.retry_repair)
{
param.retry_repair=0;
if (test_all_bits(param.testflag,
(uint) (T_RETRY_WITHOUT_QUICK | T_QUICK)))
{
param.testflag&= ~T_RETRY_WITHOUT_QUICK;
sql_print_information("Retrying repair of: '%s' without quick",
table->s->path.str);
continue;
}
param.testflag&= ~T_QUICK;
if ((param.testflag & T_REP_BY_SORT))
{
param.testflag= (param.testflag & ~T_REP_BY_SORT) | T_REP;
sql_print_information("Retrying repair of: '%s' with keycache",
table->s->path.str);
continue;
}
break;
}
if (!error && start_records != file->state->records &&
!(check_opt->flags & T_VERY_SILENT))
{
char llbuff[22],llbuff2[22];
sql_print_information("Found %s of %s rows when repairing '%s'",
llstr(file->state->records, llbuff),
llstr(start_records, llbuff2),
table->s->path.str);
}
return error;
}
int ha_myisam::optimize(THD* thd, HA_CHECK_OPT *check_opt)
{
int error;
if (!file) return HA_ADMIN_INTERNAL_ERROR;
MI_CHECK param;
myisamchk_init(¶m);
param.thd = thd;
param.op_name= "optimize";
param.testflag= (check_opt->flags | T_SILENT | T_FORCE_CREATE |
T_REP_BY_SORT | T_STATISTICS | T_SORT_INDEX);
param.sort_buffer_length= THDVAR(thd, sort_buffer_size);
if ((error= repair(thd,param,1)) && param.retry_repair)
{
sql_print_warning("Warning: Optimize table got errno %d on %s.%s, retrying",
my_errno, param.db_name, param.table_name);
param.testflag&= ~T_REP_BY_SORT;
error= repair(thd,param,1);
}
return error;
}
int ha_myisam::repair(THD *thd, MI_CHECK ¶m, bool do_optimize)
{
int error=0;
uint local_testflag=param.testflag;
bool optimize_done= !do_optimize, statistics_done=0;
bool has_old_locks= thd->locked_tables_mode || file->lock_type != F_UNLCK;
const char *old_proc_info=thd->proc_info;
char fixed_name[FN_REFLEN];
MYISAM_SHARE* share = file->s;
ha_rows rows= file->state->records;
DBUG_ENTER("ha_myisam::repair");
param.db_name= table->s->db.str;
param.table_name= table->alias;
param.tmpfile_createflag = O_RDWR | O_TRUNC;
param.using_global_keycache = 1;
param.thd= thd;
param.tmpdir= &mysql_tmpdir_list;
param.out_flag= 0;
strmov(fixed_name,file->filename);
// Release latches since this can take a long time
ha_release_temporary_latches(thd);
// Don't lock tables if we have used LOCK TABLE or already locked.
if (!has_old_locks &&
mi_lock_database(file, table->s->tmp_table ? F_EXTRA_LCK : F_WRLCK))
{
char errbuf[MYSYS_STRERROR_SIZE];
mi_check_print_error(¶m, ER(ER_CANT_LOCK), my_errno,
my_strerror(errbuf, sizeof(errbuf), my_errno));
DBUG_RETURN(HA_ADMIN_FAILED);
}
if (!do_optimize ||
((file->state->del || share->state.split != file->state->records) &&
(!(param.testflag & T_QUICK) ||
!(share->state.changed & STATE_NOT_OPTIMIZED_KEYS))))
{
ulonglong key_map= ((local_testflag & T_CREATE_MISSING_KEYS) ?
mi_get_mask_all_keys_active(share->base.keys) :
share->state.key_map);
uint testflag=param.testflag;
#ifdef HAVE_MMAP
bool remap= test(share->file_map);
/*
mi_repair*() functions family use file I/O even if memory
mapping is available.
Since mixing mmap I/O and file I/O may cause various artifacts,
memory mapping must be disabled.
*/
if (remap)
mi_munmap_file(file);
#endif
if (mi_test_if_sort_rep(file,file->state->records,key_map,0) &&
(local_testflag & T_REP_BY_SORT))
{
local_testflag|= T_STATISTICS;
param.testflag|= T_STATISTICS; // We get this for free
statistics_done=1;
if (THDVAR(thd, repair_threads)>1)
{
char buf[40];
/* TODO: respect myisam_repair_threads variable */
my_snprintf(buf, 40, "Repair with %d threads", my_count_bits(key_map));
thd_proc_info(thd, buf);
error = mi_repair_parallel(¶m, file, fixed_name,
param.testflag & T_QUICK);
thd_proc_info(thd, "Repair done"); // to reset proc_info, as
// it was pointing to local buffer
}
else
{
thd_proc_info(thd, "Repair by sorting");
error = mi_repair_by_sort(¶m, file, fixed_name,
param.testflag & T_QUICK);
}
}
else
{
thd_proc_info(thd, "Repair with keycache");
param.testflag &= ~T_REP_BY_SORT;
error= mi_repair(¶m, file, fixed_name,
param.testflag & T_QUICK);
}
#ifdef HAVE_MMAP
if (remap)
mi_dynmap_file(file, file->state->data_file_length);
#endif
param.testflag=testflag;
optimize_done=1;
}
if (!error)
{
if ((local_testflag & T_SORT_INDEX) &&
(share->state.changed & STATE_NOT_SORTED_PAGES))
{
optimize_done=1;
thd_proc_info(thd, "Sorting index");
error=mi_sort_index(¶m,file,fixed_name);
}
if (!statistics_done && (local_testflag & T_STATISTICS))
{
if (share->state.changed & STATE_NOT_ANALYZED)
{
optimize_done=1;
thd_proc_info(thd, "Analyzing");
error = chk_key(¶m, file);
}
else
local_testflag&= ~T_STATISTICS; // Don't update statistics
}
}
thd_proc_info(thd, "Saving state");
if (!error)
{
if ((share->state.changed & STATE_CHANGED) || mi_is_crashed(file))
{
share->state.changed&= ~(STATE_CHANGED | STATE_CRASHED |
STATE_CRASHED_ON_REPAIR);
file->update|=HA_STATE_CHANGED | HA_STATE_ROW_CHANGED;
}
/*
the following 'if', thought conceptually wrong,
is a useful optimization nevertheless.
*/
if (file->state != &file->s->state.state)
file->s->state.state = *file->state;
if (file->s->base.auto_key)
update_auto_increment_key(¶m, file, 1);
if (optimize_done)
error = update_state_info(¶m, file,
UPDATE_TIME | UPDATE_OPEN_COUNT |
(local_testflag &
T_STATISTICS ? UPDATE_STAT : 0));
info(HA_STATUS_NO_LOCK | HA_STATUS_TIME | HA_STATUS_VARIABLE |
HA_STATUS_CONST);
if (rows != file->state->records && ! (param.testflag & T_VERY_SILENT))
{
char llbuff[22],llbuff2[22];
mi_check_print_warning(¶m,"Number of rows changed from %s to %s",
llstr(rows,llbuff),
llstr(file->state->records,llbuff2));
}
}
else
{
mi_mark_crashed_on_repair(file);
file->update |= HA_STATE_CHANGED | HA_STATE_ROW_CHANGED;
update_state_info(¶m, file, 0);
}
thd_proc_info(thd, old_proc_info);
if (!has_old_locks)
mi_lock_database(file,F_UNLCK);
DBUG_RETURN(error ? HA_ADMIN_FAILED :
!optimize_done ? HA_ADMIN_ALREADY_DONE : HA_ADMIN_OK);
}
/*
Assign table indexes to a specific key cache.
*/
int ha_myisam::assign_to_keycache(THD* thd, HA_CHECK_OPT *check_opt)
{
KEY_CACHE *new_key_cache= check_opt->key_cache;
const char *errmsg= 0;
int error= HA_ADMIN_OK;
ulonglong map;
TABLE_LIST *table_list= table->pos_in_table_list;
DBUG_ENTER("ha_myisam::assign_to_keycache");
table->keys_in_use_for_query.clear_all();
if (table_list->process_index_hints(table))
DBUG_RETURN(HA_ADMIN_FAILED);
map= ~(ulonglong) 0;
if (!table->keys_in_use_for_query.is_clear_all())
/* use all keys if there's no list specified by the user through hints */
map= table->keys_in_use_for_query.to_ulonglong();
if ((error= mi_assign_to_key_cache(file, map, new_key_cache)))
{
char buf[STRING_BUFFER_USUAL_SIZE];
my_snprintf(buf, sizeof(buf),
"Failed to flush to index file (errno: %d)", error);
errmsg= buf;
error= HA_ADMIN_CORRUPT;
}
if (error != HA_ADMIN_OK)
{
/* Send error to user */
MI_CHECK param;
myisamchk_init(¶m);
param.thd= thd;
param.op_name= "assign_to_keycache";
param.db_name= table->s->db.str;
param.table_name= table->s->table_name.str;
param.testflag= 0;
mi_check_print_error(¶m, errmsg);
}
DBUG_RETURN(error);
}
/*
Preload pages of the index file for a table into the key cache.
*/
int ha_myisam::preload_keys(THD* thd, HA_CHECK_OPT *check_opt)
{
int error;
const char *errmsg;
ulonglong map;
TABLE_LIST *table_list= table->pos_in_table_list;
my_bool ignore_leaves= table_list->ignore_leaves;
char buf[MYSQL_ERRMSG_SIZE];
DBUG_ENTER("ha_myisam::preload_keys");
table->keys_in_use_for_query.clear_all();
if (table_list->process_index_hints(table))
DBUG_RETURN(HA_ADMIN_FAILED);
map= ~(ulonglong) 0;
/* Check validity of the index references */
if (!table->keys_in_use_for_query.is_clear_all())
/* use all keys if there's no list specified by the user through hints */
map= table->keys_in_use_for_query.to_ulonglong();
mi_extra(file, HA_EXTRA_PRELOAD_BUFFER_SIZE,
(void *) &thd->variables.preload_buff_size);
if ((error= mi_preload(file, map, ignore_leaves)))
{
switch (error) {
case HA_ERR_NON_UNIQUE_BLOCK_SIZE:
errmsg= "Indexes use different block sizes";
break;
case HA_ERR_OUT_OF_MEM:
errmsg= "Failed to allocate buffer";
break;
default:
my_snprintf(buf, sizeof(buf),
"Failed to read from index file (errno: %d)", my_errno);
errmsg= buf;
}
error= HA_ADMIN_FAILED;
goto err;
}
DBUG_RETURN(HA_ADMIN_OK);
err:
{
MI_CHECK param;
myisamchk_init(¶m);
param.thd= thd;
param.op_name= "preload_keys";
param.db_name= table->s->db.str;
param.table_name= table->s->table_name.str;
param.testflag= 0;
mi_check_print_error(¶m, errmsg);
DBUG_RETURN(error);
}
}
/*
Disable indexes, making it persistent if requested.
SYNOPSIS
disable_indexes()
mode mode of operation:
HA_KEY_SWITCH_NONUNIQ disable all non-unique keys
HA_KEY_SWITCH_ALL disable all keys
HA_KEY_SWITCH_NONUNIQ_SAVE dis. non-uni. and make persistent
HA_KEY_SWITCH_ALL_SAVE dis. all keys and make persistent
IMPLEMENTATION
HA_KEY_SWITCH_NONUNIQ is not implemented.
HA_KEY_SWITCH_ALL_SAVE is not implemented.
RETURN
0 ok
HA_ERR_WRONG_COMMAND mode not implemented.
*/
int ha_myisam::disable_indexes(uint mode)
{
int error;
if (mode == HA_KEY_SWITCH_ALL)
{
/* call a storage engine function to switch the key map */
error= mi_disable_indexes(file);
}
else if (mode == HA_KEY_SWITCH_NONUNIQ_SAVE)
{
mi_extra(file, HA_EXTRA_NO_KEYS, 0);
info(HA_STATUS_CONST); // Read new key info
error= 0;
}
else
{
/* mode not implemented */
error= HA_ERR_WRONG_COMMAND;
}
return error;
}
/*
Enable indexes, making it persistent if requested.
SYNOPSIS
enable_indexes()
mode mode of operation:
HA_KEY_SWITCH_NONUNIQ enable all non-unique keys
HA_KEY_SWITCH_ALL enable all keys
HA_KEY_SWITCH_NONUNIQ_SAVE en. non-uni. and make persistent
HA_KEY_SWITCH_ALL_SAVE en. all keys and make persistent
DESCRIPTION
Enable indexes, which might have been disabled by disable_index() before.
The modes without _SAVE work only if both data and indexes are empty,
since the MyISAM repair would enable them persistently.
To be sure in these cases, call handler::delete_all_rows() before.
IMPLEMENTATION
HA_KEY_SWITCH_NONUNIQ is not implemented.
HA_KEY_SWITCH_ALL_SAVE is not implemented.
RETURN
0 ok
!=0 Error, among others:
HA_ERR_CRASHED data or index is non-empty. Delete all rows and retry.
HA_ERR_WRONG_COMMAND mode not implemented.
*/
int ha_myisam::enable_indexes(uint mode)
{
int error;
DBUG_EXECUTE_IF("wait_in_enable_indexes",
debug_wait_for_kill("wait_in_enable_indexes"); );
if (mi_is_all_keys_active(file->s->state.key_map, file->s->base.keys))
{
/* All indexes are enabled already. */
return 0;
}
if (mode == HA_KEY_SWITCH_ALL)
{
error= mi_enable_indexes(file);
/*
Do not try to repair on error,
as this could make the enabled state persistent,
but mode==HA_KEY_SWITCH_ALL forbids it.
*/
}
else if (mode == HA_KEY_SWITCH_NONUNIQ_SAVE)
{
THD *thd=current_thd;
MI_CHECK param;
const char *save_proc_info=thd->proc_info;
thd_proc_info(thd, "Creating index");
myisamchk_init(¶m);
param.op_name= "recreating_index";
param.testflag= (T_SILENT | T_REP_BY_SORT | T_QUICK |
T_CREATE_MISSING_KEYS);
param.myf_rw&= ~MY_WAIT_IF_FULL;
param.sort_buffer_length= THDVAR(thd, sort_buffer_size);
param.stats_method= (enum_mi_stats_method)THDVAR(thd, stats_method);
param.tmpdir=&mysql_tmpdir_list;
if ((error= (repair(thd,param,0) != HA_ADMIN_OK)) && param.retry_repair)
{
sql_print_warning("Warning: Enabling keys got errno %d on %s.%s, retrying",
my_errno, param.db_name, param.table_name);
/*
Repairing by sort failed. Now try standard repair method.
Still we want to fix only index file. If data file corruption
was detected (T_RETRY_WITHOUT_QUICK), we shouldn't do much here.
Let implicit repair do this job.
*/
if (!(param.testflag & T_RETRY_WITHOUT_QUICK))
{
param.testflag&= ~T_REP_BY_SORT;
error= (repair(thd,param,0) != HA_ADMIN_OK);
}
/*
If the standard repair succeeded, clear all error messages which
might have been set by the first repair. They can still be seen
with SHOW WARNINGS then.
*/
if (! error)
thd->clear_error();
}
info(HA_STATUS_CONST);
thd_proc_info(thd, save_proc_info);
}
else
{
/* mode not implemented */
error= HA_ERR_WRONG_COMMAND;
}
return error;
}
/*
Test if indexes are disabled.
SYNOPSIS
indexes_are_disabled()
no parameters
RETURN
0 indexes are not disabled
1 all indexes are disabled
[2 non-unique indexes are disabled - NOT YET IMPLEMENTED]
*/
int ha_myisam::indexes_are_disabled(void)
{
return mi_indexes_are_disabled(file);
}
/*
prepare for a many-rows insert operation
e.g. - disable indexes (if they can be recreated fast) or
activate special bulk-insert optimizations
SYNOPSIS
start_bulk_insert(rows)
rows Rows to be inserted
0 if we don't know
NOTICE
Do not forget to call end_bulk_insert() later!
*/
void ha_myisam::start_bulk_insert(ha_rows rows)
{
DBUG_ENTER("ha_myisam::start_bulk_insert");
THD *thd= current_thd;
ulong size= min(thd->variables.read_buff_size,
(ulong) (table->s->avg_row_length*rows));
DBUG_PRINT("info",("start_bulk_insert: rows %lu size %lu",
(ulong) rows, size));
/* don't enable row cache if too few rows */
if (! rows || (rows > MI_MIN_ROWS_TO_USE_WRITE_CACHE))
mi_extra(file, HA_EXTRA_WRITE_CACHE, (void*) &size);
can_enable_indexes= mi_is_all_keys_active(file->s->state.key_map,
file->s->base.keys);
/*
Only disable old index if the table was empty and we are inserting
a lot of rows.
Note that in end_bulk_insert() we may truncate the table if
enable_indexes() failed, thus it's essential that indexes are
disabled ONLY for an empty table.
*/
if (file->state->records == 0 && can_enable_indexes &&
(!rows || rows >= MI_MIN_ROWS_TO_DISABLE_INDEXES))
mi_disable_non_unique_index(file,rows);
else
if (!file->bulk_insert &&
(!rows || rows >= MI_MIN_ROWS_TO_USE_BULK_INSERT))
{
mi_init_bulk_insert(file, thd->variables.bulk_insert_buff_size, rows);
}
DBUG_VOID_RETURN;
}
/*
end special bulk-insert optimizations,
which have been activated by start_bulk_insert().
SYNOPSIS
end_bulk_insert()
no arguments
RETURN
0 OK
!= 0 Error
*/
int ha_myisam::end_bulk_insert()
{
mi_end_bulk_insert(file);
int err=mi_extra(file, HA_EXTRA_NO_CACHE, 0);
if (!err)
{
if (can_enable_indexes)
{
/*
Truncate the table when enable index operation is killed.
After truncating the table we don't need to enable the
indexes, because the last repair operation is aborted after
setting the indexes as active and trying to recreate them.
*/
if (((err= enable_indexes(HA_KEY_SWITCH_NONUNIQ_SAVE)) != 0) &&
current_thd->killed)
{
delete_all_rows();
/* not crashed, despite being killed during repair */
file->s->state.changed&= ~(STATE_CRASHED|STATE_CRASHED_ON_REPAIR);
}
}
}
return err;
}
bool ha_myisam::check_and_repair(THD *thd)
{
int error=0;
int marked_crashed;
HA_CHECK_OPT check_opt;
DBUG_ENTER("ha_myisam::check_and_repair");
check_opt.init();
check_opt.flags= T_MEDIUM | T_AUTO_REPAIR;
// Don't use quick if deleted rows
if (!file->state->del && (myisam_recover_options & HA_RECOVER_QUICK))
check_opt.flags|=T_QUICK;
sql_print_warning("Checking table: '%s'",table->s->path.str);
const CSET_STRING query_backup= thd->query_string;
thd->set_query(table->s->table_name.str,
(uint) table->s->table_name.length, system_charset_info);
if ((marked_crashed= mi_is_crashed(file)) || check(thd, &check_opt))
{
sql_print_warning("Recovering table: '%s'",table->s->path.str);
check_opt.flags=
((myisam_recover_options & HA_RECOVER_BACKUP ? T_BACKUP_DATA : 0) |
(marked_crashed ? 0 : T_QUICK) |
(myisam_recover_options & HA_RECOVER_FORCE ? 0 : T_SAFE_REPAIR) |
T_AUTO_REPAIR);
if (repair(thd, &check_opt))
error=1;
}
thd->set_query(query_backup);
DBUG_RETURN(error);
}
bool ha_myisam::is_crashed() const
{
return (file->s->state.changed & STATE_CRASHED ||
(my_disable_locking && file->s->state.open_count));
}
int ha_myisam::update_row(const uchar *old_data, uchar *new_data)
{
ha_statistic_increment(&SSV::ha_update_count);
return mi_update(file,old_data,new_data);
}
int ha_myisam::delete_row(const uchar *buf)
{
ha_statistic_increment(&SSV::ha_delete_count);
return mi_delete(file,buf);
}
C_MODE_START
ICP_RESULT index_cond_func_myisam(void *arg)
{
ha_myisam *h= (ha_myisam*)arg;
if (h->end_range && h->compare_key_icp(h->end_range) > 0)
return ICP_OUT_OF_RANGE; /* caller should return HA_ERR_END_OF_FILE already */
return (ICP_RESULT) test(h->pushed_idx_cond->val_int());
}
C_MODE_END
int ha_myisam::index_init(uint idx, bool sorted)
{
active_index=idx;
if (pushed_idx_cond_keyno == idx)
mi_set_index_cond_func(file, index_cond_func_myisam, this);
return 0;
}
int ha_myisam::index_end()
{
active_index=MAX_KEY;
//pushed_idx_cond_keyno= MAX_KEY;
mi_set_index_cond_func(file, NULL, 0);
in_range_check_pushed_down= FALSE;
ds_mrr.dsmrr_close();
return 0;
}
int ha_myisam::rnd_end()
{
ds_mrr.dsmrr_close();
return 0;
}
int ha_myisam::index_read_map(uchar *buf, const uchar *key,
key_part_map keypart_map,
enum ha_rkey_function find_flag)
{
MYSQL_INDEX_READ_ROW_START(table_share->db.str, table_share->table_name.str);
DBUG_ASSERT(inited==INDEX);
ha_statistic_increment(&SSV::ha_read_key_count);
int error=mi_rkey(file, buf, active_index, key, keypart_map, find_flag);
table->status=error ? STATUS_NOT_FOUND: 0;
MYSQL_INDEX_READ_ROW_DONE(error);
return error;
}
int ha_myisam::index_read_idx_map(uchar *buf, uint index, const uchar *key,
key_part_map keypart_map,
enum ha_rkey_function find_flag)
{
DBUG_ASSERT(pushed_idx_cond == NULL);
DBUG_ASSERT(pushed_idx_cond_keyno == MAX_KEY);
MYSQL_INDEX_READ_ROW_START(table_share->db.str, table_share->table_name.str);
ha_statistic_increment(&SSV::ha_read_key_count);
int error=mi_rkey(file, buf, index, key, keypart_map, find_flag);
table->status=error ? STATUS_NOT_FOUND: 0;
MYSQL_INDEX_READ_ROW_DONE(error);
return error;
}
int ha_myisam::index_read_last_map(uchar *buf, const uchar *key,
key_part_map keypart_map)
{
MYSQL_INDEX_READ_ROW_START(table_share->db.str, table_share->table_name.str);
DBUG_ENTER("ha_myisam::index_read_last");
DBUG_ASSERT(inited==INDEX);
ha_statistic_increment(&SSV::ha_read_key_count);
int error=mi_rkey(file, buf, active_index, key, keypart_map,
HA_READ_PREFIX_LAST);
table->status=error ? STATUS_NOT_FOUND: 0;
MYSQL_INDEX_READ_ROW_DONE(error);
DBUG_RETURN(error);
}
int ha_myisam::index_next(uchar *buf)
{
MYSQL_INDEX_READ_ROW_START(table_share->db.str, table_share->table_name.str);
DBUG_ASSERT(inited==INDEX);
ha_statistic_increment(&SSV::ha_read_next_count);
int error=mi_rnext(file,buf,active_index);
table->status=error ? STATUS_NOT_FOUND: 0;
MYSQL_INDEX_READ_ROW_DONE(error);
return error;
}
int ha_myisam::index_prev(uchar *buf)
{
MYSQL_INDEX_READ_ROW_START(table_share->db.str, table_share->table_name.str);
DBUG_ASSERT(inited==INDEX);
ha_statistic_increment(&SSV::ha_read_prev_count);
int error=mi_rprev(file,buf, active_index);
table->status=error ? STATUS_NOT_FOUND: 0;
MYSQL_INDEX_READ_ROW_DONE(error);
return error;
}
int ha_myisam::index_first(uchar *buf)
{
MYSQL_INDEX_READ_ROW_START(table_share->db.str, table_share->table_name.str);
DBUG_ASSERT(inited==INDEX);
ha_statistic_increment(&SSV::ha_read_first_count);
int error=mi_rfirst(file, buf, active_index);
table->status=error ? STATUS_NOT_FOUND: 0;
MYSQL_INDEX_READ_ROW_DONE(error);
return error;
}
int ha_myisam::index_last(uchar *buf)
{
MYSQL_INDEX_READ_ROW_START(table_share->db.str, table_share->table_name.str);
DBUG_ASSERT(inited==INDEX);
ha_statistic_increment(&SSV::ha_read_last_count);
int error=mi_rlast(file, buf, active_index);
table->status=error ? STATUS_NOT_FOUND: 0;
MYSQL_INDEX_READ_ROW_DONE(error);
return error;
}
int ha_myisam::index_next_same(uchar *buf,
const uchar *key __attribute__((unused)),
uint length __attribute__((unused)))
{
int error;
DBUG_ASSERT(inited==INDEX);
MYSQL_INDEX_READ_ROW_START(table_share->db.str, table_share->table_name.str);
ha_statistic_increment(&SSV::ha_read_next_count);
do
{
error= mi_rnext_same(file,buf);
} while (error == HA_ERR_RECORD_DELETED);
table->status=error ? STATUS_NOT_FOUND: 0;
MYSQL_INDEX_READ_ROW_DONE(error);
return error;
}
int ha_myisam::rnd_init(bool scan)
{
if (scan)
return mi_scan_init(file);
return mi_reset(file); // Free buffers
}
int ha_myisam::rnd_next(uchar *buf)
{
MYSQL_READ_ROW_START(table_share->db.str, table_share->table_name.str,
TRUE);
ha_statistic_increment(&SSV::ha_read_rnd_next_count);
int error=mi_scan(file, buf);
table->status=error ? STATUS_NOT_FOUND: 0;
MYSQL_READ_ROW_DONE(error);
return error;
}
int ha_myisam::restart_rnd_next(uchar *buf, uchar *pos)
{
return rnd_pos(buf,pos);
}
int ha_myisam::rnd_pos(uchar *buf, uchar *pos)
{
MYSQL_READ_ROW_START(table_share->db.str, table_share->table_name.str,
FALSE);
ha_statistic_increment(&SSV::ha_read_rnd_count);
int error=mi_rrnd(file, buf, my_get_ptr(pos,ref_length));
table->status=error ? STATUS_NOT_FOUND: 0;
MYSQL_READ_ROW_DONE(error);
return error;
}
void ha_myisam::position(const uchar *record)
{
my_off_t row_position= mi_position(file);
my_store_ptr(ref, ref_length, row_position);
}
int ha_myisam::info(uint flag)
{
MI_ISAMINFO misam_info;
char name_buff[FN_REFLEN];
(void) mi_status(file,&misam_info,flag);
if (flag & HA_STATUS_VARIABLE)
{
stats.records= misam_info.records;
stats.deleted= misam_info.deleted;
stats.data_file_length= misam_info.data_file_length;
stats.index_file_length= misam_info.index_file_length;
stats.delete_length= misam_info.delete_length;
stats.check_time= (ulong) misam_info.check_time;
stats.mean_rec_length= misam_info.mean_reclength;
}
if (flag & HA_STATUS_CONST)
{
TABLE_SHARE *share= table->s;
stats.max_data_file_length= misam_info.max_data_file_length;
stats.max_index_file_length= misam_info.max_index_file_length;
stats.create_time= misam_info.create_time;
/*
We want the value of stats.mrr_length_per_rec to be platform independent.
The size of the chunk at the end of the join buffer used for MRR needs
is calculated now basing on the values passed in the stats structure.
The remaining part of the join buffer is used for records. A different
number of records in the buffer results in a different number of buffer
refills and in a different order of records in the result set.
*/
stats.mrr_length_per_rec= misam_info.reflength + 8; // 8=max(sizeof(void *))
ref_length= misam_info.reflength;
share->db_options_in_use= misam_info.options;
stats.block_size= myisam_block_size; /* record block size */
/*
Update share.
lock_shared_ha_data is slighly abused here, since there is no other
way of locking the TABLE_SHARE.
*/
lock_shared_ha_data();
share->keys_in_use.set_prefix(share->keys);
share->keys_in_use.intersect_extended(misam_info.key_map);
share->keys_for_keyread.intersect(share->keys_in_use);
share->db_record_offset= misam_info.record_offset;
unlock_shared_ha_data();
if (share->key_parts)
memcpy((char*) table->key_info[0].rec_per_key,
(char*) misam_info.rec_per_key,
sizeof(table->key_info[0].rec_per_key[0])*share->key_parts);
/*
Set data_file_name and index_file_name to point at the symlink value
if table is symlinked (Ie; Real name is not same as generated name)
*/
data_file_name= index_file_name= 0;
fn_format(name_buff, file->filename, "", MI_NAME_DEXT,
MY_APPEND_EXT | MY_UNPACK_FILENAME);
if (strcmp(name_buff, misam_info.data_file_name))
data_file_name=misam_info.data_file_name;
fn_format(name_buff, file->filename, "", MI_NAME_IEXT,
MY_APPEND_EXT | MY_UNPACK_FILENAME);
if (strcmp(name_buff, misam_info.index_file_name))
index_file_name=misam_info.index_file_name;
}
if (flag & HA_STATUS_ERRKEY)
{
errkey = misam_info.errkey;
my_store_ptr(dup_ref, ref_length, misam_info.dupp_key_pos);
}
if (flag & HA_STATUS_TIME)
stats.update_time = (ulong) misam_info.update_time;
if (flag & HA_STATUS_AUTO)
stats.auto_increment_value= misam_info.auto_increment;
return 0;
}
int ha_myisam::extra(enum ha_extra_function operation)
{
if (operation == HA_EXTRA_MMAP && !opt_myisam_use_mmap)
return 0;
return mi_extra(file, operation, 0);
}
int ha_myisam::reset(void)
{
/* Reset MyISAM specific part for index condition pushdown */
DBUG_ASSERT(pushed_idx_cond == NULL);
DBUG_ASSERT(pushed_idx_cond_keyno == MAX_KEY);
mi_set_index_cond_func(file, NULL, 0);
ds_mrr.reset();
return mi_reset(file);
}
/* To be used with WRITE_CACHE and EXTRA_CACHE */
int ha_myisam::extra_opt(enum ha_extra_function operation, ulong cache_size)
{
return mi_extra(file, operation, (void*) &cache_size);
}
int ha_myisam::delete_all_rows()
{
return mi_delete_all_rows(file);
}
/*
Intended to support partitioning.
Allows a particular partition to be truncated.
*/
int ha_myisam::truncate()
{
int error= delete_all_rows();
return error ? error : reset_auto_increment(0);
}
int ha_myisam::reset_auto_increment(ulonglong value)
{
file->s->state.auto_increment= value;
return 0;
}
int ha_myisam::delete_table(const char *name)
{
return mi_delete_table(name);
}
int ha_myisam::external_lock(THD *thd, int lock_type)
{
file->in_use.data= thd;
return mi_lock_database(file, !table->s->tmp_table ?
lock_type : ((lock_type == F_UNLCK) ?
F_UNLCK : F_EXTRA_LCK));
}
THR_LOCK_DATA **ha_myisam::store_lock(THD *thd,
THR_LOCK_DATA **to,
enum thr_lock_type lock_type)
{
if (lock_type != TL_IGNORE && file->lock.type == TL_UNLOCK)
file->lock.type=lock_type;
*to++= &file->lock;
return to;
}
void ha_myisam::update_create_info(HA_CREATE_INFO *create_info)
{
ha_myisam::info(HA_STATUS_AUTO | HA_STATUS_CONST);
if (!(create_info->used_fields & HA_CREATE_USED_AUTO))
{
create_info->auto_increment_value= stats.auto_increment_value;
}
create_info->data_file_name=data_file_name;
create_info->index_file_name=index_file_name;
}
int ha_myisam::create(const char *name, register TABLE *table_arg,
HA_CREATE_INFO *ha_create_info)
{
int error;
uint create_flags= 0, records, i;
char buff[FN_REFLEN];
MI_KEYDEF *keydef;
MI_COLUMNDEF *recinfo;
MI_CREATE_INFO create_info;
TABLE_SHARE *share= table_arg->s;
uint options= share->db_options_in_use;
DBUG_ENTER("ha_myisam::create");
for (i= 0; i < share->keys; i++)
{
if (table_arg->key_info[i].flags & HA_USES_PARSER)
{
create_flags|= HA_CREATE_RELIES_ON_SQL_LAYER;
break;
}
}
if ((error= table2myisam(table_arg, &keydef, &recinfo, &records)))
DBUG_RETURN(error); /* purecov: inspected */
memset(&create_info, 0, sizeof(create_info));
create_info.max_rows= share->max_rows;
create_info.reloc_rows= share->min_rows;
create_info.with_auto_increment= share->next_number_key_offset == 0;
create_info.auto_increment= (ha_create_info->auto_increment_value ?
ha_create_info->auto_increment_value -1 :
(ulonglong) 0);
create_info.data_file_length= ((ulonglong) share->max_rows *
share->avg_row_length);
create_info.language= share->table_charset->number;
#ifdef HAVE_READLINK
if (my_use_symdir)
{
create_info.data_file_name= ha_create_info->data_file_name;
create_info.index_file_name= ha_create_info->index_file_name;
}
else
#endif /* HAVE_READLINK */
{
if (ha_create_info->data_file_name)
push_warning_printf(table_arg->in_use, Sql_condition::WARN_LEVEL_WARN,
WARN_OPTION_IGNORED, ER(WARN_OPTION_IGNORED),
"DATA DIRECTORY");
if (ha_create_info->index_file_name)
push_warning_printf(table_arg->in_use, Sql_condition::WARN_LEVEL_WARN,
WARN_OPTION_IGNORED, ER(WARN_OPTION_IGNORED),
"INDEX DIRECTORY");
}
if (ha_create_info->options & HA_LEX_CREATE_TMP_TABLE)
create_flags|= HA_CREATE_TMP_TABLE;
if (ha_create_info->options & HA_CREATE_KEEP_FILES)
create_flags|= HA_CREATE_KEEP_FILES;
if (options & HA_OPTION_PACK_RECORD)
create_flags|= HA_PACK_RECORD;
if (options & HA_OPTION_CHECKSUM)
create_flags|= HA_CREATE_CHECKSUM;
if (options & HA_OPTION_DELAY_KEY_WRITE)
create_flags|= HA_CREATE_DELAY_KEY_WRITE;
/* TODO: Check that the following fn_format is really needed */
error= mi_create(fn_format(buff, name, "", "",
MY_UNPACK_FILENAME|MY_APPEND_EXT),
share->keys, keydef,
records, recinfo,
0, (MI_UNIQUEDEF*) 0,
&create_info, create_flags);
my_free(recinfo);
DBUG_RETURN(error);
}
int ha_myisam::rename_table(const char * from, const char * to)
{
return mi_rename(from,to);
}
void ha_myisam::get_auto_increment(ulonglong offset, ulonglong increment,
ulonglong nb_desired_values,
ulonglong *first_value,
ulonglong *nb_reserved_values)
{
ulonglong nr;
int error;
uchar key[MI_MAX_KEY_LENGTH];
if (!table->s->next_number_key_offset)
{ // Autoincrement at key-start
ha_myisam::info(HA_STATUS_AUTO);
*first_value= stats.auto_increment_value;
/* MyISAM has only table-level lock, so reserves to +inf */
*nb_reserved_values= ULONGLONG_MAX;
return;
}
/* it's safe to call the following if bulk_insert isn't on */
mi_flush_bulk_insert(file, table->s->next_number_index);
(void) extra(HA_EXTRA_KEYREAD);
key_copy(key, table->record[0],
table->key_info + table->s->next_number_index,
table->s->next_number_key_offset);
error= mi_rkey(file, table->record[1], (int) table->s->next_number_index,
key, make_prev_keypart_map(table->s->next_number_keypart),
HA_READ_PREFIX_LAST);
if (error)
nr= 1;
else
{
/* Get data from record[1] */
nr= ((ulonglong) table->next_number_field->
val_int_offset(table->s->rec_buff_length)+1);
}
extra(HA_EXTRA_NO_KEYREAD);
*first_value= nr;
/*
MySQL needs to call us for next row: assume we are inserting ("a",null)
here, we return 3, and next this statement will want to insert ("b",null):
there is no reason why ("b",3+1) would be the good row to insert: maybe it
already exists, maybe 3+1 is too large...
*/
*nb_reserved_values= 1;
}
/*
Find out how many rows there is in the given range
SYNOPSIS
records_in_range()
inx Index to use
min_key Start of range. Null pointer if from first key
max_key End of range. Null pointer if to last key
NOTES
min_key.flag can have one of the following values:
HA_READ_KEY_EXACT Include the key in the range
HA_READ_AFTER_KEY Don't include key in range
max_key.flag can have one of the following values:
HA_READ_BEFORE_KEY Don't include key in range
HA_READ_AFTER_KEY Include all 'end_key' values in the range
RETURN
HA_POS_ERROR Something is wrong with the index tree.
0 There is no matching keys in the given range
number > 0 There is approximately 'number' matching rows in
the range.
*/
ha_rows ha_myisam::records_in_range(uint inx, key_range *min_key,
key_range *max_key)
{
return (ha_rows) mi_records_in_range(file, (int) inx, min_key, max_key);
}
int ha_myisam::ft_read(uchar *buf)
{
int error;
if (!ft_handler)
return -1;
thread_safe_increment(table->in_use->status_var.ha_read_next_count,
&LOCK_status); // why ?
error=ft_handler->please->read_next(ft_handler,(char*) buf);
table->status=error ? STATUS_NOT_FOUND: 0;
return error;
}
uint ha_myisam::checksum() const
{
return (uint)file->state->checksum;
}
bool ha_myisam::check_if_incompatible_data(HA_CREATE_INFO *info,
uint table_changes)
{
uint options= table->s->db_options_in_use;
if (info->auto_increment_value != stats.auto_increment_value ||
info->data_file_name != data_file_name ||
info->index_file_name != index_file_name ||
table_changes == IS_EQUAL_NO ||
table_changes & IS_EQUAL_PACK_LENGTH) // Not implemented yet
return COMPATIBLE_DATA_NO;
if ((options & (HA_OPTION_PACK_RECORD | HA_OPTION_CHECKSUM |
HA_OPTION_DELAY_KEY_WRITE)) !=
(info->table_options & (HA_OPTION_PACK_RECORD | HA_OPTION_CHECKSUM |
HA_OPTION_DELAY_KEY_WRITE)))
return COMPATIBLE_DATA_NO;
return COMPATIBLE_DATA_YES;
}
extern int mi_panic(enum ha_panic_function flag);
int myisam_panic(handlerton *hton, ha_panic_function flag)
{
return mi_panic(flag);
}
static int myisam_init(void *p)
{
handlerton *myisam_hton;
#ifdef HAVE_PSI_INTERFACE
init_myisam_psi_keys();
#endif
/* Set global variables based on startup options */
if (myisam_recover_options)
ha_open_options|=HA_OPEN_ABORT_IF_CRASHED;
else
myisam_recover_options= HA_RECOVER_OFF;
myisam_block_size=(uint) 1 << my_bit_log2(opt_myisam_block_size);
myisam_hton= (handlerton *)p;
myisam_hton->state= SHOW_OPTION_YES;
myisam_hton->db_type= DB_TYPE_MYISAM;
myisam_hton->create= myisam_create_handler;
myisam_hton->panic= myisam_panic;
myisam_hton->flags= HTON_CAN_RECREATE | HTON_SUPPORT_LOG_TABLES;
myisam_hton->is_supported_system_table= myisam_is_supported_system_table;
return 0;
}
/****************************************************************************
* MyISAM MRR implementation: use DS-MRR
***************************************************************************/
int ha_myisam::multi_range_read_init(RANGE_SEQ_IF *seq, void *seq_init_param,
uint n_ranges, uint mode,
HANDLER_BUFFER *buf)
{
return ds_mrr.dsmrr_init(this, seq, seq_init_param, n_ranges, mode, buf);
}
int ha_myisam::multi_range_read_next(char **range_info)
{
return ds_mrr.dsmrr_next(range_info);
}
ha_rows ha_myisam::multi_range_read_info_const(uint keyno, RANGE_SEQ_IF *seq,
void *seq_init_param,
uint n_ranges, uint *bufsz,
uint *flags, Cost_estimate *cost)
{
/*
This call is here because there is no location where this->table would
already be known.
TODO: consider moving it into some per-query initialization call.
*/
ds_mrr.init(this, table);
return ds_mrr.dsmrr_info_const(keyno, seq, seq_init_param, n_ranges, bufsz,
flags, cost);
}
ha_rows ha_myisam::multi_range_read_info(uint keyno, uint n_ranges, uint keys,
uint *bufsz, uint *flags,
Cost_estimate *cost)
{
ds_mrr.init(this, table);
return ds_mrr.dsmrr_info(keyno, n_ranges, keys, bufsz, flags, cost);
}
/* MyISAM MRR implementation ends */
/* Index condition pushdown implementation*/
Item *ha_myisam::idx_cond_push(uint keyno_arg, Item* idx_cond_arg)
{
/*
Check if the key contains a blob field. If it does then MyISAM
should not accept the pushed index condition since MyISAM will not
read the blob field from the index entry during evaluation of the
pushed index condition and the BLOB field might be part of the
range evaluation done by the ICP code.
*/
const KEY *key= &table_share->key_info[keyno_arg];
for (uint k= 0; k < key->user_defined_key_parts; ++k)
{
const KEY_PART_INFO *key_part= &key->key_part[k];
if (key_part->key_part_flag & HA_BLOB_PART)
{
/* Let the server handle the index condition */
return idx_cond_arg;
}
}
pushed_idx_cond_keyno= keyno_arg;
pushed_idx_cond= idx_cond_arg;
in_range_check_pushed_down= TRUE;
if (active_index == pushed_idx_cond_keyno)
mi_set_index_cond_func(file, index_cond_func_myisam, this);
return NULL;
}
static struct st_mysql_sys_var* myisam_sysvars[]= {
MYSQL_SYSVAR(block_size),
MYSQL_SYSVAR(data_pointer_size),
MYSQL_SYSVAR(max_sort_file_size),
MYSQL_SYSVAR(recover_options),
MYSQL_SYSVAR(repair_threads),
MYSQL_SYSVAR(sort_buffer_size),
MYSQL_SYSVAR(use_mmap),
MYSQL_SYSVAR(mmap_size),
MYSQL_SYSVAR(stats_method),
0
};
struct st_mysql_storage_engine myisam_storage_engine=
{ MYSQL_HANDLERTON_INTERFACE_VERSION };
mysql_declare_plugin(myisam)
{
MYSQL_STORAGE_ENGINE_PLUGIN,
&myisam_storage_engine,
"MyISAM",
"MySQL AB",
"MyISAM storage engine",
PLUGIN_LICENSE_GPL,
myisam_init, /* Plugin Init */
NULL, /* Plugin Deinit */
0x0100, /* 1.0 */
NULL, /* status variables */
myisam_sysvars, /* system variables */
NULL,
0,
}
mysql_declare_plugin_end;
#ifdef HAVE_QUERY_CACHE
/**
@brief Register a named table with a call back function to the query cache.
@param thd The thread handle
@param table_key A pointer to the table name in the table cache
@param key_length The length of the table name
@param[out] engine_callback The pointer to the storage engine call back
function, currently 0
@param[out] engine_data Engine data will be set to 0.
@note Despite the name of this function, it is used to check each statement
before it is cached and not to register a table or callback function.
@see handler::register_query_cache_table
@return The error code. The engine_data and engine_callback will be set to 0.
@retval TRUE Success
@retval FALSE An error occured
*/
my_bool ha_myisam::register_query_cache_table(THD *thd, char *table_name,
uint table_name_len,
qc_engine_callback
*engine_callback,
ulonglong *engine_data)
{
DBUG_ENTER("ha_myisam::register_query_cache_table");
/*
No call back function is needed to determine if a cached statement
is valid or not.
*/
*engine_callback= 0;
/*
No engine data is needed.
*/
*engine_data= 0;
if (file->s->concurrent_insert)
{
/*
If a concurrent INSERT has happened just before the currently
processed SELECT statement, the total size of the table is
unknown.
To determine if the table size is known, the current thread's snap
shot of the table size with the actual table size are compared.
If the table size is unknown the SELECT statement can't be cached.
When concurrent inserts are disabled at table open, mi_open()
does not assign a get_status() function. In this case the local
("current") status is never updated. We would wrongly think that
we cannot cache the statement.
*/
ulonglong actual_data_file_length;
ulonglong current_data_file_length;
/*
POSIX visibility rules specify that "2. Whatever memory values a
thread can see when it unlocks a mutex <...> can also be seen by any
thread that later locks the same mutex". In this particular case,
concurrent insert thread had modified the data_file_length in
MYISAM_SHARE before it has unlocked (or even locked)
structure_guard_mutex. So, here we're guaranteed to see at least that
value after we've locked the same mutex. We can see a later value
(modified by some other thread) though, but it's ok, as we only want
to know if the variable was changed, the actual new value doesn't matter
*/
actual_data_file_length= file->s->state.state.data_file_length;
current_data_file_length= file->save_state.data_file_length;
if (current_data_file_length != actual_data_file_length)
{
/* Don't cache current statement. */
DBUG_RETURN(FALSE);
}
}
/*
This query execution might have started after the query cache was flushed
by a concurrent INSERT. In this case, don't cache this statement as the
data file length difference might not be visible yet if the tables haven't
been unlocked by the concurrent insert thread.
*/
if (file->state->uncacheable)
DBUG_RETURN(FALSE);
/* It is ok to try to cache current statement. */
DBUG_RETURN(TRUE);
}
#endif
| tangtang2013/MySQL-Research | storage/myisam/ha_myisam.cc | C++ | gpl-2.0 | 77,068 |
<?php
/**
* PHP OpenCloud library.
*
* @copyright 2013 Rackspace Hosting, Inc. See LICENSE for information.
* @license https://www.apache.org/licenses/LICENSE-2.0
* @author Glen Campbell <glen.campbell@rackspace.com>
* @author Jamie Hannaford <jamie.hannaford@rackspace.com>
*/
namespace OpenCloud\CloudMonitoring\Resource;
use OpenCloud\CloudMonitoring\Exception;
use OpenCloud\Common\Collection\ResourceIterator;
/**
* Agent class.
*/
class AgentHost extends ReadOnlyResource
{
private $token;
private $label;
protected static $json_name = false;
protected static $json_collection_name = 'info';
protected static $url_resource = 'host_info';
private $allowedTypes = array(
'cpus',
'disks',
'filesystems',
'memory',
'network_interfaces',
'processes',
'system',
'who'
);
public function info($type)
{
if (!in_array($type, $this->allowedTypes)) {
throw new Exception\AgentException(sprintf(
'Incorrect info type. Please specify one of the following: %s',
implode(', ', $this->allowedTypes)
));
}
return $this->getService()->resourceList('AgentHostInfo', $this->getUrl($type), $this);
}
} | sohilgupta/datamelons | wp_bkp/html/wp-content/plugins/updraftplus/oc/rs/lib/OpenCloud/CloudMonitoring/Resource/AgentHost.php | PHP | gpl-2.0 | 1,300 |
/***************************************************************************
qgserrordialog.cpp - error description
-------------------
begin : October 2012
copyright : (C) October 2012 Radim Blazek
email : radim dot blazek at gmail dot com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#include "qgserrordialog.h"
#include <QMessageBox>
#include <QSettings>
QgsErrorDialog::QgsErrorDialog( const QgsError & theError, const QString & theTitle, QWidget *parent, const Qt::WindowFlags& fl )
: QDialog( parent, fl )
, mError( theError )
{
setupUi( this );
QString title = theTitle;
if ( title.isEmpty() ) title = tr( "Error" );
setWindowTitle( title );
// QMessageBox has static standardIcon( Icon icon ), but it is marked as obsolete
QMessageBox messageBox( QMessageBox::Critical, "", "" );
mIconLabel->setPixmap( messageBox.iconPixmap() );
mSummaryTextBrowser->setOpenExternalLinks( true );
mDetailTextBrowser->setOpenExternalLinks( true );
mDetailTextBrowser->hide();
QPalette p = palette();
p.setColor( QPalette::Base, Qt::transparent );
mSummaryTextBrowser->setPalette( p );
mDetailCheckBox->hide();
mSummaryTextBrowser->setText( mError.summary() );
mDetailTextBrowser->setText( mError.message( QgsErrorMessage::Html ) );
resize( width(), 150 );
QSettings settings;
Qt::CheckState state = ( Qt::CheckState ) settings.value( "/Error/dialog/detail", 0 ).toInt();
mDetailCheckBox->setCheckState( state );
if ( state == Qt::Checked ) on_mDetailPushButton_clicked();
}
QgsErrorDialog::~QgsErrorDialog()
{
}
void QgsErrorDialog::show( const QgsError & theError, const QString & theTitle, QWidget *parent, const Qt::WindowFlags& fl )
{
QgsErrorDialog d( theError, theTitle, parent, fl );
d.exec();
}
void QgsErrorDialog::on_mDetailPushButton_clicked()
{
mSummaryTextBrowser->hide();
mDetailTextBrowser->show();
mDetailCheckBox->show();
mDetailPushButton->hide();
resize( width(), 400 );
}
void QgsErrorDialog::on_mDetailCheckBox_stateChanged( int state )
{
QSettings settings;
settings.setValue( "/Error/dialog/detail", state );
}
| sebastic/QGIS | src/gui/qgserrordialog.cpp | C++ | gpl-2.0 | 2,866 |
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
PositionedGlyph::PositionedGlyph() noexcept
: character (0), glyph (0), x (0), y (0), w (0), whitespace (false)
{
}
PositionedGlyph::PositionedGlyph (const Font& font_, const juce_wchar character_, const int glyph_,
const float x_, const float y_, const float w_, const bool whitespace_)
: font (font_), character (character_), glyph (glyph_),
x (x_), y (y_), w (w_), whitespace (whitespace_)
{
}
PositionedGlyph::PositionedGlyph (const PositionedGlyph& other)
: font (other.font), character (other.character), glyph (other.glyph),
x (other.x), y (other.y), w (other.w), whitespace (other.whitespace)
{
}
PositionedGlyph::~PositionedGlyph() {}
PositionedGlyph& PositionedGlyph::operator= (const PositionedGlyph& other)
{
font = other.font;
character = other.character;
glyph = other.glyph;
x = other.x;
y = other.y;
w = other.w;
whitespace = other.whitespace;
return *this;
}
static inline void drawGlyphWithFont (const Graphics& g, int glyph, const Font& font, const AffineTransform& t)
{
LowLevelGraphicsContext& context = g.getInternalContext();
context.setFont (font);
context.drawGlyph (glyph, t);
}
void PositionedGlyph::draw (const Graphics& g) const
{
if (! isWhitespace())
drawGlyphWithFont (g, glyph, font, AffineTransform::translation (x, y));
}
void PositionedGlyph::draw (const Graphics& g, const AffineTransform& transform) const
{
if (! isWhitespace())
drawGlyphWithFont (g, glyph, font, AffineTransform::translation (x, y).followedBy (transform));
}
void PositionedGlyph::createPath (Path& path) const
{
if (! isWhitespace())
{
if (Typeface* const t = font.getTypeface())
{
Path p;
t->getOutlineForGlyph (glyph, p);
path.addPath (p, AffineTransform::scale (font.getHeight() * font.getHorizontalScale(), font.getHeight())
.translated (x, y));
}
}
}
bool PositionedGlyph::hitTest (float px, float py) const
{
if (getBounds().contains (px, py) && ! isWhitespace())
{
if (Typeface* const t = font.getTypeface())
{
Path p;
t->getOutlineForGlyph (glyph, p);
AffineTransform::translation (-x, -y)
.scaled (1.0f / (font.getHeight() * font.getHorizontalScale()), 1.0f / font.getHeight())
.transformPoint (px, py);
return p.contains (px, py);
}
}
return false;
}
void PositionedGlyph::moveBy (const float deltaX,
const float deltaY)
{
x += deltaX;
y += deltaY;
}
//==============================================================================
GlyphArrangement::GlyphArrangement()
{
glyphs.ensureStorageAllocated (128);
}
GlyphArrangement::GlyphArrangement (const GlyphArrangement& other)
: glyphs (other.glyphs)
{
}
GlyphArrangement& GlyphArrangement::operator= (const GlyphArrangement& other)
{
glyphs = other.glyphs;
return *this;
}
GlyphArrangement::~GlyphArrangement()
{
}
//==============================================================================
void GlyphArrangement::clear()
{
glyphs.clear();
}
PositionedGlyph& GlyphArrangement::getGlyph (const int index) const noexcept
{
return glyphs.getReference (index);
}
//==============================================================================
void GlyphArrangement::addGlyphArrangement (const GlyphArrangement& other)
{
glyphs.addArray (other.glyphs);
}
void GlyphArrangement::addGlyph (const PositionedGlyph& glyph)
{
glyphs.add (glyph);
}
void GlyphArrangement::removeRangeOfGlyphs (int startIndex, const int num)
{
glyphs.removeRange (startIndex, num < 0 ? glyphs.size() : num);
}
//==============================================================================
void GlyphArrangement::addLineOfText (const Font& font,
const String& text,
const float xOffset,
const float yOffset)
{
addCurtailedLineOfText (font, text, xOffset, yOffset, 1.0e10f, false);
}
void GlyphArrangement::addCurtailedLineOfText (const Font& font,
const String& text,
const float xOffset,
const float yOffset,
const float maxWidthPixels,
const bool useEllipsis)
{
if (text.isNotEmpty())
{
Array <int> newGlyphs;
Array <float> xOffsets;
font.getGlyphPositions (text, newGlyphs, xOffsets);
const int textLen = newGlyphs.size();
glyphs.ensureStorageAllocated (glyphs.size() + textLen);
String::CharPointerType t (text.getCharPointer());
for (int i = 0; i < textLen; ++i)
{
const float nextX = xOffsets.getUnchecked (i + 1);
if (nextX > maxWidthPixels + 1.0f)
{
// curtail the string if it's too wide..
if (useEllipsis && textLen > 3 && glyphs.size() >= 3)
insertEllipsis (font, xOffset + maxWidthPixels, 0, glyphs.size());
break;
}
else
{
const float thisX = xOffsets.getUnchecked (i);
const bool isWhitespace = t.isWhitespace();
glyphs.add (PositionedGlyph (font, t.getAndAdvance(),
newGlyphs.getUnchecked(i),
xOffset + thisX, yOffset,
nextX - thisX, isWhitespace));
}
}
}
}
int GlyphArrangement::insertEllipsis (const Font& font, const float maxXPos,
const int startIndex, int endIndex)
{
int numDeleted = 0;
if (glyphs.size() > 0)
{
Array<int> dotGlyphs;
Array<float> dotXs;
font.getGlyphPositions ("..", dotGlyphs, dotXs);
const float dx = dotXs[1];
float xOffset = 0.0f, yOffset = 0.0f;
while (endIndex > startIndex)
{
const PositionedGlyph& pg = glyphs.getReference (--endIndex);
xOffset = pg.x;
yOffset = pg.y;
glyphs.remove (endIndex);
++numDeleted;
if (xOffset + dx * 3 <= maxXPos)
break;
}
for (int i = 3; --i >= 0;)
{
glyphs.insert (endIndex++, PositionedGlyph (font, '.', dotGlyphs.getFirst(),
xOffset, yOffset, dx, false));
--numDeleted;
xOffset += dx;
if (xOffset > maxXPos)
break;
}
}
return numDeleted;
}
void GlyphArrangement::addJustifiedText (const Font& font,
const String& text,
float x, float y,
const float maxLineWidth,
Justification horizontalLayout)
{
int lineStartIndex = glyphs.size();
addLineOfText (font, text, x, y);
const float originalY = y;
while (lineStartIndex < glyphs.size())
{
int i = lineStartIndex;
if (glyphs.getReference(i).getCharacter() != '\n'
&& glyphs.getReference(i).getCharacter() != '\r')
++i;
const float lineMaxX = glyphs.getReference (lineStartIndex).getLeft() + maxLineWidth;
int lastWordBreakIndex = -1;
while (i < glyphs.size())
{
const PositionedGlyph& pg = glyphs.getReference (i);
const juce_wchar c = pg.getCharacter();
if (c == '\r' || c == '\n')
{
++i;
if (c == '\r' && i < glyphs.size()
&& glyphs.getReference(i).getCharacter() == '\n')
++i;
break;
}
else if (pg.isWhitespace())
{
lastWordBreakIndex = i + 1;
}
else if (pg.getRight() - 0.0001f >= lineMaxX)
{
if (lastWordBreakIndex >= 0)
i = lastWordBreakIndex;
break;
}
++i;
}
const float currentLineStartX = glyphs.getReference (lineStartIndex).getLeft();
float currentLineEndX = currentLineStartX;
for (int j = i; --j >= lineStartIndex;)
{
if (! glyphs.getReference (j).isWhitespace())
{
currentLineEndX = glyphs.getReference (j).getRight();
break;
}
}
float deltaX = 0.0f;
if (horizontalLayout.testFlags (Justification::horizontallyJustified))
spreadOutLine (lineStartIndex, i - lineStartIndex, maxLineWidth);
else if (horizontalLayout.testFlags (Justification::horizontallyCentred))
deltaX = (maxLineWidth - (currentLineEndX - currentLineStartX)) * 0.5f;
else if (horizontalLayout.testFlags (Justification::right))
deltaX = maxLineWidth - (currentLineEndX - currentLineStartX);
moveRangeOfGlyphs (lineStartIndex, i - lineStartIndex,
x + deltaX - currentLineStartX, y - originalY);
lineStartIndex = i;
y += font.getHeight();
}
}
void GlyphArrangement::addFittedText (const Font& f,
const String& text,
const float x, const float y,
const float width, const float height,
Justification layout,
int maximumLines,
const float minimumHorizontalScale)
{
// doesn't make much sense if this is outside a sensible range of 0.5 to 1.0
jassert (minimumHorizontalScale > 0 && minimumHorizontalScale <= 1.0f);
if (text.containsAnyOf ("\r\n"))
{
GlyphArrangement ga;
ga.addJustifiedText (f, text, x, y, width, layout);
const Rectangle<float> bb (ga.getBoundingBox (0, -1, false));
float dy = y - bb.getY();
if (layout.testFlags (Justification::verticallyCentred)) dy += (height - bb.getHeight()) * 0.5f;
else if (layout.testFlags (Justification::bottom)) dy += (height - bb.getHeight());
ga.moveRangeOfGlyphs (0, -1, 0.0f, dy);
glyphs.addArray (ga.glyphs);
return;
}
int startIndex = glyphs.size();
addLineOfText (f, text.trim(), x, y);
if (glyphs.size() > startIndex)
{
float lineWidth = glyphs.getReference (glyphs.size() - 1).getRight()
- glyphs.getReference (startIndex).getLeft();
if (lineWidth <= 0)
return;
if (lineWidth * minimumHorizontalScale < width)
{
if (lineWidth > width)
stretchRangeOfGlyphs (startIndex, glyphs.size() - startIndex,
width / lineWidth);
justifyGlyphs (startIndex, glyphs.size() - startIndex,
x, y, width, height, layout);
}
else if (maximumLines <= 1)
{
fitLineIntoSpace (startIndex, glyphs.size() - startIndex,
x, y, width, height, f, layout, minimumHorizontalScale);
}
else
{
Font font (f);
String txt (text.trim());
const int length = txt.length();
const int originalStartIndex = startIndex;
int numLines = 1;
if (length <= 12 && ! txt.containsAnyOf (" -\t\r\n"))
maximumLines = 1;
maximumLines = jmin (maximumLines, length);
while (numLines < maximumLines)
{
++numLines;
const float newFontHeight = height / (float) numLines;
if (newFontHeight < font.getHeight())
{
font.setHeight (jmax (8.0f, newFontHeight));
removeRangeOfGlyphs (startIndex, -1);
addLineOfText (font, txt, x, y);
lineWidth = glyphs.getReference (glyphs.size() - 1).getRight()
- glyphs.getReference (startIndex).getLeft();
}
if (numLines > lineWidth / width || newFontHeight < 8.0f)
break;
}
if (numLines < 1)
numLines = 1;
float lineY = y;
float widthPerLine = lineWidth / numLines;
for (int line = 0; line < numLines; ++line)
{
int i = startIndex;
float lineStartX = glyphs.getReference (startIndex).getLeft();
if (line == numLines - 1)
{
widthPerLine = width;
i = glyphs.size();
}
else
{
while (i < glyphs.size())
{
lineWidth = (glyphs.getReference (i).getRight() - lineStartX);
if (lineWidth > widthPerLine)
{
// got to a point where the line's too long, so skip forward to find a
// good place to break it..
const int searchStartIndex = i;
while (i < glyphs.size())
{
if ((glyphs.getReference (i).getRight() - lineStartX) * minimumHorizontalScale < width)
{
if (glyphs.getReference (i).isWhitespace()
|| glyphs.getReference (i).getCharacter() == '-')
{
++i;
break;
}
}
else
{
// can't find a suitable break, so try looking backwards..
i = searchStartIndex;
for (int back = 1; back < jmin (7, i - startIndex - 1); ++back)
{
if (glyphs.getReference (i - back).isWhitespace()
|| glyphs.getReference (i - back).getCharacter() == '-')
{
i -= back - 1;
break;
}
}
break;
}
++i;
}
break;
}
++i;
}
int wsStart = i;
while (wsStart > 0 && glyphs.getReference (wsStart - 1).isWhitespace())
--wsStart;
int wsEnd = i;
while (wsEnd < glyphs.size() && glyphs.getReference (wsEnd).isWhitespace())
++wsEnd;
removeRangeOfGlyphs (wsStart, wsEnd - wsStart);
i = jmax (wsStart, startIndex + 1);
}
i -= fitLineIntoSpace (startIndex, i - startIndex,
x, lineY, width, font.getHeight(), font,
layout.getOnlyHorizontalFlags() | Justification::verticallyCentred,
minimumHorizontalScale);
startIndex = i;
lineY += font.getHeight();
if (startIndex >= glyphs.size())
break;
}
justifyGlyphs (originalStartIndex, glyphs.size() - originalStartIndex,
x, y, width, height, layout.getFlags() & ~Justification::horizontallyJustified);
}
}
}
//==============================================================================
void GlyphArrangement::moveRangeOfGlyphs (int startIndex, int num, const float dx, const float dy)
{
jassert (startIndex >= 0);
if (dx != 0.0f || dy != 0.0f)
{
if (num < 0 || startIndex + num > glyphs.size())
num = glyphs.size() - startIndex;
while (--num >= 0)
glyphs.getReference (startIndex++).moveBy (dx, dy);
}
}
int GlyphArrangement::fitLineIntoSpace (int start, int numGlyphs, float x, float y, float w, float h, const Font& font,
Justification justification, float minimumHorizontalScale)
{
int numDeleted = 0;
const float lineStartX = glyphs.getReference (start).getLeft();
float lineWidth = glyphs.getReference (start + numGlyphs - 1).getRight() - lineStartX;
if (lineWidth > w)
{
if (minimumHorizontalScale < 1.0f)
{
stretchRangeOfGlyphs (start, numGlyphs, jmax (minimumHorizontalScale, w / lineWidth));
lineWidth = glyphs.getReference (start + numGlyphs - 1).getRight() - lineStartX - 0.5f;
}
if (lineWidth > w)
{
numDeleted = insertEllipsis (font, lineStartX + w, start, start + numGlyphs);
numGlyphs -= numDeleted;
}
}
justifyGlyphs (start, numGlyphs, x, y, w, h, justification);
return numDeleted;
}
void GlyphArrangement::stretchRangeOfGlyphs (int startIndex, int num,
const float horizontalScaleFactor)
{
jassert (startIndex >= 0);
if (num < 0 || startIndex + num > glyphs.size())
num = glyphs.size() - startIndex;
if (num > 0)
{
const float xAnchor = glyphs.getReference (startIndex).getLeft();
while (--num >= 0)
{
PositionedGlyph& pg = glyphs.getReference (startIndex++);
pg.x = xAnchor + (pg.x - xAnchor) * horizontalScaleFactor;
pg.font.setHorizontalScale (pg.font.getHorizontalScale() * horizontalScaleFactor);
pg.w *= horizontalScaleFactor;
}
}
}
Rectangle<float> GlyphArrangement::getBoundingBox (int startIndex, int num, const bool includeWhitespace) const
{
jassert (startIndex >= 0);
if (num < 0 || startIndex + num > glyphs.size())
num = glyphs.size() - startIndex;
Rectangle<float> result;
while (--num >= 0)
{
const PositionedGlyph& pg = glyphs.getReference (startIndex++);
if (includeWhitespace || ! pg.isWhitespace())
result = result.getUnion (pg.getBounds());
}
return result;
}
void GlyphArrangement::justifyGlyphs (const int startIndex, const int num,
const float x, const float y, const float width, const float height,
Justification justification)
{
jassert (num >= 0 && startIndex >= 0);
if (glyphs.size() > 0 && num > 0)
{
const Rectangle<float> bb (getBoundingBox (startIndex, num, ! justification.testFlags (Justification::horizontallyJustified
| Justification::horizontallyCentred)));
float deltaX = 0.0f, deltaY = 0.0f;
if (justification.testFlags (Justification::horizontallyJustified)) deltaX = x - bb.getX();
else if (justification.testFlags (Justification::horizontallyCentred)) deltaX = x + (width - bb.getWidth()) * 0.5f - bb.getX();
else if (justification.testFlags (Justification::right)) deltaX = x + width - bb.getRight();
else deltaX = x - bb.getX();
if (justification.testFlags (Justification::top)) deltaY = y - bb.getY();
else if (justification.testFlags (Justification::bottom)) deltaY = y + height - bb.getBottom();
else deltaY = y + (height - bb.getHeight()) * 0.5f - bb.getY();
moveRangeOfGlyphs (startIndex, num, deltaX, deltaY);
if (justification.testFlags (Justification::horizontallyJustified))
{
int lineStart = 0;
float baseY = glyphs.getReference (startIndex).getBaselineY();
int i;
for (i = 0; i < num; ++i)
{
const float glyphY = glyphs.getReference (startIndex + i).getBaselineY();
if (glyphY != baseY)
{
spreadOutLine (startIndex + lineStart, i - lineStart, width);
lineStart = i;
baseY = glyphY;
}
}
if (i > lineStart)
spreadOutLine (startIndex + lineStart, i - lineStart, width);
}
}
}
void GlyphArrangement::spreadOutLine (const int start, const int num, const float targetWidth)
{
if (start + num < glyphs.size()
&& glyphs.getReference (start + num - 1).getCharacter() != '\r'
&& glyphs.getReference (start + num - 1).getCharacter() != '\n')
{
int numSpaces = 0;
int spacesAtEnd = 0;
for (int i = 0; i < num; ++i)
{
if (glyphs.getReference (start + i).isWhitespace())
{
++spacesAtEnd;
++numSpaces;
}
else
{
spacesAtEnd = 0;
}
}
numSpaces -= spacesAtEnd;
if (numSpaces > 0)
{
const float startX = glyphs.getReference (start).getLeft();
const float endX = glyphs.getReference (start + num - 1 - spacesAtEnd).getRight();
const float extraPaddingBetweenWords
= (targetWidth - (endX - startX)) / (float) numSpaces;
float deltaX = 0.0f;
for (int i = 0; i < num; ++i)
{
glyphs.getReference (start + i).moveBy (deltaX, 0.0f);
if (glyphs.getReference (start + i).isWhitespace())
deltaX += extraPaddingBetweenWords;
}
}
}
}
//==============================================================================
inline void GlyphArrangement::drawGlyphUnderline (const Graphics& g, const PositionedGlyph& pg,
const int i, const AffineTransform& transform) const
{
const float lineThickness = (pg.font.getDescent()) * 0.3f;
float nextX = pg.x + pg.w;
if (i < glyphs.size() - 1 && glyphs.getReference (i + 1).y == pg.y)
nextX = glyphs.getReference (i + 1).x;
Path p;
p.addRectangle (pg.x, pg.y + lineThickness * 2.0f, nextX - pg.x, lineThickness);
g.fillPath (p, transform);
}
void GlyphArrangement::draw (const Graphics& g) const
{
for (int i = 0; i < glyphs.size(); ++i)
{
const PositionedGlyph& pg = glyphs.getReference(i);
if (pg.font.isUnderlined())
drawGlyphUnderline (g, pg, i, AffineTransform::identity);
pg.draw (g);
}
}
void GlyphArrangement::draw (const Graphics& g, const AffineTransform& transform) const
{
for (int i = 0; i < glyphs.size(); ++i)
{
const PositionedGlyph& pg = glyphs.getReference(i);
if (pg.font.isUnderlined())
drawGlyphUnderline (g, pg, i, transform);
pg.draw (g, transform);
}
}
void GlyphArrangement::createPath (Path& path) const
{
for (int i = 0; i < glyphs.size(); ++i)
glyphs.getReference (i).createPath (path);
}
int GlyphArrangement::findGlyphIndexAt (const float x, const float y) const
{
for (int i = 0; i < glyphs.size(); ++i)
if (glyphs.getReference (i).hitTest (x, y))
return i;
return -1;
}
| aneeshvartakavi/VSTPlugins | stereoEnhancer/JuceLibraryCode/modules/juce_graphics/fonts/juce_GlyphArrangement.cpp | C++ | gpl-2.0 | 25,385 |
##
# Copyright 2012-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
Declaration of toolchains.linalg namespace.
@author: Stijn De Weirdt (Ghent University)
@author: Kenneth Hoste (Ghent University)
"""
from pkgutil import extend_path
# we're not the only ones in this namespace
__path__ = extend_path(__path__, __name__) #@ReservedAssignment
| pneerincx/easybuild-framework | easybuild/toolchains/linalg/__init__.py | Python | gpl-2.0 | 1,340 |
/*-
* See the file LICENSE for redistribution information.
*
* Copyright (c) 1997, 1998, 1999, 2000
* Sleepycat Software. All rights reserved.
*
* $Id: EnvExample.cpp,v 11.12 2000/10/27 20:32:00 dda Exp $
*/
#include "db_config.h"
#ifndef NO_SYSTEM_INCLUDES
#include <sys/types.h>
#include <errno.h>
#include <iostream.h>
#include <stddef.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#endif
#include <db_cxx.h>
#ifdef macintosh
#define DATABASE_HOME ":database"
#define CONFIG_DATA_DIR ":database"
#else
#ifdef DB_WIN32
#define DATABASE_HOME "\\tmp\\database"
#define CONFIG_DATA_DIR "\\database\\files"
#else
#define DATABASE_HOME "/tmp/database"
#define CONFIG_DATA_DIR "/database/files"
#endif
#endif
void db_setup(char *, char *, ostream&);
void db_teardown(char *, char *, ostream&);
char *progname = "EnvExample"; /* Program name. */
//
// An example of a program creating/configuring a Berkeley DB environment.
//
int
main(int, char **)
{
//
// Note: it may be easiest to put all Berkeley DB operations in a
// try block, as seen here. Alternatively, you can change the
// ErrorModel in the DbEnv so that exceptions are never thrown
// and check error returns from all methods.
//
try {
char *data_dir, *home;
//
// All of the shared database files live in /home/database,
// but data files live in /database.
//
home = DATABASE_HOME;
data_dir = CONFIG_DATA_DIR;
cout << "Setup env\n";
db_setup(DATABASE_HOME, data_dir, cerr);
cout << "Teardown env\n";
db_teardown(DATABASE_HOME, data_dir, cerr);
return 0;
}
catch (DbException &dbe) {
cerr << "AccessExample: " << dbe.what() << "\n";
return 1;
}
}
// Note that any of the db calls can throw DbException
void
db_setup(char *home, char *data_dir, ostream& err_stream)
{
//
// Create an environment object and initialize it for error
// reporting.
//
DbEnv *dbenv = new DbEnv(0);
dbenv->set_error_stream(&err_stream);
dbenv->set_errpfx(progname);
//
// We want to specify the shared memory buffer pool cachesize,
// but everything else is the default.
//
dbenv->set_cachesize(0, 64 * 1024, 0);
// Databases are in a subdirectory.
(void)dbenv->set_data_dir(data_dir);
// Open the environment with full transactional support.
dbenv->open(DATABASE_HOME,
DB_CREATE | DB_INIT_LOCK | DB_INIT_LOG | DB_INIT_MPOOL | DB_INIT_TXN, 0);
// Do something interesting...
// Close the handle.
dbenv->close(0);
}
void
db_teardown(char *home, char *data_dir, ostream& err_stream)
{
// Remove the shared database regions.
DbEnv *dbenv = new DbEnv(0);
dbenv->set_error_stream(&err_stream);
dbenv->set_errpfx(progname);
(void)dbenv->set_data_dir(data_dir);
dbenv->remove(home, 0);
delete dbenv;
}
| rhuitl/uClinux | user/mysql/bdb/examples_cxx/EnvExample.cpp | C++ | gpl-2.0 | 2,767 |
DV.Schema.helpers = {
HOST_EXTRACTOR : (/https?:\/\/([^\/]+)\//),
annotationClassName: '.DV-annotation',
// Bind all events for the docviewer
// live/delegate are the preferred methods of event attachment
bindEvents: function(context){
var boundZoom = this.events.compile('zoom');
var doc = context.models.document;
var value = _.indexOf(doc.ZOOM_RANGES, doc.zoomLevel);
var viewer = this.viewer;
viewer.slider = viewer.$('.DV-zoomBox').slider({
step: 1,
min: 0,
max: 4,
value: value,
slide: function(el,d){
boundZoom(context.models.document.ZOOM_RANGES[parseInt(d.value, 10)]);
},
change: function(el,d){
boundZoom(context.models.document.ZOOM_RANGES[parseInt(d.value, 10)]);
}
});
// next/previous
var history = viewer.history;
var compiled = viewer.compiled;
compiled.next = this.events.compile('next');
compiled.previous = this.events.compile('previous');
var states = context.states;
viewer.$('.DV-navControls').delegate('span.DV-next','click', compiled.next);
viewer.$('.DV-navControls').delegate('span.DV-previous','click', compiled.previous);
viewer.$('.DV-annotationView').delegate('.DV-trigger','click',function(e){
e.preventDefault();
context.open('ViewAnnotation');
});
viewer.$('.DV-documentView').delegate('.DV-trigger','click',function(e){
// history.save('document/p'+context.models.document.currentPage());
context.open('ViewDocument');
});
viewer.$('.DV-thumbnailsView').delegate('.DV-trigger','click',function(e){
context.open('ViewThumbnails');
});
viewer.$('.DV-textView').delegate('.DV-trigger','click',function(e){
// history.save('text/p'+context.models.document.currentPage());
context.open('ViewText');
});
viewer.$('.DV-allAnnotations').delegate('.DV-annotationGoto .DV-trigger','click', DV.jQuery.proxy(this.gotoPage, this));
viewer.$('.DV-allAnnotations').delegate('.DV-annotationTitle .DV-trigger','click', DV.jQuery.proxy(this.gotoPage, this));
viewer.$('form.DV-searchDocument').submit(this.events.compile('search'));
viewer.$('.DV-searchBar').delegate('.DV-closeSearch','click',function(e){
viewer.$('.DV-searchBar').fadeOut(250);
e.preventDefault();
// history.save('text/p'+context.models.document.currentPage());
});
viewer.$('.DV-searchBox').delegate('.DV-searchInput-cancel', 'click', DV.jQuery.proxy(this.clearSearch, this));
viewer.$('.DV-searchResults').delegate('span.DV-resultPrevious','click', DV.jQuery.proxy(this.highlightPreviousMatch, this));
viewer.$('.DV-searchResults').delegate('span.DV-resultNext','click', DV.jQuery.proxy(this.highlightNextMatch, this));
// Prevent navigation elements from being selectable when clicked.
viewer.$('.DV-trigger').bind('selectstart', function(){ return false; });
this.elements.viewer.delegate('.DV-fullscreen', 'click', _.bind(this.openFullScreen, this));
var boundToggle = DV.jQuery.proxy(this.annotationBridgeToggle, this);
var collection = this.elements.collection;
collection.delegate('.DV-annotationTab','click', boundToggle);
collection.delegate('.DV-annotationRegion','click', DV.jQuery.proxy(this.annotationBridgeShow, this));
collection.delegate('.DV-annotationNext','click', DV.jQuery.proxy(this.annotationBridgeNext, this));
collection.delegate('.DV-annotationPrevious','click', DV.jQuery.proxy(this.annotationBridgePrevious, this));
collection.delegate('.DV-showEdit','click', DV.jQuery.proxy(this.showAnnotationEdit, this));
collection.delegate('.DV-cancelEdit','click', DV.jQuery.proxy(this.cancelAnnotationEdit, this));
collection.delegate('.DV-saveAnnotation','click', DV.jQuery.proxy(this.saveAnnotation, this));
collection.delegate('.DV-saveAnnotationDraft','click', DV.jQuery.proxy(this.saveAnnotation, this));
collection.delegate('.DV-deleteAnnotation','click', DV.jQuery.proxy(this.deleteAnnotation, this));
collection.delegate('.DV-pageNumber', 'click', _.bind(this.permalinkPage, this, 'document'));
collection.delegate('.DV-textCurrentPage', 'click', _.bind(this.permalinkPage, this, 'text'));
collection.delegate('.DV-annotationTitle', 'click', _.bind(this.permalinkAnnotation, this));
collection.delegate('.DV-permalink', 'click', _.bind(this.permalinkAnnotation, this));
// Thumbnails
viewer.$('.DV-thumbnails').delegate('.DV-thumbnail-page', 'click', function(e) {
var $thumbnail = viewer.$(e.currentTarget);
if (!viewer.openEditor) {
var pageIndex = $thumbnail.closest('.DV-thumbnail').attr('data-pageNumber') - 1;
viewer.models.document.setPageIndex(pageIndex);
viewer.open('ViewDocument');
// viewer.history.save('document/p'+pageNumber);
}
});
// Handle iPad / iPhone scroll events...
_.bindAll(this, 'touchStart', 'touchMove', 'touchEnd');
this.elements.window[0].ontouchstart = this.touchStart;
this.elements.window[0].ontouchmove = this.touchMove;
this.elements.window[0].ontouchend = this.touchEnd;
this.elements.well[0].ontouchstart = this.touchStart;
this.elements.well[0].ontouchmove = this.touchMove;
this.elements.well[0].ontouchend = this.touchEnd;
viewer.$('.DV-descriptionToggle').live('click',function(e){
e.preventDefault();
e.stopPropagation();
viewer.$('.DV-descriptionText').toggle();
viewer.$('.DV-descriptionToggle').toggleClass('DV-showDescription');
});
var cleanUp = DV.jQuery.proxy(viewer.pageSet.cleanUp, this);
this.elements.window.live('mousedown',
function(e){
var el = viewer.$(e.target);
if (el.parents().is('.DV-annotation') || el.is('.DV-annotation')) return true;
if(context.elements.window.hasClass('DV-coverVisible')){
if((el.width() - parseInt(e.clientX,10)) >= 15){
cleanUp();
}
}
}
);
var docId = viewer.schema.document.id;
//if(DV.jQuery.browser.msie == true){
// this.elements.browserDocument.bind('focus.' + docId, DV.jQuery.proxy(this.focusWindow,this));
// this.elements.browserDocument.bind('focusout.' + docId, DV.jQuery.proxy(this.focusOut,this));
// }else{
this.elements.browserWindow.bind('focus.' + docId, DV.jQuery.proxy(this.focusWindow,this));
this.elements.browserWindow.bind('blur.' + docId, DV.jQuery.proxy(this.blurWindow,this));
// }
// When the document is scrolled, even in the background, resume polling.
this.elements.window.bind('scroll.' + docId, DV.jQuery.proxy(this.focusWindow, this));
this.elements.coverPages.live('mousedown', cleanUp);
viewer.acceptInput = this.elements.currentPage.acceptInput({ changeCallBack: DV.jQuery.proxy(this.acceptInputCallBack,this) });
},
// Unbind jQuery events that have been bound to objects outside of the viewer.
unbindEvents: function() {
var viewer = this.viewer;
var docId = viewer.schema.document.id;
if(DV.jQuery.browser.msie == true){
this.elements.browserDocument.unbind('focus.' + docId);
this.elements.browserDocument.unbind('focusout.' + docId);
}else{
viewer.helpers.elements.browserWindow.unbind('focus.' + docId);
viewer.helpers.elements.browserWindow.unbind('blur.' + docId);
}
viewer.helpers.elements.browserWindow.unbind('scroll.' + docId);
_.each(viewer.observers, function(obs){ viewer.helpers.removeObserver(obs); });
},
// We're entering the Notes tab -- make sure that there are no data-src
// attributes remaining.
ensureAnnotationImages : function() {
this.viewer.$(".DV-img[data-src]").each(function() {
var el = DV.jQuery(this);
el.attr('src', el.attr('data-src'));
});
},
startCheckTimer: function(){
var _t = this.viewer;
var _check = function(){
_t.events.check();
};
this.viewer.checkTimer = setInterval(_check,100);
},
stopCheckTimer: function(){
clearInterval(this.viewer.checkTimer);
},
blurWindow: function(){
if(this.viewer.isFocus === true){
this.viewer.isFocus = false;
// pause draw timer
this.stopCheckTimer();
}else{
return;
}
},
focusOut: function(){
if(this.viewer.activeElement != document.activeElement){
this.viewer.activeElement = document.activeElement;
this.viewer.isFocus = true;
}else{
// pause draw timer
this.viewer.isFocus = false;
this.viewer.helpers.stopCheckTimer();
return;
}
},
focusWindow: function(){
if(this.viewer.isFocus === true){
return;
}else{
this.viewer.isFocus = true;
// restart draw timer
this.startCheckTimer();
}
},
touchStart : function(e) {
e.stopPropagation();
e.preventDefault();
var touch = e.changedTouches[0];
this._moved = false;
this._touchX = touch.pageX;
this._touchY = touch.pageY;
},
touchMove : function(e) {
var el = e.currentTarget;
var touch = e.changedTouches[0];
var xDiff = this._touchX - touch.pageX;
var yDiff = this._touchY - touch.pageY;
el.scrollLeft += xDiff;
el.scrollTop += yDiff;
this._touchX -= xDiff;
this._touchY -= yDiff;
if (yDiff != 0 || xDiff != 0) this._moved = true;
},
touchEnd : function(e) {
if (!this._moved) {
var touch = e.changedTouches[0];
var target = touch.target;
var fakeClick = document.createEvent('MouseEvent');
while (target.nodeType !== 1) target = target.parentNode;
fakeClick.initMouseEvent('click', true, true, touch.view, 1,
touch.screenX, touch.screenY, touch.clientX, touch.clientY,
false, false, false, false, 0, null);
target.dispatchEvent(fakeClick);
}
this._moved = false;
},
// Click to open a page's permalink.
permalinkPage : function(mode, e) {
if (mode == 'text') {
var number = this.viewer.models.document.currentPage();
} else {
var pageId = this.viewer.$(e.target).closest('.DV-set').attr('data-id');
var page = this.viewer.pageSet.pages[pageId];
var number = page.pageNumber;
this.jump(page.index);
}
this.viewer.history.save(mode + '/p' + number);
},
// Click to open an annotation's permalink.
permalinkAnnotation : function(e) {
var id = this.viewer.$(e.target).closest('.DV-annotation').attr('data-id');
var anno = this.viewer.models.annotations.getAnnotation(id);
var sid = anno.server_id || anno.id;
if (this.viewer.state == 'ViewDocument') {
this.viewer.pageSet.showAnnotation(anno);
this.viewer.history.save('document/p' + anno.pageNumber + '/a' + sid);
} else {
this.viewer.history.save('annotation/a' + sid);
}
},
setDocHeight: function(height,diff) {
this.elements.bar.css('height', height);
this.elements.window[0].scrollTop += diff;
},
getWindowDimensions: function(){
var d = {
height: window.innerHeight ? window.innerHeight : this.elements.browserWindow.height(),
width: this.elements.browserWindow.width()
};
return d;
},
// Is the given URL on a remote domain?
isCrossDomain : function(url) {
var match = url.match(this.HOST_EXTRACTOR);
return match && (match[1] != window.location.host);
},
resetScrollState: function(){
this.elements.window.scrollTop(0);
},
gotoPage: function(e){
e.preventDefault();
var aid = this.viewer.$(e.target).parents('.DV-annotation').attr('rel').replace('aid-','');
var annotation = this.models.annotations.getAnnotation(aid);
var viewer = this.viewer;
if(viewer.state !== 'ViewDocument'){
this.models.document.setPageIndex(annotation.index);
viewer.open('ViewDocument');
// this.viewer.history.save('document/p'+(parseInt(annotation.index,10)+1));
}
},
openFullScreen : function() {
var doc = this.viewer.schema.document;
var url = doc.canonicalURL.replace(/#\S+$/,"");
var currentPage = this.models.document.currentPage();
// construct url fragment based on current viewer state
switch (this.viewer.state) {
case 'ViewAnnotation':
url += '#annotation/a' + this.viewer.activeAnnotationId; // default to the top of the annotations page.
break;
case 'ViewDocument':
url += '#document/p' + currentPage;
break;
case 'ViewSearch':
url += '#search/p' + currentPage + '/' + encodeURIComponent(this.elements.searchInput.val());
break;
case 'ViewText':
url += '#text/p' + currentPage;
break;
case 'ViewThumbnails':
url += '#pages/p' + currentPage; // need to set up a route to catch this.
break;
}
window.open(url, "documentviewer", "toolbar=no,resizable=yes,scrollbars=no,status=no");
},
// Determine the correct DOM page ordering for a given page index.
sortPages : function(pageIndex) {
if (pageIndex == 0 || pageIndex % 3 == 1) return ['p0', 'p1', 'p2'];
if (pageIndex % 3 == 2) return ['p1', 'p2', 'p0'];
if (pageIndex % 3 == 0) return ['p2', 'p0', 'p1'];
},
addObserver: function(observerName){
this.removeObserver(observerName);
this.viewer.observers.push(observerName);
},
removeObserver: function(observerName){
var observers = this.viewer.observers;
for(var i = 0,len=observers.length;i<len;i++){
if(observerName === observers[i]){
observers.splice(i,1);
}
}
},
toggleContent: function(toggleClassName){
this.elements.viewer.removeClass('DV-viewText DV-viewSearch DV-viewDocument DV-viewAnnotations DV-viewThumbnails').addClass('DV-'+toggleClassName);
},
jump: function(pageIndex, modifier, forceRedraw){
modifier = (modifier) ? parseInt(modifier, 10) : 0;
var position = this.models.document.getOffset(parseInt(pageIndex, 10)) + modifier;
this.elements.window[0].scrollTop = position;
this.models.document.setPageIndex(pageIndex);
if (forceRedraw) this.viewer.pageSet.redraw(true);
if (this.viewer.state === 'ViewThumbnails') {
this.viewer.thumbnails.highlightCurrentPage();
}
},
shift: function(argHash){
var windowEl = this.elements.window;
var scrollTopShift = windowEl.scrollTop() + argHash.deltaY;
var scrollLeftShift = windowEl.scrollLeft() + argHash.deltaX;
windowEl.scrollTop(scrollTopShift);
windowEl.scrollLeft(scrollLeftShift);
},
getAppState: function(){
var docModel = this.models.document;
var currentPage = (docModel.currentIndex() == 0) ? 1 : docModel.currentPage();
return { page: currentPage, zoom: docModel.zoomLevel, view: this.viewer.state };
},
constructPages: function(){
var pages = [];
var totalPagesToCreate = (this.viewer.schema.data.totalPages < 3) ? this.viewer.schema.data.totalPages : 3;
var height = this.models.pages.height;
for (var i = 0; i < totalPagesToCreate; i++) {
pages.push(JST.pages({ pageNumber: i+1, pageIndex: i , pageImageSource: null, baseHeight: height }));
}
return pages.join('');
},
// Position the viewer on the page. For a full screen viewer, this means
// absolute from the current y offset to the bottom of the viewport.
positionViewer : function() {
var offset = this.elements.viewer.position();
this.elements.viewer.css({position: 'absolute', top: offset.top, bottom: 0, left: offset.left, right: offset.left});
},
unsupportedBrowser : function() {
if (!(DV.jQuery.browser.msie && DV.jQuery.browser.version <= "6.0")) return false;
DV.jQuery(this.viewer.options.container).html(JST.unsupported({viewer : this.viewer}));
return true;
},
registerHashChangeEvents: function(){
var events = this.events;
var history = this.viewer.history;
// Default route
history.defaultCallback = _.bind(events.handleHashChangeDefault,this.events);
// Handle page loading
history.register(/document\/p(\d*)$/, _.bind(events.handleHashChangeViewDocumentPage,this.events));
// Legacy NYT stuff
history.register(/p(\d*)$/, _.bind(events.handleHashChangeLegacyViewDocumentPage,this.events));
history.register(/p=(\d*)$/, _.bind(events.handleHashChangeLegacyViewDocumentPage,this.events));
// Handle annotation loading in document view
history.register(/document\/p(\d*)\/a(\d*)$/, _.bind(events.handleHashChangeViewDocumentAnnotation,this.events));
// Handle annotation loading in annotation view
history.register(/annotation\/a(\d*)$/, _.bind(events.handleHashChangeViewAnnotationAnnotation,this.events));
// Handle loading of the pages view
history.register(/pages$/, _.bind(events.handleHashChangeViewPages, events));
// Handle page loading in text view
history.register(/text\/p(\d*)$/, _.bind(events.handleHashChangeViewText,this.events));
// Handle entity display requests.
history.register(/entity\/p(\d*)\/(.*)\/(\d+):(\d+)$/, _.bind(events.handleHashChangeViewEntity,this.events));
// Handle search requests
history.register(/search\/p(\d*)\/(.*)$/, _.bind(events.handleHashChangeViewSearchRequest,this.events));
},
// Sets up the zoom slider to match the appropriate for the specified
// initial zoom level, and real document page sizes.
autoZoomPage: function() {
var windowWidth = this.elements.window.outerWidth(true);
var zoom;
if (this.viewer.options.zoom == 'auto') {
zoom = Math.min(
700,
windowWidth - (this.viewer.models.pages.REDUCED_PADDING * 2)
);
} else {
zoom = this.viewer.options.zoom;
}
// Setup ranges for auto-width zooming
var ranges = [];
if (zoom <= 500) {
var zoom2 = (zoom + 700) / 2;
ranges = [zoom, zoom2, 700, 850, 1000];
} else if (zoom <= 750) {
var zoom2 = ((1000 - 700) / 3) + zoom;
var zoom3 = ((1000 - 700) / 3)*2 + zoom;
ranges = [.66*zoom, zoom, zoom2, zoom3, 1000];
} else if (750 < zoom && zoom <= 850){
var zoom2 = ((1000 - zoom) / 2) + zoom;
ranges = [.66*zoom, 700, zoom, zoom2, 1000];
} else if (850 < zoom && zoom < 1000){
var zoom2 = ((zoom - 700) / 2) + 700;
ranges = [.66*zoom, 700, zoom2, zoom, 1000];
} else if (zoom >= 1000) {
zoom = 1000;
ranges = this.viewer.models.document.ZOOM_RANGES;
}
this.viewer.models.document.ZOOM_RANGES = ranges;
this.viewer.slider.slider({'value': parseInt(_.indexOf(ranges, zoom), 10)});
this.events.zoom(zoom);
},
handleInitialState: function(){
var initialRouteMatch = this.viewer.history.loadURL(true);
if(!initialRouteMatch) {
var opts = this.viewer.options;
this.viewer.open('ViewDocument');
if (opts.note) {
this.viewer.pageSet.showAnnotation(this.viewer.models.annotations.byId[opts.note]);
} else if (opts.page) {
this.jump(opts.page - 1);
}
}
}
};
| cloudbearings/providence | viewers/apps/src/nytDocumentViewer/public/javascripts/DV/helpers/helpers.js | JavaScript | gpl-3.0 | 19,944 |
#include <string>
#include <opm/common/OpmLog/OpmLog.hpp>
#include "export.hpp"
namespace {
void info(const std::string& msg) {
OpmLog::info(msg);
}
void warning(const std::string& msg) {
OpmLog::warning(msg);
}
void error(const std::string& msg) {
OpmLog::error(msg);
}
void problem(const std::string& msg) {
OpmLog::problem(msg);
}
void bug(const std::string& msg) {
OpmLog::bug(msg);
}
void debug(const std::string& msg) {
OpmLog::debug(msg);
}
void note(const std::string& msg) {
OpmLog::note(msg);
}
}
void python::common::export_Log(py::module& module)
{
py::class_<OpmLog>(module, "OpmLog")
.def_static("info", info )
.def_static("warning", warning)
.def_static("error", error)
.def_static("problem", problem)
.def_static("bug", bug)
.def_static("debug", debug)
.def_static("note", note);
}
| bska/opm-common | python/cxx/log.cpp | C++ | gpl-3.0 | 898 |
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import hashlib
import itertools
import numpy
from nupic.bindings.math import Random
from nupic.encoders.base import Encoder
class CoordinateEncoder(Encoder):
"""
Given a coordinate in an N-dimensional space, and a radius around
that coordinate, the Coordinate Encoder returns an SDR representation
of that position.
The Coordinate Encoder uses an N-dimensional integer coordinate space.
For example, a valid coordinate in this space is (150, -49, 58), whereas
an invalid coordinate would be (55.4, -5, 85.8475).
It uses the following algorithm:
1. Find all the coordinates around the input coordinate, within the
specified radius.
2. For each coordinate, use a uniform hash function to
deterministically map it to a real number between 0 and 1. This is the
"order" of the coordinate.
3. Of these coordinates, pick the top W by order, where W is the
number of active bits desired in the SDR.
4. For each of these W coordinates, use a uniform hash function to
deterministically map it to one of the bits in the SDR. Make this bit active.
5. This results in a final SDR with exactly W bits active
(barring chance hash collisions).
"""
def __init__(self,
w=21,
n=1000,
name=None,
verbosity=0):
"""
See `nupic.encoders.base.Encoder` for more information.
@param name An optional string which will become part of the description
"""
# Validate inputs
if (w <= 0) or (w % 2 == 0):
raise ValueError("w must be an odd positive integer")
if (n <= 6 * w) or (not isinstance(n, int)):
raise ValueError("n must be an int strictly greater than 6*w. For "
"good results we recommend n be strictly greater "
"than 11*w")
self.w = w
self.n = n
self.verbosity = verbosity
self.encoders = None
if name is None:
name = "[%s:%s]" % (self.n, self.w)
self.name = name
def getWidth(self):
"""See `nupic.encoders.base.Encoder` for more information."""
return self.n
def getDescription(self):
"""See `nupic.encoders.base.Encoder` for more information."""
return [('coordinate', 0), ('radius', 1)]
def getScalars(self, inputData):
"""See `nupic.encoders.base.Encoder` for more information."""
return numpy.array([0]*len(inputData))
def encodeIntoArray(self, inputData, output):
"""
See `nupic.encoders.base.Encoder` for more information.
@param inputData (tuple) Contains coordinate (numpy.array)
and radius (float)
@param output (numpy.array) Stores encoded SDR in this numpy array
"""
(coordinate, radius) = inputData
neighbors = self._neighbors(coordinate, radius)
winners = self._topWCoordinates(neighbors, self.w)
bitFn = lambda coordinate: self._bitForCoordinate(coordinate, self.n)
indices = numpy.array([bitFn(w) for w in winners])
output[:] = 0
output[indices] = 1
@staticmethod
def _neighbors(coordinate, radius):
"""
Returns coordinates around given coordinate, within given radius.
Includes given coordinate.
@param coordinate (numpy.array) Coordinate whose neighbors to find
@param radius (float) Radius around `coordinate`
@return (numpy.array) List of coordinates
"""
ranges = [range(n-radius, n+radius+1) for n in coordinate.tolist()]
return numpy.array(list(itertools.product(*ranges)))
@classmethod
def _topWCoordinates(cls, coordinates, w):
"""
Returns the top W coordinates by order.
@param coordinates (numpy.array) A 2D numpy array, where each element
is a coordinate
@param w (int) Number of top coordinates to return
@return (numpy.array) A subset of `coordinates`, containing only the
top ones by order
"""
orders = numpy.array([cls._orderForCoordinate(c)
for c in coordinates.tolist()])
indices = numpy.argsort(orders)[-w:]
return coordinates[indices]
@staticmethod
def _hashCoordinate(coordinate):
"""Hash a coordinate to a 64 bit integer."""
coordinateStr = ",".join(str(v) for v in coordinate)
# Compute the hash and convert to 64 bit int.
hash = int(int(hashlib.md5(coordinateStr).hexdigest(), 16) % (2 ** 64))
return hash
@classmethod
def _orderForCoordinate(cls, coordinate):
"""
Returns the order for a coordinate.
@param coordinate (numpy.array) Coordinate
@return (float) A value in the interval [0, 1), representing the
order of the coordinate
"""
seed = cls._hashCoordinate(coordinate)
rng = Random(seed)
return rng.getReal64()
@classmethod
def _bitForCoordinate(cls, coordinate, n):
"""
Maps the coordinate to a bit in the SDR.
@param coordinate (numpy.array) Coordinate
@param n (int) The number of available bits in the SDR
@return (int) The index to a bit in the SDR
"""
seed = cls._hashCoordinate(coordinate)
rng = Random(seed)
return rng.getUInt32(n)
def dump(self):
print "CoordinateEncoder:"
print " w: %d" % self.w
print " n: %d" % self.n
@classmethod
def read(cls, proto):
encoder = object.__new__(cls)
encoder.w = proto.w
encoder.n = proto.n
encoder.verbosity = proto.verbosity
encoder.name = proto.name
return encoder
def write(self, proto):
proto.w = self.w
proto.n = self.n
proto.verbosity = self.verbosity
proto.name = self.name
| david-ragazzi/nupic | nupic/encoders/coordinate.py | Python | gpl-3.0 | 6,560 |
/*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2013 - Raw Material Software Ltd.
Permission is granted to use this software under the terms of either:
a) the GPL v2 (or any later version)
b) the Affero GPL v3
Details of these licenses can be found at: www.gnu.org/licenses
JUCE is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
------------------------------------------------------------------------------
To release a closed-source product which uses JUCE, commercial licenses are
available: visit www.juce.com for more information.
==============================================================================
*/
Viewport::Viewport (const String& name)
: Component (name),
scrollBarThickness (0),
singleStepX (16),
singleStepY (16),
showHScrollbar (true),
showVScrollbar (true),
deleteContent (true),
allowScrollingWithoutScrollbarV (false),
allowScrollingWithoutScrollbarH (false),
verticalScrollBar (true),
horizontalScrollBar (false)
{
// content holder is used to clip the contents so they don't overlap the scrollbars
addAndMakeVisible (contentHolder);
contentHolder.setInterceptsMouseClicks (false, true);
addChildComponent (verticalScrollBar);
addChildComponent (horizontalScrollBar);
verticalScrollBar.addListener (this);
horizontalScrollBar.addListener (this);
setInterceptsMouseClicks (false, true);
setWantsKeyboardFocus (true);
}
Viewport::~Viewport()
{
deleteContentComp();
}
//==============================================================================
void Viewport::visibleAreaChanged (const Rectangle<int>&) {}
void Viewport::viewedComponentChanged (Component*) {}
//==============================================================================
void Viewport::deleteContentComp()
{
if (contentComp != nullptr)
contentComp->removeComponentListener (this);
if (deleteContent)
{
// This sets the content comp to a null pointer before deleting the old one, in case
// anything tries to use the old one while it's in mid-deletion..
ScopedPointer<Component> oldCompDeleter (contentComp);
}
else
{
contentComp = nullptr;
}
}
void Viewport::setViewedComponent (Component* const newViewedComponent, const bool deleteComponentWhenNoLongerNeeded)
{
if (contentComp.get() != newViewedComponent)
{
deleteContentComp();
contentComp = newViewedComponent;
deleteContent = deleteComponentWhenNoLongerNeeded;
if (contentComp != nullptr)
{
contentHolder.addAndMakeVisible (contentComp);
setViewPosition (Point<int>());
contentComp->addComponentListener (this);
}
viewedComponentChanged (contentComp);
updateVisibleArea();
}
}
int Viewport::getMaximumVisibleWidth() const { return contentHolder.getWidth(); }
int Viewport::getMaximumVisibleHeight() const { return contentHolder.getHeight(); }
Point<int> Viewport::viewportPosToCompPos (Point<int> pos) const
{
jassert (contentComp != nullptr);
return Point<int> (jmax (jmin (0, contentHolder.getWidth() - contentComp->getWidth()), jmin (0, -(pos.x))),
jmax (jmin (0, contentHolder.getHeight() - contentComp->getHeight()), jmin (0, -(pos.y))));
}
void Viewport::setViewPosition (const int xPixelsOffset, const int yPixelsOffset)
{
setViewPosition (Point<int> (xPixelsOffset, yPixelsOffset));
}
void Viewport::setViewPosition (Point<int> newPosition)
{
if (contentComp != nullptr)
contentComp->setTopLeftPosition (viewportPosToCompPos (newPosition));
}
void Viewport::setViewPositionProportionately (const double x, const double y)
{
if (contentComp != nullptr)
setViewPosition (jmax (0, roundToInt (x * (contentComp->getWidth() - getWidth()))),
jmax (0, roundToInt (y * (contentComp->getHeight() - getHeight()))));
}
bool Viewport::autoScroll (const int mouseX, const int mouseY, const int activeBorderThickness, const int maximumSpeed)
{
if (contentComp != nullptr)
{
int dx = 0, dy = 0;
if (horizontalScrollBar.isVisible() || contentComp->getX() < 0 || contentComp->getRight() > getWidth())
{
if (mouseX < activeBorderThickness)
dx = activeBorderThickness - mouseX;
else if (mouseX >= contentHolder.getWidth() - activeBorderThickness)
dx = (contentHolder.getWidth() - activeBorderThickness) - mouseX;
if (dx < 0)
dx = jmax (dx, -maximumSpeed, contentHolder.getWidth() - contentComp->getRight());
else
dx = jmin (dx, maximumSpeed, -contentComp->getX());
}
if (verticalScrollBar.isVisible() || contentComp->getY() < 0 || contentComp->getBottom() > getHeight())
{
if (mouseY < activeBorderThickness)
dy = activeBorderThickness - mouseY;
else if (mouseY >= contentHolder.getHeight() - activeBorderThickness)
dy = (contentHolder.getHeight() - activeBorderThickness) - mouseY;
if (dy < 0)
dy = jmax (dy, -maximumSpeed, contentHolder.getHeight() - contentComp->getBottom());
else
dy = jmin (dy, maximumSpeed, -contentComp->getY());
}
if (dx != 0 || dy != 0)
{
contentComp->setTopLeftPosition (contentComp->getX() + dx,
contentComp->getY() + dy);
return true;
}
}
return false;
}
void Viewport::componentMovedOrResized (Component&, bool, bool)
{
updateVisibleArea();
}
void Viewport::resized()
{
updateVisibleArea();
}
//==============================================================================
void Viewport::updateVisibleArea()
{
const int scrollbarWidth = getScrollBarThickness();
const bool canShowAnyBars = getWidth() > scrollbarWidth && getHeight() > scrollbarWidth;
const bool canShowHBar = showHScrollbar && canShowAnyBars;
const bool canShowVBar = showVScrollbar && canShowAnyBars;
bool hBarVisible = false, vBarVisible = false;
Rectangle<int> contentArea;
for (int i = 3; --i >= 0;)
{
hBarVisible = canShowHBar && ! horizontalScrollBar.autoHides();
vBarVisible = canShowVBar && ! verticalScrollBar.autoHides();
contentArea = getLocalBounds();
if (contentComp != nullptr && ! contentArea.contains (contentComp->getBounds()))
{
hBarVisible = canShowHBar && (hBarVisible || contentComp->getX() < 0 || contentComp->getRight() > contentArea.getWidth());
vBarVisible = canShowVBar && (vBarVisible || contentComp->getY() < 0 || contentComp->getBottom() > contentArea.getHeight());
if (vBarVisible)
contentArea.setWidth (getWidth() - scrollbarWidth);
if (hBarVisible)
contentArea.setHeight (getHeight() - scrollbarWidth);
if (! contentArea.contains (contentComp->getBounds()))
{
hBarVisible = canShowHBar && (hBarVisible || contentComp->getRight() > contentArea.getWidth());
vBarVisible = canShowVBar && (vBarVisible || contentComp->getBottom() > contentArea.getHeight());
}
}
if (vBarVisible) contentArea.setWidth (getWidth() - scrollbarWidth);
if (hBarVisible) contentArea.setHeight (getHeight() - scrollbarWidth);
if (contentComp == nullptr)
{
contentHolder.setBounds (contentArea);
break;
}
const Rectangle<int> oldContentBounds (contentComp->getBounds());
contentHolder.setBounds (contentArea);
// If the content has changed its size, that might affect our scrollbars, so go round again and re-caclulate..
if (oldContentBounds == contentComp->getBounds())
break;
}
Rectangle<int> contentBounds;
if (contentComp != nullptr)
contentBounds = contentHolder.getLocalArea (contentComp, contentComp->getLocalBounds());
Point<int> visibleOrigin (-contentBounds.getPosition());
horizontalScrollBar.setBounds (0, contentArea.getHeight(), contentArea.getWidth(), scrollbarWidth);
horizontalScrollBar.setRangeLimits (0.0, contentBounds.getWidth());
horizontalScrollBar.setCurrentRange (visibleOrigin.x, contentArea.getWidth());
horizontalScrollBar.setSingleStepSize (singleStepX);
horizontalScrollBar.cancelPendingUpdate();
if (canShowHBar && ! hBarVisible)
visibleOrigin.setX (0);
verticalScrollBar.setBounds (contentArea.getWidth(), 0, scrollbarWidth, contentArea.getHeight());
verticalScrollBar.setRangeLimits (0.0, contentBounds.getHeight());
verticalScrollBar.setCurrentRange (visibleOrigin.y, contentArea.getHeight());
verticalScrollBar.setSingleStepSize (singleStepY);
verticalScrollBar.cancelPendingUpdate();
if (canShowVBar && ! vBarVisible)
visibleOrigin.setY (0);
// Force the visibility *after* setting the ranges to avoid flicker caused by edge conditions in the numbers.
horizontalScrollBar.setVisible (hBarVisible);
verticalScrollBar.setVisible (vBarVisible);
if (contentComp != nullptr)
{
const Point<int> newContentCompPos (viewportPosToCompPos (visibleOrigin));
if (contentComp->getBounds().getPosition() != newContentCompPos)
{
contentComp->setTopLeftPosition (newContentCompPos); // (this will re-entrantly call updateVisibleArea again)
return;
}
}
const Rectangle<int> visibleArea (visibleOrigin.x, visibleOrigin.y,
jmin (contentBounds.getWidth() - visibleOrigin.x, contentArea.getWidth()),
jmin (contentBounds.getHeight() - visibleOrigin.y, contentArea.getHeight()));
if (lastVisibleArea != visibleArea)
{
lastVisibleArea = visibleArea;
visibleAreaChanged (visibleArea);
}
horizontalScrollBar.handleUpdateNowIfNeeded();
verticalScrollBar.handleUpdateNowIfNeeded();
}
//==============================================================================
void Viewport::setSingleStepSizes (const int stepX, const int stepY)
{
if (singleStepX != stepX || singleStepY != stepY)
{
singleStepX = stepX;
singleStepY = stepY;
updateVisibleArea();
}
}
void Viewport::setScrollBarsShown (const bool showVerticalScrollbarIfNeeded,
const bool showHorizontalScrollbarIfNeeded,
const bool allowVerticalScrollingWithoutScrollbar,
const bool allowHorizontalScrollingWithoutScrollbar)
{
allowScrollingWithoutScrollbarV = allowVerticalScrollingWithoutScrollbar;
allowScrollingWithoutScrollbarH = allowHorizontalScrollingWithoutScrollbar;
if (showVScrollbar != showVerticalScrollbarIfNeeded
|| showHScrollbar != showHorizontalScrollbarIfNeeded)
{
showVScrollbar = showVerticalScrollbarIfNeeded;
showHScrollbar = showHorizontalScrollbarIfNeeded;
updateVisibleArea();
}
}
void Viewport::setScrollBarThickness (const int thickness)
{
if (scrollBarThickness != thickness)
{
scrollBarThickness = thickness;
updateVisibleArea();
}
}
int Viewport::getScrollBarThickness() const
{
return scrollBarThickness > 0 ? scrollBarThickness
: getLookAndFeel().getDefaultScrollbarWidth();
}
void Viewport::scrollBarMoved (ScrollBar* scrollBarThatHasMoved, double newRangeStart)
{
const int newRangeStartInt = roundToInt (newRangeStart);
if (scrollBarThatHasMoved == &horizontalScrollBar)
{
setViewPosition (newRangeStartInt, getViewPositionY());
}
else if (scrollBarThatHasMoved == &verticalScrollBar)
{
setViewPosition (getViewPositionX(), newRangeStartInt);
}
}
void Viewport::mouseWheelMove (const MouseEvent& e, const MouseWheelDetails& wheel)
{
if (! useMouseWheelMoveIfNeeded (e, wheel))
Component::mouseWheelMove (e, wheel);
}
static int rescaleMouseWheelDistance (float distance, int singleStepSize) noexcept
{
if (distance == 0)
return 0;
distance *= 14.0f * singleStepSize;
return roundToInt (distance < 0 ? jmin (distance, -1.0f)
: jmax (distance, 1.0f));
}
bool Viewport::useMouseWheelMoveIfNeeded (const MouseEvent& e, const MouseWheelDetails& wheel)
{
if (! (e.mods.isAltDown() || e.mods.isCtrlDown() || e.mods.isCommandDown()))
{
const bool canScrollVert = (allowScrollingWithoutScrollbarV || verticalScrollBar.isVisible());
const bool canScrollHorz = (allowScrollingWithoutScrollbarH || horizontalScrollBar.isVisible());
if (canScrollHorz || canScrollVert)
{
const int deltaX = rescaleMouseWheelDistance (wheel.deltaX, singleStepX);
const int deltaY = rescaleMouseWheelDistance (wheel.deltaY, singleStepY);
Point<int> pos (getViewPosition());
if (deltaX != 0 && deltaY != 0 && canScrollHorz && canScrollVert)
{
pos.x -= deltaX;
pos.y -= deltaY;
}
else if (canScrollHorz && (deltaX != 0 || e.mods.isShiftDown() || ! canScrollVert))
{
pos.x -= deltaX != 0 ? deltaX : deltaY;
}
else if (canScrollVert && deltaY != 0)
{
pos.y -= deltaY;
}
if (pos != getViewPosition())
{
setViewPosition (pos);
return true;
}
}
}
return false;
}
static bool isUpDownKeyPress (const KeyPress& key)
{
return key == KeyPress::upKey
|| key == KeyPress::downKey
|| key == KeyPress::pageUpKey
|| key == KeyPress::pageDownKey
|| key == KeyPress::homeKey
|| key == KeyPress::endKey;
}
static bool isLeftRightKeyPress (const KeyPress& key)
{
return key == KeyPress::leftKey
|| key == KeyPress::rightKey;
}
bool Viewport::keyPressed (const KeyPress& key)
{
const bool isUpDownKey = isUpDownKeyPress (key);
if (verticalScrollBar.isVisible() && isUpDownKey)
return verticalScrollBar.keyPressed (key);
const bool isLeftRightKey = isLeftRightKeyPress (key);
if (horizontalScrollBar.isVisible() && (isUpDownKey || isLeftRightKey))
return horizontalScrollBar.keyPressed (key);
return false;
}
bool Viewport::respondsToKey (const KeyPress& key)
{
return isUpDownKeyPress (key) || isLeftRightKeyPress (key);
}
| deeuu/LoudnessMeters | src/JuceLibraryCode/modules/juce_gui_basics/layout/juce_Viewport.cpp | C++ | gpl-3.0 | 15,506 |
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Metric class used in monitor mixin framework.
"""
import numpy
class Metric(object):
"""
A metric computed over a set of data (usually from a `CountsTrace`).
"""
def __init__(self, monitor, title, data):
"""
@param monitor (MonitorMixinBase) Monitor Mixin instance that generated
this trace
@param title (string) Title
@param data (list) List of numbers to compute metric from
"""
self.monitor = monitor
self.title = title
self.min = None
self.max = None
self.sum = None
self.mean = None
self.standardDeviation = None
self._computeStats(data)
@staticmethod
def createFromTrace(trace, excludeResets=None):
data = list(trace.data)
if excludeResets is not None:
data = [x for i, x in enumerate(trace.data) if not excludeResets.data[i]]
return Metric(trace.monitor, trace.title, data)
def copy(self):
metric = Metric(self.monitor, self.title, [])
metric.min = self.min
metric.max = self.max
metric.sum = self.sum
metric.mean = self.mean
metric.standardDeviation = self.standardDeviation
return metric
def prettyPrintTitle(self):
return ("[{0}] {1}".format(self.monitor.mmName, self.title)
if self.monitor.mmName is not None else self.title)
def _computeStats(self, data):
if not len(data):
return
self.min = min(data)
self.max = max(data)
self.sum = sum(data)
self.mean = numpy.mean(data)
self.standardDeviation = numpy.std(data)
def getStats(self, sigFigs=7):
if self.mean is None:
return [None, None, None, None, None]
return [round(self.mean, sigFigs),
round(self.standardDeviation, sigFigs),
round(self.min, sigFigs),
round(self.max, sigFigs),
round(self.sum, sigFigs)]
| david-ragazzi/nupic | nupic/research/monitor_mixin/metric.py | Python | gpl-3.0 | 2,865 |
/*
* Copyright 2015-2016 the original author or authors
*
* This software is licensed under the Apache License, Version 2.0,
* the GNU Lesser General Public License version 2 or later ("LGPL")
* and the WTFPL.
* You may choose either license to govern your use of this software only
* upon the condition that you accept all of the terms of either
* the Apache License 2.0, the LGPL 2.1+ or the WTFPL.
*/
package eu.siacs.conversations.utils;
import de.measite.minidns.dnsserverlookup.AbstractDNSServerLookupMechanism;
import de.measite.minidns.dnsserverlookup.AndroidUsingReflection;
import de.measite.minidns.dnsserverlookup.DNSServerLookupMechanism;
import de.measite.minidns.util.PlatformDetection;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.LineNumberReader;
import java.net.InetAddress;
import java.util.HashSet;
import java.util.logging.Level;
/**
* Try to retrieve the list of DNS server by executing getprop.
*/
public class AndroidUsingExecLowPriority extends AbstractDNSServerLookupMechanism {
public static final DNSServerLookupMechanism INSTANCE = new AndroidUsingExecLowPriority();
public static final int PRIORITY = AndroidUsingReflection.PRIORITY + 1;
private AndroidUsingExecLowPriority() {
super(AndroidUsingExecLowPriority.class.getSimpleName(), PRIORITY);
}
@Override
public String[] getDnsServerAddresses() {
try {
Process process = Runtime.getRuntime().exec("getprop");
InputStream inputStream = process.getInputStream();
LineNumberReader lnr = new LineNumberReader(
new InputStreamReader(inputStream));
String line;
HashSet<String> server = new HashSet<>(6);
while ((line = lnr.readLine()) != null) {
int split = line.indexOf("]: [");
if (split == -1) {
continue;
}
String property = line.substring(1, split);
String value = line.substring(split + 4, line.length() - 1);
if (value.isEmpty()) {
continue;
}
if (property.endsWith(".dns") || property.endsWith(".dns1") ||
property.endsWith(".dns2") || property.endsWith(".dns3") ||
property.endsWith(".dns4")) {
// normalize the address
InetAddress ip = InetAddress.getByName(value);
if (ip == null) continue;
value = ip.getHostAddress();
if (value == null) continue;
if (value.length() == 0) continue;
server.add(value);
}
}
if (server.size() > 0) {
return server.toArray(new String[server.size()]);
}
} catch (IOException e) {
LOGGER.log(Level.WARNING, "Exception in findDNSByExec", e);
}
return null;
}
@Override
public boolean isAvailable() {
return PlatformDetection.isAndroid();
}
}
| Kandru/KandruIM | src/main/java/eu/siacs/conversations/utils/AndroidUsingExecLowPriority.java | Java | gpl-3.0 | 2,702 |
// Plugin: jQuery.scrollSpeed
// Source: github.com/nathco/jQuery.scrollSpeed
// Author: Nathan Rutzky
// Update: 1.0.2
(function($) {
jQuery.scrollSpeed = function(step, speed, easing) {
var $document = $(document),
$window = $(window),
$body = $('html, body'),
option = easing || 'default',
root = 0,
scroll = false,
scrollY,
scrollX,
view;
if (window.navigator.msPointerEnabled)
return false;
$window.on('mousewheel DOMMouseScroll', function(e) {
var deltaY = e.originalEvent.wheelDeltaY,
detail = e.originalEvent.detail;
scrollY = $document.height() > $window.height();
scrollX = $document.width() > $window.width();
scroll = true;
if (scrollY) {
view = $window.height();
if (deltaY < 0 || detail > 0)
root = (root + view) >= $document.height() ? root : root += step;
if (deltaY > 0 || detail < 0)
root = root <= 0 ? 0 : root -= step;
$body.stop().animate({
scrollTop: root
}, speed, option, function() {
scroll = false;
});
}
if (scrollX) {
view = $window.width();
if (deltaY < 0 || detail > 0)
root = (root + view) >= $document.width() ? root : root += step;
if (deltaY > 0 || detail < 0)
root = root <= 0 ? 0 : root -= step;
$body.stop().animate({
scrollLeft: root
}, speed, option, function() {
scroll = false;
});
}
return false;
}).on('scroll', function() {
if (scrollY && !scroll) root = $window.scrollTop();
if (scrollX && !scroll) root = $window.scrollLeft();
}).on('resize', function() {
if (scrollY && !scroll) view = $window.height();
if (scrollX && !scroll) view = $window.width();
});
};
jQuery.easing.default = function (x,t,b,c,d) {
return -c * ((t=t/d-1)*t*t*t - 1) + b;
};
})(jQuery); | haas26u/zu-store | web/libs/jQuery.scrollSpeed/jQuery.scrollSpeed.js | JavaScript | gpl-3.0 | 2,827 |
/* Copyright (c) Business Objects 2006. All rights reserved. */
/*
================================================================================
ParameterUI
Widget for displaying and editing parameter values. Contains one or many
ParameterValueRows and, optionally, UI that allows rows to be added.
================================================================================
*/
bobj.crv.params.newParameterUI = function(kwArgs) {
kwArgs = MochiKit.Base.update({
id: bobj.uniqueId(),
canChangeOnPanel: false,
allowCustom: false,
isPassword : false,
isReadOnlyParam: true,
allowRange : false,
values: [],
defaultValues: null,
width: '200px',
changeValueCB: null,
enterPressCB: null,
openAdvDialogCB: null,
maxNumParameterDefaultValues: 200,
tooltip : null,
calendarProperties : {displayValueFormat : '' , isTimeShown : false, hasButton : false, iconUrl : ''},
maxNumValuesDisplayed : 7,
canOpenAdvDialog : false
}, kwArgs);
var o = newWidget(kwArgs.id);
// Update instance with constructor arguments
bobj.fillIn(o, kwArgs);
o.displayAllValues = false;
// Update instance with member functions
MochiKit.Base.update(o, bobj.crv.params.ParameterUI);
o._createMenu();
o._rows = [];
o._infoRow = new bobj.crv.params.ParameterInfoRow(o.id);
return o;
};
bobj.crv.params.ParameterUI = {
/**
* Creates single menubar for all parameter value rows of current param UI
*/
_createMenu : function() {
var dvLength = this.defaultValues.length;
if (dvLength > 0) {
var kwArgs = {
originalValues : this.defaultValues
};
if (dvLength == this.maxNumParameterDefaultValues) {
kwArgs.originalValues[this.maxNumParameterDefaultValues] = L_bobj_crv_ParamsMaxNumDefaultValues;
MochiKit.Base.update (kwArgs, {
openAdvDialogCB : this.openAdvDialogCB,
maxNumParameterDefaultValues : this.maxNumParameterDefaultValues
});
}
this._defaultValuesMenu = bobj.crv.params.newScrollMenuWidget (kwArgs);
} else {
this._defaultValuesMenu = null;
}
},
setFocusOnRow : function(rowIndex) {
var row = this._rows[rowIndex];
if (row)
row.focus ();
},
/*
* Disables tabbing if dis is true
*/
setTabDisabled : function(dis) {
for(var i = 0, len = this._rows.length; i < len; i++) {
this._rows[i].setTabDisabled(dis);
}
this._infoRow.setTabDisabled(dis);
},
init : function() {
Widget_init.call (this);
var rows = this._rows;
for ( var i = 0, len = rows.length; i < len; ++i) {
rows[i].init ();
}
MochiKit.Signal.connect(this._infoRow, "switch", this, '_onSwitchDisplayAllValues');
this.refreshUI ();
},
/**
* Processes actions triggered by clicks on "x more values" or "collapse" button displayed in inforow
*/
_onSwitchDisplayAllValues: function() {
this.displayAllValues = !this.displayAllValues;
var TIME_INTERVAL = 10; /* 10 msec or 100 actions per second */
var timerIndex = 0;
if(this.displayAllValues) {
if (this.values.length > this._rows.length) {
for(var i = this._rows.length, l = this.values.length; i < l; i++) {
var addRow = function(paramUI, value) {
return function() { return paramUI._addRow(value); };
};
timerIndex++;
setTimeout(addRow(this, this.values[i]), TIME_INTERVAL * timerIndex);
}
}
}
else {
if(this._rows.length > this.maxNumValuesDisplayed) {
for(var i = this._rows.length -1; i >= this.maxNumValuesDisplayed; i--) {
var deleteRow = function(paramUI, rowIndex) {
return function() { return paramUI.deleteValue(rowIndex); };
};
timerIndex++;
setTimeout(deleteRow(this, i), TIME_INTERVAL * timerIndex);
}
}
}
var signalResize = function(paramUI) {
return function() {MochiKit.Signal.signal(paramUI, 'ParameterUIResized'); };
};
setTimeout(signalResize(this), TIME_INTERVAL * timerIndex);
},
getHTML : function() {
var rowsHtml = '';
var values = this.values;
var rows = this._rows;
var rowsCount = Math.min (values.length, this.maxNumValuesDisplayed);
for ( var i = 0; i < rowsCount; ++i) {
rows.push (this._getRow (values[i]));
rowsHtml += rows[i].getHTML ();
}
return bobj.html.DIV ( {
id : this.id,
style : {
width : bobj.unitValue (this.width),
'padding-left' : '20px'
}
}, rowsHtml);
},
_getNewValueRowArgs : function(value) {
return {
value : value,
defaultValues : this.defaultValues,
width : this.width,
isReadOnlyParam : this.isReadOnlyParam,
canChangeOnPanel : this.canChangeOnPanel,
allowCustom : this.allowCustom,
isPassword : this.isPassword,
calendarProperties : this.calendarProperties,
defaultValuesMenu : this._defaultValuesMenu,
tooltip : this.tooltip,
isRangeValue : this.allowRange,
canOpenAdvDialog : this.canOpenAdvDialog
};
},
_getNewValueRowConstructor : function() {
return bobj.crv.params.newParameterValueRow;
},
_getRow : function(value) {
var row = this._getNewValueRowConstructor()(this._getNewValueRowArgs(value));
var bind = MochiKit.Base.bind;
row.changeCB = bind(this._onChangeValue, this, row);
row.enterCB = bind(this._onEnterValue, this, row);
return row;
},
_addRow : function(value) {
var row = this._getRow (value);
this._rows.push (row);
append (this.layer, row.getHTML ());
row.init ();
this.refreshUI ();
return row;
},
_onChangeValue : function(row) {
if (this.changeValueCB) {
this.changeValueCB (this._getRowIndex (row), row.getValue ());
}
},
_onEnterValue : function(row) {
if (this.enterPressCB) {
this.enterPressCB (this._getRowIndex (row));
}
},
_getRowIndex : function(row) {
if (row) {
var rows = this._rows;
for ( var i = 0, len = rows.length; i < len; ++i) {
if (rows[i] === row) {
return i;
}
}
}
return -1;
},
getNumValues : function() {
return this._rows.length;
},
refreshUI : function() {
if (this.allowRange)
this.alignRangeRows ();
var displayInfoRow = false;
var infoRowText = "";
if (this.values.length > this.maxNumValuesDisplayed) {
displayInfoRow = true;
if(this.displayAllValues)
infoRowText = L_bobj_crv_Collapse;
else {
var hiddenValuesCount = this.values.length - this.maxNumValuesDisplayed;
infoRowText = (hiddenValuesCount == 1) ? L_bobj_crv_ParamsMoreValue : L_bobj_crv_ParamsMoreValues;
infoRowText = infoRowText.replace ("%1", hiddenValuesCount);
}
}
this._infoRow.setText (infoRowText);
this._infoRow.setVisible (displayInfoRow);
},
getValueAt : function(index) {
var row = this._rows[index];
if (row) {
return row.getValue ();
}
return null;
},
getValues : function() {
var values = [];
for ( var i = 0, len = this._rows.length; i < len; ++i) {
values.push (this._rows[i].getValue ());
}
return values;
},
setValueAt : function(index, value) {
var row = this._rows[index];
if (row) {
row.setValue (value);
}
this.refreshUI ();
},
resetValues : function(values) {
if (!values) {
return;
}
this.values = values;
var valuesLen = values.length;
var rowsLen = this._rows.length;
//Resets value
for ( var i = 0; i < valuesLen && i < rowsLen; ++i) {
this._rows[i].reset (values[i]);
}
//removes newly added values that are not commited
if (rowsLen > valuesLen) {
for ( var i = rowsLen - 1; i >= valuesLen; --i) {
// delete from the end to minimize calls to setBgColor
this.deleteValue (i);
}
}
//re-adds removed values
else if (valuesLen > rowsLen) {
for ( var i = rowsLen; i < valuesLen && (this.displayAllValues || i < this.maxNumValuesDisplayed); ++i) {
var row = this._addRow (values[i]);
}
}
MochiKit.Signal.signal(this, 'ParameterUIResized');
this.refreshUI ();
},
alignRangeRows : function() {
if (!this.allowRange)
return;
var lowerBoundWidth = 0;
for ( var i = 0, l = this._rows.length; i < l; i++) {
var row = this._rows[i];
var rangeField = row._valueWidget;
lowerBoundWidth = Math.max (lowerBoundWidth, rangeField.getLowerBoundValueWidth ());
}
for ( var i = 0, l = this._rows.length; i < l; i++) {
var row = this._rows[i];
var rangeField = row._valueWidget;
rangeField.setLowerBoundValueWidth (lowerBoundWidth);
}
},
setValues : function(values) {
if (!values)
return;
this.values = values;
var valuesLen = values.length;
var rowsLen = this._rows.length;
for ( var i = 0; i < valuesLen && i < rowsLen; ++i) {
this._rows[i].setValue (values[i]);
}
if (rowsLen > valuesLen) {
for ( var i = rowsLen - 1; i >= valuesLen; --i) {
// delete from the end to minimize calls to setBgColor
this.deleteValue (i);
}
} else if (valuesLen > rowsLen) {
for ( var i = rowsLen; i < valuesLen && (this.displayAllValues || i < this.maxNumValuesDisplayed); ++i) {
this._addRow (values[i]);
}
}
MochiKit.Signal.signal(this, 'ParameterUIResized');
this.refreshUI ();
},
setCleanValue : function(index, value) {
var row = this._rows[index];
if (row)
row.setCleanValue (value);
},
deleteValue : function(index) {
if (index >= 0 && index < this._rows.length) {
var row = this._rows[index];
row.layer.parentNode.removeChild (row.layer);
_widgets[row.widx] = null;
this._rows.splice (index, 1);
var rowsLen = this._rows.length;
}
this.refreshUI ();
},
setWarning : function(index, warning) {
var row = this._rows[index];
if (row) {
row.setWarning (warning);
}
},
getWarning : function(index) {
var row = this._rows[index];
if (row)
return row.getWarning ();
return null;
},
resize : function(w) {
if (w !== null) {
this.width = w;
if (this.layer) {
bobj.setOuterSize (this.layer, w);
}
}
}
};
| johndavedecano/tanghalangpasigenyo | aspnet_client/system_web/4_0_30319/crystalreportviewers13/js/crviewer/ParameterUI.js | JavaScript | gpl-3.0 | 12,193 |
<?php
/**
* @package plugins.attUverseDistribution
* @subpackage api.filters
*/
class KalturaAttUverseDistributionProfileFilter extends KalturaAttUverseDistributionProfileBaseFilter
{
}
| ivesbai/server | plugins/content_distribution/providers/att_uverse/lib/api/filters/KalturaAttUverseDistributionProfileFilter.php | PHP | agpl-3.0 | 189 |
<?php
/**
* @package api
* @subpackage objects
* @deprecated use KalturaRule instead
*/
class KalturaPreviewRestriction extends KalturaSessionRestriction
{
/**
* The preview restriction length
*
* @var int
*/
public $previewLength;
private static $mapBetweenObjects = array
(
"previewLength",
);
public function getMapBetweenObjects()
{
return array_merge(parent::getMapBetweenObjects(), self::$mapBetweenObjects);
}
/* (non-PHPdoc)
* @see KalturaBaseRestriction::toRule()
*/
public function toRule(KalturaRestrictionArray $restrictions)
{
// Preview restriction became a rule action, it's not a rule.
return null;
}
} | ivesbai/server | api_v3/lib/types/accessControl/KalturaPreviewRestriction.php | PHP | agpl-3.0 | 663 |
package models
import (
"koding/db/mongodb/modelhelper"
"net"
"socialapi/config"
"socialapi/request"
"github.com/koding/logging"
)
// Client holds the contextual requester/client info
type Client struct {
// Account holds the requester info
Account *Account
// IP is remote IP of the requester
IP net.IP
// SessionID is session cookie id
SessionID string
}
// Context holds contextual info regarding a REST query
type Context struct {
GroupName string
Client *Client
log logging.Logger
}
// NewContext creates a new context
func NewContext(log logging.Logger) *Context {
return &Context{
log: log,
}
}
// OverrideQuery overrides Query with context info
func (c *Context) OverrideQuery(q *request.Query) *request.Query {
// get group name from context
q.GroupName = c.GroupName
if c.IsLoggedIn() {
q.AccountId = c.Client.Account.Id
} else {
q.AccountId = 0
}
return q
}
// IsLoggedIn checks if the request is an authenticated one
func (c *Context) IsLoggedIn() bool {
if c.Client == nil {
return false
}
if c.Client.Account == nil {
return false
}
if c.Client.Account.Id == 0 {
return false
}
return true
}
// IsAdmin checks if the current requester is an admin or not, this part is just
// a stub and temproray solution for moderation security, when we implement the
// permission system fully, this should be the first function to remove.
func (c *Context) IsAdmin() bool {
if !c.IsLoggedIn() {
return false
}
superAdmins := config.MustGet().DummyAdmins
return IsIn(c.Client.Account.Nick, superAdmins...)
}
// CanManage checks if the current context is the admin of the context's
// group.
// mongo connection is required.
func (c *Context) CanManage() error {
if !c.IsLoggedIn() {
return ErrNotLoggedIn
}
canManage, err := modelhelper.CanManage(c.Client.Account.Nick, c.GroupName)
if err != nil {
return err
}
if !canManage {
return ErrCannotManageGroup
}
return nil
}
// MustGetLogger gets the logger from context, otherwise panics
func (c *Context) MustGetLogger() logging.Logger {
if c.log == nil {
panic(ErrLoggerNotExist)
}
return c.log
}
| acbodine/koding | go/src/socialapi/models/context.go | GO | agpl-3.0 | 2,142 |
/*
Copyright 2008-2015 Clipperz Srl
This file is part of Clipperz, the online password manager.
For further information about its features and functionalities please
refer to http://www.clipperz.com.
* Clipperz is free software: you can redistribute it and/or modify it
under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
* Clipperz is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU Affero General Public License for more details.
* You should have received a copy of the GNU Affero General Public
License along with Clipperz. If not, see http://www.gnu.org/licenses/.
*/
Clipperz.Base.module('Clipperz.PM.UI.Web.Components');
//#############################################################################
Clipperz.PM.UI.Web.Components.FaviconColumnManager = function(args) {
args = args || {};
Clipperz.PM.UI.Web.Components.FaviconColumnManager.superclass.constructor.call(this, args);
return this;
}
//=============================================================================
Clipperz.Base.extend(Clipperz.PM.UI.Web.Components.FaviconColumnManager, Clipperz.PM.UI.Web.Components.ColumnManager, {
'toString': function () {
return "Clipperz.PM.UI.Web.Components.FaviconColumnManager component";
},
//-------------------------------------------------------------------------
'renderCell': function(aRowElement, anObject) {
var faviconImageElement;
var faviconUrl;
faviconImageElement = this.getId('favicon');
faviconUrl = anObject[this.name()];
if (faviconUrl == null) {
faviconUrl = Clipperz.PM.Strings.getValue('defaultFaviconUrl');
}
Clipperz.DOM.Helper.append(aRowElement, {tag:'td', cls:this.cssClass(), children:[
{tag:'img', id:faviconImageElement, src:faviconUrl}
]});
MochiKit.Signal.connect(faviconImageElement, 'onload', this, 'handleLoadedFaviconImage');
MochiKit.Signal.connect(faviconImageElement, 'onerror', this, 'handleMissingFaviconImage');
MochiKit.Signal.connect(faviconImageElement, 'onabort', this, 'handleMissingFaviconImage');
},
//-----------------------------------------------------
'handleLoadedFaviconImage': function(anEvent) {
MochiKit.Signal.disconnectAllTo(anEvent.src());
if (anEvent.src().complete == false) {
anEvent.src().src = Clipperz.PM.Strings.getValue('defaultFaviconUrl');
}
},
//-----------------------------------------------------
'handleMissingFaviconImage': function(anEvent) {
MochiKit.Signal.disconnectAllTo(anEvent.src());
anEvent.src().src = Clipperz.PM.Strings.getValue('defaultFaviconUrl');
},
//-----------------------------------------------------
'__syntax_fix__' : 'syntax fix'
});
| gcsolaroli/password-manager | frontend/gamma/js/Clipperz/PM/UI/Web/Components/FaviconColumnManager.js | JavaScript | agpl-3.0 | 2,902 |