repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
ApplauseAQI/mysql_perf_analyzer | myperf/src/main/java/com/yahoo/dba/perf/myperf/common/Constants.java | 1337 | /*
* Copyright 2015, Yahoo Inc.
* Copyrights licensed under the Apache License.
* See the accompanying LICENSE file for terms.
*/
package com.yahoo.dba.perf.myperf.common;
public class Constants
{
//Analyzer version
public static final String VERSION = "2.0";
//Status
public static final int STATUS_OK = 0;
public static final int STATUS_BAD = -1;
// DB instance management action
public static final int DBM_ACTION_ADD_CLUSTER=0;
public static final int DBM_ACTION_ADD_CLUSTER_USING_VIP=1;
public static final int DBM_ACTION_ADD_HOST=2;
public static final int DBM_ACTION_UPDATE_HOST=3;
public static final int DBM_ACTION_REMOVE_HOST=4;
public static final int DBM_ACTION_REMOVE_CLUSTER=5;
public static final int DBM_ACTION_RENAME_CLUSTER=6;
public static final int DBM_ACTION_ACL=7;
public static final String URL_PATH_CMD = "CMD";
public static final String URL_PATH_DBGROUP = "DBGROUP";
public static final String URL_PATH_DBHOST = "DBHOST";
public static final String URL_PATH_METRICS = "METRICS";
public static final String URL_PATH_START_TS = "START_TS";
public static final String URL_PATH_END_TS = "END_TS";
public static final String URL_PATH_ALERT_TYPE = "ALERT_TYPE";
public static final String SESSION_DEBUG="sess_debug";
}
| apache-2.0 |
gemini-testing/selenium | py/selenium/webdriver/__init__.py | 1524 | #!/usr/bin/python
#
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from .firefox.webdriver import WebDriver as Firefox
from .firefox.firefox_profile import FirefoxProfile
from .chrome.webdriver import WebDriver as Chrome
from .chrome.options import Options as ChromeOptions
from .ie.webdriver import WebDriver as Ie
from .opera.webdriver import WebDriver as Opera
from .safari.webdriver import WebDriver as Safari
from .phantomjs.webdriver import WebDriver as PhantomJS
from .android.webdriver import WebDriver as Android
from .remote.webdriver import WebDriver as Remote
from .common.desired_capabilities import DesiredCapabilities
from .common.action_chains import ActionChains
from .common.touch_actions import TouchActions
from .common.proxy import Proxy
__version__ = '2.45.0'
| apache-2.0 |
nmldiegues/stibt | infinispan/server/websocket/src/main/java/org/infinispan/server/websocket/logging/Log.java | 1424 | /*
* JBoss, Home of Professional Open Source.
* Copyright 2000 - 2011, Red Hat Middleware LLC, and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.server.websocket.logging;
import org.jboss.logging.MessageLogger;
/**
* Log abstraction for the websocket server. For this module, message ids
* ranging from 13001 to 14000 inclusively have been reserved.
*
* @author Galder Zamarreño
* @since 5.0
*/
@MessageLogger(projectCode = "ISPN")
public interface Log extends org.infinispan.util.logging.Log {
}
| apache-2.0 |
Apelon-VA/ISAAC | import-export/src/main/java/org/hl7/knowledgeartifact/r1/Not.java | 1206 | //
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2014.09.30 at 06:15:10 PM PDT
//
package org.hl7.knowledgeartifact.r1;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
* The Not operator returns the logical negation of its argument. If the argument is true, the result is false; if the argument is false, the result is true; otherwise, the result is null.
*
* <p>Java class for Not complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="Not">
* <complexContent>
* <extension base="{urn:hl7-org:knowledgeartifact:r1}UnaryExpression">
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "Not")
public class Not
extends UnaryExpression
{
}
| apache-2.0 |
zhangwei900808/structuremap | src/StructureMap.Testing/PoliciesTester.cs | 1930 | using NUnit.Framework;
namespace StructureMap.Testing
{
[TestFixture]
public class PoliciesTester
{
[Test]
public void CanBeAutoFilledIsFalse()
{
new Policies().CanBeAutoFilled(typeof (ClassWithPrimitiveConstructorArguments))
.ShouldBeFalse();
}
[Test]
public void CanBeAutoFilledIsTrue()
{
new Policies().CanBeAutoFilled(typeof (ClassWithAllNonSimpleConstructorArguments))
.ShouldBeTrue();
}
[Test]
public void cannot_be_auto_filled_with_no_contructors()
{
new Policies().CanBeAutoFilled(typeof (ClassWithNoConstructor))
.ShouldBeFalse();
}
public interface IAutomobile
{
}
public interface IEngine
{
}
public class ClassWithNoConstructor
{
private ClassWithNoConstructor()
{
}
}
public class ClassWithPrimitiveConstructorArguments : IAutomobile
{
private readonly string _breed;
private readonly IEngine _engine;
private readonly int _horsePower;
public ClassWithPrimitiveConstructorArguments(int horsePower, string breed, IEngine engine)
{
_horsePower = horsePower;
_breed = breed;
_engine = engine;
}
}
public class ClassWithAllNonSimpleConstructorArguments : IAutomobile
{
private readonly IEngine _engine;
public ClassWithAllNonSimpleConstructorArguments(IEngine engine)
{
_engine = engine;
}
public IEngine Engine
{
get { return _engine; }
}
}
}
} | apache-2.0 |
devananda/ironic | ironic/tests/unit/api/v1/test_chassis.py | 21807 | # -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for the API /chassis/ methods.
"""
import datetime
import mock
from oslo_config import cfg
from oslo_utils import timeutils
from oslo_utils import uuidutils
import six
from six.moves import http_client
from six.moves.urllib import parse as urlparse
from wsme import types as wtypes
from ironic.api.controllers import base as api_base
from ironic.api.controllers import v1 as api_v1
from ironic.api.controllers.v1 import chassis as api_chassis
from ironic.tests import base
from ironic.tests.unit.api import base as test_api_base
from ironic.tests.unit.api import utils as apiutils
from ironic.tests.unit.objects import utils as obj_utils
class TestChassisObject(base.TestCase):
def test_chassis_init(self):
chassis_dict = apiutils.chassis_post_data()
del chassis_dict['description']
chassis = api_chassis.Chassis(**chassis_dict)
self.assertEqual(wtypes.Unset, chassis.description)
class TestListChassis(test_api_base.BaseApiTest):
def test_empty(self):
data = self.get_json('/chassis')
self.assertEqual([], data['chassis'])
def test_one(self):
chassis = obj_utils.create_test_chassis(self.context)
data = self.get_json('/chassis')
self.assertEqual(chassis.uuid, data['chassis'][0]["uuid"])
self.assertNotIn('extra', data['chassis'][0])
self.assertNotIn('nodes', data['chassis'][0])
def test_get_one(self):
chassis = obj_utils.create_test_chassis(self.context)
data = self.get_json('/chassis/%s' % chassis['uuid'])
self.assertEqual(chassis.uuid, data['uuid'])
self.assertIn('extra', data)
self.assertIn('nodes', data)
def test_get_one_custom_fields(self):
chassis = obj_utils.create_test_chassis(self.context)
fields = 'extra,description'
data = self.get_json(
'/chassis/%s?fields=%s' % (chassis.uuid, fields),
headers={api_base.Version.string: str(api_v1.MAX_VER)})
# We always append "links"
self.assertItemsEqual(['description', 'extra', 'links'], data)
def test_get_collection_custom_fields(self):
fields = 'uuid,extra'
for i in range(3):
obj_utils.create_test_chassis(
self.context, uuid=uuidutils.generate_uuid())
data = self.get_json(
'/chassis?fields=%s' % fields,
headers={api_base.Version.string: str(api_v1.MAX_VER)})
self.assertEqual(3, len(data['chassis']))
for ch in data['chassis']:
# We always append "links"
self.assertItemsEqual(['uuid', 'extra', 'links'], ch)
def test_get_custom_fields_invalid_fields(self):
chassis = obj_utils.create_test_chassis(self.context)
fields = 'uuid,spongebob'
response = self.get_json(
'/chassis/%s?fields=%s' % (chassis.uuid, fields),
headers={api_base.Version.string: str(api_v1.MAX_VER)},
expect_errors=True)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertIn('spongebob', response.json['error_message'])
def test_get_custom_fields_invalid_api_version(self):
chassis = obj_utils.create_test_chassis(self.context)
fields = 'uuid,extra'
response = self.get_json(
'/chassis/%s?fields=%s' % (chassis.uuid, fields),
headers={api_base.Version.string: str(api_v1.MIN_VER)},
expect_errors=True)
self.assertEqual(http_client.NOT_ACCEPTABLE, response.status_int)
def test_detail(self):
chassis = obj_utils.create_test_chassis(self.context)
data = self.get_json('/chassis/detail')
self.assertEqual(chassis.uuid, data['chassis'][0]["uuid"])
self.assertIn('extra', data['chassis'][0])
self.assertIn('nodes', data['chassis'][0])
def test_detail_against_single(self):
chassis = obj_utils.create_test_chassis(self.context)
response = self.get_json('/chassis/%s/detail' % chassis['uuid'],
expect_errors=True)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
def test_many(self):
ch_list = []
for id_ in range(5):
chassis = obj_utils.create_test_chassis(
self.context, uuid=uuidutils.generate_uuid())
ch_list.append(chassis.uuid)
data = self.get_json('/chassis')
self.assertEqual(len(ch_list), len(data['chassis']))
uuids = [n['uuid'] for n in data['chassis']]
six.assertCountEqual(self, ch_list, uuids)
def _test_links(self, public_url=None):
cfg.CONF.set_override('public_endpoint', public_url, 'api')
uuid = uuidutils.generate_uuid()
obj_utils.create_test_chassis(self.context, uuid=uuid)
data = self.get_json('/chassis/%s' % uuid)
self.assertIn('links', data.keys())
self.assertEqual(2, len(data['links']))
self.assertIn(uuid, data['links'][0]['href'])
for l in data['links']:
bookmark = l['rel'] == 'bookmark'
self.assertTrue(self.validate_link(l['href'], bookmark=bookmark))
if public_url is not None:
expected = [{'href': '%s/v1/chassis/%s' % (public_url, uuid),
'rel': 'self'},
{'href': '%s/chassis/%s' % (public_url, uuid),
'rel': 'bookmark'}]
for i in expected:
self.assertIn(i, data['links'])
def test_links(self):
self._test_links()
def test_links_public_url(self):
self._test_links(public_url='http://foo')
def test_collection_links(self):
for id in range(5):
obj_utils.create_test_chassis(self.context,
uuid=uuidutils.generate_uuid())
data = self.get_json('/chassis/?limit=3')
self.assertEqual(3, len(data['chassis']))
next_marker = data['chassis'][-1]['uuid']
self.assertIn(next_marker, data['next'])
def test_collection_links_default_limit(self):
cfg.CONF.set_override('max_limit', 3, 'api')
for id_ in range(5):
obj_utils.create_test_chassis(self.context,
uuid=uuidutils.generate_uuid())
data = self.get_json('/chassis')
self.assertEqual(3, len(data['chassis']))
next_marker = data['chassis'][-1]['uuid']
self.assertIn(next_marker, data['next'])
def test_sort_key(self):
ch_list = []
for id_ in range(3):
chassis = obj_utils.create_test_chassis(
self.context, uuid=uuidutils.generate_uuid())
ch_list.append(chassis.uuid)
data = self.get_json('/chassis?sort_key=uuid')
uuids = [n['uuid'] for n in data['chassis']]
self.assertEqual(sorted(ch_list), uuids)
def test_sort_key_invalid(self):
invalid_keys_list = ['foo', 'extra']
for invalid_key in invalid_keys_list:
response = self.get_json('/chassis?sort_key=%s' % invalid_key,
expect_errors=True)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertIn(invalid_key, response.json['error_message'])
def test_nodes_subresource_link(self):
chassis = obj_utils.create_test_chassis(self.context)
data = self.get_json('/chassis/%s' % chassis.uuid)
self.assertIn('nodes', data.keys())
def test_nodes_subresource(self):
chassis = obj_utils.create_test_chassis(self.context)
for id_ in range(2):
obj_utils.create_test_node(self.context,
chassis_id=chassis.id,
uuid=uuidutils.generate_uuid())
data = self.get_json('/chassis/%s/nodes' % chassis.uuid)
self.assertEqual(2, len(data['nodes']))
self.assertNotIn('next', data.keys())
# Test collection pagination
data = self.get_json('/chassis/%s/nodes?limit=1' % chassis.uuid)
self.assertEqual(1, len(data['nodes']))
self.assertIn('next', data.keys())
def test_nodes_subresource_no_uuid(self):
response = self.get_json('/chassis/nodes', expect_errors=True)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
def test_nodes_subresource_chassis_not_found(self):
non_existent_uuid = 'eeeeeeee-cccc-aaaa-bbbb-cccccccccccc'
response = self.get_json('/chassis/%s/nodes' % non_existent_uuid,
expect_errors=True)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
class TestPatch(test_api_base.BaseApiTest):
def setUp(self):
super(TestPatch, self).setUp()
obj_utils.create_test_chassis(self.context)
def test_update_not_found(self):
uuid = uuidutils.generate_uuid()
response = self.patch_json('/chassis/%s' % uuid,
[{'path': '/extra/a', 'value': 'b',
'op': 'add'}],
expect_errors=True)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
@mock.patch.object(timeutils, 'utcnow')
def test_replace_singular(self, mock_utcnow):
chassis = obj_utils.get_test_chassis(self.context)
description = 'chassis-new-description'
test_time = datetime.datetime(2000, 1, 1, 0, 0)
mock_utcnow.return_value = test_time
response = self.patch_json('/chassis/%s' % chassis.uuid,
[{'path': '/description',
'value': description, 'op': 'replace'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
result = self.get_json('/chassis/%s' % chassis.uuid)
self.assertEqual(description, result['description'])
return_updated_at = timeutils.parse_isotime(
result['updated_at']).replace(tzinfo=None)
self.assertEqual(test_time, return_updated_at)
def test_replace_multi(self):
extra = {"foo1": "bar1", "foo2": "bar2", "foo3": "bar3"}
chassis = obj_utils.create_test_chassis(self.context, extra=extra,
uuid=uuidutils.generate_uuid())
new_value = 'new value'
response = self.patch_json('/chassis/%s' % chassis.uuid,
[{'path': '/extra/foo2',
'value': new_value, 'op': 'replace'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
result = self.get_json('/chassis/%s' % chassis.uuid)
extra["foo2"] = new_value
self.assertEqual(extra, result['extra'])
def test_remove_singular(self):
chassis = obj_utils.create_test_chassis(self.context, extra={'a': 'b'},
uuid=uuidutils.generate_uuid())
response = self.patch_json('/chassis/%s' % chassis.uuid,
[{'path': '/description', 'op': 'remove'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
result = self.get_json('/chassis/%s' % chassis.uuid)
self.assertIsNone(result['description'])
# Assert nothing else was changed
self.assertEqual(chassis.uuid, result['uuid'])
self.assertEqual(chassis.extra, result['extra'])
def test_remove_multi(self):
extra = {"foo1": "bar1", "foo2": "bar2", "foo3": "bar3"}
chassis = obj_utils.create_test_chassis(self.context, extra=extra,
description="foobar",
uuid=uuidutils.generate_uuid())
# Removing one item from the collection
response = self.patch_json('/chassis/%s' % chassis.uuid,
[{'path': '/extra/foo2', 'op': 'remove'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
result = self.get_json('/chassis/%s' % chassis.uuid)
extra.pop("foo2")
self.assertEqual(extra, result['extra'])
# Removing the collection
response = self.patch_json('/chassis/%s' % chassis.uuid,
[{'path': '/extra', 'op': 'remove'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
result = self.get_json('/chassis/%s' % chassis.uuid)
self.assertEqual({}, result['extra'])
# Assert nothing else was changed
self.assertEqual(chassis.uuid, result['uuid'])
self.assertEqual(chassis.description, result['description'])
def test_remove_non_existent_property_fail(self):
chassis = obj_utils.get_test_chassis(self.context)
response = self.patch_json(
'/chassis/%s' % chassis.uuid,
[{'path': '/extra/non-existent', 'op': 'remove'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.BAD_REQUEST, response.status_code)
self.assertTrue(response.json['error_message'])
def test_add_root(self):
chassis = obj_utils.get_test_chassis(self.context)
response = self.patch_json('/chassis/%s' % chassis.uuid,
[{'path': '/description', 'value': 'test',
'op': 'add'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_int)
def test_add_root_non_existent(self):
chassis = obj_utils.get_test_chassis(self.context)
response = self.patch_json('/chassis/%s' % chassis.uuid,
[{'path': '/foo', 'value': 'bar',
'op': 'add'}],
expect_errors=True)
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertTrue(response.json['error_message'])
def test_add_multi(self):
chassis = obj_utils.get_test_chassis(self.context)
response = self.patch_json('/chassis/%s' % chassis.uuid,
[{'path': '/extra/foo1', 'value': 'bar1',
'op': 'add'},
{'path': '/extra/foo2', 'value': 'bar2',
'op': 'add'}])
self.assertEqual('application/json', response.content_type)
self.assertEqual(http_client.OK, response.status_code)
result = self.get_json('/chassis/%s' % chassis.uuid)
expected = {"foo1": "bar1", "foo2": "bar2"}
self.assertEqual(expected, result['extra'])
def test_patch_nodes_subresource(self):
chassis = obj_utils.get_test_chassis(self.context)
response = self.patch_json('/chassis/%s/nodes' % chassis.uuid,
[{'path': '/extra/foo', 'value': 'bar',
'op': 'add'}], expect_errors=True)
self.assertEqual(http_client.FORBIDDEN, response.status_int)
def test_remove_uuid(self):
chassis = obj_utils.get_test_chassis(self.context)
response = self.patch_json('/chassis/%s' % chassis.uuid,
[{'path': '/uuid', 'op': 'remove'}],
expect_errors=True)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
class TestPost(test_api_base.BaseApiTest):
@mock.patch.object(timeutils, 'utcnow')
def test_create_chassis(self, mock_utcnow):
cdict = apiutils.chassis_post_data()
test_time = datetime.datetime(2000, 1, 1, 0, 0)
mock_utcnow.return_value = test_time
response = self.post_json('/chassis', cdict)
self.assertEqual(http_client.CREATED, response.status_int)
result = self.get_json('/chassis/%s' % cdict['uuid'])
self.assertEqual(cdict['uuid'], result['uuid'])
self.assertFalse(result['updated_at'])
return_created_at = timeutils.parse_isotime(
result['created_at']).replace(tzinfo=None)
self.assertEqual(test_time, return_created_at)
# Check location header
self.assertIsNotNone(response.location)
expected_location = '/v1/chassis/%s' % cdict['uuid']
self.assertEqual(urlparse.urlparse(response.location).path,
expected_location)
def test_create_chassis_doesnt_contain_id(self):
with mock.patch.object(self.dbapi, 'create_chassis',
wraps=self.dbapi.create_chassis) as cc_mock:
cdict = apiutils.chassis_post_data(extra={'foo': 123})
self.post_json('/chassis', cdict)
result = self.get_json('/chassis/%s' % cdict['uuid'])
self.assertEqual(cdict['extra'], result['extra'])
cc_mock.assert_called_once_with(mock.ANY)
# Check that 'id' is not in first arg of positional args
self.assertNotIn('id', cc_mock.call_args[0][0])
def test_create_chassis_generate_uuid(self):
cdict = apiutils.chassis_post_data()
del cdict['uuid']
self.post_json('/chassis', cdict)
result = self.get_json('/chassis')
self.assertEqual(cdict['description'],
result['chassis'][0]['description'])
self.assertTrue(uuidutils.is_uuid_like(result['chassis'][0]['uuid']))
def test_post_nodes_subresource(self):
chassis = obj_utils.create_test_chassis(self.context)
ndict = apiutils.node_post_data()
ndict['chassis_uuid'] = chassis.uuid
response = self.post_json('/chassis/nodes', ndict,
expect_errors=True)
self.assertEqual(http_client.FORBIDDEN, response.status_int)
def test_create_chassis_valid_extra(self):
cdict = apiutils.chassis_post_data(extra={'str': 'foo', 'int': 123,
'float': 0.1, 'bool': True,
'list': [1, 2], 'none': None,
'dict': {'cat': 'meow'}})
self.post_json('/chassis', cdict)
result = self.get_json('/chassis/%s' % cdict['uuid'])
self.assertEqual(cdict['extra'], result['extra'])
def test_create_chassis_unicode_description(self):
descr = u'\u0430\u043c\u043e'
cdict = apiutils.chassis_post_data(description=descr)
self.post_json('/chassis', cdict)
result = self.get_json('/chassis/%s' % cdict['uuid'])
self.assertEqual(descr, result['description'])
class TestDelete(test_api_base.BaseApiTest):
def test_delete_chassis(self):
chassis = obj_utils.create_test_chassis(self.context)
self.delete('/chassis/%s' % chassis.uuid)
response = self.get_json('/chassis/%s' % chassis.uuid,
expect_errors=True)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
def test_delete_chassis_with_node(self):
chassis = obj_utils.create_test_chassis(self.context)
obj_utils.create_test_node(self.context, chassis_id=chassis.id)
response = self.delete('/chassis/%s' % chassis.uuid,
expect_errors=True)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
self.assertIn(chassis.uuid, response.json['error_message'])
def test_delete_chassis_not_found(self):
uuid = uuidutils.generate_uuid()
response = self.delete('/chassis/%s' % uuid, expect_errors=True)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
self.assertEqual('application/json', response.content_type)
self.assertTrue(response.json['error_message'])
def test_delete_nodes_subresource(self):
chassis = obj_utils.create_test_chassis(self.context)
response = self.delete('/chassis/%s/nodes' % chassis.uuid,
expect_errors=True)
self.assertEqual(http_client.FORBIDDEN, response.status_int)
| apache-2.0 |
yancya/gcloud-ruby | google-cloud-vision/lib/google-cloud-vision.rb | 4040 | # Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
# This file is here to be autorequired by bundler, so that the .bigquery and
# #bigquery methods can be available, but the library and all dependencies won't
# be loaded until required and used.
gem "google-cloud-core"
require "google/cloud"
module Google
module Cloud
##
# Creates a new object for connecting to the Vision service.
# Each call creates a new connection.
#
# @param [String, Array<String>] scope The OAuth 2.0 scopes controlling the
# set of resources and operations that the connection can access. See
# [Using OAuth 2.0 to Access Google
# APIs](https://developers.google.com/identity/protocols/OAuth2).
#
# The default scope is:
#
# * `https://www.googleapis.com/auth/cloud-platform`
# @param [Integer] timeout Default timeout to use in requests. Optional.
# @param [Hash] client_config A hash of values to override the default
# behavior of the API client. Optional.
#
# @return [Google::Cloud::Vision::Project]
#
# @example
# require "google/cloud"
#
# gcloud = Google::Cloud.new
# vision = gcloud.vision
#
# image = vision.image "path/to/landmark.jpg"
#
# landmark = image.landmark
# landmark.description #=> "Mount Rushmore"
#
# @example The default scope can be overridden with the `scope` option:
# require "google/cloud"
#
# gcloud = Google::Cloud.new
# platform_scope = "https://www.googleapis.com/auth/cloud-platform"
# vision = gcloud.vision scope: platform_scope
#
def vision scope: nil, timeout: nil, client_config: nil
Google::Cloud.vision @project, @keyfile, scope: scope,
timeout: (timeout || @timeout),
client_config: client_config
end
##
# Creates a new object for connecting to the Vision service.
# Each call creates a new connection.
#
# @param [String] project Project identifier for the Vision service you are
# connecting to.
# @param [String, Hash] keyfile Keyfile downloaded from Google Cloud. If
# file path the file must be readable.
# @param [String, Array<String>] scope The OAuth 2.0 scopes controlling the
# set of resources and operations that the connection can access. See
# [Using OAuth 2.0 to Access Google
# APIs](https://developers.google.com/identity/protocols/OAuth2).
#
# The default scope is:
#
# * `https://www.googleapis.com/auth/cloud-platform`
# @param [Integer] timeout Default timeout to use in requests. Optional.
# @param [Hash] client_config A hash of values to override the default
# behavior of the API client. Optional.
#
# @return [Google::Cloud::Vision::Project]
#
# @example
# require "google/cloud"
#
# vision = Google::Cloud.vision
#
# image = vision.image "path/to/landmark.jpg"
#
# landmark = image.landmark
# landmark.description #=> "Mount Rushmore"
#
def self.vision project = nil, keyfile = nil, scope: nil, timeout: nil,
client_config: nil
require "google/cloud/vision"
Google::Cloud::Vision.new project: project, keyfile: keyfile,
scope: scope, timeout: timeout,
client_config: client_config
end
end
end
| apache-2.0 |
tombujok/hazelcast | hazelcast/src/test/java/com/hazelcast/executor/CompletableFutureTest.java | 13827 | /*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.executor;
import com.hazelcast.core.ExecutionCallback;
import com.hazelcast.core.ICompletableFuture;
import com.hazelcast.spi.ExecutionService;
import com.hazelcast.spi.NodeEngine;
import com.hazelcast.test.AssertTask;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.annotation.ParallelTest;
import com.hazelcast.test.annotation.QuickTest;
import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import java.util.concurrent.Callable;
import java.util.concurrent.CancellationException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
import static org.hamcrest.Matchers.instanceOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
@RunWith(HazelcastParallelClassRunner.class)
@Category({QuickTest.class, ParallelTest.class})
public class CompletableFutureTest extends HazelcastTestSupport {
private static final RuntimeException THROW_TEST_EXCEPTION = new RuntimeException("Test exception");
private static final RuntimeException NO_EXCEPTION = null;
private ExecutionService executionService;
private CountDownLatch inExecutionLatch, startLogicLatch, executedLogic, callbacksDoneLatch;
private AtomicReference<Object> reference1, reference2;
@Rule
public ExpectedException expected = ExpectedException.none();
@Before
public void setUp() throws Exception {
NodeEngine nodeEngine = getNode(createHazelcastInstance()).getNodeEngine();
executionService = nodeEngine.getExecutionService();
startLogicLatch = new CountDownLatch(1);
executedLogic = new CountDownLatch(1);
inExecutionLatch = new CountDownLatch(1);
reference1 = new AtomicReference<Object>();
reference2 = new AtomicReference<Object>();
}
@Test
public void preregisterCallback() throws Exception {
ICompletableFuture<String> f = submitAwaitingTask(expectedNumberOfCallbacks(1), NO_EXCEPTION);
f.andThen(storeTaskResponseToReference(reference1));
releaseAwaitingTask();
assertCallbacksExecutedEventually();
assertEquals("success", reference1.get());
}
@Test
public void preregisterTwoCallbacks() throws Exception {
ICompletableFuture<String> f = submitAwaitingTask(expectedNumberOfCallbacks(2), NO_EXCEPTION);
f.andThen(storeTaskResponseToReference(reference1));
f.andThen(storeTaskResponseToReference(reference2));
releaseAwaitingTask();
assertCallbacksExecutedEventually();
assertEquals("success", reference1.get());
assertEquals("success", reference2.get());
}
@Test
public void preregisterTwoCallbacks_taskThrowsException() throws Exception {
ICompletableFuture<String> f = submitAwaitingTask(expectedNumberOfCallbacks(2), THROW_TEST_EXCEPTION);
f.andThen(storeTaskResponseToReference(reference1));
f.andThen(storeTaskResponseToReference(reference2));
releaseAwaitingTask();
assertCallbacksExecutedEventually();
assertTestExceptionThrown(reference1, reference2);
}
@Test
// https://github.com/hazelcast/hazelcast/issues/6020
public void postregisterCallback() throws Exception {
ICompletableFuture<String> f = submitAwaitingTask(expectedNumberOfCallbacks(1), NO_EXCEPTION);
releaseAwaitingTask();
assertTaskFinishedEventually(f);
f.andThen(storeTaskResponseToReference(reference1));
assertCallbacksExecutedEventually();
assertEquals("success", reference1.get());
}
@Test
public void postregisterTwoCallbacks() throws Exception {
ICompletableFuture<String> f = submitAwaitingTask(expectedNumberOfCallbacks(2), NO_EXCEPTION);
releaseAwaitingTask();
assertTaskFinishedEventually(f);
f.andThen(storeTaskResponseToReference(reference1));
f.andThen(storeTaskResponseToReference(reference2));
assertCallbacksExecutedEventually();
assertEquals("success", reference1.get());
assertEquals("success", reference2.get());
}
@Test
public void postregisterTwoCallbacks_taskThrowsException() throws Exception {
ICompletableFuture<String> f = submitAwaitingTask(expectedNumberOfCallbacks(2), THROW_TEST_EXCEPTION);
releaseAwaitingTask();
assertTaskFinishedEventually(f);
f.andThen(storeTaskResponseToReference(reference1));
f.andThen(storeTaskResponseToReference(reference2));
assertCallbacksExecutedEventually();
assertTestExceptionThrown(reference1, reference2);
}
@Test(timeout = 60000)
public void get_taskThrowsException() throws Exception {
ICompletableFuture<String> f = submitAwaitingTaskNoCallbacks(THROW_TEST_EXCEPTION);
submitReleasingTask(100);
expected.expect(ExecutionException.class);
f.get();
}
@Test(timeout = 60000)
public void getWithTimeout_taskThrowsException() throws Exception {
ICompletableFuture<String> f = submitAwaitingTaskNoCallbacks(THROW_TEST_EXCEPTION);
submitReleasingTask(200);
expected.expect(ExecutionException.class);
f.get(30000, TimeUnit.MILLISECONDS);
}
@Test(timeout = 60000)
public void getWithTimeout_finishesWithinTime() throws Exception {
ICompletableFuture<String> f = submitAwaitingTaskNoCallbacks(NO_EXCEPTION);
submitReleasingTask(200);
String result = f.get(30000, TimeUnit.MILLISECONDS);
assertEquals("success", result);
}
@Test(timeout = 60000)
public void getWithTimeout_timesOut() throws Exception {
ICompletableFuture<String> f = submitAwaitingTaskNoCallbacks(NO_EXCEPTION);
expected.expect(TimeoutException.class);
f.get(1, TimeUnit.MILLISECONDS);
}
@Test
public void singleCancellation_beforeDone_succeeds() throws Exception {
ICompletableFuture<String> f = submitAwaitingTaskNoCallbacks(NO_EXCEPTION);
assertTaskInExecution();
boolean cancelResult = f.cancel(false);
assertTrue("Task cancellation succeeded should succeed", cancelResult);
}
@Test
public void doubleCancellation_beforeDone_firstSucceeds_secondFails() throws Exception {
ICompletableFuture<String> f = submitAwaitingTaskNoCallbacks(NO_EXCEPTION);
assertTaskInExecution(); // but never released to execute logic
boolean firstCancelResult = f.cancel(false);
boolean secondCancelResult = f.cancel(false);
assertTrue("First task cancellation should succeed", firstCancelResult);
assertFalse("Second task cancellation should failed", secondCancelResult);
}
@Test
public void cancellation_afterDone_taskNotCancelled_flagsSetCorrectly() throws Exception {
final ICompletableFuture<String> f = submitAwaitingTaskNoCallbacks(NO_EXCEPTION);
assertTaskInExecution();
releaseAwaitingTask();
assertTaskExecutedItsLogic();
assertTaskFinishedEventually(f);
boolean firstCancelResult = f.cancel(false);
boolean secondCancelResult = f.cancel(false);
assertFalse("Cancellation should not succeed after task is done", firstCancelResult);
assertFalse("Cancellation should not succeed after task is done", secondCancelResult);
assertFalse("Task should NOT be cancelled", f.isCancelled());
assertEquals("success", f.get());
}
@Test
public void noCancellation_afterDone_flagsSetCorrectly() throws Exception {
ICompletableFuture<String> f = submitAwaitingTaskNoCallbacks(NO_EXCEPTION);
assertTaskInExecution();
releaseAwaitingTask();
assertTaskExecutedItsLogic();
assertTaskFinishedEventually(f);
assertTrue("Task should be done", f.isDone());
assertFalse("Task should NOT be cancelled", f.isCancelled());
assertEquals("success", f.get());
}
@Test(timeout = 60000)
public void cancelAndGet_taskCancelled_withoutInterruption_logicExecuted() throws Exception {
ICompletableFuture<String> f = submitAwaitingTaskNoCallbacks(NO_EXCEPTION);
assertTaskInExecution();
boolean cancelResult = f.cancel(false);
releaseAwaitingTask();
assertTaskExecutedItsLogic(); // cancellation came, when task already awaiting, so logic executed
assertTaskFinishedEventually(f);
assertTrue("Task cancellation should succeed", cancelResult);
assertTrue("Task should be done", f.isDone());
assertTrue("Task should be cancelled", f.isCancelled());
expected.expect(CancellationException.class);
f.get();
}
@Test(timeout = 60000)
public void cancelAndGet_taskCancelled_withInterruption_noLogicExecuted() throws Exception {
ICompletableFuture<String> f = submitAwaitingTaskNoCallbacks(NO_EXCEPTION);
assertTaskInExecution();
boolean cancelResult = f.cancel(true);
assertTaskInterruptedAndDidNotExecuteItsLogic();
assertTaskFinishedEventually(f); // task did not have to be releases - interruption was enough
assertTrue("Task cancellation should succeed", cancelResult);
assertTrue("Task should be done", f.isDone());
assertTrue("Task should be cancelled", f.isCancelled());
expected.expect(CancellationException.class);
f.get();
}
private static void assertTestExceptionThrown(AtomicReference<?>... refs) {
for (AtomicReference<?> ref : refs) {
assertThat("ExecutionException expected", ref.get(), instanceOf(ExecutionException.class));
}
for (AtomicReference<?> ref : refs) {
assertThat("TEST_EXCEPTION expected as cause", ((Throwable) ref.get()).getCause(),
Matchers.<Throwable>sameInstance(THROW_TEST_EXCEPTION));
}
}
private ICompletableFuture<String> submitAwaitingTaskNoCallbacks(final Exception exception) {
return submitAwaitingTask(0, exception);
}
private ICompletableFuture<String> submitAwaitingTask(Integer numberOfCallbacks, final Exception exception) {
callbacksDoneLatch = new CountDownLatch(numberOfCallbacks);
return submit(new Callable<String>() {
@Override
public String call() throws Exception {
inExecutionLatch.countDown();
assertOpenEventually(startLogicLatch);
executedLogic.countDown();
if (exception != null) {
throw exception;
}
return "success";
}
});
}
private void submitReleasingTask(final long millisToAwaitBeforeRelease) {
submit(new Runnable() {
@Override
public void run() {
sleepAtLeastMillis(millisToAwaitBeforeRelease);
releaseAwaitingTask();
}
});
}
private ICompletableFuture<String> submit(final Callable<String> callable) {
return executionService.asCompletableFuture(executionService.submit("default", callable));
}
private void submit(final Runnable runnable) {
executionService.submit("default", runnable);
}
private Integer expectedNumberOfCallbacks(int number) {
return number;
}
private void releaseAwaitingTask() {
startLogicLatch.countDown();
}
private void assertCallbacksExecutedEventually() {
assertOpenEventually(callbacksDoneLatch);
}
private void assertTaskExecutedItsLogic() {
assertOpenEventually(executedLogic);
}
private void assertTaskInterruptedAndDidNotExecuteItsLogic() {
assertEquals(1, executedLogic.getCount());
}
private void assertTaskFinishedEventually(final ICompletableFuture future) {
assertTrueEventually(new AssertTask() {
@Override
public void run()
throws Exception {
assertTrue(future.isDone());
}
});
}
private void assertTaskInExecution() {
assertOpenEventually(inExecutionLatch);
}
private ExecutionCallback<String> storeTaskResponseToReference(final AtomicReference<Object> ref) {
return new ExecutionCallback<String>() {
@Override
public void onResponse(String response) {
doit(response);
}
@Override
public void onFailure(Throwable t) {
doit(t);
}
private void doit(Object response) {
ref.set(response);
callbacksDoneLatch.countDown();
}
};
}
}
| apache-2.0 |
TNO/PhenotypeDatabase | web-app/js/studyView/studyView.js | 1492 | if( typeof( StudyView ) === "undefined" ) {
StudyView = {};
}
StudyView.initialize = function() {
attachHelpTooltips();
}
StudyView.initializePropertiesPage = function() {
// Initialize help tooltips
attachHelpTooltips();
}
StudyView.studyChildren = {
refresh: function( table ) {
if( table.length == 0 ) {
location.reload();
return;
}
$.each( table, function( idx, datatable ) {
$(datatable).dataTable().fnDraw();
});
},
initialize: function( entityMethods, title ) {
},
}
/**
* Handles adding and deleting subjects
*/
StudyView.subjects = {
// Reload data for the datatable
refresh: function() {
StudyView.studyChildren.refresh( $( "#subjects .dataTables_scrollBody .dataTable" ) )
},
initialize: function() {
StudyView.studyChildren.initialize( StudyView.subjects, "Add subject(s)" );
},
};
/**
* Handles adding and deleting samples
*/
StudyView.samples = {
// Reload data for the datatable
refresh: function() {
StudyView.studyChildren.refresh( $( "#samples .dataTables_scrollBody .dataTable" ) )
},
initialize: function() {
StudyView.studyChildren.initialize( StudyView.samples, "Add sample(s)" );
},
};
/**
* Handles adding and deleting assays
*/
StudyView.assays = {
// Reload data for the datatable
refresh: function() {
StudyView.studyChildren.refresh( $( "#assays .dataTables_scrollBody .dataTable" ) )
},
initialize: function() {
StudyView.studyChildren.initialize( StudyView.assays, "Add assay(s)" );
},
};
| apache-2.0 |
GeoinformationSystems/Time4Maps | WebContent/js/dojo-release-1.9.0/dojox/mobile/ComboBox.js | 4327 | //>>built
define("dojox/mobile/ComboBox",["dojo/_base/kernel","dojo/_base/declare","dojo/_base/lang","dojo/_base/window","dojo/dom-geometry","dojo/dom-style","dojo/dom-attr","dojo/window","dojo/touch","dijit/form/_AutoCompleterMixin","dijit/popup","./_ComboBoxMenu","./TextBox","./sniff"],function(_1,_2,_3,_4,_5,_6,_7,_8,_9,_a,_b,_c,_d,_e){
_1.experimental("dojox.mobile.ComboBox");
return _2("dojox.mobile.ComboBox",[_d,_a],{dropDownClass:"dojox.mobile._ComboBoxMenu",selectOnClick:false,autoComplete:false,dropDown:null,maxHeight:-1,dropDownPosition:["below","above"],_throttleOpenClose:function(){
if(this._throttleHandler){
this._throttleHandler.remove();
}
this._throttleHandler=this.defer(function(){
this._throttleHandler=null;
},500);
},_onFocus:function(){
this.inherited(arguments);
if(!this._opened&&!this._throttleHandler){
this._startSearchAll();
}
if(_e("windows-theme")){
this.domNode.blur();
}
},onInput:function(e){
this._onKey(e);
this.inherited(arguments);
},_setListAttr:function(v){
this._set("list",v);
},closeDropDown:function(){
this._throttleOpenClose();
if(this.endHandler){
this.disconnect(this.startHandler);
this.disconnect(this.endHandler);
this.disconnect(this.moveHandler);
clearInterval(this.repositionTimer);
this.repositionTimer=this.endHandler=null;
}
this.inherited(arguments);
_7.remove(this.domNode,"aria-owns");
_b.close(this.dropDown);
this._opened=false;
if(_e("windows-theme")&&this.domNode.disabled){
this.defer(function(){
this.domNode.removeAttribute("disabled");
},300);
}
},openDropDown:function(){
var _f=!this._opened;
var _10=this.dropDown,_11=_10.domNode,_12=this.domNode,_13=this;
_7.set(_10.domNode,"role","listbox");
if(_10.id){
_7.set(this.domNode,"aria-owns",_10.id);
}
if(_e("touch")){
_4.global.scrollBy(0,_5.position(_12,false).y);
}
if(!this._preparedNode){
this._preparedNode=true;
if(_11.style.width){
this._explicitDDWidth=true;
}
if(_11.style.height){
this._explicitDDHeight=true;
}
}
var _14={display:"",overflow:"hidden",visibility:"hidden"};
if(!this._explicitDDWidth){
_14.width="";
}
if(!this._explicitDDHeight){
_14.height="";
}
_6.set(_11,_14);
var _15=this.maxHeight;
if(_15==-1){
var _16=_8.getBox(),_17=_5.position(_12,false);
_15=Math.floor(Math.max(_17.y,_16.h-(_17.y+_17.h)));
}
_b.moveOffScreen(_10);
if(_10.startup&&!_10._started){
_10.startup();
}
var mb=_5.position(this.dropDown.containerNode,false);
var _18=(_15&&mb.h>_15);
if(_18){
mb.h=_15;
}
mb.w=Math.max(mb.w,_12.offsetWidth);
_5.setMarginBox(_11,mb);
var _19=_b.open({parent:this,popup:_10,around:_12,orient:_e("windows-theme")?["above"]:this.dropDownPosition,onExecute:function(){
_13.closeDropDown();
},onCancel:function(){
_13.closeDropDown();
},onClose:function(){
_13._opened=false;
}});
this._opened=true;
if(_f){
var _1a=false,_1b=false,_1c=false,_1d=_10.domNode.parentNode,_1e=_5.position(_12,false),_1f=_5.position(_1d,false),_20=_1f.x-_1e.x,_21=_1f.y-_1e.y,_22=-1,_23=-1;
this.startHandler=this.connect(_4.doc.documentElement,_9.press,function(e){
_1b=true;
_1c=true;
_1a=false;
_22=e.clientX;
_23=e.clientY;
});
this.moveHandler=this.connect(_4.doc.documentElement,_9.move,function(e){
_1b=true;
if(e.touches){
_1c=_1a=true;
}else{
if(_1c&&(e.clientX!=_22||e.clientY!=_23)){
_1a=true;
}
}
});
this.clickHandler=this.connect(_10.domNode,"onclick",function(){
_1b=true;
_1c=_1a=false;
});
this.endHandler=this.connect(_4.doc.documentElement,"onmouseup",function(){
this.defer(function(){
_1b=true;
if(!_1a&&_1c){
this.closeDropDown();
}
_1c=false;
});
});
this.repositionTimer=setInterval(_3.hitch(this,function(){
if(_1b){
_1b=false;
return;
}
var _24=_5.position(_12,false),_25=_5.position(_1d,false),_26=_25.x-_24.x,_27=_25.y-_24.y;
if(Math.abs(_26-_20)>=1||Math.abs(_27-_21)>=1){
_6.set(_1d,{left:parseInt(_6.get(_1d,"left"))+_20-_26+"px",top:parseInt(_6.get(_1d,"top"))+_21-_27+"px"});
}
}),50);
}
if(_e("windows-theme")){
this.domNode.setAttribute("disabled",true);
}
return _19;
},postCreate:function(){
this.inherited(arguments);
this.connect(this.domNode,"onclick","_onClick");
_7.set(this.domNode,"role","combobox");
},destroy:function(){
if(this.repositionTimer){
clearInterval(this.repositionTimer);
}
this.inherited(arguments);
},_onClick:function(e){
if(!this._throttleHandler){
if(this.opened){
this.closeDropDown();
}else{
this._startSearchAll();
}
}
}});
});
| apache-2.0 |
google/traceur-compiler | test/feature/ArrowFunctions/InDerivedClassConstructor.js | 247 | class Base {
bar() {
return 1;
}
}
class Derived extends Base {
constructor() {
super()
this.foo = () => {
return this.bar();
};
}
bar() {
return 2;
}
}
let d = new Derived();
assert.equal(2, (0, d).foo());
| apache-2.0 |
nikitamarchenko/open-kilda | services/src/messaging/src/test/java/org/openkilda/messaging/AbstractSerializer.java | 843 | /* Copyright 2017 Telstra Open Source
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openkilda.messaging;
import java.io.IOException;
public interface AbstractSerializer {
Object deserialize() throws IOException, ClassNotFoundException;
void serialize(Object obj) throws IOException;
}
| apache-2.0 |
IvanGurtler/aerogear-unifiedpush-server | jaxrs/src/main/java/org/jboss/aerogear/unifiedpush/rest/registry/applications/AdmVariantEndpoint.java | 4371 | /**
* JBoss, Home of Professional Open Source
* Copyright Red Hat, Inc., and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.aerogear.unifiedpush.rest.registry.applications;
import org.jboss.aerogear.unifiedpush.api.AdmVariant;
import org.jboss.aerogear.unifiedpush.api.PushApplication;
import javax.validation.ConstraintViolationException;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
@Path("/applications/{pushAppID}/adm")
public class AdmVariantEndpoint extends AbstractVariantEndpoint {
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response registerAdmVariant(
AdmVariant admVariant,
@PathParam("pushAppID") String pushApplicationID,
@Context UriInfo uriInfo) {
// find the root push app
PushApplication pushApp = getSearch().findByPushApplicationIDForDeveloper(pushApplicationID);
if (pushApp == null) {
return Response.status(Response.Status.NOT_FOUND).entity("Could not find requested PushApplicationEntity").build();
}
// some validation
try {
validateModelClass(admVariant);
} catch (ConstraintViolationException cve) {
// Build and return the 400 (Bad Request) response
Response.ResponseBuilder builder = createBadRequestResponse(cve.getConstraintViolations());
return builder.build();
}
// store the Adm variant:
variantService.addVariant(admVariant);
// add Adm variant, and merge:
pushAppService.addVariant(pushApp, admVariant);
return Response.created(uriInfo.getAbsolutePathBuilder().path(String.valueOf(admVariant.getVariantID())).build()).entity(admVariant).build();
}
// READ
@GET
@Produces(MediaType.APPLICATION_JSON)
public Response listAllAdmVariationsForPushApp(@PathParam("pushAppID") String pushApplicationID) {
final PushApplication application = getSearch().findByPushApplicationIDForDeveloper(pushApplicationID);
return Response.ok(getVariantsByType(application, AdmVariant.class)).build();
}
// UPDATE
@PUT
@Path("/{admID}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response updateAndroidVariation(
@PathParam("pushAppID") String id,
@PathParam("admID") String androidID,
AdmVariant updatedAdmApplication) {
AdmVariant admVariant = (AdmVariant) variantService.findByVariantID(androidID);
if (admVariant != null) {
// some validation
try {
validateModelClass(updatedAdmApplication);
} catch (ConstraintViolationException cve) {
// Build and return the 400 (Bad Request) response
Response.ResponseBuilder builder = createBadRequestResponse(cve.getConstraintViolations());
return builder.build();
}
// apply updated data:
admVariant.setClientId(updatedAdmApplication.getClientId());
admVariant.setClientSecret(updatedAdmApplication.getClientSecret());
admVariant.setName(updatedAdmApplication.getName());
admVariant.setDescription(updatedAdmApplication.getDescription());
variantService.updateVariant(admVariant);
return Response.noContent().build();
}
return Response.status(Response.Status.NOT_FOUND).entity("Could not find requested Variant").build();
}
}
| apache-2.0 |
nhuntwalker/astroML | book_figures/chapter9/fig_bayes_DB_2d.py | 2283 | """
2D Bayes Decision Boundary
--------------------------
Plot a schematic of a two-dimensional decision boundary
"""
# Author: Jake VanderPlas
# License: BSD
# The figure produced by this code is published in the textbook
# "Statistics, Data Mining, and Machine Learning in Astronomy" (2013)
# For more information, see http://astroML.github.com
# To report a bug or issue, use the following forum:
# https://groups.google.com/forum/#!forum/astroml-general
import numpy as np
from matplotlib import pyplot as plt
from matplotlib.patches import Ellipse
#----------------------------------------------------------------------
# This function adjusts matplotlib settings for a uniform feel in the textbook.
# Note that with usetex=True, fonts are rendered with LaTeX. This may
# result in an error if LaTeX is not installed on your system. In that case,
# you can set usetex to False.
from astroML.plotting import setup_text_plots
setup_text_plots(fontsize=8, usetex=True)
#------------------------------------------------------------
# Set up diagram
mu1 = (0.25, 0.25)
mu2 = (0.85, 0.7)
sigma1 = (0.5, 0.5)
sigma2 = (0.25, 0.5)
y_boundary = np.linspace(-0.1, 1.1, 100)
x_boundary = (0.5 + 0.4 * (y_boundary - 0.9) ** 2)
#------------------------------------------------------------
# Set up plot
fig = plt.figure(figsize=(5, 5), facecolor='w')
ax = fig.add_axes([0, 0, 1, 1], frameon=False, xticks=[], yticks=[])
# draw axes
plt.annotate(r'$x_1$', (-0.08, -0.02), (1.05, -0.02),
ha='center', va='center',
arrowprops=dict(arrowstyle='<-', color='k'))
plt.annotate(r'$x_2$', (-0.02, -0.08), (-0.02, 1.05),
ha='center', va='center',
arrowprops=dict(arrowstyle='<-', color='k'))
# draw ellipses, points, and boundaries
ax.scatter(mu1[:1], mu1[1:], c='k')
ax.scatter(mu2[:1], mu2[1:], c='k')
ax.add_patch(Ellipse(mu1, sigma1[0], sigma1[1], fc='none', ec='k'))
ax.add_patch(Ellipse(mu2, sigma2[0], sigma2[1], fc='none', ec='k'))
ax.text(mu1[0] + 0.02, mu1[1] + 0.02, r'$\mu_1$')
ax.text(mu2[0] + 0.02, mu2[1] + 0.02, r'$\mu_2$')
ax.plot(x_boundary, y_boundary, '--k')
ax.text(0.53, 0.28, "decision boundary", rotation=-70,
ha='left', va='bottom')
ax.set_xlim(-0.1, 1.1)
ax.set_ylim(-0.1, 1.1)
plt.show()
| bsd-2-clause |
otzy007/homebrew-cask | Casks/sigil.rb | 302 | class Sigil < Cask
version '0.8.0'
sha256 'cafe9a942f787d4588445bf9fc5cbbd62531724e955cb7eec2f714a745fa82b3'
url "https://github.com/user-none/Sigil/releases/download/#{version}/Sigil-#{version}-Mac-Package.dmg"
homepage 'http://code.google.com/p/sigil/'
license :oss
app 'Sigil.app'
end
| bsd-2-clause |
sebastienros/jint | Jint.Tests.Test262/test/built-ins/Object/defineProperty/15.2.3.6-4-144.js | 519 | // Copyright (c) 2012 Ecma International. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
es5id: 15.2.3.6-4-144
description: >
Object.defineProperty - 'O' is an Array, 'name' is the length
property of 'O', test the [[Value]] field of 'desc' is a string
containing a number with leading zeros (15.4.5.1 step 3.c)
---*/
var arrObj = [];
Object.defineProperty(arrObj, "length", {
value: "0002.0"
});
assert.sameValue(arrObj.length, 2, 'arrObj.length');
| bsd-2-clause |
nathany/get-programming-with-go | lesson03/launch/launch.go | 327 | package main
import (
"fmt"
"math/rand"
"time"
)
func main() {
rand.Seed(time.Now().UnixNano())
var count = 10
for count > 0 {
fmt.Println(count)
time.Sleep(time.Second)
if rand.Intn(100) == 0 {
break
}
count--
}
if count == 0 {
fmt.Println("Liftoff!")
} else {
fmt.Println("Launch failed.")
}
}
| bsd-2-clause |
nandub/homebrew-core | Formula/quvi.rb | 1745 | class Quvi < Formula
desc "Parse video download URLs"
homepage "https://quvi.sourceforge.io/"
url "https://downloads.sourceforge.net/project/quvi/0.4/quvi/quvi-0.4.2.tar.bz2"
sha256 "1f4e40c14373cb3d358ae1b14a427625774fd09a366b6da0c97d94cb1ff733c3"
license "LGPL-2.1"
livecheck do
url :stable
regex(%r{url=.*?/quvi[._-]v?(\d+(?:\.\d+)+)\.t}i)
end
bottle do
sha256 cellar: :any, arm64_big_sur: "195d1401be4ab2b454d97e611163251bb4ed1986cab9c39b089268969fe67ff1"
sha256 cellar: :any, big_sur: "1b3252441e8eac802fcd016b09149004b86288c79916e2204be210478af2e185"
sha256 cellar: :any, catalina: "4dd1859cd18aa0e4bdf2286c31dc80c74d572b8d3b3dd7cea89c9042ec73ac23"
sha256 cellar: :any, mojave: "403d1157a64341c76067353225c6acbe1c0f3e9c0b69634ed80f0bb6400c4c7c"
sha256 cellar: :any, high_sierra: "10fe26a54bcdf8e33e9798b399a3a72e8b571c9668e4398a3f8d1a7952f9c652"
sha256 cellar: :any, sierra: "9e3b86dff84297edec9c63ff1593136c2ce62e8a9f8d523e9d9137943da939bb"
sha256 cellar: :any, el_capitan: "c5a8c9b53432e15b4ec31a9c1374bde130d56f73f8ee43e392917a52f34ab945"
sha256 cellar: :any, yosemite: "944922426376a9962bb90f032e02ef2404d3155ed3bba81a0b4d349ba1f1aec8"
sha256 cellar: :any_skip_relocation, x86_64_linux: "bde81ca6c65e967a5f18c0552cbe4823bc393e5e943c24d809f55dbefc6ea59d"
end
depends_on "pkg-config" => :build
depends_on "libquvi"
def install
system "./configure", "--disable-dependency-tracking", "--prefix=#{prefix}"
system "make", "install"
end
test do
system "#{bin}/quvi", "--version"
end
end
| bsd-2-clause |
sebastienros/jint | Jint.Tests.Test262/test/built-ins/Object/defineProperty/15.2.3.6-4-483.js | 997 | // Copyright (c) 2012 Ecma International. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
es5id: 15.2.3.6-4-483
description: >
ES5 Attributes - success to update [[Configurable]] attribute of
accessor property ([[Get]] is undefined, [[Set]] is a Function,
[[Enumerable]] is false, [[Configurable]] is true) to different
value
includes: [propertyHelper.js]
---*/
var obj = {};
var verifySetFunc = "data";
var setFunc = function(value) {
verifySetFunc = value;
};
Object.defineProperty(obj, "prop", {
get: undefined,
set: setFunc,
enumerable: false,
configurable: true
});
var desc1 = Object.getOwnPropertyDescriptor(obj, "prop");
Object.defineProperty(obj, "prop", {
configurable: false
});
var desc2 = Object.getOwnPropertyDescriptor(obj, "prop");
assert.sameValue(desc1.configurable, true);
assert.sameValue(desc2.configurable, false);
verifyNotConfigurable(obj, "prop");
assert(obj.hasOwnProperty("prop"));
| bsd-2-clause |
ondrejvelisek/perun | perun-core/src/main/java/cz/metacentrum/perun/core/impl/modules/attributes/urn_perun_facility_attribute_def_def_uid_namespace.java | 2888 | package cz.metacentrum.perun.core.impl.modules.attributes;
import java.util.ArrayList;
import java.util.List;
import cz.metacentrum.perun.core.api.Attribute;
import cz.metacentrum.perun.core.api.AttributeDefinition;
import cz.metacentrum.perun.core.api.AttributesManager;
import cz.metacentrum.perun.core.api.Facility;
import cz.metacentrum.perun.core.api.exceptions.AttributeNotExistsException;
import cz.metacentrum.perun.core.api.exceptions.ConsistencyErrorException;
import cz.metacentrum.perun.core.api.exceptions.InternalErrorException;
import cz.metacentrum.perun.core.api.exceptions.WrongAttributeAssignmentException;
import cz.metacentrum.perun.core.api.exceptions.WrongAttributeValueException;
import cz.metacentrum.perun.core.api.exceptions.WrongReferenceAttributeValueException;
import cz.metacentrum.perun.core.impl.PerunSessionImpl;
import cz.metacentrum.perun.core.implApi.modules.attributes.FacilityAttributesModuleAbstract;
import cz.metacentrum.perun.core.implApi.modules.attributes.FacilityAttributesModuleImplApi;
/**
* Checks and fills at specified facility uid namespace.
*
* @date 22.4.2011 10:43:48
* @author Lukáš Pravda <luky.pravda@gmail.com>
*/
public class urn_perun_facility_attribute_def_def_uid_namespace extends FacilityAttributesModuleAbstract implements FacilityAttributesModuleImplApi {
@Override
/**
* Checks if the corresponding attribute u:uid-namespace:[namespace] exists.
*/
public void checkAttributeValue(PerunSessionImpl session, Facility facility, Attribute attribute) throws WrongAttributeValueException, WrongReferenceAttributeValueException, InternalErrorException, WrongAttributeAssignmentException {
if(attribute.getValue() == null) throw new WrongAttributeValueException(attribute, facility, "Missing uid namespace for facility.");
String userFacilityUidNamespaceAttributeName =
AttributesManager.NS_USER_ATTR_DEF + ":" + attribute.getFriendlyName() + ":" + (String) attribute.getValue();
try {
session.getPerunBl().getAttributesManagerBl().getAttributeDefinition(session, userFacilityUidNamespaceAttributeName);
} catch (AttributeNotExistsException e) {
throw new ConsistencyErrorException("Attribute " + userFacilityUidNamespaceAttributeName + " doesn't exists");
}
}
@Override
public Attribute fillAttribute(PerunSessionImpl session, Facility facility, AttributeDefinition attribute) throws InternalErrorException, WrongAttributeAssignmentException {
return new Attribute(attribute);
}
public AttributeDefinition getAttributeDefinition() {
AttributeDefinition attr = new AttributeDefinition();
attr.setNamespace(AttributesManager.NS_FACILITY_ATTR_DEF);
attr.setFriendlyName("uid-namespace");
attr.setDisplayName("UID namespace");
attr.setType(String.class.getName());
attr.setDescription("Namespace for UIDs which can be used at specific facility.");
return attr;
}
}
| bsd-2-clause |
mstorsjo/openh264 | test/api/decode_api_test.cpp | 47569 | #include <gtest/gtest.h>
#include "codec_def.h"
#include "utils/BufferedData.h"
#include "utils/FileInputStream.h"
#include "BaseDecoderTest.h"
#include "BaseEncoderTest.h"
#include "wels_common_defs.h"
#include "utils/HashFunctions.h"
#include <string>
#include <vector>
#include "encode_decode_api_test.h"
using namespace WelsCommon;
static void TestOutPutTrace (void* ctx, int level, const char* string) {
STraceUnit* pTraceUnit = (STraceUnit*) ctx;
EXPECT_LE (level, pTraceUnit->iTarLevel);
}
TEST_P (EncodeDecodeTestAPI, DecoderVclNal) {
EncodeDecodeFileParamBase p = GetParam();
prepareParamDefault (1, p.slicenum, p.width, p.height, p.frameRate, ¶m_);
encoder_->Uninitialize();
int rv = encoder_->InitializeExt (¶m_);
ASSERT_TRUE (rv == cmResultSuccess);
int32_t iTraceLevel = WELS_LOG_QUIET;
encoder_->SetOption (ENCODER_OPTION_TRACE_LEVEL, &iTraceLevel);
decoder_->SetOption (DECODER_OPTION_TRACE_LEVEL, &iTraceLevel);
ASSERT_TRUE (InitialEncDec (p.width, p.height));
int iIdx = 0;
while (iIdx <= p.numframes) {
EncodeOneFrame (0);
//decoding after each encoding frame
int vclNal, len = 0;
encToDecData (info, len);
unsigned char* pData[3] = { NULL };
memset (&dstBufInfo_, 0, sizeof (SBufferInfo));
rv = decoder_->DecodeFrame2 (info.sLayerInfo[0].pBsBuf, len, pData, &dstBufInfo_);
ASSERT_TRUE (rv == cmResultSuccess);
rv = decoder_->GetOption (DECODER_OPTION_VCL_NAL, &vclNal);
EXPECT_EQ (vclNal, FEEDBACK_UNKNOWN_NAL); //no reconstruction, unknown return
rv = decoder_->DecodeFrame2 (NULL, 0, pData, &dstBufInfo_); //reconstruction
ASSERT_TRUE (rv == cmResultSuccess);
rv = decoder_->GetOption (DECODER_OPTION_VCL_NAL, &vclNal);
EXPECT_EQ (vclNal, FEEDBACK_VCL_NAL);
iIdx++;
} //while
//ignore last frame
}
TEST_P (EncodeDecodeTestAPI, GetOptionFramenum) {
EncodeDecodeFileParamBase p = GetParam();
prepareParamDefault (1, p.slicenum, p.width, p.height, p.frameRate, ¶m_);
encoder_->Uninitialize();
int rv = encoder_->InitializeExt (¶m_);
ASSERT_TRUE (rv == cmResultSuccess);
int32_t iTraceLevel = WELS_LOG_QUIET;
encoder_->SetOption (ENCODER_OPTION_TRACE_LEVEL, &iTraceLevel);
decoder_->SetOption (DECODER_OPTION_TRACE_LEVEL, &iTraceLevel);
ASSERT_TRUE (InitialEncDec (p.width, p.height));
int32_t iEncFrameNum = -1;
int32_t iDecFrameNum;
int iIdx = 0;
while (iIdx <= p.numframes) {
EncodeOneFrame (0);
//decoding after each encoding frame
int len = 0;
encToDecData (info, len);
unsigned char* pData[3] = { NULL };
memset (&dstBufInfo_, 0, sizeof (SBufferInfo));
rv = decoder_->DecodeFrame2 (info.sLayerInfo[0].pBsBuf, len, pData, &dstBufInfo_);
ASSERT_TRUE (rv == cmResultSuccess);
decoder_->GetOption (DECODER_OPTION_FRAME_NUM, &iDecFrameNum);
EXPECT_EQ (iDecFrameNum, -1);
iEncFrameNum++;
rv = decoder_->DecodeFrame2 (NULL, 0, pData, &dstBufInfo_); //reconstruction
ASSERT_TRUE (rv == cmResultSuccess);
decoder_->GetOption (DECODER_OPTION_FRAME_NUM, &iDecFrameNum);
EXPECT_EQ (iEncFrameNum, iDecFrameNum);
iIdx++;
} //while
//ignore last frame
}
TEST_P (EncodeDecodeTestAPI, GetOptionIDR) {
EncodeDecodeFileParamBase p = GetParam();
prepareParamDefault (1, p.slicenum, p.width, p.height, p.frameRate, ¶m_);
encoder_->Uninitialize();
int rv = encoder_->InitializeExt (¶m_);
ASSERT_TRUE (rv == cmResultSuccess);
//init for encoder
// I420: 1(Y) + 1/4(U) + 1/4(V)
int32_t iTraceLevel = WELS_LOG_QUIET;
encoder_->SetOption (ENCODER_OPTION_TRACE_LEVEL, &iTraceLevel);
decoder_->SetOption (DECODER_OPTION_TRACE_LEVEL, &iTraceLevel);
ASSERT_TRUE (InitialEncDec (p.width, p.height));
int32_t iEncCurIdrPicId = 0;
int32_t iDecCurIdrPicId;
int32_t iIDRPeriod = 1;
int32_t iSpsPpsIdAddition = 0;
int iIdx = 0;
while (iIdx <= p.numframes) {
iSpsPpsIdAddition = rand() %
2; //the current strategy supports more than 2 modes, but the switch between the modes>2 is not allowed
iIDRPeriod = (rand() % 150) + 1;
encoder_->SetOption (ENCODER_OPTION_IDR_INTERVAL, &iIDRPeriod);
encoder_->SetOption (ENCODER_OPTION_SPS_PPS_ID_STRATEGY, &iSpsPpsIdAddition);
EncodeOneFrame (0);
if (info.eFrameType == videoFrameTypeIDR) {
iEncCurIdrPicId = iEncCurIdrPicId + 1;
}
//decoding after each encoding frame
int len = 0;
encToDecData (info, len);
unsigned char* pData[3] = { NULL };
memset (&dstBufInfo_, 0, sizeof (SBufferInfo));
rv = decoder_->DecodeFrame2 (info.sLayerInfo[0].pBsBuf, len, pData, &dstBufInfo_);
ASSERT_TRUE (rv == cmResultSuccess);
decoder_->GetOption (DECODER_OPTION_IDR_PIC_ID, &iDecCurIdrPicId);
EXPECT_EQ (iDecCurIdrPicId, iEncCurIdrPicId);
rv = decoder_->DecodeFrame2 (NULL, 0, pData, &dstBufInfo_); //reconstruction
ASSERT_TRUE (rv == cmResultSuccess);
decoder_->GetOption (DECODER_OPTION_IDR_PIC_ID, &iDecCurIdrPicId);
EXPECT_EQ (iDecCurIdrPicId, iEncCurIdrPicId);
iIdx++;
} //while
//ignore last frame
}
TEST_P (EncodeDecodeTestAPI, InOutTimeStamp) {
EncodeDecodeFileParamBase p = GetParam();
prepareParamDefault (1, p.slicenum, p.width, p.height, p.frameRate, ¶m_);
encoder_->Uninitialize();
int rv = encoder_->InitializeExt (¶m_);
ASSERT_TRUE (rv == cmResultSuccess);
ASSERT_TRUE (InitialEncDec (p.width, p.height));
int32_t iTraceLevel = WELS_LOG_QUIET;
encoder_->SetOption (ENCODER_OPTION_TRACE_LEVEL, &iTraceLevel);
decoder_->SetOption (DECODER_OPTION_TRACE_LEVEL, &iTraceLevel);
int32_t iSpsPpsIdAddition = 1;
encoder_->SetOption (ENCODER_OPTION_SPS_PPS_ID_STRATEGY, &iSpsPpsIdAddition);
int32_t iIDRPeriod = 60;
encoder_->SetOption (ENCODER_OPTION_IDR_INTERVAL, &iIDRPeriod);
SLTRConfig sLtrConfigVal;
sLtrConfigVal.bEnableLongTermReference = 1;
sLtrConfigVal.iLTRRefNum = 1;
encoder_->SetOption (ENCODER_OPTION_LTR, &sLtrConfigVal);
int32_t iLtrPeriod = 2;
encoder_->SetOption (ENCODER_LTR_MARKING_PERIOD, &iLtrPeriod);
int iIdx = 0;
int iSkipedBytes;
unsigned long long uiEncTimeStamp = 100;
while (iIdx <= p.numframes) {
EncodeOneFrame (1);
//decoding after each encoding frame
int len = 0;
encToDecData (info, len);
unsigned char* pData[3] = { NULL };
memset (&dstBufInfo_, 0, sizeof (SBufferInfo));
uint32_t uiEcIdc = ERROR_CON_SLICE_COPY_CROSS_IDR_FREEZE_RES_CHANGE;
decoder_->SetOption (DECODER_OPTION_ERROR_CON_IDC, &uiEcIdc);
dstBufInfo_.uiInBsTimeStamp = uiEncTimeStamp;
rv = decoder_->DecodeFrame2 (info.sLayerInfo[0].pBsBuf, len, pData, &dstBufInfo_);
memset (&dstBufInfo_, 0, sizeof (SBufferInfo));
dstBufInfo_.uiInBsTimeStamp = uiEncTimeStamp;
rv = decoder_->DecodeFrame2 (NULL, 0, pData, &dstBufInfo_); //reconstruction
if (dstBufInfo_.iBufferStatus == 1) {
EXPECT_EQ (uiEncTimeStamp, dstBufInfo_.uiOutYuvTimeStamp);
}
iIdx++;
uiEncTimeStamp++;
}
(void) iSkipedBytes;
}
TEST_P (EncodeDecodeTestAPI, GetOptionIsRefPic) {
EncodeDecodeFileParamBase p = GetParam();
prepareParamDefault (1, p.slicenum, p.width, p.height, p.frameRate, ¶m_);
encoder_->Uninitialize();
int rv = encoder_->InitializeExt (¶m_);
ASSERT_TRUE (rv == cmResultSuccess);
ASSERT_TRUE (InitialEncDec (p.width, p.height));
int32_t iTraceLevel = WELS_LOG_QUIET;
encoder_->SetOption (ENCODER_OPTION_TRACE_LEVEL, &iTraceLevel);
decoder_->SetOption (DECODER_OPTION_TRACE_LEVEL, &iTraceLevel);
int iIdx = 0;
int iSkipedBytes;
int iIsRefPic;
decoder_->GetOption (DECODER_OPTION_IS_REF_PIC, &iIsRefPic);
ASSERT_EQ (iIsRefPic, -1);
while (iIdx <= p.numframes) {
EncodeOneFrame (1);
//decoding after each encoding frame
int len = 0;
encToDecData (info, len);
unsigned char* pData[3] = { NULL };
memset (&dstBufInfo_, 0, sizeof (SBufferInfo));
rv = decoder_->DecodeFrame2 (info.sLayerInfo[0].pBsBuf, len, pData, &dstBufInfo_);
memset (&dstBufInfo_, 0, sizeof (SBufferInfo));
decoder_->GetOption (DECODER_OPTION_IS_REF_PIC, &iIsRefPic);
ASSERT_EQ (iIsRefPic, -1);
rv = decoder_->DecodeFrame2 (NULL, 0, pData, &dstBufInfo_); //reconstruction
if (dstBufInfo_.iBufferStatus == 1) {
decoder_->GetOption (DECODER_OPTION_IS_REF_PIC, &iIsRefPic);
ASSERT_TRUE (iIsRefPic >= 0);
}
iIdx++;
}
(void)iSkipedBytes;
}
TEST_P (EncodeDecodeTestAPI, GetOptionTid_AVC_NOPREFIX) {
SLTRMarkingFeedback m_LTR_Marking_Feedback;
SLTRRecoverRequest m_LTR_Recover_Request;
m_LTR_Recover_Request.uiIDRPicId = 0;
m_LTR_Recover_Request.iLayerId = 0;
m_LTR_Marking_Feedback.iLayerId = 0;
EncodeDecodeFileParamBase p = GetParam();
prepareParamDefault (1, p.slicenum, p.width, p.height, p.frameRate, ¶m_);
param_.bPrefixNalAddingCtrl = false;
param_.iTemporalLayerNum = (rand() % 4) + 1;
encoder_->Uninitialize();
int rv = encoder_->InitializeExt (¶m_);
ASSERT_TRUE (rv == cmResultSuccess);
m_LTR_Recover_Request.uiFeedbackType = NO_RECOVERY_REQUSET;
ASSERT_TRUE (InitialEncDec (p.width, p.height));
int32_t iTraceLevel = WELS_LOG_QUIET;
encoder_->SetOption (ENCODER_OPTION_TRACE_LEVEL, &iTraceLevel);
decoder_->SetOption (DECODER_OPTION_TRACE_LEVEL, &iTraceLevel);
int32_t iSpsPpsIdAddition = 1;
encoder_->SetOption (ENCODER_OPTION_SPS_PPS_ID_STRATEGY, &iSpsPpsIdAddition);
int32_t iIDRPeriod = 60;
encoder_->SetOption (ENCODER_OPTION_IDR_INTERVAL, &iIDRPeriod);
SLTRConfig sLtrConfigVal;
sLtrConfigVal.bEnableLongTermReference = 1;
sLtrConfigVal.iLTRRefNum = 1;
encoder_->SetOption (ENCODER_OPTION_LTR, &sLtrConfigVal);
int32_t iLtrPeriod = 2;
encoder_->SetOption (ENCODER_LTR_MARKING_PERIOD, &iLtrPeriod);
int iIdx = 0;
int iLossIdx = 0;
bool bVCLLoss = false;
while (iIdx <= p.numframes) {
EncodeOneFrame (1);
if (m_LTR_Recover_Request.uiFeedbackType == IDR_RECOVERY_REQUEST) {
ASSERT_TRUE (info.eFrameType == videoFrameTypeIDR);
}
//decoding after each encoding frame
int len = 0;
encToDecData (info, len);
unsigned char* pData[3] = { NULL };
memset (&dstBufInfo_, 0, sizeof (SBufferInfo));
SimulateNALLoss (info.sLayerInfo[0].pBsBuf, len, &m_SLostSim, p.pLossSequence, p.bLostPara, iLossIdx, bVCLLoss);
rv = decoder_->DecodeFrame2 (info.sLayerInfo[0].pBsBuf, len, pData, &dstBufInfo_);
int iTid = -1;
decoder_->GetOption (DECODER_OPTION_TEMPORAL_ID, &iTid);
if (iTid != -1) {
ASSERT_EQ (iTid, 0);
}
m_LTR_Recover_Request.uiFeedbackType = NO_RECOVERY_REQUSET;
LTRRecoveryRequest (decoder_, encoder_, &m_LTR_Recover_Request, rv, true);
rv = decoder_->DecodeFrame2 (NULL, 0, pData, &dstBufInfo_); //reconstruction
decoder_->GetOption (DECODER_OPTION_TEMPORAL_ID, &iTid);
std::vector<SLostSim>::iterator iter = m_SLostSim.begin();
bool bHasVCL = false;
for (unsigned int k = 0; k < m_SLostSim.size(); k++) {
if (IS_VCL_NAL (iter->eNalType, 0) && iter->isLost == false) {
bHasVCL = true;
break;
}
iter++;
}
(void) bHasVCL;
if (iTid != -1) {
ASSERT_EQ (iTid, 0);
}
LTRRecoveryRequest (decoder_, encoder_, &m_LTR_Recover_Request, rv, true);
LTRMarkFeedback (decoder_, encoder_, &m_LTR_Marking_Feedback, rv);
iIdx++;
}
}
TEST_P (EncodeDecodeTestAPI, GetOptionTid_AVC_WITH_PREFIX_NOLOSS) {
SLTRMarkingFeedback m_LTR_Marking_Feedback;
SLTRRecoverRequest m_LTR_Recover_Request;
m_LTR_Recover_Request.uiIDRPicId = 0;
m_LTR_Recover_Request.iLayerId = 0;
m_LTR_Marking_Feedback.iLayerId = 0;
EncodeDecodeFileParamBase p = GetParam();
prepareParamDefault (1, p.slicenum, p.width, p.height, p.frameRate, ¶m_);
param_.bPrefixNalAddingCtrl = true;
param_.iTemporalLayerNum = (rand() % 4) + 1;
param_.iSpatialLayerNum = 1;
encoder_->Uninitialize();
int rv = encoder_->InitializeExt (¶m_);
ASSERT_TRUE (rv == cmResultSuccess);
m_LTR_Recover_Request.uiFeedbackType = NO_RECOVERY_REQUSET;
ASSERT_TRUE (InitialEncDec (p.width, p.height));
int32_t iTraceLevel = WELS_LOG_QUIET;
encoder_->SetOption (ENCODER_OPTION_TRACE_LEVEL, &iTraceLevel);
decoder_->SetOption (DECODER_OPTION_TRACE_LEVEL, &iTraceLevel);
int32_t iSpsPpsIdAddition = 1;
encoder_->SetOption (ENCODER_OPTION_SPS_PPS_ID_STRATEGY, &iSpsPpsIdAddition);
int32_t iIDRPeriod = 60;
encoder_->SetOption (ENCODER_OPTION_IDR_INTERVAL, &iIDRPeriod);
SLTRConfig sLtrConfigVal;
sLtrConfigVal.bEnableLongTermReference = 1;
sLtrConfigVal.iLTRRefNum = 1;
encoder_->SetOption (ENCODER_OPTION_LTR, &sLtrConfigVal);
int32_t iLtrPeriod = 2;
encoder_->SetOption (ENCODER_LTR_MARKING_PERIOD, &iLtrPeriod);
int iIdx = 0;
while (iIdx <= p.numframes) {
EncodeOneFrame (1);
if (m_LTR_Recover_Request.uiFeedbackType == IDR_RECOVERY_REQUEST) {
ASSERT_TRUE (info.eFrameType == videoFrameTypeIDR);
}
//decoding after each encoding frame
int len = 0;
encToDecData (info, len);
unsigned char* pData[3] = { NULL };
memset (&dstBufInfo_, 0, sizeof (SBufferInfo));
ExtractDidNal (&info, len, &m_SLostSim, 0);
rv = decoder_->DecodeFrame2 (info.sLayerInfo[0].pBsBuf, len, pData, &dstBufInfo_);
int iTid = -1;
decoder_->GetOption (DECODER_OPTION_TEMPORAL_ID, &iTid);
ASSERT_EQ (iTid, -1);
m_LTR_Recover_Request.uiFeedbackType = NO_RECOVERY_REQUSET;
LTRRecoveryRequest (decoder_, encoder_, &m_LTR_Recover_Request, rv, true);
rv = decoder_->DecodeFrame2 (NULL, 0, pData, &dstBufInfo_); //reconstruction
decoder_->GetOption (DECODER_OPTION_TEMPORAL_ID, &iTid);
ASSERT_EQ (iTid, info.sLayerInfo[0].uiTemporalId);
LTRRecoveryRequest (decoder_, encoder_, &m_LTR_Recover_Request, rv, true);
LTRMarkFeedback (decoder_, encoder_, &m_LTR_Marking_Feedback, rv);
iIdx++;
}
}
TEST_P (EncodeDecodeTestAPI, GetOptionTid_SVC_L1_NOLOSS) {
SLTRMarkingFeedback m_LTR_Marking_Feedback;
SLTRRecoverRequest m_LTR_Recover_Request;
m_LTR_Recover_Request.uiIDRPicId = 0;
m_LTR_Recover_Request.iLayerId = 0;
m_LTR_Marking_Feedback.iLayerId = 0;
EncodeDecodeFileParamBase p = GetParam();
prepareParamDefault (2, p.slicenum, p.width, p.height, p.frameRate, ¶m_);
param_.iTemporalLayerNum = (rand() % 4) + 1;
param_.iSpatialLayerNum = 2;
encoder_->Uninitialize();
int rv = encoder_->InitializeExt (¶m_);
ASSERT_TRUE (rv == cmResultSuccess);
m_LTR_Recover_Request.uiFeedbackType = NO_RECOVERY_REQUSET;
ASSERT_TRUE (InitialEncDec (p.width, p.height));
int32_t iTraceLevel = WELS_LOG_QUIET;
encoder_->SetOption (ENCODER_OPTION_TRACE_LEVEL, &iTraceLevel);
decoder_->SetOption (DECODER_OPTION_TRACE_LEVEL, &iTraceLevel);
int32_t iSpsPpsIdAddition = 1;
encoder_->SetOption (ENCODER_OPTION_SPS_PPS_ID_STRATEGY, &iSpsPpsIdAddition);
int32_t iIDRPeriod = 60;
encoder_->SetOption (ENCODER_OPTION_IDR_INTERVAL, &iIDRPeriod);
SLTRConfig sLtrConfigVal;
sLtrConfigVal.bEnableLongTermReference = 1;
sLtrConfigVal.iLTRRefNum = 1;
encoder_->SetOption (ENCODER_OPTION_LTR, &sLtrConfigVal);
int32_t iLtrPeriod = 2;
encoder_->SetOption (ENCODER_LTR_MARKING_PERIOD, &iLtrPeriod);
int iIdx = 0;
while (iIdx <= p.numframes) {
EncodeOneFrame (1);
if (m_LTR_Recover_Request.uiFeedbackType == IDR_RECOVERY_REQUEST) {
ASSERT_TRUE (info.eFrameType == videoFrameTypeIDR);
}
//decoding after each encoding frame
int len = 0;
encToDecData (info, len);
unsigned char* pData[3] = { NULL };
memset (&dstBufInfo_, 0, sizeof (SBufferInfo));
ExtractDidNal (&info, len, &m_SLostSim, 1);
rv = decoder_->DecodeFrame2 (info.sLayerInfo[0].pBsBuf, len, pData, &dstBufInfo_);
int iTid = -1;
decoder_->GetOption (DECODER_OPTION_TEMPORAL_ID, &iTid);
ASSERT_EQ (iTid, -1);
m_LTR_Recover_Request.uiFeedbackType = NO_RECOVERY_REQUSET;
LTRRecoveryRequest (decoder_, encoder_, &m_LTR_Recover_Request, rv, true);
rv = decoder_->DecodeFrame2 (NULL, 0, pData, &dstBufInfo_); //reconstruction
decoder_->GetOption (DECODER_OPTION_TEMPORAL_ID, &iTid);
ASSERT_EQ (iTid, info.sLayerInfo[0].uiTemporalId);
LTRRecoveryRequest (decoder_, encoder_, &m_LTR_Recover_Request, rv, true);
LTRMarkFeedback (decoder_, encoder_, &m_LTR_Marking_Feedback, rv);
iIdx++;
}
}
TEST_P (EncodeDecodeTestAPI, SetOption_Trace) {
SLTRMarkingFeedback m_LTR_Marking_Feedback;
SLTRRecoverRequest m_LTR_Recover_Request;
m_LTR_Recover_Request.uiIDRPicId = 0;
m_LTR_Recover_Request.iLayerId = 0;
m_LTR_Marking_Feedback.iLayerId = 0;
EncodeDecodeFileParamBase p = GetParam();
prepareParamDefault (1, p.slicenum, p.width, p.height, p.frameRate, ¶m_);
param_.iSpatialLayerNum = 1;
int rv = encoder_->InitializeExt (¶m_);
ASSERT_TRUE (rv == cmResultSuccess);
m_LTR_Recover_Request.uiFeedbackType = NO_RECOVERY_REQUSET;
ASSERT_TRUE (InitialEncDec (p.width, p.height));
int32_t iTraceLevel = WELS_LOG_QUIET;
pFunc = TestOutPutTrace;
pTraceInfo = &sTrace;
sTrace.iTarLevel = iTraceLevel;
encoder_->SetOption (ENCODER_OPTION_TRACE_LEVEL, &iTraceLevel);
decoder_->SetOption (DECODER_OPTION_TRACE_LEVEL, &iTraceLevel);
encoder_->SetOption (ENCODER_OPTION_TRACE_CALLBACK, &pFunc);
encoder_->SetOption (ENCODER_OPTION_TRACE_CALLBACK_CONTEXT, &pTraceInfo);
decoder_->SetOption (DECODER_OPTION_TRACE_CALLBACK, &pFunc);
decoder_->SetOption (DECODER_OPTION_TRACE_CALLBACK_CONTEXT, &pTraceInfo);
int32_t iSpsPpsIdAddition = 1;
encoder_->SetOption (ENCODER_OPTION_SPS_PPS_ID_STRATEGY, &iSpsPpsIdAddition);
int32_t iIDRPeriod = 60;
encoder_->SetOption (ENCODER_OPTION_IDR_INTERVAL, &iIDRPeriod);
SLTRConfig sLtrConfigVal;
sLtrConfigVal.bEnableLongTermReference = 1;
sLtrConfigVal.iLTRRefNum = 1;
encoder_->SetOption (ENCODER_OPTION_LTR, &sLtrConfigVal);
int32_t iLtrPeriod = 2;
encoder_->SetOption (ENCODER_LTR_MARKING_PERIOD, &iLtrPeriod);
int iIdx = 0;
int iLossIdx = 0;
bool bVCLLoss = false;
while (iIdx <= p.numframes) {
iTraceLevel = rand() % 33;
sTrace.iTarLevel = iTraceLevel;
encoder_->SetOption (ENCODER_OPTION_TRACE_LEVEL, &iTraceLevel);
decoder_->SetOption (DECODER_OPTION_TRACE_LEVEL, &iTraceLevel);
EncodeOneFrame (1);
if (m_LTR_Recover_Request.uiFeedbackType == IDR_RECOVERY_REQUEST) {
ASSERT_TRUE (info.eFrameType == videoFrameTypeIDR);
}
//decoding after each encoding frame
int len = 0;
encToDecData (info, len);
unsigned char* pData[3] = { NULL };
memset (&dstBufInfo_, 0, sizeof (SBufferInfo));
ExtractDidNal (&info, len, &m_SLostSim, 0);
SimulateNALLoss (info.sLayerInfo[0].pBsBuf, len, &m_SLostSim, p.pLossSequence, p.bLostPara, iLossIdx, bVCLLoss);
rv = decoder_->DecodeFrame2 (info.sLayerInfo[0].pBsBuf, len, pData, &dstBufInfo_);
m_LTR_Recover_Request.uiFeedbackType = NO_RECOVERY_REQUSET;
LTRRecoveryRequest (decoder_, encoder_, &m_LTR_Recover_Request, rv, true);
rv = decoder_->DecodeFrame2 (NULL, 0, pData, &dstBufInfo_); //reconstruction
LTRRecoveryRequest (decoder_, encoder_, &m_LTR_Recover_Request, rv, true);
LTRMarkFeedback (decoder_, encoder_, &m_LTR_Marking_Feedback, rv);
iIdx++;
}
}
TEST_P (EncodeDecodeTestAPI, SetOption_Trace_NULL) {
SLTRMarkingFeedback m_LTR_Marking_Feedback;
SLTRRecoverRequest m_LTR_Recover_Request;
m_LTR_Recover_Request.uiIDRPicId = 0;
m_LTR_Recover_Request.iLayerId = 0;
m_LTR_Marking_Feedback.iLayerId = 0;
EncodeDecodeFileParamBase p = GetParam();
prepareParamDefault (1, p.slicenum, p.width, p.height, p.frameRate, ¶m_);
param_.iSpatialLayerNum = 1;
int rv = encoder_->InitializeExt (¶m_);
ASSERT_TRUE (rv == cmResultSuccess);
m_LTR_Recover_Request.uiFeedbackType = NO_RECOVERY_REQUSET;
ASSERT_TRUE (InitialEncDec (p.width, p.height));
int32_t iTraceLevel = WELS_LOG_QUIET;
pFunc = NULL;
pTraceInfo = NULL;
encoder_->SetOption (ENCODER_OPTION_TRACE_CALLBACK, &pFunc);
encoder_->SetOption (ENCODER_OPTION_TRACE_CALLBACK_CONTEXT, &pTraceInfo);
decoder_->SetOption (DECODER_OPTION_TRACE_CALLBACK, &pFunc);
decoder_->SetOption (DECODER_OPTION_TRACE_CALLBACK_CONTEXT, &pTraceInfo);
encoder_->SetOption (ENCODER_OPTION_TRACE_LEVEL, &iTraceLevel);
decoder_->SetOption (DECODER_OPTION_TRACE_LEVEL, &iTraceLevel);
int32_t iSpsPpsIdAddition = 1;
encoder_->SetOption (ENCODER_OPTION_SPS_PPS_ID_STRATEGY, &iSpsPpsIdAddition);
int32_t iIDRPeriod = 60;
encoder_->SetOption (ENCODER_OPTION_IDR_INTERVAL, &iIDRPeriod);
SLTRConfig sLtrConfigVal;
sLtrConfigVal.bEnableLongTermReference = 1;
sLtrConfigVal.iLTRRefNum = 1;
encoder_->SetOption (ENCODER_OPTION_LTR, &sLtrConfigVal);
int32_t iLtrPeriod = 2;
encoder_->SetOption (ENCODER_LTR_MARKING_PERIOD, &iLtrPeriod);
int iIdx = 0;
int iLossIdx = 0;
bool bVCLLoss = false;
while (iIdx <= p.numframes) {
iTraceLevel = rand() % 33;
encoder_->SetOption (ENCODER_OPTION_TRACE_LEVEL, &iTraceLevel);
decoder_->SetOption (DECODER_OPTION_TRACE_LEVEL, &iTraceLevel);
EncodeOneFrame (1);
if (m_LTR_Recover_Request.uiFeedbackType == IDR_RECOVERY_REQUEST) {
ASSERT_TRUE (info.eFrameType == videoFrameTypeIDR);
}
//decoding after each encoding frame
int len = 0;
encToDecData (info, len);
unsigned char* pData[3] = { NULL };
memset (&dstBufInfo_, 0, sizeof (SBufferInfo));
ExtractDidNal (&info, len, &m_SLostSim, 0);
SimulateNALLoss (info.sLayerInfo[0].pBsBuf, len, &m_SLostSim, p.pLossSequence, p.bLostPara, iLossIdx, bVCLLoss);
rv = decoder_->DecodeFrame2 (info.sLayerInfo[0].pBsBuf, len, pData, &dstBufInfo_);
m_LTR_Recover_Request.uiFeedbackType = NO_RECOVERY_REQUSET;
LTRRecoveryRequest (decoder_, encoder_, &m_LTR_Recover_Request, rv, true);
rv = decoder_->DecodeFrame2 (NULL, 0, pData, &dstBufInfo_); //reconstruction
LTRRecoveryRequest (decoder_, encoder_, &m_LTR_Recover_Request, rv, true);
LTRMarkFeedback (decoder_, encoder_, &m_LTR_Marking_Feedback, rv);
iIdx++;
}
}
class DecodeCrashTestAPI : public ::testing::TestWithParam<EncodeDecodeFileParamBase>, public EncodeDecodeTestBase {
public:
void SetUp() {
EncodeDecodeTestBase::SetUp();
ucBuf_ = NULL;
ucBuf_ = new unsigned char [1000000];
ASSERT_TRUE (ucBuf_ != NULL);
}
void TearDown() {
EncodeDecodeTestBase::TearDown();
if (NULL != ucBuf_) {
delete[] ucBuf_;
ucBuf_ = NULL;
}
ASSERT_TRUE (ucBuf_ == NULL);
}
void prepareParam (int iLayerNum, int iSliceNum, int width, int height, float framerate, SEncParamExt* pParam) {
memset (pParam, 0, sizeof (SEncParamExt));
EncodeDecodeTestBase::prepareParam (iLayerNum, iSliceNum, width, height, framerate, pParam);
}
void EncodeOneFrame() {
int frameSize = EncPic.iPicWidth * EncPic.iPicHeight * 3 / 2;
memset (buf_.data(), iRandValue, (frameSize >> 2));
memset (buf_.data() + (frameSize >> 2), rand() % 256, (frameSize - (frameSize >> 2)));
int rv = encoder_->EncodeFrame (&EncPic, &info);
ASSERT_TRUE (rv == cmResultSuccess || rv == cmUnknownReason);
}
protected:
unsigned char* ucBuf_;
};
struct EncodeDecodeParamBase {
int width;
int height;
float frameRate;
int iTarBitrate;
};
#define NUM_OF_POSSIBLE_RESOLUTION (9)
static const EncodeDecodeParamBase kParamArray[] = {
{160, 90, 6.0f, 250000},
{90, 160, 6.0f, 250000},
{320, 180, 12.0f, 500000},
{180, 320, 12.0f, 500000},
{480, 270, 12.0f, 600000},
{270, 480, 12.0f, 600000},
{640, 360, 24.0f, 800000},
{360, 640, 24.0f, 800000},
{1280, 720, 24.0f, 1000000},
};
//#define DEBUG_FILE_SAVE_CRA
TEST_F (DecodeCrashTestAPI, DecoderCrashTest) {
uint32_t uiGet;
encoder_->Uninitialize();
//do tests until crash
unsigned int uiLoopRound = 0;
unsigned char* pucBuf = ucBuf_;
int iDecAuSize;
#ifdef DEBUG_FILE_SAVE_CRA
//open file to save tested BS
FILE* fDataFile = fopen ("test_crash.264", "wb");
FILE* fLenFile = fopen ("test_crash_len.log", "w");
int iFileSize = 0;
#endif
//set eCurStrategy for one test
EParameterSetStrategy eCurStrategy = CONSTANT_ID;
switch (rand() % 7) {
case 1:
eCurStrategy = INCREASING_ID;
break;
case 2:
eCurStrategy = SPS_LISTING;
break;
case 3:
eCurStrategy = SPS_LISTING_AND_PPS_INCREASING;
break;
case 6:
eCurStrategy = SPS_PPS_LISTING;
break;
default:
//using the initial value
break;
}
do {
int iTotalFrameNum = (rand() % 100) + 1;
int iSeed = rand() % NUM_OF_POSSIBLE_RESOLUTION;
EncodeDecodeParamBase p = kParamArray[iSeed];
#ifdef DEBUG_FILE_SAVE_CRA
printf ("using param set %d in loop %d\n", iSeed, uiLoopRound);
#endif
//Initialize Encoder
prepareParam (1, 1, p.width, p.height, p.frameRate, ¶m_);
param_.iRCMode = RC_TIMESTAMP_MODE;
param_.iTargetBitrate = p.iTarBitrate;
param_.uiIntraPeriod = 0;
param_.eSpsPpsIdStrategy = eCurStrategy;
param_.bEnableBackgroundDetection = true;
param_.bEnableSceneChangeDetect = (rand() % 3) ? true : false;
param_.bPrefixNalAddingCtrl = (rand() % 2) ? true : false;
param_.iEntropyCodingModeFlag = 0;
param_.bEnableFrameSkip = true;
param_.iMultipleThreadIdc = 0;
param_.sSpatialLayers[0].iSpatialBitrate = p.iTarBitrate;
param_.sSpatialLayers[0].iMaxSpatialBitrate = p.iTarBitrate << 1;
param_.sSpatialLayers[0].sSliceArgument.uiSliceMode = (rand() % 2) ? SM_SIZELIMITED_SLICE : SM_SINGLE_SLICE;
if (param_.sSpatialLayers[0].sSliceArgument.uiSliceMode == SM_SIZELIMITED_SLICE) {
param_.sSpatialLayers[0].sSliceArgument.uiSliceSizeConstraint = 1400;
param_.uiMaxNalSize = 1400;
} else {
param_.sSpatialLayers[0].sSliceArgument.uiSliceSizeConstraint = 0;
param_.uiMaxNalSize = 0;
}
int rv = encoder_->InitializeExt (¶m_);
ASSERT_TRUE (rv == cmResultSuccess);
decoder_->GetOption (DECODER_OPTION_ERROR_CON_IDC, &uiGet);
EXPECT_EQ (uiGet, (uint32_t) ERROR_CON_SLICE_COPY); //default value should be ERROR_CON_SLICE_COPY
int32_t iTraceLevel = WELS_LOG_QUIET;
encoder_->SetOption (ENCODER_OPTION_TRACE_LEVEL, &iTraceLevel);
decoder_->SetOption (DECODER_OPTION_TRACE_LEVEL, &iTraceLevel);
//Start for enc/dec
int iIdx = 0;
unsigned char* pData[3] = { NULL };
EncodeDecodeFileParamBase pInput; //to conform with old functions
pInput.width = p.width;
pInput.height = p.height;
pInput.frameRate = p.frameRate;
ASSERT_TRUE (prepareEncDecParam (pInput));
while (iIdx++ < iTotalFrameNum) { // loop in frame
EncodeOneFrame();
#ifdef DEBUG_FILE_SAVE_CRA
//reset file if file size large
if ((info.eFrameType == videoFrameTypeIDR) && (iFileSize >= (1 << 25))) {
fclose (fDataFile);
fDataFile = fopen ("test_crash.264", "wb");
iFileSize = 0;
decoder_->Uninitialize();
SDecodingParam decParam;
memset (&decParam, 0, sizeof (SDecodingParam));
decParam.uiTargetDqLayer = UCHAR_MAX;
decParam.eEcActiveIdc = ERROR_CON_SLICE_COPY;
decParam.sVideoProperty.eVideoBsType = VIDEO_BITSTREAM_DEFAULT;
rv = decoder_->Initialize (&decParam);
ASSERT_EQ (0, rv);
}
#endif
if (info.eFrameType == videoFrameTypeSkip)
continue;
//deal with packets
unsigned char* pBsBuf;
iDecAuSize = 0;
pucBuf = ucBuf_; //init buf start pos for decoder usage
for (int iLayerNum = 0; iLayerNum < info.iLayerNum; iLayerNum++) {
SLayerBSInfo* pLayerBsInfo = &info.sLayerInfo[iLayerNum];
pBsBuf = info.sLayerInfo[iLayerNum].pBsBuf;
int iTotalNalCnt = pLayerBsInfo->iNalCount;
for (int iNalCnt = 0; iNalCnt < iTotalNalCnt; iNalCnt++) { //loop in NAL
int iPacketSize = pLayerBsInfo->pNalLengthInByte[iNalCnt];
//packet loss
int iLossRateRange = (uiLoopRound % 100) + 1; //1-100
int iLossRate = (rand() % iLossRateRange);
bool bPacketLost = (rand() % 101) > (100 -
iLossRate); // [0, (100-iLossRate)] indicates NO LOSS, (100-iLossRate, 100] indicates LOSS
if (!bPacketLost) { //no loss
memcpy (pucBuf, pBsBuf, iPacketSize);
pucBuf += iPacketSize;
iDecAuSize += iPacketSize;
}
#ifdef DEBUG_FILE_SAVE_CRA
else {
printf ("lost packet size=%d at frame-type=%d at loss rate %d (%d)\n", iPacketSize, info.eFrameType, iLossRate,
iLossRateRange);
}
#endif
//update bs info
pBsBuf += iPacketSize;
} //nal
} //layer
#ifdef DEBUG_FILE_SAVE_CRA
//save to file
fwrite (ucBuf_, 1, iDecAuSize, fDataFile);
fflush (fDataFile);
iFileSize += iDecAuSize;
//save to len file
unsigned long ulTmp[4];
ulTmp[0] = ulTmp[1] = ulTmp[2] = iIdx;
ulTmp[3] = iDecAuSize;
fwrite (ulTmp, sizeof (unsigned long), 4, fLenFile); // index, timeStamp, data size
fflush (fLenFile);
#endif
//decode
pData[0] = pData[1] = pData[2] = 0;
memset (&dstBufInfo_, 0, sizeof (SBufferInfo));
rv = decoder_->DecodeFrame2 (ucBuf_, iDecAuSize, pData, &dstBufInfo_);
rv = decoder_->DecodeFrame2 (NULL, 0, pData, &dstBufInfo_); //reconstruction
//guarantee decoder EC status
decoder_->GetOption (DECODER_OPTION_ERROR_CON_IDC, &uiGet);
EXPECT_EQ (uiGet, (uint32_t) ERROR_CON_SLICE_COPY);
} //frame
uiLoopRound ++;
if (uiLoopRound >= (1 << 30))
uiLoopRound = 0;
#ifdef DEBUG_FILE_SAVE_CRA
if (uiLoopRound % 100 == 0)
printf ("run %d times.\n", uiLoopRound);
} while (1); //while (iLoopRound<100);
fclose (fDataFile);
fclose (fLenFile);
#else
}
while (uiLoopRound < 10);
#endif
}
const uint32_t kiTotalLayer = 3; //DO NOT CHANGE!
const uint32_t kiSliceNum = 2; //DO NOT CHANGE!
const uint32_t kiWidth = 160; //DO NOT CHANGE!
const uint32_t kiHeight = 96; //DO NOT CHANGE!
const uint32_t kiFrameRate = 12; //DO NOT CHANGE!
const uint32_t kiFrameNum = 100; //DO NOT CHANGE!
const char* const pHashStr[][2] = { //DO NOT CHANGE!
// Allow for different output depending on whether averaging is done
// vertically or horizontally first when downsampling.
{ "d5fb6d72f8cc0ea4b037e883598c162fd32b475d", "0fc7e06d0d766ac911730da2aa9e953bc858a161" },
{ "17203f07486e895aef7c1bf94133fd731caba572", "1d47de674c9c44d8292ee00fa053a42bb9383614" },
{ "86bf890aef2abe24abe40ebe3d9ec76a25ddebe7", "43eaac708413c109ca120c5d570176f1c9b4036c" }
};
class DecodeParseAPI : public ::testing::TestWithParam<EncodeDecodeFileParamBase>, public EncodeDecodeTestBase {
public:
DecodeParseAPI() {
memset (&BsInfo_, 0, sizeof (SParserBsInfo));
fYuv_ = NULL;
iWidth_ = 0;
iHeight_ = 0;
memset (&ctx_, 0, sizeof (SHA1Context));
}
void SetUp() {
SHA1Reset (&ctx_);
EncodeDecodeTestBase::SetUp();
if (decoder_)
decoder_->Uninitialize();
SDecodingParam decParam;
memset (&decParam, 0, sizeof (SDecodingParam));
decParam.uiTargetDqLayer = UCHAR_MAX;
decParam.eEcActiveIdc = ERROR_CON_SLICE_COPY;
decParam.bParseOnly = true;
decParam.sVideoProperty.eVideoBsType = VIDEO_BITSTREAM_DEFAULT;
int rv = decoder_->Initialize (&decParam);
ASSERT_EQ (0, rv);
memset (&BsInfo_, 0, sizeof (SParserBsInfo));
const char* sFileName = "res/CiscoVT2people_160x96_6fps.yuv";
#if defined(ANDROID_NDK)
std::string filename = std::string ("/sdcard/") + sFileName;
ASSERT_TRUE ((fYuv_ = fopen (filename.c_str(), "rb")) != NULL);
#else
ASSERT_TRUE ((fYuv_ = fopen (sFileName, "rb")) != NULL);
#endif
iWidth_ = kiWidth;
iHeight_ = kiHeight;
}
void TearDown() {
EncodeDecodeTestBase::TearDown();
if (fYuv_ != NULL) {
fclose (fYuv_);
fYuv_ = NULL;
}
}
bool prepareEncDecParam (const EncodeDecodeFileParamBase p) {
if (!EncodeDecodeTestBase::prepareEncDecParam (p))
return false;
unsigned char* pTmpPtr = BsInfo_.pDstBuff; //store for restore
memset (&BsInfo_, 0, sizeof (SParserBsInfo));
BsInfo_.pDstBuff = pTmpPtr;
return true;
}
void MockInputData (uint8_t* pData, int32_t iSize) {
int32_t iCurr = 0;
while (iCurr < iSize) {
* (pData + iCurr) = (* (pData + iCurr) + (rand() % 20) + 256) & 0x00ff;
iCurr++;
}
}
void EncodeOneFrame (bool bMock) {
int iFrameSize = iWidth_ * iHeight_ * 3 / 2;
int iSize = (int) fread (buf_.data(), sizeof (char), iFrameSize, fYuv_);
if (feof (fYuv_) || iSize != iFrameSize) {
rewind (fYuv_);
iSize = (int) fread (buf_.data(), sizeof (char), iFrameSize, fYuv_);
ASSERT_TRUE (iSize == iFrameSize);
}
if (bMock) {
MockInputData (buf_.data(), iWidth_ * iHeight_);
}
int rv = encoder_->EncodeFrame (&EncPic, &info);
ASSERT_TRUE (rv == cmResultSuccess || rv == cmUnknownReason);
}
void prepareParam (int iLayerNum, int iSliceNum, int width, int height, float framerate, SEncParamExt* pParam) {
memset (pParam, 0, sizeof (SEncParamExt));
EncodeDecodeTestBase::prepareParam (iLayerNum, iSliceNum, width, height, framerate, pParam);
}
protected:
SParserBsInfo BsInfo_;
FILE* fYuv_;
int iWidth_;
int iHeight_;
SHA1Context ctx_;
};
//#define DEBUG_FILE_SAVE_PARSEONLY_GENERAL
TEST_F (DecodeParseAPI, ParseOnly_General) {
EncodeDecodeFileParamBase p;
p.width = iWidth_;
p.height = iHeight_;
p.frameRate = kiFrameRate;
p.numframes = kiFrameNum;
prepareParam (kiTotalLayer, kiSliceNum, p.width, p.height, p.frameRate, ¶m_);
param_.iSpatialLayerNum = kiTotalLayer;
encoder_->Uninitialize();
int rv = encoder_->InitializeExt (¶m_);
ASSERT_TRUE (rv == 0);
int32_t iTraceLevel = WELS_LOG_QUIET;
encoder_->SetOption (ENCODER_OPTION_TRACE_LEVEL, &iTraceLevel);
decoder_->SetOption (DECODER_OPTION_TRACE_LEVEL, &iTraceLevel);
uint32_t uiTargetLayerId = rand() % kiTotalLayer; //run only once
#ifdef DEBUG_FILE_SAVE_PARSEONLY_GENERAL
FILE* fDec = fopen ("output.264", "wb");
FILE* fEnc = fopen ("enc.264", "wb");
FILE* fExtract = fopen ("extract.264", "wb");
#endif
if (uiTargetLayerId < kiTotalLayer) { //should always be true
//Start for enc
int iLen = 0;
ASSERT_TRUE (prepareEncDecParam (p));
int iFrame = 0;
while (iFrame < p.numframes) {
//encode
EncodeOneFrame (0);
//extract target layer data
encToDecData (info, iLen);
#ifdef DEBUG_FILE_SAVE_PARSEONLY_GENERAL
fwrite (info.sLayerInfo[0].pBsBuf, iLen, 1, fEnc);
#endif
ExtractDidNal (&info, iLen, &m_SLostSim, uiTargetLayerId);
#ifdef DEBUG_FILE_SAVE_PARSEONLY_GENERAL
fwrite (info.sLayerInfo[0].pBsBuf, iLen, 1, fExtract);
#endif
//parseonly
//BsInfo_.pDstBuff = new unsigned char [1000000];
rv = decoder_->DecodeParser (info.sLayerInfo[0].pBsBuf, iLen, &BsInfo_);
EXPECT_TRUE (rv == 0);
EXPECT_TRUE (BsInfo_.iNalNum == 0);
rv = decoder_->DecodeParser (NULL, 0, &BsInfo_);
EXPECT_TRUE (rv == 0);
EXPECT_TRUE (BsInfo_.iNalNum != 0);
//get final output bs
iLen = 0;
int i = 0;
while (i < BsInfo_.iNalNum) {
iLen += BsInfo_.pNalLenInByte[i];
i++;
}
#ifdef DEBUG_FILE_SAVE_PARSEONLY_GENERAL
fwrite (BsInfo_.pDstBuff, iLen, 1, fDec);
#endif
SHA1Input (&ctx_, BsInfo_.pDstBuff, iLen);
iFrame++;
}
//calculate final SHA1 value
unsigned char digest[SHA_DIGEST_LENGTH];
SHA1Result (&ctx_, digest);
if (!HasFatalFailure()) {
CompareHashAnyOf (digest, pHashStr[uiTargetLayerId], sizeof * pHashStr / sizeof** pHashStr);
}
} //while
#ifdef DEBUG_FILE_SAVE_PARSEONLY_GENERAL
fclose (fEnc);
fclose (fExtract);
fclose (fDec);
#endif
}
//This case is for one layer only, for incomplete frame input
//First slice is loss for random one picture with 2 slices per pic
TEST_F (DecodeParseAPI, ParseOnly_SpecSliceLoss) {
int32_t iLayerNum = 1;
int32_t iSliceNum = 2;
EncodeDecodeFileParamBase p;
p.width = iWidth_;
p.height = iHeight_;
p.frameRate = kiFrameRate;
p.numframes = 5;
prepareParam (iLayerNum, iSliceNum, p.width, p.height, p.frameRate, ¶m_);
param_.iSpatialLayerNum = iLayerNum;
encoder_->Uninitialize();
int rv = encoder_->InitializeExt (¶m_);
ASSERT_TRUE (rv == 0);
int32_t iTraceLevel = WELS_LOG_QUIET;
encoder_->SetOption (ENCODER_OPTION_TRACE_LEVEL, &iTraceLevel);
decoder_->SetOption (DECODER_OPTION_TRACE_LEVEL, &iTraceLevel);
int32_t iMissedPicNum = rand() % (p.numframes - 1) + 1; //IDR no loss
//Start for enc
int iLen = 0;
uint32_t uiGet;
ASSERT_TRUE (prepareEncDecParam (p));
int iFrame = 0;
while (iFrame < p.numframes) {
//encode
EncodeOneFrame (0);
//parseonly
if (iFrame == iMissedPicNum) { //make current frame partly missing
//Frame: P, first slice loss
int32_t iTotalSliceSize = 0;
encToDecSliceData (0, 0, info, iTotalSliceSize); //slice 1 lost
encToDecSliceData (0, 1, info, iLen); //slice 2
decoder_->GetOption (DECODER_OPTION_ERROR_CON_IDC, &uiGet);
EXPECT_EQ (uiGet, (uint32_t) ERROR_CON_DISABLE);
rv = decoder_->DecodeParser (info.sLayerInfo[0].pBsBuf + iTotalSliceSize, iLen, &BsInfo_);
EXPECT_TRUE (rv == 0);
EXPECT_TRUE (BsInfo_.iNalNum == 0);
rv = decoder_->DecodeParser (NULL, 0, &BsInfo_);
EXPECT_TRUE (rv != 0);
} else { //normal frame, complete
encToDecData (info, iLen);
rv = decoder_->DecodeParser (info.sLayerInfo[0].pBsBuf, iLen, &BsInfo_);
EXPECT_TRUE (rv == 0); //parse correct
EXPECT_TRUE (BsInfo_.iNalNum == 0);
rv = decoder_->DecodeParser (NULL, 0, &BsInfo_);
if (iFrame < iMissedPicNum) { //correct frames, all OK with output
EXPECT_TRUE (rv == 0);
EXPECT_TRUE (BsInfo_.iNalNum != 0);
} else { //(iFrame > iMissedPicNum), should output nothing as error
EXPECT_TRUE (rv != 0);
EXPECT_TRUE (BsInfo_.iNalNum == 0);
}
}
iFrame++;
} //while
}
TEST_F (DecodeParseAPI, ParseOnly_SpecStatistics) {
//set params
int32_t iLayerNum = 1;
int32_t iSliceNum = 1;
EncodeDecodeFileParamBase p;
const int iLoopNum = 10;
p.frameRate = kiFrameRate;
p.numframes = 2; //encode 2 frames in each test
p.width = iWidth_ = 16;
p.height = iHeight_ = 16; //default start width/height = 16, will be modified each time
int iTotalFrmCnt = 0;
for (int i = 0; i < iLoopNum; ++i) {
prepareParam (iLayerNum, iSliceNum, p.width, p.height, p.frameRate, ¶m_);
param_.iSpatialLayerNum = iLayerNum;
param_.sSpatialLayers[0].iDLayerQp = 40; //to revent size too limited to encoding fail
encoder_->Uninitialize();
int rv = encoder_->InitializeExt (¶m_);
ASSERT_TRUE (rv == 0);
int32_t iTraceLevel = WELS_LOG_QUIET;
rv = encoder_->SetOption (ENCODER_OPTION_TRACE_LEVEL, &iTraceLevel);
ASSERT_TRUE (rv == 0);
rv = decoder_->SetOption (DECODER_OPTION_TRACE_LEVEL, &iTraceLevel);
ASSERT_TRUE (rv == 0);
//Start for enc
int iLen = 0;
ASSERT_TRUE (prepareEncDecParam (p));
int iFrame = 0;
while (iFrame < p.numframes) {
EncodeOneFrame (0);
encToDecData (info, iLen);
iFrame++;
iTotalFrmCnt++;
rv = decoder_->DecodeParser (info.sLayerInfo[0].pBsBuf, iLen, &BsInfo_);
ASSERT_TRUE (rv == 0);
ASSERT_TRUE (BsInfo_.iNalNum == 0);
rv = decoder_->DecodeParser (NULL, 0, &BsInfo_);
ASSERT_TRUE (rv == 0);
ASSERT_TRUE (BsInfo_.iNalNum != 0);
SDecoderStatistics sDecStat;
rv = decoder_->GetOption (DECODER_OPTION_GET_STATISTICS, &sDecStat);
ASSERT_TRUE (rv == 0);
uint32_t uiProfile, uiLevel;
rv = decoder_->GetOption (DECODER_OPTION_PROFILE, &uiProfile);
ASSERT_TRUE (rv == 0);
rv = decoder_->GetOption (DECODER_OPTION_LEVEL, &uiLevel);
ASSERT_TRUE (rv == 0);
ASSERT_EQ (sDecStat.uiWidth, (unsigned int) p.width);
ASSERT_EQ (sDecStat.uiHeight, (unsigned int) p.height);
ASSERT_EQ (sDecStat.uiResolutionChangeTimes, (unsigned int) (i + 1));
EXPECT_EQ (sDecStat.iCurrentActiveSpsId, 0);
EXPECT_EQ (sDecStat.iCurrentActivePpsId, 0);
ASSERT_EQ (sDecStat.uiDecodedFrameCount, (unsigned int) iTotalFrmCnt);
ASSERT_EQ (sDecStat.uiProfile, uiProfile);
ASSERT_EQ (sDecStat.uiLevel, uiLevel);
EXPECT_TRUE (sDecStat.fActualAverageFrameSpeedInMs != 0.);
EXPECT_TRUE (sDecStat.fAverageFrameSpeedInMs != 0.);
EXPECT_TRUE (sDecStat.iAvgLumaQp != 0);
EXPECT_EQ (sDecStat.uiIDRCorrectNum, (unsigned int) (i + 1));
}
//set next width & height
p.width += 16;
p.height += 16;
if ((unsigned int) p.width > kiWidth) //exceeds max frame size
p.width = 16;
if ((unsigned int) p.height > kiHeight)
p.height = 16;
iWidth_ = p.width;
iHeight_ = p.height;
}
}
//Test parseonly crash cases
class DecodeParseCrashAPI : public DecodeParseAPI {
public:
DecodeParseCrashAPI() {
}
void SetUp() {
DecodeParseAPI::SetUp();
iWidth_ = 1280;
iHeight_ = 720;
ucBuf_ = NULL;
ucBuf_ = new unsigned char[1000000];
ASSERT_TRUE (ucBuf_ != NULL);
}
void TearDown() {
DecodeParseAPI::TearDown();
if (NULL != ucBuf_) {
delete[] ucBuf_;
ucBuf_ = NULL;
}
ASSERT_TRUE (ucBuf_ == NULL);
}
protected:
unsigned char* ucBuf_;
};
//#define DEBUG_FILE_SAVE_PARSE_CRA1
TEST_F (DecodeParseCrashAPI, ParseOnlyCrash_General) {
if (fYuv_)
fclose (fYuv_);
const char* sFileName = "res/Cisco_Absolute_Power_1280x720_30fps.yuv";
#if defined(ANDROID_NDK)
std::string filename = std::string ("/sdcard/") + sFileName;
ASSERT_TRUE ((fYuv_ = fopen (filename.c_str(), "rb")) != NULL);
#else
ASSERT_TRUE ((fYuv_ = fopen (sFileName, "rb")) != NULL);
#endif
uint32_t uiGet;
encoder_->Uninitialize();
//do tests until crash
unsigned int uiLoopRound = 0;
unsigned char* pucBuf = ucBuf_;
int iDecAuSize;
#ifdef DEBUG_FILE_SAVE_PARSE_CRA1
//open file to save tested BS
FILE* fDataFile = fopen ("test_parseonly_crash.264", "wb");
FILE* fLenFile = fopen ("test_parseonly_crash_len.log", "w");
int iFileSize = 0;
#endif
do {
#ifdef DEBUG_FILE_SAVE_PARSE_CRA1
int iTotalFrameNum = (rand() % 1200) + 1;
#else
int iTotalFrameNum = (rand() % 100) + 1;
#endif
EncodeDecodeParamBase p = kParamArray[8]; //720p by default
//Initialize Encoder
prepareParam (1, 4, p.width, p.height, p.frameRate, ¶m_);
param_.iRCMode = RC_TIMESTAMP_MODE;
param_.iTargetBitrate = p.iTarBitrate;
param_.uiIntraPeriod = 0;
param_.eSpsPpsIdStrategy = CONSTANT_ID;
param_.bEnableBackgroundDetection = true;
param_.bEnableSceneChangeDetect = (rand() % 3) ? true : false;
param_.bPrefixNalAddingCtrl = 0;// (rand() % 2) ? true : false;
param_.iEntropyCodingModeFlag = 0;
param_.bEnableFrameSkip = true;
param_.iMultipleThreadIdc = 0;
param_.sSpatialLayers[0].iSpatialBitrate = p.iTarBitrate;
param_.sSpatialLayers[0].iMaxSpatialBitrate = p.iTarBitrate << 1;
param_.sSpatialLayers[0].sSliceArgument.uiSliceMode =
SM_FIXEDSLCNUM_SLICE; // (rand() % 2) ? SM_SIZELIMITED_SLICE : SM_SINGLE_SLICE;
if (param_.sSpatialLayers[0].sSliceArgument.uiSliceMode == SM_SIZELIMITED_SLICE) {
param_.sSpatialLayers[0].sSliceArgument.uiSliceSizeConstraint = 1400;
param_.uiMaxNalSize = 1400;
} else {
param_.sSpatialLayers[0].sSliceArgument.uiSliceSizeConstraint = 0;
param_.uiMaxNalSize = 0;
}
int rv = encoder_->InitializeExt (¶m_);
ASSERT_TRUE (rv == cmResultSuccess);
decoder_->GetOption (DECODER_OPTION_ERROR_CON_IDC, &uiGet);
EXPECT_EQ (uiGet, (uint32_t)ERROR_CON_DISABLE); //default value should be ERROR_CON_SLICE_COPY
int32_t iTraceLevel = WELS_LOG_QUIET;
encoder_->SetOption (ENCODER_OPTION_TRACE_LEVEL, &iTraceLevel);
decoder_->SetOption (DECODER_OPTION_TRACE_LEVEL, &iTraceLevel);
//Start for enc/dec
int iIdx = 0;
unsigned char* pData[3] = { NULL };
EncodeDecodeFileParamBase pInput; //to conform with old functions
pInput.width = p.width;
pInput.height = p.height;
pInput.frameRate = p.frameRate;
prepareEncDecParam (pInput);
while (iIdx++ < iTotalFrameNum) { // loop in frame
EncodeOneFrame (1);
#ifdef DEBUG_FILE_SAVE_PARSE_CRA1
//reset file if file size large
if ((info.eFrameType == videoFrameTypeIDR) && (iFileSize >= (1 << 25))) {
fclose (fDataFile);
fclose (fLenFile);
fDataFile = fopen ("test_parseonly_crash.264", "wb");
fLenFile = fopen ("test_parseonly_crash_len.log", "w");
iFileSize = 0;
decoder_->Uninitialize();
SDecodingParam decParam;
memset (&decParam, 0, sizeof (SDecodingParam));
decParam.uiTargetDqLayer = UCHAR_MAX;
decParam.eEcActiveIdc = ERROR_CON_DISABLE;
decParam.bParseOnly = true;
decParam.sVideoProperty.eVideoBsType = VIDEO_BITSTREAM_DEFAULT;
rv = decoder_->Initialize (&decParam);
ASSERT_EQ (0, rv);
}
#endif
if (info.eFrameType == videoFrameTypeSkip)
continue;
//deal with packets
unsigned char* pBsBuf;
iDecAuSize = 0;
pucBuf = ucBuf_; //init buf start pos for decoder usage
for (int iLayerNum = 0; iLayerNum < info.iLayerNum; iLayerNum++) {
SLayerBSInfo* pLayerBsInfo = &info.sLayerInfo[iLayerNum];
pBsBuf = info.sLayerInfo[iLayerNum].pBsBuf;
int iTotalNalCnt = pLayerBsInfo->iNalCount;
for (int iNalCnt = 0; iNalCnt < iTotalNalCnt; iNalCnt++) { //loop in NAL
int iPacketSize = pLayerBsInfo->pNalLengthInByte[iNalCnt];
//packet loss
int iLossRateRange = (uiLoopRound % 20) + 1; //1-100
int iLossRate = (rand() % iLossRateRange);
bool bPacketLost = (rand() % 101) > (100 -
iLossRate); // [0, (100-iLossRate)] indicates NO LOSS, (100-iLossRate, 100] indicates LOSS
if (!bPacketLost) { //no loss
memcpy (pucBuf, pBsBuf, iPacketSize);
pucBuf += iPacketSize;
iDecAuSize += iPacketSize;
}
//update bs info
pBsBuf += iPacketSize;
} //nal
} //layer
#ifdef DEBUG_FILE_SAVE_PARSE_CRA1
//save to file
if (iDecAuSize != 0) {
fwrite (ucBuf_, 1, iDecAuSize, fDataFile);
fflush (fDataFile);
iFileSize += iDecAuSize;
}
//save to len file
unsigned long ulTmp[4];
ulTmp[0] = ulTmp[1] = ulTmp[2] = iIdx;
ulTmp[3] = iDecAuSize;
fwrite (ulTmp, sizeof (unsigned long), 4, fLenFile); // index, timeStamp, data size
fflush (fLenFile);
#endif
//decode
pData[0] = pData[1] = pData[2] = 0;
memset (&BsInfo_, 0, sizeof (SParserBsInfo));
rv = decoder_->DecodeParser (ucBuf_, iDecAuSize, &BsInfo_);
rv = decoder_->DecodeParser (NULL, 0, &BsInfo_); //reconstruction
//guarantee decoder EC status
decoder_->GetOption (DECODER_OPTION_ERROR_CON_IDC, &uiGet);
EXPECT_EQ (uiGet, (uint32_t)ERROR_CON_DISABLE);
} //frame
uiLoopRound++;
if (uiLoopRound >= (1 << 30))
uiLoopRound = 0;
#ifdef DEBUG_FILE_SAVE_PARSE_CRA1
if (uiLoopRound % 10 == 0)
printf ("run %d times.\n", uiLoopRound);
} while (1);
fclose (fDataFile);
fclose (fLenFile);
#else
}
while (0);
#endif
}
| bsd-2-clause |
sebastienros/jint | Jint.Tests.Test262/test/built-ins/Promise/resolve/context-non-object-with-promise.js | 1267 | // Copyright (C) 2015 André Bargull. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
es6id: 25.4.4.5
description: >
Throws a TypeError if `this` is not an Object.
info: |
Promise.resolve ( x )
1. Let C be the this value.
2. If Type(C) is not Object, throw a TypeError exception.
...
features: [Symbol]
---*/
var promise = new Promise(function() {});
promise.constructor = undefined;
assert.throws(TypeError, function() {
Promise.resolve.call(undefined, promise);
}, "`this` value is undefined");
promise.constructor = null;
assert.throws(TypeError, function() {
Promise.resolve.call(null, promise);
}, "`this` value is null");
promise.constructor = true;
assert.throws(TypeError, function() {
Promise.resolve.call(true, promise);
}, "`this` value is a Boolean");
promise.constructor = 1;
assert.throws(TypeError, function() {
Promise.resolve.call(1, promise);
}, "`this` value is a Number");
promise.constructor = "";
assert.throws(TypeError, function() {
Promise.resolve.call("", promise);
}, "`this` value is a String");
var symbol = Symbol();
promise.constructor = symbol;
assert.throws(TypeError, function() {
Promise.resolve.call(symbol, promise);
}, "`this` value is a Symbol");
| bsd-2-clause |
chrisfinazzo/homebrew-core | Formula/golangci-lint.rb | 2732 | class GolangciLint < Formula
desc "Fast linters runner for Go"
homepage "https://golangci-lint.run/"
url "https://github.com/golangci/golangci-lint.git",
tag: "v1.44.0",
revision: "617470fa9e2c54dd91ab91c14e0d20030e183c20"
license "GPL-3.0-only"
head "https://github.com/golangci/golangci-lint.git", branch: "master"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "0b42eb53c7921abbbc6e05d055cfe43c52f4b98065b59ba7ebe6ec8f03d4ca51"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "1a6f3bb4ba129ebe89ebbd569675285cd7426e4f6daf3f4c3044ca9f2acdde2e"
sha256 cellar: :any_skip_relocation, monterey: "95d5384656d52a3274b6b107c213930101d857ea54b84aa883ed296d4331055b"
sha256 cellar: :any_skip_relocation, big_sur: "7300525291a8452685b37487e4e15adebd9f5545ba7c97068914abeccea47f04"
sha256 cellar: :any_skip_relocation, catalina: "7492f258cdba289a32fe72befd57aa308f494cbe63943462a6a62e160dd271cf"
sha256 cellar: :any_skip_relocation, x86_64_linux: "5d25c23a542624e2dc083c5bc768ab31eebcc4a21f84ee35aa624958726a11cc"
end
depends_on "go"
def install
ldflags = %W[
-s -w
-X main.version=#{version}
-X main.commit=#{Utils.git_short_head(length: 7)}
-X main.date=#{time.rfc3339}
]
system "go", "build", *std_go_args(ldflags: ldflags), "./cmd/golangci-lint"
output = Utils.safe_popen_read("#{bin}/golangci-lint", "completion", "bash")
(bash_completion/"golangci-lint").write output
output = Utils.safe_popen_read("#{bin}/golangci-lint", "completion", "zsh")
(zsh_completion/"_golangci-lint").write output
output = Utils.safe_popen_read("#{bin}/golangci-lint", "completion", "fish")
(fish_completion/"golangci-lint.fish").write output
end
test do
str_version = shell_output("#{bin}/golangci-lint --version")
assert_match "golangci-lint has version #{version} built from", str_version
str_help = shell_output("#{bin}/golangci-lint --help")
str_default = shell_output("#{bin}/golangci-lint")
assert_equal str_default, str_help
assert_match "Usage:", str_help
assert_match "Available Commands:", str_help
(testpath/"try.go").write <<~EOS
package try
func add(nums ...int) (res int) {
for _, n := range nums {
res += n
}
return
}
EOS
args = %w[
--color=never
--disable-all
--issues-exit-code=0
--print-issued-lines=false
--enable=deadcode
].join(" ")
ok_test = shell_output("#{bin}/golangci-lint run #{args} #{testpath}/try.go")
expected_message = "try.go:3:6: `add` is unused (deadcode)"
assert_match expected_message, ok_test
end
end
| bsd-2-clause |
KingBowser/hatter-source-code | hatter.me/js/tinyencrypt/jsapp/cn.aprilsoft.jsapp.text.StringFormat.js | 17853 | // Copyright (c) 2002 M.Inamori,All rights reserved.
// Coded 2/26/02
// usage:
// format(strFormat[, arg1[, arg2...]])
// i.e.:
// sprintf("%d", 12345)); 12345
// sprintf("%d", 123.6)); 123
// sprintf("%6d", 123)); 123
// sprintf("%06d", 123)); 000123
// sprintf("%06d", -123)); -00123
// sprintf("%6.4d", 123)); 0123
// sprintf("%6.8d", 123)); 00000123
// sprintf("%f", 123.45)); 123.450000
// sprintf("%11f", 123.45)); 123.450000
// sprintf("%.3f", 123.45)); 123.450
// sprintf("%7.1f", 123.25)); 123.2
// sprintf("%7.1f", 123.251)); 123.3
// sprintf("%7.0f", 123.45)); 123
// sprintf("%08.2f", 123.45)); 00123.45
// sprintf("%08.2f", -123.45)); -0123.45
// sprintf("%E", 123.45)); 1.234500E+02
// sprintf("%13E", 123.45)); 1.234500E+02
// sprintf("%.4E", 123.45)); 1.2345E+02
// sprintf("%13.5e", 123.45)); 1.23450e+02
// sprintf("%s", "abc")); abc
// sprintf("%4s", "abc")); abc
// sprintf("%.2s", "abc")); ab
// sprintf("%4.s", "abc"));
// sprintf("%d", 123.45)); 123
// sprintf("%d", "123")); 123
// sprintf("%d", "abc")); 0
// sprintf("%7s", 123.45)); 123.45
// sprintf("%-7d", 123.45)); 123
// sprintf("% f", 123.45)); 123.450000
// sprintf("%+7.3f", 123.45)); +123.450
// sprintf("%+- 9.2fa", 123.45)); +123.45 a
// sprintf()
/*
* cn.aprilsoft.jsapp.text.StringFormat.js
* jsapp, string format functions
*
* Copyright(C) Hatter Jiang
*/
(function()
{
// New package: cn.aprilsoft.jsapp.text
Package("cn.aprilsoft.jsapp.text");
Class("cn.aprilsoft.jsapp.text.StringFormat", Extend(), Implement(),
{
format: Static(function()
{
var argv = arguments;
var argc = argv.length;
if(argc == 0)
{
return "";
}
var result = "";
var format = argv[0];
var format_length = format.length;
var flag, width, precision;
flag = 0;
var index = 1;
var mode = 0;
var tmpresult;
var buff;
for(var i = 0; i < format_length; i++)
{
var c = format.charAt(i);
switch(mode)
{
case 0: //normal
if(c == '%')
{
tmpresult = c;
mode = 1;
buff = "";
}
else
result += c;
break;
case 1: //after '%'
if(c == '%')
{
result += c;
mode = 0;
break;
}
if(index >= argc)
{
argv[argc++] = "";
}
width = 0;
precision = -1;
switch(c)
{
case '-':
flag |= 1;
mode = 1;
break;
case '+':
flag |= 2;
mode = 1;
break;
case '0':
flag |= 4;
mode = 2;
break;
case ' ':
flag |= 8;
mode = 1;
break;
case '#':
flag |= 16;
mode = 1;
break;
case '1': case '2': case '3': case '4': case '5':
case '6': case '7': case '8': case '9':
width = parseInt(c);
mode = 2;
break;
case '-':
flag = 1;
mode = 2;
break;
case '.':
width = "";
precision = 0;
mode = 3;
break;
case 'd':
result += ThisClass().toInteger(argv[index], flag, width, precision);
index++;
mode = 0;
break;
case 'f':
result += ThisClass().toFloatingPoint(argv[index], flag, width, 6);
index++;
mode = 0;
break;
case 'e':
result += ThisClass().toExponential(argv[index], flag, width, 6, 'e');
index++;
mode = 0;
break;
case 'E':
result += ThisClass().toExponential(argv[index], flag, width, 6, 'E');
index++;
mode = 0;
break;
case 's':
result += argv[index];
index++;
mode = 0;
break;
default:
result += buff + c;
mode = 0;
break;
}
break;
case 2: //while defining width
switch(c)
{
case '.':
precision = 0;
mode = 3;
break;
case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9':
width = width * 10 + parseInt(c);
mode = 2;
break;
case 'd':
result += ThisClass().toInteger(argv[index], flag, width, precision);
index++;
mode = 0;
break;
case 'f':
result += ThisClass().toFloatingPoint(argv[index], flag, width, 6);
index++;
mode = 0;
break;
case 'e':
result += ThisClass().toExponential(argv[index], flag, width, 6, 'e');
index++;
mode = 0;
break;
case 'E':
result += ThisClass().toExponential(argv[index], flag, width, 6, 'E');
index++;
mode = 0;
break;
case 's':
result += ThisClass().toFormatString(argv[index], width, precision);
index++;
mode = 0;
break;
default:
result += buff + c;
mode = 0;
break;
}
break;
case 3: //while defining precision
switch(c)
{
case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9':
precision = precision * 10 + parseInt(c);
break;
case 'd':
result += ThisClass().toInteger(argv[index], flag, width, precision);
index++;
mode = 0;
break;
case 'f':
result += ThisClass().toFloatingPoint(argv[index], flag, width, precision);
index++;
mode = 0;
break;
case 'e':
result += ThisClass().toExponential(argv[index], flag, width, precision, 'e');
index++;
mode = 0;
break;
case 'E':
result += ThisClass().toExponential(argv[index], flag, width, precision, 'E');
index++;
mode = 0;
break;
case 's':
result += ThisClass().toFormatString(argv[index], width, precision);
index++;
mode = 0;
break;
default:
result += buff + c;
mode = 0;
break;
}
break;
default:
return "error";
}
if(mode)
{
buff += c;
}
}
return result;
}),
toInteger: Private(Static(function (n, f, w, p)
{
if(typeof n != "number")
{
if(typeof n == "string")
{
n = parseFloat(n);
if(isNaN(n))
{
n = 0;
}
}
else
{
n = 0;
}
}
var str = n.toString();
//to integer if decimal
if(-1 < n && n < 1)
{
str = "0";
}
else
{
if(n < 0)
{
str = str.substring(1);
}
var pos_e = str.indexOf('e');
if(pos_e != -1)
{ //
var exp = parseInt(str.substring(pos_e + 2));
var pos_dot = str.indexOf('.');
if(pos_dot == -1)
{
str = str.substring(0, pos_e) + "000000000000000000000";
exp -= 21;
}
else
{
str = str.substring(0, pos_dot)
+ str.substring(pos_dot + 1, pos_e) + "00000";
exp -= str.length - pos_dot;
}
for( ; exp; exp--)
{
str += "0";
}
}
else
{
var pos_dot = str.indexOf('.');
if(pos_dot != -1)
{
str = str.substring(0, pos_dot);
}
}
}
var len = str.length;
if(len < p)
{
var c = "0";
for(var i = p - len; i; i--)
{
str = c + str;
}
len = p;
}
return ThisClass().procFlag(str, f, w - len, n >= 0);
})),
toFloatingPoint: Private(Static(function (n, f, w, p)
{
if(typeof n != "number")
{
if(typeof n == "string")
{
n = parseFloat(n);
if(isNaN(n))
{
n = 0;
}
}
else
{
n = 0;
}
}
var bpositive = (n >= 0);
if(!bpositive)
{
n = -n;
}
str = ThisClass().toFloatingPoint2(n, f, p);
return ThisClass().procFlag(str, f, w - str.length, bpositive);
})),
toFloatingPoint2: Private(Static(function (n, f, p)
{
var str = n.toString();
//to decimal if exponential
var pos_e = str.indexOf('e');
if(pos_e != -1)
{
var exp = parseInt(str.substring(pos_e + 1));
if(exp > 0)
{ //
var pos_dot = str.indexOf('.');
if(pos_dot == -1)
{
str = str.substring(0, pos_e) + "000000000000000000000";
exp -= 21;
}
else
{
str = str.charAt(0) + str.substring(2, pos_e) + "00000";
exp -= str.length - 1;
}
for( ; exp; exp--)
str += "0";
}
else
{ //
var equive_p = exp + p;
if(equive_p < -1) //
{
str = "0";
}
else if(equive_p >= 0)
{ //
str = str.substring(0, pos_e);
var pos_dot = str.indexOf(".");
if(pos_dot != -1)
{
str = str.charAt(0) + str.substring(2, pos_e);
}
str = "000000" + str;
for(exp += 7; exp; exp++)
{
str = "0" + str;
}
str = "0." + str;
}
else
{ //
var tmp = parseFloat(str.substring(0, pos_e));
if(tmp > 5)
{ //
str = "0.00000";
for(var i = exp + 7; i; i++)
{
str += "0";
}
str += "1";
}
else //
{
str = "0";
}
}
}
}
//
var len = str.length;
var pos_dot = str.indexOf(".");
if(pos_dot != -1)
{
var dec = len - pos_dot - 1;
if(dec > p)
{ //
var tmp = parseFloat(str.charAt(pos_dot + p + 1)
+ "." + str.substring(pos_dot + p + 2));
if(tmp > 5)
{ //
var i;
if(n < 1)
{
i = 2;
while(str.charAt(i) == "0")
{
i++;
}
tmp = (parseInt(str.substring(i, p + 2)) + 1).toString();
if(tmp.length > p + 2 - i)
{ //
if(i == 2)
{
str = "1." + tmp.substring(1);
}
else
{
str = str.substring(0, i - 1) + tmp;
}
}
else
{
str = str.substring(0, i) + tmp;
}
}
else
{
tmp = (parseInt(str.substring(0, pos_dot) + str.substring(
pos_dot + 1, pos_dot + p + 1)) + 1).toString();
if(tmp.length > pos_dot + p) //
{
str = tmp.substring(0, pos_dot + 1)
+ "." + tmp.substring(pos_dot + 1);
}
else
{
str = tmp.substring(0, pos_dot)
+ "." + tmp.substring(pos_dot);
}
}
}
else
{ //
str = str.substring(0, p ? pos_dot + p + 1 : pos_dot);
}
}
else if(dec < p)
{ //"0"
for(var i = p - dec; i; i--)
{
str += "0";
}
}
}
else
{
if(p)
{
str += ".0";
for(var i = p - 1; i; i--)
{
str += "0";
}
}
}
return str;
})),
toExponential: Private(Static(function (n, f, w, p, e)
{
if(typeof n != "number")
{
if(typeof n == "string")
{
n = parseFloat(n);
if(isNaN(n))
{
n = 0;
}
}
else
{
n = 0;
}
}
var bpositive = n >= 0;
if(!bpositive)
{
n = -n;
}
var str = n.toString();
var pos_dot = str.indexOf(".");
var pos_e = str.indexOf("e");
var type = ((pos_e != -1) << 1) + (pos_dot != -1);
var exp;
if(type == 0)
{ //
if(exp = str.length - 1)
{
str = str.charAt(0) + "." + str.substring(pos_dot = 1);
}
}
else if(type == 1)
{ //
if(n > 10)
{
exp = pos_dot - 1;
str = str.substring(0, 1) + "."
+ str.substring(1, pos_dot) + str.substring(pos_dot + 1);
pos_dot = 1;
}
else if(n > 1)
{
exp = 0;
}
else
{
for(var i = 2; ; i++)
{
if(str.charAt(i) != "0")
{
exp = 1 - i;
str = str.charAt(i) + "." + str.substring(i + 1);
break;
}
}
pos_dot = 1;
}
}
else
{ //
exp = parseInt(str.substring(pos_e + 1));
str = str.substring(0, pos_e);
}
str = ThisClass().toFloatingPoint2(parseFloat(str), f, p);
if(exp >= 0)
{
str += e + (exp < 10 ? "+0" : "+") + exp;
}
else
{
str += e + (exp > -10 ? "-0" + (-exp) : exp);
}
str = ThisClass().procFlag(str, f, w - str.length, bpositive);
return str;
})),
toFormatString: Private(Static(function (s, w, p)
{
if(typeof s != "string")
{
s = s.toString();
}
var len = s.length;
if(p >= 0)
{
if(p < len)
{
s = s.substring(0, p);
len = p;
}
}
if(len < w)
{
var c = " ";
for(var i = w - len; i; i--)
{
s = c + s;
}
}
return s;
})),
procFlag: Private(Static(function (str, f, extra, b)
{
var minus = f & 1;
var plus = f & 2;
var space = f & 8;
if(space) //with ' '
{
extra--;
}
extra -= !b + plus > 0;
if((f & 4) > 0 && !minus)
{ //with 0 and not -
if(extra > 0)
{
var c = "0";
for(var i = extra; i; i--)
{
str = c + str;
}
}
if(!b)
{
str = "-" + str;
}
else if(plus)
{
str = "+" + str;
}
}
else
{ //without 0 or with -
if(!b)
{
str = "-" + str;
}
else if(plus)
{
str = "+" + str;
}
var c = " ";
if(extra > 0)
{
var c = " ";
if(minus)
{
for(var i = extra; i; i--)
{
str += c;
}
}
else
{
for(var i = extra; i; i--)
{
str = c + str;
}
}
}
}
if(space)
{
str = " " + str;
}
return str;
}))
});
})();
| bsd-3-clause |
NServiceKit/NServiceKit | src/NServiceKit/WebHost.Endpoints/Support/Markdown/Templates/VarStatementExprBlock.cs | 2752 | using System;
using System.Collections.Generic;
using System.IO;
using NServiceKit.Markdown;
namespace NServiceKit.WebHost.Endpoints.Support.Markdown.Templates
{
/// <summary>
///
/// </summary>
public class VarStatementExprBlock : EvalExprStatementBase
{
private string varName;
private string memberExpr;
/// <summary>
/// Initializes a new instance of the <see cref="VarStatementExprBlock"/> class.
/// </summary>
/// <param name="directive">The directive.</param>
/// <param name="line">The line.</param>
/// <exception cref="System.ArgumentException">Expected 'var' got: + directive</exception>
public VarStatementExprBlock(string directive, string line)
: base(line, null)
{
if (directive != "var")
throw new ArgumentException("Expected 'var' got: " + directive);
this.ReturnType = typeof(object);
}
/// <summary>Called when [first run].</summary>
///
/// <exception cref="InvalidDataException">Thrown when an Invalid Data error condition occurs.</exception>
protected override void OnFirstRun()
{
if (varName != null)
return;
var declaration = Condition.TrimEnd().TrimEnd(';');
var parts = declaration.Split('=');
if (parts.Length != 2)
throw new InvalidDataException(
"Invalid var declaration, should be '@var varName = {MemberExpression} [, {VarDeclaration}]' was: " + declaration);
varName = parts[0].Trim();
memberExpr = parts[1].Trim();
this.Condition = memberExpr;
const string methodName = "resolveVarType";
var exprParams = GetExprParams();
var evaluator = new Evaluator(ReturnType, Condition, methodName, exprParams);
var result = evaluator.Evaluate(methodName, GetParamValues(ScopeArgs).ToArray());
ScopeArgs[varName] = result;
if (result != null)
this.ReturnType = result.GetType();
base.OnFirstRun();
}
/// <summary>Writes the specified instance.</summary>
///
/// <param name="instance"> The instance.</param>
/// <param name="textWriter">The text writer.</param>
/// <param name="scopeArgs"> The scope arguments.</param>
public override void Write(MarkdownViewBase instance, TextWriter textWriter, Dictionary<string, object> scopeArgs)
{
//Resolve and add to ScopeArgs
var resultCondition = Evaluate<object>(scopeArgs, true);
scopeArgs[varName] = resultCondition;
}
}
} | bsd-3-clause |
wuhengzhi/chromium-crosswalk | base/android/java/src/org/chromium/base/ApplicationStatusManager.java | 8902 | // Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.base;
import android.app.Activity;
import android.app.Application;
import android.os.Bundle;
import android.view.Window;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
/**
* Basic application functionality that should be shared among all browser applications.
*/
public class ApplicationStatusManager {
private static final String TOOLBAR_CALLBACK_INTERNAL_WRAPPER_CLASS =
"android.support.v7.internal.app.ToolbarActionBar$ToolbarCallbackWrapper";
// In builds using the --use_unpublished_apis flag, the ToolbarActionBar class name does not
// include the "internal" package.
private static final String TOOLBAR_CALLBACK_WRAPPER_CLASS =
"android.support.v7.app.ToolbarActionBar$ToolbarCallbackWrapper";
/**
* Interface to be implemented by listeners for window focus events.
*/
public interface WindowFocusChangedListener {
/**
* Called when the window focus changes for {@code activity}.
* @param activity The {@link Activity} that has a window focus changed event.
* @param hasFocus Whether or not {@code activity} gained or lost focus.
*/
public void onWindowFocusChanged(Activity activity, boolean hasFocus);
}
private static ObserverList<WindowFocusChangedListener> sWindowFocusListeners =
new ObserverList<WindowFocusChangedListener>();
/**
* Intercepts calls to an existing Window.Callback. Most invocations are passed on directly
* to the composed Window.Callback but enables intercepting/manipulating others.
*
* This is used to relay window focus changes throughout the app and remedy a bug in the
* appcompat library.
*/
private static class WindowCallbackProxy implements InvocationHandler {
private final Window.Callback mCallback;
private final Activity mActivity;
public WindowCallbackProxy(Activity activity, Window.Callback callback) {
mCallback = callback;
mActivity = activity;
}
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
if (method.getName().equals("onWindowFocusChanged") && args.length == 1
&& args[0] instanceof Boolean) {
onWindowFocusChanged((boolean) args[0]);
return null;
} else {
try {
return method.invoke(mCallback, args);
} catch (InvocationTargetException e) {
// Special-case for when a method is not defined on the underlying
// Window.Callback object. Because we're using a Proxy to forward all method
// calls, this breaks the Android framework's handling for apps built against
// an older SDK. The framework expects an AbstractMethodError but due to
// reflection it becomes wrapped inside an InvocationTargetException. Undo the
// wrapping to signal the framework accordingly.
if (e.getCause() instanceof AbstractMethodError) {
throw e.getCause();
}
throw e;
}
}
}
public void onWindowFocusChanged(boolean hasFocus) {
mCallback.onWindowFocusChanged(hasFocus);
for (WindowFocusChangedListener listener : sWindowFocusListeners) {
listener.onWindowFocusChanged(mActivity, hasFocus);
}
}
}
public static void init(Application app) {
ApplicationStatus.initialize(app);
app.registerActivityLifecycleCallbacks(new Application.ActivityLifecycleCallbacks() {
@Override
public void onActivityCreated(final Activity activity, Bundle savedInstanceState) {
setWindowFocusChangedCallback(activity);
}
@Override
public void onActivityDestroyed(Activity activity) {
assert (Proxy.isProxyClass(activity.getWindow().getCallback().getClass())
|| activity.getWindow().getCallback().getClass().getName().equals(
TOOLBAR_CALLBACK_WRAPPER_CLASS)
|| activity.getWindow().getCallback().getClass().getName().equals(
TOOLBAR_CALLBACK_INTERNAL_WRAPPER_CLASS));
}
@Override
public void onActivityPaused(Activity activity) {
assert (Proxy.isProxyClass(activity.getWindow().getCallback().getClass())
|| activity.getWindow().getCallback().getClass().getName().equals(
TOOLBAR_CALLBACK_WRAPPER_CLASS)
|| activity.getWindow().getCallback().getClass().getName().equals(
TOOLBAR_CALLBACK_INTERNAL_WRAPPER_CLASS));
}
@Override
public void onActivityResumed(Activity activity) {
assert (Proxy.isProxyClass(activity.getWindow().getCallback().getClass())
|| activity.getWindow().getCallback().getClass().getName().equals(
TOOLBAR_CALLBACK_WRAPPER_CLASS)
|| activity.getWindow().getCallback().getClass().getName().equals(
TOOLBAR_CALLBACK_INTERNAL_WRAPPER_CLASS));
}
@Override
public void onActivitySaveInstanceState(Activity activity, Bundle outState) {
assert (Proxy.isProxyClass(activity.getWindow().getCallback().getClass())
|| activity.getWindow().getCallback().getClass().getName().equals(
TOOLBAR_CALLBACK_WRAPPER_CLASS)
|| activity.getWindow().getCallback().getClass().getName().equals(
TOOLBAR_CALLBACK_INTERNAL_WRAPPER_CLASS));
}
@Override
public void onActivityStarted(Activity activity) {
assert (Proxy.isProxyClass(activity.getWindow().getCallback().getClass())
|| activity.getWindow().getCallback().getClass().getName().equals(
TOOLBAR_CALLBACK_WRAPPER_CLASS)
|| activity.getWindow().getCallback().getClass().getName().equals(
TOOLBAR_CALLBACK_INTERNAL_WRAPPER_CLASS));
}
@Override
public void onActivityStopped(Activity activity) {
assert (Proxy.isProxyClass(activity.getWindow().getCallback().getClass())
|| activity.getWindow().getCallback().getClass().getName().equals(
TOOLBAR_CALLBACK_WRAPPER_CLASS)
|| activity.getWindow().getCallback().getClass().getName().equals(
TOOLBAR_CALLBACK_INTERNAL_WRAPPER_CLASS));
}
});
}
/**
* Registers a listener to receive window focus updates on activities in this application.
* @param listener Listener to receive window focus events.
*/
public static void registerWindowFocusChangedListener(WindowFocusChangedListener listener) {
sWindowFocusListeners.addObserver(listener);
}
/**
* Unregisters a listener from receiving window focus updates on activities in this application.
* @param listener Listener that doesn't want to receive window focus events.
*/
public static void unregisterWindowFocusChangedListener(WindowFocusChangedListener listener) {
sWindowFocusListeners.removeObserver(listener);
}
/**
* When ApplicationStatus initialized after application started, the onActivityCreated(),
* onActivityStarted() and onActivityResumed() callbacks will be missed.
* This function will give the chance to simulate these three callbacks.
*/
public static void informActivityStarted(final Activity activity) {
setWindowFocusChangedCallback(activity);
ApplicationStatus.informActivityStarted(activity);
}
private static void setWindowFocusChangedCallback(final Activity activity) {
Window.Callback callback = activity.getWindow().getCallback();
activity.getWindow().setCallback((Window.Callback) Proxy.newProxyInstance(
Window.Callback.class.getClassLoader(), new Class[] {Window.Callback.class},
new WindowCallbackProxy(activity, callback)));
}
}
| bsd-3-clause |
stoiczek/WebRTC | modules/video_render/main/source/video_render_impl.cc | 35631 | /*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video_render_impl.h"
#include "engine_configurations.h"
#include "critical_section_wrapper.h"
#include "video_render_defines.h"
#include "trace.h"
#include "incoming_video_stream.h"
#include "i_video_render.h"
#include <cassert>
#ifndef WEBRTC_VIDEO_EXTERNAL_CAPTURE_AND_RENDER
#if defined (_WIN32)
#include "windows/video_render_windows_impl.h"
#define STANDARD_RENDERING kRenderWindows
#elif defined(MAC_IPHONE) // MAC_IPHONE should go before WEBRTC_MAC_INTEL because WEBRTC_MAC_INTEL gets defined if MAC_IPHONE is defined
#if defined(IPHONE_GLES_RENDERING)
#define STANDARD_RENDERING kRenderiPhone
#include "iPhone/video_render_iphone_impl.h"
#endif
#elif defined(WEBRTC_MAC) || defined(WEBRTC_MAC_INTEL)
#if defined(COCOA_RENDERING)
#define STANDARD_RENDERING kRenderCocoa
#include "mac/video_render_mac_cocoa_impl.h"
#elif defined(CARBON_RENDERING)
#define STANDARD_RENDERING kRenderCarbon
#include "mac/video_render_mac_carbon_impl.h"
#endif
#elif defined(ANDROID)
#include "Android/video_render_android_impl.h"
#include "Android/video_render_android_surface_view.h"
#include "Android/video_render_android_native_opengl2.h"
#define STANDARD_RENDERING kRenderAndroid
#elif defined(WEBRTC_LINUX)
#include "linux/video_render_linux_impl.h"
#define STANDARD_RENDERING kRenderX11
#else
//Other platforms
#endif
#endif // WEBRTC_VIDEO_EXTERNAL_CAPTURE_AND_RENDER
// For external rendering
#include "external/video_render_external_impl.h"
#ifndef STANDARD_RENDERING
#define STANDARD_RENDERING kRenderExternal
#endif // STANDARD_RENDERING
namespace webrtc {
VideoRender*
VideoRender::CreateVideoRender(const WebRtc_Word32 id,
void* window,
const bool fullscreen,
const VideoRenderType videoRenderType/*=kRenderDefault*/)
{
WEBRTC_TRACE(
kTraceModuleCall,
kTraceVideoRenderer,
id,
"CreateVideoRender(videoRenderType: %d, window: %x, fullscreen: %d)",
videoRenderType, window, fullscreen);
VideoRenderType resultVideoRenderType = videoRenderType;
if (videoRenderType == kRenderDefault)
{
resultVideoRenderType = STANDARD_RENDERING;
}
return new ModuleVideoRenderImpl(id, resultVideoRenderType, window,
fullscreen);
}
void VideoRender::DestroyVideoRender(
VideoRender* module)
{
if (module)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer,
static_cast<ModuleVideoRenderImpl*> (module)->Id(),
"DestroyVideoRender");
delete module;
}
}
WebRtc_Word32 VideoRender::SetAndroidObjects(void *javaVM)
{
#ifdef ANDROID
return VideoRenderAndroid::SetAndroidEnvVariables(javaVM);
#else
return -1;
#endif
}
ModuleVideoRenderImpl::ModuleVideoRenderImpl(
const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen) :
_id(id), _moduleCrit(*CriticalSectionWrapper::CreateCriticalSection()),
_ptrWindow(window), _renderType(videoRenderType),
_fullScreen(fullscreen), _ptrRenderer(NULL),
_streamRenderMap(*(new MapWrapper()))
{
// Create platform specific renderer
switch (videoRenderType)
{
#ifndef WEBRTC_VIDEO_EXTERNAL_CAPTURE_AND_RENDER
#if defined(_WIN32)
case kRenderWindows:
{
VideoRenderWindowsImpl* ptrRenderer;
ptrRenderer = new VideoRenderWindowsImpl(_id, videoRenderType, window, _fullScreen);
if (ptrRenderer)
{
_ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
}
}
break;
#elif defined(MAC_IPHONE)
case kRenderiPhone:
{
VideoRenderIPhoneImpl* ptrRenderer = new VideoRenderIPhoneImpl(_id, videoRenderType, window, _fullScreen);
if(ptrRenderer)
{
_ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
}
}
break;
#elif defined(WEBRTC_MAC) || defined(WEBRTC_MAC_INTEL)
#if defined(COCOA_RENDERING)
case kRenderCocoa:
{
VideoRenderMacCocoaImpl* ptrRenderer = new VideoRenderMacCocoaImpl(_id, videoRenderType, window, _fullScreen);
if(ptrRenderer)
{
_ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
}
}
break;
#elif defined(CARBON_RENDERING)
case kRenderCarbon:
{
VideoRenderMacCarbonImpl* ptrRenderer = new VideoRenderMacCarbonImpl(_id, videoRenderType, window, _fullScreen);
if(ptrRenderer)
{
_ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
}
}
break;
#endif
#elif defined(ANDROID)
case kRenderAndroid:
{
if(AndroidNativeOpenGl2Renderer::UseOpenGL2(window))
{
AndroidNativeOpenGl2Renderer* ptrRenderer = NULL;
ptrRenderer = new AndroidNativeOpenGl2Renderer(_id, videoRenderType, window, _fullScreen);
if (ptrRenderer)
{
_ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
}
}
else
{
AndroidSurfaceViewRenderer* ptrRenderer = NULL;
ptrRenderer = new AndroidSurfaceViewRenderer(_id, videoRenderType, window, _fullScreen);
if (ptrRenderer)
{
_ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
}
}
}
break;
#elif defined(WEBRTC_LINUX)
case kRenderX11:
{
VideoRenderLinuxImpl* ptrRenderer = NULL;
ptrRenderer = new VideoRenderLinuxImpl(_id, videoRenderType, window, _fullScreen);
if ( ptrRenderer )
{
_ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
}
}
break;
#else
// Other platforms
#endif
#endif // WEBRTC_VIDEO_EXTERNAL_CAPTURE_AND_RENDER
case kRenderExternal:
{
VideoRenderExternalImpl* ptrRenderer(NULL);
ptrRenderer = new VideoRenderExternalImpl(_id, videoRenderType,
window, _fullScreen);
if (ptrRenderer)
{
_ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
}
}
break;
default:
// Error...
break;
}
if (_ptrRenderer)
{
if (_ptrRenderer->Init() == -1)
{
}
}
}
ModuleVideoRenderImpl::~ModuleVideoRenderImpl()
{
delete &_moduleCrit;
while (_streamRenderMap.Size() > 0)
{
MapItem* item = _streamRenderMap.First();
IncomingVideoStream* ptrIncomingStream =
static_cast<IncomingVideoStream*> (item->GetItem());
assert(ptrIncomingStream != NULL);
delete ptrIncomingStream;
_streamRenderMap.Erase(item);
}
delete &_streamRenderMap;
// Delete platform specific renderer
if (_ptrRenderer)
{
VideoRenderType videoRenderType = _ptrRenderer->RenderType();
switch (videoRenderType)
{
case kRenderExternal:
{
VideoRenderExternalImpl
* ptrRenderer =
reinterpret_cast<VideoRenderExternalImpl*> (_ptrRenderer);
_ptrRenderer = NULL;
delete ptrRenderer;
}
break;
#ifndef WEBRTC_VIDEO_EXTERNAL_CAPTURE_AND_RENDER
#if defined(_WIN32)
case kRenderWindows:
{
VideoRenderWindowsImpl* ptrRenderer = reinterpret_cast<VideoRenderWindowsImpl*>(_ptrRenderer);
_ptrRenderer = NULL;
delete ptrRenderer;
}
break;
#elif defined(WEBRTC_MAC) || defined(WEBRTC_MAC_INTEL)
#if defined(COCOA_RENDERING)
case kRenderCocoa:
{
VideoRenderMacCocoaImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCocoaImpl*> (_ptrRenderer);
_ptrRenderer = NULL;
delete ptrRenderer;
}
break;
#elif defined(CARBON_RENDERING)
case kRenderCarbon:
{
VideoRenderMacCarbonImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCarbonImpl*> (_ptrRenderer);
_ptrRenderer = NULL;
delete ptrRenderer;
}
break;
#endif
#elif defined(MAC_IPHONE)
case kRenderiPhone:
break;
#elif defined(ANDROID)
case kRenderAndroid:
{
VideoRenderAndroid* ptrRenderer = reinterpret_cast<VideoRenderAndroid*> (_ptrRenderer);
_ptrRenderer = NULL;
delete ptrRenderer;
}
break;
#elif defined(WEBRTC_LINUX)
case kRenderX11:
{
VideoRenderLinuxImpl* ptrRenderer = reinterpret_cast<VideoRenderLinuxImpl*> (_ptrRenderer);
_ptrRenderer = NULL;
delete ptrRenderer;
}
break;
#else
//other platforms
#endif
#endif // WEBRTC_VIDEO_EXTERNAL_CAPTURE_AND_RENDER
default:
// Error...
break;
}
}
}
WebRtc_Word32 ModuleVideoRenderImpl::Version(
WebRtc_Word8* version,
WebRtc_UWord32& remainingBufferInBytes,
WebRtc_UWord32& position) const
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s", __FUNCTION__);
if (version == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"Version pointer is NULL");
return -1;
}
WebRtc_Word8 ourVersion[256] = "VideoRender 1.1.0";
WebRtc_Word32 ourLength = (WebRtc_Word32) strlen(ourVersion);
if ((WebRtc_Word32) remainingBufferInBytes < ourLength + 1)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"Version buffer not long enough");
return -1;
}
memcpy(version, ourVersion, ourLength);
version[ourLength] = 0; // null terminaion
position += ourLength;
return 0;
}
WebRtc_Word32 ModuleVideoRenderImpl::ChangeUniqueId(const WebRtc_Word32 id)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"ChangeUniqueId(new id:%d)", id);
CriticalSectionScoped cs(_moduleCrit);
_id = id;
if (_ptrRenderer)
{
_ptrRenderer->ChangeUniqueId(_id);
}
return 0;
}
WebRtc_Word32 ModuleVideoRenderImpl::TimeUntilNextProcess()
{
// Not used
return 50;
}
WebRtc_Word32 ModuleVideoRenderImpl::Process()
{
// Not used
return 0;
}
void*
ModuleVideoRenderImpl::Window()
{
CriticalSectionScoped cs(_moduleCrit);
return _ptrWindow;
}
WebRtc_Word32 ModuleVideoRenderImpl::ChangeWindow(void* window)
{
CriticalSectionScoped cs(_moduleCrit);
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s", __FUNCTION__);
#ifndef WEBRTC_VIDEO_EXTERNAL_CAPTURE_AND_RENDER
#if defined(MAC_IPHONE) // MAC_IPHONE must go before WEBRTC_MAC or WEBRTC_MAC_INTEL
_ptrRenderer = NULL;
delete _ptrRenderer;
VideoRenderIPhoneImpl* ptrRenderer;
ptrRenderer = new VideoRenderIPhoneImpl(_id, kRenderiPhone, window, _fullScreen);
if (!ptrRenderer)
{
return -1;
}
_ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
return _ptrRenderer->ChangeWindow(window);
#elif defined(WEBRTC_MAC) | defined(WEBRTC_MAC_INTEL)
_ptrRenderer = NULL;
delete _ptrRenderer;
#if defined(COCOA_RENDERING)
VideoRenderMacCocoaImpl* ptrRenderer;
ptrRenderer = new VideoRenderMacCocoaImpl(_id, kRenderCocoa, window, _fullScreen);
#elif defined(CARBON_RENDERING)
VideoRenderMacCarbonImpl* ptrRenderer;
ptrRenderer = new VideoRenderMacCarbonImpl(_id, kRenderCarbon, window, _fullScreen);
#endif
if (!ptrRenderer)
{
return -1;
}
_ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
return _ptrRenderer->ChangeWindow(window);
#else
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
return _ptrRenderer->ChangeWindow(window);
#endif
#else // WEBRTC_VIDEO_EXTERNAL_CAPTURE_AND_RENDER
return -1;
#endif
}
WebRtc_Word32 ModuleVideoRenderImpl::Id()
{
CriticalSectionScoped cs(_moduleCrit);
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s", __FUNCTION__);
return _id;
}
WebRtc_UWord32 ModuleVideoRenderImpl::GetIncomingFrameRate(
const WebRtc_UWord32 streamId)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s, stream: %u", __FUNCTION__, streamId);
CriticalSectionScoped cs(_moduleCrit);
MapItem* mapItem = _streamRenderMap.Find(streamId);
if (mapItem == NULL)
{
// This stream doesn't exist
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream doesn't exist", __FUNCTION__);
return 0;
}
IncomingVideoStream* incomingStream =
static_cast<IncomingVideoStream*> (mapItem->GetItem());
if (incomingStream == NULL)
{
// This should never happen
assert(false);
_streamRenderMap.Erase(mapItem);
return 0;
}
return incomingStream->IncomingRate();
}
VideoRenderCallback*
ModuleVideoRenderImpl::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s, stream: %u", __FUNCTION__, streamId);
CriticalSectionScoped cs(_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return NULL;
}
if (_streamRenderMap.Find(streamId) != NULL)
{
// The stream already exists...
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream already exists", __FUNCTION__);
return NULL;
}
// Create platform independant code
IncomingVideoStream* ptrIncomingStream = new IncomingVideoStream(_id,
streamId);
if (ptrIncomingStream == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Can't create incoming stream", __FUNCTION__);
return NULL;
}
VideoRenderCallback* ptrRenderCallback =
_ptrRenderer->AddIncomingRenderStream(streamId, zOrder, left, top,
right, bottom);
if (ptrRenderCallback == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Can't create incoming stream in renderer",
__FUNCTION__);
return NULL;
}
if (ptrIncomingStream->SetRenderCallback(ptrRenderCallback) == -1)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Can't set render callback", __FUNCTION__);
delete ptrIncomingStream;
_ptrRenderer->DeleteIncomingRenderStream(streamId);
return NULL;
}
VideoRenderCallback* moduleCallback =
ptrIncomingStream->ModuleCallback();
// Store the stream
_streamRenderMap.Insert(streamId, ptrIncomingStream);
return moduleCallback;
}
WebRtc_Word32 ModuleVideoRenderImpl::DeleteIncomingRenderStream(
const WebRtc_UWord32 streamId)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s, stream: %u", __FUNCTION__, streamId);
CriticalSectionScoped cs(_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
MapItem* mapItem = _streamRenderMap.Find(streamId);
if (!mapItem)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream doesn't exist", __FUNCTION__);
return -1;
}
IncomingVideoStream* ptrIncomingStream =
static_cast<IncomingVideoStream*> (mapItem->GetItem());
delete ptrIncomingStream;
ptrIncomingStream = NULL;
_ptrRenderer->DeleteIncomingRenderStream(streamId);
_streamRenderMap.Erase(mapItem);
return 0;
}
WebRtc_Word32 ModuleVideoRenderImpl::AddExternalRenderCallback(
const WebRtc_UWord32 streamId,
VideoRenderCallback* renderObject)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s, stream: %u, callback: %x", __FUNCTION__, streamId,
renderObject);
CriticalSectionScoped cs(_moduleCrit);
MapItem* mapItem = _streamRenderMap.Find(streamId);
if (!mapItem)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream doesn't exist", __FUNCTION__);
return -1;
}
IncomingVideoStream* ptrIncomingStream =
static_cast<IncomingVideoStream*> (mapItem->GetItem());
if (!ptrIncomingStream)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not get stream", __FUNCTION__);
}
return ptrIncomingStream->SetExternalCallback(renderObject);
}
WebRtc_Word32 ModuleVideoRenderImpl::GetIncomingRenderStreamProperties(
const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left,
float& top,
float& right,
float& bottom) const
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s, stream: %u", __FUNCTION__, streamId);
CriticalSectionScoped cs(_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
return _ptrRenderer->GetIncomingRenderStreamProperties(streamId, zOrder,
left, top, right,
bottom);
}
WebRtc_UWord32 ModuleVideoRenderImpl::GetNumIncomingRenderStreams() const
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s", __FUNCTION__);
CriticalSectionScoped cs(_moduleCrit);
return (WebRtc_UWord32) _streamRenderMap.Size();
}
bool ModuleVideoRenderImpl::HasIncomingRenderStream(
const WebRtc_UWord32 streamId) const
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s", __FUNCTION__);
CriticalSectionScoped cs(_moduleCrit);
bool hasStream = false;
if (_streamRenderMap.Find(streamId) != NULL)
{
hasStream = true;
}
return hasStream;
}
WebRtc_Word32 ModuleVideoRenderImpl::RegisterRawFrameCallback(
const WebRtc_UWord32 streamId,
VideoRenderCallback* callbackObj)
{
return -1;
}
WebRtc_Word32 ModuleVideoRenderImpl::StartRender(const WebRtc_UWord32 streamId)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s(%u)", __FUNCTION__, streamId);
CriticalSectionScoped cs(_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
// Start the stream
MapItem* item = _streamRenderMap.Find(streamId);
if (item == NULL)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer,
_id, "%s: Could find render stream %d", __FUNCTION__,
streamId);
return -1;
}
IncomingVideoStream* incomingStream =
static_cast<IncomingVideoStream*> (item->GetItem());
if (incomingStream->Start() == -1)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer,
_id, "%s: Could not start stream %d", __FUNCTION__,
incomingStream->StreamId());
return -1;
}
// Start the HW renderer
if (_ptrRenderer->StartRender() == -1)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer,
_id, "%s: Could not start renderer", __FUNCTION__);
return -1;
}
return 0;
}
WebRtc_Word32 ModuleVideoRenderImpl::StopRender(const WebRtc_UWord32 streamId)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s(%u)", __FUNCTION__, streamId);
CriticalSectionScoped cs(_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s(%d): No renderer", __FUNCTION__, streamId);
return -1;
}
// Stop the incoming stream
MapItem* item = _streamRenderMap.Find(streamId);
if (item == NULL)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer,
_id, "%s: Could find render stream %d", __FUNCTION__,
streamId);
return -1;
}
IncomingVideoStream* incomingStream =
static_cast<IncomingVideoStream*> (item->GetItem());
if (incomingStream->Stop() == -1)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer,
_id, "%s: Could not start stream %d", __FUNCTION__,
incomingStream->StreamId());
return -1;
}
return 0;
}
WebRtc_Word32 ModuleVideoRenderImpl::ResetRender()
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s", __FUNCTION__);
CriticalSectionScoped cs(_moduleCrit);
WebRtc_Word32 error = 0;
// Loop through all incoming streams and stop them
MapItem* item = _streamRenderMap.First();
while (item)
{
IncomingVideoStream* incomingStream =
static_cast<IncomingVideoStream*> (item->GetItem());
if (incomingStream->Reset() == -1)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer,
_id, "%s: Could not reset stream %d", __FUNCTION__,
incomingStream->StreamId());
error = -1;
}
item = _streamRenderMap.Next(item);
}
return error;
}
RawVideoType ModuleVideoRenderImpl::PreferredVideoType() const
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s", __FUNCTION__);
CriticalSectionScoped cs(_moduleCrit);
if (_ptrRenderer == NULL)
{
return kVideoI420;
}
return _ptrRenderer->PerferedVideoType();
}
bool ModuleVideoRenderImpl::IsFullScreen()
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s", __FUNCTION__);
CriticalSectionScoped cs(_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->FullScreen();
}
WebRtc_Word32 ModuleVideoRenderImpl::GetScreenResolution(
WebRtc_UWord32& screenWidth,
WebRtc_UWord32& screenHeight) const
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s", __FUNCTION__);
CriticalSectionScoped cs(_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->GetScreenResolution(screenWidth, screenHeight);
}
WebRtc_UWord32 ModuleVideoRenderImpl::RenderFrameRate(
const WebRtc_UWord32 streamId)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s, streamId: %u", __FUNCTION__, streamId);
CriticalSectionScoped cs(_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->RenderFrameRate(streamId);
}
WebRtc_Word32 ModuleVideoRenderImpl::SetStreamCropping(
const WebRtc_UWord32 streamId,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s, l: %1.1f, t: %1.1f, r: %1.1f, b: %1.1f", __FUNCTION__,
left, top, right, bottom);
CriticalSectionScoped cs(_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->SetStreamCropping(streamId, left, top, right, bottom);
}
WebRtc_Word32 ModuleVideoRenderImpl::SetTransparentBackground(const bool enable)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s, enable: %d", __FUNCTION__, enable);
CriticalSectionScoped cs(_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->SetTransparentBackground(enable);
}
WebRtc_Word32 ModuleVideoRenderImpl::FullScreenRender(void* window,
const bool enable)
{
return -1;
}
WebRtc_Word32 ModuleVideoRenderImpl::SetText(
const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left, const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s", __FUNCTION__);
CriticalSectionScoped cs(_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
return _ptrRenderer->SetText(textId, text, textLength, textColorRef,
backgroundColorRef, left, top, right, bottom);
}
WebRtc_Word32 ModuleVideoRenderImpl::SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s", __FUNCTION__);
CriticalSectionScoped cs(_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
return _ptrRenderer->SetBitmap(bitMap, pictureId, colorKey, left, top,
right, bottom);
}
WebRtc_Word32 ModuleVideoRenderImpl::GetLastRenderedFrame(
const WebRtc_UWord32 streamId,
VideoFrame &frame) const
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s", __FUNCTION__);
CriticalSectionScoped cs(_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
MapItem *item = _streamRenderMap.Find(streamId);
if (item == NULL)
{
// This stream doesn't exist
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream doesn't exist", __FUNCTION__);
return 0;
}
IncomingVideoStream* incomingStream =
static_cast<IncomingVideoStream*> (item->GetItem());
if (incomingStream == NULL)
{
// This should never happen
assert(false);
_streamRenderMap.Erase(item);
return 0;
}
return incomingStream->GetLastRenderedFrame(frame);
}
WebRtc_Word32 ModuleVideoRenderImpl::ConfigureRenderer(
const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s, l: %1.1f, t: %1.1f, r: %1.1f, b: %1.1f", __FUNCTION__,
left, top, right, bottom);
CriticalSectionScoped cs(_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return false;
}
return _ptrRenderer->ConfigureRenderer(streamId, zOrder, left, top, right,
bottom);
}
WebRtc_Word32 ModuleVideoRenderImpl::SetStartImage(
const WebRtc_UWord32 streamId,
const VideoFrame& videoFrame)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s", __FUNCTION__);
CriticalSectionScoped cs(_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
MapItem *item = _streamRenderMap.Find(streamId);
if (item == NULL)
{
// This stream doesn't exist
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream doesn't exist", __FUNCTION__);
return -1;
}
IncomingVideoStream* incomingStream =
static_cast<IncomingVideoStream*> (item->GetItem());
if (incomingStream == NULL)
{
// This should never happen
assert(false);
_streamRenderMap.Erase(item);
return 0;
}
return incomingStream->SetStartImage(videoFrame);
}
WebRtc_Word32 ModuleVideoRenderImpl::SetTimeoutImage(
const WebRtc_UWord32 streamId,
const VideoFrame& videoFrame,
const WebRtc_UWord32 timeout)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s", __FUNCTION__);
CriticalSectionScoped cs(_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
MapItem *item = _streamRenderMap.Find(streamId);
if (item == NULL)
{
// This stream doesn't exist
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream doesn't exist", __FUNCTION__);
return -1;
}
IncomingVideoStream* incomingStream =
static_cast<IncomingVideoStream*> (item->GetItem());
if (incomingStream == NULL)
{
// This should never happen
assert(false);
_streamRenderMap.Erase(item);
return 0;
}
return incomingStream->SetTimeoutImage(videoFrame, timeout);
}
WebRtc_Word32 ModuleVideoRenderImpl::MirrorRenderStream(const int renderId,
const bool enable,
const bool mirrorXAxis,
const bool mirrorYAxis)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s", __FUNCTION__);
CriticalSectionScoped cs(_moduleCrit);
if (!_ptrRenderer)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No renderer", __FUNCTION__);
return -1;
}
MapItem *item = _streamRenderMap.Find(renderId);
if (item == NULL)
{
// This stream doesn't exist
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: stream doesn't exist", __FUNCTION__);
return 0;
}
IncomingVideoStream* incomingStream =
static_cast<IncomingVideoStream*> (item->GetItem());
if (incomingStream == NULL)
{
// This should never happen
assert(false);
_streamRenderMap.Erase(item);
return 0;
}
return incomingStream->EnableMirroring(enable, mirrorXAxis, mirrorYAxis);
}
} //namespace webrtc
| bsd-3-clause |
ThreeTen/threetenbp | src/main/java/org/threeten/bp/format/SimpleDateTimeFormatStyleProvider.java | 4487 | /*
* Copyright (c) 2007-present, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.threeten.bp.format;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Locale;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.threeten.bp.chrono.Chronology;
/**
* The Service Provider Implementation to obtain date-time formatters for a style.
* <p>
* This implementation is based on extraction of data from a {@link SimpleDateFormat}.
*
* <h3>Specification for implementors</h3>
* This class is immutable and thread-safe.
*/
final class SimpleDateTimeFormatStyleProvider extends DateTimeFormatStyleProvider {
// TODO: Better implementation based on CLDR
/** Cache of formatters. */
private static final ConcurrentMap<String, Object> FORMATTER_CACHE =
new ConcurrentHashMap<String, Object>(16, 0.75f, 2);
@Override
public Locale[] getAvailableLocales() {
return DateFormat.getAvailableLocales();
}
@Override
public DateTimeFormatter getFormatter(
FormatStyle dateStyle, FormatStyle timeStyle, Chronology chrono, Locale locale) {
if (dateStyle == null && timeStyle == null) {
throw new IllegalArgumentException("Date and Time style must not both be null");
}
String key = chrono.getId() + '|' + locale.toString() + '|' + dateStyle + timeStyle;
Object cached = FORMATTER_CACHE.get(key);
if (cached != null) {
if (cached.equals("")) {
throw new IllegalArgumentException("Unable to convert DateFormat to DateTimeFormatter");
}
return (DateTimeFormatter) cached;
}
DateFormat dateFormat;
if (dateStyle != null) {
if (timeStyle != null) {
dateFormat = DateFormat.getDateTimeInstance(convertStyle(dateStyle), convertStyle(timeStyle), locale);
} else {
dateFormat = DateFormat.getDateInstance(convertStyle(dateStyle), locale);
}
} else {
dateFormat = DateFormat.getTimeInstance(convertStyle(timeStyle), locale);
}
if (dateFormat instanceof SimpleDateFormat) {
String pattern = ((SimpleDateFormat) dateFormat).toPattern();
DateTimeFormatter formatter = new DateTimeFormatterBuilder().appendPattern(pattern).toFormatter(locale);
FORMATTER_CACHE.putIfAbsent(key, formatter);
return formatter;
}
FORMATTER_CACHE.putIfAbsent(key, "");
throw new IllegalArgumentException("Unable to convert DateFormat to DateTimeFormatter");
}
/**
* Converts the enum style to the old format style.
* @param style the enum style, not null
* @return the int style
*/
private int convertStyle(FormatStyle style) {
return style.ordinal(); // indices happen to align
}
}
| bsd-3-clause |
chirilo/remo | vendor-local/lib/python/rest_framework/pagination.py | 26877 | # coding: utf-8
"""
Pagination serializers determine the structure of the output that should
be used for paginated responses.
"""
from __future__ import unicode_literals
from base64 import b64encode, b64decode
from collections import namedtuple
from django.core.paginator import InvalidPage, Paginator as DjangoPaginator
from django.template import Context, loader
from django.utils import six
from django.utils.six.moves.urllib import parse as urlparse
from django.utils.translation import ugettext as _
from rest_framework.compat import OrderedDict
from rest_framework.exceptions import NotFound
from rest_framework.response import Response
from rest_framework.settings import api_settings
from rest_framework.utils.urls import (
replace_query_param, remove_query_param
)
import warnings
def _positive_int(integer_string, strict=False, cutoff=None):
"""
Cast a string to a strictly positive integer.
"""
ret = int(integer_string)
if ret < 0 or (ret == 0 and strict):
raise ValueError()
if cutoff:
ret = min(ret, cutoff)
return ret
def _divide_with_ceil(a, b):
"""
Returns 'a' divded by 'b', with any remainder rounded up.
"""
if a % b:
return (a // b) + 1
return a // b
def _get_count(queryset):
"""
Determine an object count, supporting either querysets or regular lists.
"""
try:
return queryset.count()
except (AttributeError, TypeError):
return len(queryset)
def _get_displayed_page_numbers(current, final):
"""
This utility function determines a list of page numbers to display.
This gives us a nice contextually relevant set of page numbers.
For example:
current=14, final=16 -> [1, None, 13, 14, 15, 16]
This implementation gives one page to each side of the cursor,
or two pages to the side when the cursor is at the edge, then
ensures that any breaks between non-continous page numbers never
remove only a single page.
For an alernativative implementation which gives two pages to each side of
the cursor, eg. as in GitHub issue list pagination, see:
https://gist.github.com/tomchristie/321140cebb1c4a558b15
"""
assert current >= 1
assert final >= current
if final <= 5:
return list(range(1, final + 1))
# We always include the first two pages, last two pages, and
# two pages either side of the current page.
included = set((
1,
current - 1, current, current + 1,
final
))
# If the break would only exclude a single page number then we
# may as well include the page number instead of the break.
if current <= 4:
included.add(2)
included.add(3)
if current >= final - 3:
included.add(final - 1)
included.add(final - 2)
# Now sort the page numbers and drop anything outside the limits.
included = [
idx for idx in sorted(list(included))
if idx > 0 and idx <= final
]
# Finally insert any `...` breaks
if current > 4:
included.insert(1, None)
if current < final - 3:
included.insert(len(included) - 1, None)
return included
def _get_page_links(page_numbers, current, url_func):
"""
Given a list of page numbers and `None` page breaks,
return a list of `PageLink` objects.
"""
page_links = []
for page_number in page_numbers:
if page_number is None:
page_link = PAGE_BREAK
else:
page_link = PageLink(
url=url_func(page_number),
number=page_number,
is_active=(page_number == current),
is_break=False
)
page_links.append(page_link)
return page_links
def _decode_cursor(encoded):
"""
Given a string representing an encoded cursor, return a `Cursor` instance.
"""
# The offset in the cursor is used in situations where we have a
# nearly-unique index. (Eg millisecond precision creation timestamps)
# We guard against malicious users attempting to cause expensive database
# queries, by having a hard cap on the maximum possible size of the offset.
OFFSET_CUTOFF = 1000
try:
querystring = b64decode(encoded.encode('ascii')).decode('ascii')
tokens = urlparse.parse_qs(querystring, keep_blank_values=True)
offset = tokens.get('o', ['0'])[0]
offset = _positive_int(offset, cutoff=OFFSET_CUTOFF)
reverse = tokens.get('r', ['0'])[0]
reverse = bool(int(reverse))
position = tokens.get('p', [None])[0]
except (TypeError, ValueError):
return None
return Cursor(offset=offset, reverse=reverse, position=position)
def _encode_cursor(cursor):
"""
Given a Cursor instance, return an encoded string representation.
"""
tokens = {}
if cursor.offset != 0:
tokens['o'] = str(cursor.offset)
if cursor.reverse:
tokens['r'] = '1'
if cursor.position is not None:
tokens['p'] = cursor.position
querystring = urlparse.urlencode(tokens, doseq=True)
return b64encode(querystring.encode('ascii')).decode('ascii')
def _reverse_ordering(ordering_tuple):
"""
Given an order_by tuple such as `('-created', 'uuid')` reverse the
ordering and return a new tuple, eg. `('created', '-uuid')`.
"""
def invert(x):
return x[1:] if (x.startswith('-')) else '-' + x
return tuple([invert(item) for item in ordering_tuple])
Cursor = namedtuple('Cursor', ['offset', 'reverse', 'position'])
PageLink = namedtuple('PageLink', ['url', 'number', 'is_active', 'is_break'])
PAGE_BREAK = PageLink(url=None, number=None, is_active=False, is_break=True)
class BasePagination(object):
display_page_controls = False
def paginate_queryset(self, queryset, request, view=None): # pragma: no cover
raise NotImplementedError('paginate_queryset() must be implemented.')
def get_paginated_response(self, data): # pragma: no cover
raise NotImplementedError('get_paginated_response() must be implemented.')
def to_html(self): # pragma: no cover
raise NotImplementedError('to_html() must be implemented to display page controls.')
class PageNumberPagination(BasePagination):
"""
A simple page number based style that supports page numbers as
query parameters. For example:
http://api.example.org/accounts/?page=4
http://api.example.org/accounts/?page=4&page_size=100
"""
# The default page size.
# Defaults to `None`, meaning pagination is disabled.
page_size = api_settings.PAGE_SIZE
# Client can control the page using this query parameter.
page_query_param = 'page'
# Client can control the page size using this query parameter.
# Default is 'None'. Set to eg 'page_size' to enable usage.
page_size_query_param = None
# Set to an integer to limit the maximum page size the client may request.
# Only relevant if 'page_size_query_param' has also been set.
max_page_size = None
last_page_strings = ('last',)
template = 'rest_framework/pagination/numbers.html'
invalid_page_message = _('Invalid page "{page_number}": {message}.')
def _handle_backwards_compat(self, view):
"""
Prior to version 3.1, pagination was handled in the view, and the
attributes were set there. The attributes should now be set on
the pagination class, but the old style is still pending deprecation.
"""
assert not (
getattr(view, 'pagination_serializer_class', None) or
getattr(api_settings, 'DEFAULT_PAGINATION_SERIALIZER_CLASS', None)
), (
"The pagination_serializer_class attribute and "
"DEFAULT_PAGINATION_SERIALIZER_CLASS setting have been removed as "
"part of the 3.1 pagination API improvement. See the pagination "
"documentation for details on the new API."
)
for (settings_key, attr_name) in (
('PAGINATE_BY', 'page_size'),
('PAGINATE_BY_PARAM', 'page_size_query_param'),
('MAX_PAGINATE_BY', 'max_page_size')
):
value = getattr(api_settings, settings_key, None)
if value is not None:
setattr(self, attr_name, value)
warnings.warn(
"The `%s` settings key is pending deprecation. "
"Use the `%s` attribute on the pagination class instead." % (
settings_key, attr_name
),
PendingDeprecationWarning,
)
for (view_attr, attr_name) in (
('paginate_by', 'page_size'),
('page_query_param', 'page_query_param'),
('paginate_by_param', 'page_size_query_param'),
('max_paginate_by', 'max_page_size')
):
value = getattr(view, view_attr, None)
if value is not None:
setattr(self, attr_name, value)
warnings.warn(
"The `%s` view attribute is pending deprecation. "
"Use the `%s` attribute on the pagination class instead." % (
view_attr, attr_name
),
PendingDeprecationWarning,
)
def paginate_queryset(self, queryset, request, view=None):
"""
Paginate a queryset if required, either returning a
page object, or `None` if pagination is not configured for this view.
"""
self._handle_backwards_compat(view)
page_size = self.get_page_size(request)
if not page_size:
return None
paginator = DjangoPaginator(queryset, page_size)
page_number = request.query_params.get(self.page_query_param, 1)
if page_number in self.last_page_strings:
page_number = paginator.num_pages
try:
self.page = paginator.page(page_number)
except InvalidPage as exc:
msg = self.invalid_page_message.format(
page_number=page_number, message=six.text_type(exc)
)
raise NotFound(msg)
if paginator.count > 1 and self.template is not None:
# The browsable API should display pagination controls.
self.display_page_controls = True
self.request = request
return list(self.page)
def get_paginated_response(self, data):
return Response(OrderedDict([
('count', self.page.paginator.count),
('next', self.get_next_link()),
('previous', self.get_previous_link()),
('results', data)
]))
def get_page_size(self, request):
if self.page_size_query_param:
try:
return _positive_int(
request.query_params[self.page_size_query_param],
strict=True,
cutoff=self.max_page_size
)
except (KeyError, ValueError):
pass
return self.page_size
def get_next_link(self):
if not self.page.has_next():
return None
url = self.request.build_absolute_uri()
page_number = self.page.next_page_number()
return replace_query_param(url, self.page_query_param, page_number)
def get_previous_link(self):
if not self.page.has_previous():
return None
url = self.request.build_absolute_uri()
page_number = self.page.previous_page_number()
if page_number == 1:
return remove_query_param(url, self.page_query_param)
return replace_query_param(url, self.page_query_param, page_number)
def get_html_context(self):
base_url = self.request.build_absolute_uri()
def page_number_to_url(page_number):
if page_number == 1:
return remove_query_param(base_url, self.page_query_param)
else:
return replace_query_param(base_url, self.page_query_param, page_number)
current = self.page.number
final = self.page.paginator.num_pages
page_numbers = _get_displayed_page_numbers(current, final)
page_links = _get_page_links(page_numbers, current, page_number_to_url)
return {
'previous_url': self.get_previous_link(),
'next_url': self.get_next_link(),
'page_links': page_links
}
def to_html(self):
template = loader.get_template(self.template)
context = Context(self.get_html_context())
return template.render(context)
class LimitOffsetPagination(BasePagination):
"""
A limit/offset based style. For example:
http://api.example.org/accounts/?limit=100
http://api.example.org/accounts/?offset=400&limit=100
"""
default_limit = api_settings.PAGE_SIZE
limit_query_param = 'limit'
offset_query_param = 'offset'
max_limit = None
template = 'rest_framework/pagination/numbers.html'
def paginate_queryset(self, queryset, request, view=None):
self.limit = self.get_limit(request)
self.offset = self.get_offset(request)
self.count = _get_count(queryset)
self.request = request
if self.count > self.limit and self.template is not None:
self.display_page_controls = True
return list(queryset[self.offset:self.offset + self.limit])
def get_paginated_response(self, data):
return Response(OrderedDict([
('count', self.count),
('next', self.get_next_link()),
('previous', self.get_previous_link()),
('results', data)
]))
def get_limit(self, request):
if self.limit_query_param:
try:
return _positive_int(
request.query_params[self.limit_query_param],
cutoff=self.max_limit
)
except (KeyError, ValueError):
pass
return self.default_limit
def get_offset(self, request):
try:
return _positive_int(
request.query_params[self.offset_query_param],
)
except (KeyError, ValueError):
return 0
def get_next_link(self):
if self.offset + self.limit >= self.count:
return None
url = self.request.build_absolute_uri()
offset = self.offset + self.limit
return replace_query_param(url, self.offset_query_param, offset)
def get_previous_link(self):
if self.offset <= 0:
return None
url = self.request.build_absolute_uri()
if self.offset - self.limit <= 0:
return remove_query_param(url, self.offset_query_param)
offset = self.offset - self.limit
return replace_query_param(url, self.offset_query_param, offset)
def get_html_context(self):
base_url = self.request.build_absolute_uri()
current = _divide_with_ceil(self.offset, self.limit) + 1
# The number of pages is a little bit fiddly.
# We need to sum both the number of pages from current offset to end
# plus the number of pages up to the current offset.
# When offset is not strictly divisible by the limit then we may
# end up introducing an extra page as an artifact.
final = (
_divide_with_ceil(self.count - self.offset, self.limit) +
_divide_with_ceil(self.offset, self.limit)
)
def page_number_to_url(page_number):
if page_number == 1:
return remove_query_param(base_url, self.offset_query_param)
else:
offset = self.offset + ((page_number - current) * self.limit)
return replace_query_param(base_url, self.offset_query_param, offset)
page_numbers = _get_displayed_page_numbers(current, final)
page_links = _get_page_links(page_numbers, current, page_number_to_url)
return {
'previous_url': self.get_previous_link(),
'next_url': self.get_next_link(),
'page_links': page_links
}
def to_html(self):
template = loader.get_template(self.template)
context = Context(self.get_html_context())
return template.render(context)
class CursorPagination(BasePagination):
"""
The cursor pagination implementation is neccessarily complex.
For an overview of the position/offset style we use, see this post:
http://cramer.io/2011/03/08/building-cursors-for-the-disqus-api/
"""
cursor_query_param = 'cursor'
page_size = api_settings.PAGE_SIZE
invalid_cursor_message = _('Invalid cursor')
ordering = '-created'
template = 'rest_framework/pagination/previous_and_next.html'
def paginate_queryset(self, queryset, request, view=None):
self.base_url = request.build_absolute_uri()
self.ordering = self.get_ordering(request, queryset, view)
# Determine if we have a cursor, and if so then decode it.
encoded = request.query_params.get(self.cursor_query_param)
if encoded is None:
self.cursor = None
(offset, reverse, current_position) = (0, False, None)
else:
self.cursor = _decode_cursor(encoded)
if self.cursor is None:
raise NotFound(self.invalid_cursor_message)
(offset, reverse, current_position) = self.cursor
# Cursor pagination always enforces an ordering.
if reverse:
queryset = queryset.order_by(*_reverse_ordering(self.ordering))
else:
queryset = queryset.order_by(*self.ordering)
# If we have a cursor with a fixed position then filter by that.
if current_position is not None:
order = self.ordering[0]
is_reversed = order.startswith('-')
order_attr = order.lstrip('-')
# Test for: (cursor reversed) XOR (queryset reversed)
if self.cursor.reverse != is_reversed:
kwargs = {order_attr + '__lt': current_position}
else:
kwargs = {order_attr + '__gt': current_position}
queryset = queryset.filter(**kwargs)
# If we have an offset cursor then offset the entire page by that amount.
# We also always fetch an extra item in order to determine if there is a
# page following on from this one.
results = list(queryset[offset:offset + self.page_size + 1])
self.page = list(results[:self.page_size])
# Determine the position of the final item following the page.
if len(results) > len(self.page):
has_following_postion = True
following_position = self._get_position_from_instance(results[-1], self.ordering)
else:
has_following_postion = False
following_position = None
# If we have a reverse queryset, then the query ordering was in reverse
# so we need to reverse the items again before returning them to the user.
if reverse:
self.page = list(reversed(self.page))
if reverse:
# Determine next and previous positions for reverse cursors.
self.has_next = (current_position is not None) or (offset > 0)
self.has_previous = has_following_postion
if self.has_next:
self.next_position = current_position
if self.has_previous:
self.previous_position = following_position
else:
# Determine next and previous positions for forward cursors.
self.has_next = has_following_postion
self.has_previous = (current_position is not None) or (offset > 0)
if self.has_next:
self.next_position = following_position
if self.has_previous:
self.previous_position = current_position
# Display page controls in the browsable API if there is more
# than one page.
if (self.has_previous or self.has_next) and self.template is not None:
self.display_page_controls = True
return self.page
def get_next_link(self):
if not self.has_next:
return None
if self.cursor and self.cursor.reverse and self.cursor.offset != 0:
# If we're reversing direction and we have an offset cursor
# then we cannot use the first position we find as a marker.
compare = self._get_position_from_instance(self.page[-1], self.ordering)
else:
compare = self.next_position
offset = 0
for item in reversed(self.page):
position = self._get_position_from_instance(item, self.ordering)
if position != compare:
# The item in this position and the item following it
# have different positions. We can use this position as
# our marker.
break
# The item in this postion has the same position as the item
# following it, we can't use it as a marker position, so increment
# the offset and keep seeking to the previous item.
compare = position
offset += 1
else:
# There were no unique positions in the page.
if not self.has_previous:
# We are on the first page.
# Our cursor will have an offset equal to the page size,
# but no position to filter against yet.
offset = self.page_size
position = None
elif self.cursor.reverse:
# The change in direction will introduce a paging artifact,
# where we end up skipping forward a few extra items.
offset = 0
position = self.previous_position
else:
# Use the position from the existing cursor and increment
# it's offset by the page size.
offset = self.cursor.offset + self.page_size
position = self.previous_position
cursor = Cursor(offset=offset, reverse=False, position=position)
encoded = _encode_cursor(cursor)
return replace_query_param(self.base_url, self.cursor_query_param, encoded)
def get_previous_link(self):
if not self.has_previous:
return None
if self.cursor and not self.cursor.reverse and self.cursor.offset != 0:
# If we're reversing direction and we have an offset cursor
# then we cannot use the first position we find as a marker.
compare = self._get_position_from_instance(self.page[0], self.ordering)
else:
compare = self.previous_position
offset = 0
for item in self.page:
position = self._get_position_from_instance(item, self.ordering)
if position != compare:
# The item in this position and the item following it
# have different positions. We can use this position as
# our marker.
break
# The item in this postion has the same position as the item
# following it, we can't use it as a marker position, so increment
# the offset and keep seeking to the previous item.
compare = position
offset += 1
else:
# There were no unique positions in the page.
if not self.has_next:
# We are on the final page.
# Our cursor will have an offset equal to the page size,
# but no position to filter against yet.
offset = self.page_size
position = None
elif self.cursor.reverse:
# Use the position from the existing cursor and increment
# it's offset by the page size.
offset = self.cursor.offset + self.page_size
position = self.next_position
else:
# The change in direction will introduce a paging artifact,
# where we end up skipping back a few extra items.
offset = 0
position = self.next_position
cursor = Cursor(offset=offset, reverse=True, position=position)
encoded = _encode_cursor(cursor)
return replace_query_param(self.base_url, self.cursor_query_param, encoded)
def get_ordering(self, request, queryset, view):
"""
Return a tuple of strings, that may be used in an `order_by` method.
"""
ordering_filters = [
filter_cls for filter_cls in getattr(view, 'filter_backends', [])
if hasattr(filter_cls, 'get_ordering')
]
if ordering_filters:
# If a filter exists on the view that implements `get_ordering`
# then we defer to that filter to determine the ordering.
filter_cls = ordering_filters[0]
filter_instance = filter_cls()
ordering = filter_instance.get_ordering(request, queryset, view)
assert ordering is not None, (
'Using cursor pagination, but filter class {filter_cls} '
'returned a `None` ordering.'.format(
filter_cls=filter_cls.__name__
)
)
else:
# The default case is to check for an `ordering` attribute
# on this pagination instance.
ordering = self.ordering
assert ordering is not None, (
'Using cursor pagination, but no ordering attribute was declared '
'on the pagination class.'
)
assert isinstance(ordering, (six.string_types, list, tuple)), (
'Invalid ordering. Expected string or tuple, but got {type}'.format(
type=type(ordering).__name__
)
)
if isinstance(ordering, six.string_types):
return (ordering,)
return tuple(ordering)
def _get_position_from_instance(self, instance, ordering):
attr = getattr(instance, ordering[0].lstrip('-'))
return six.text_type(attr)
def get_paginated_response(self, data):
return Response(OrderedDict([
('next', self.get_next_link()),
('previous', self.get_previous_link()),
('results', data)
]))
def get_html_context(self):
return {
'previous_url': self.get_previous_link(),
'next_url': self.get_next_link()
}
def to_html(self):
template = loader.get_template(self.template)
context = Context(self.get_html_context())
return template.render(context)
| bsd-3-clause |
gopl-zh/gopl-zh.github.com | tools/lsdir.go | 1926 | // Copyright 2013 <chaishushan{AT}gmail.com>. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build ingore
//
// List files, support file/header regexp.
//
// Example:
// lsdir dir
// lsdir dir "\.go$"
// lsdir dir "\.go$" "chaishushan"
// lsdir dir "\.tiff?|jpg|jpeg$"
//
// Help:
// lsdir -h
//
package main
import (
"fmt"
"io/ioutil"
"log"
"os"
"path/filepath"
"regexp"
)
const usage = `
Usage: lsdir dir [nameFilter [dataFilter]]
lsdir -h
Example:
lsdir dir
lsdir dir "\.go$"
lsdir dir "\.go$" "chaishushan"
lsdir dir "\.tiff?|jpg|jpeg$"
Report bugs to <chaishushan{AT}gmail.com>.
`
func main() {
if len(os.Args) < 2 || os.Args[1] == "-h" {
fmt.Fprintln(os.Stderr, usage[1:len(usage)-1])
os.Exit(0)
}
dir, nameFilter, dataFilter := os.Args[1], ".*", ""
if len(os.Args) > 2 {
nameFilter = os.Args[2]
}
if len(os.Args) > 3 {
dataFilter = os.Args[3]
}
total := 0
filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
if err != nil {
log.Fatal("filepath.Walk: ", err)
return err
}
if info.IsDir() {
return nil
}
relpath, err := filepath.Rel(dir, path)
if err != nil {
log.Fatal("filepath.Rel: ", err)
return err
}
mathed, err := regexp.MatchString(nameFilter, relpath)
if err != nil {
log.Fatal("regexp.MatchString: ", err)
}
if mathed {
if dataFilter != "" {
data, err := ioutil.ReadFile(path)
if err != nil {
fmt.Printf("ioutil.ReadFile: %s\n", path)
log.Fatal("ioutil.ReadFile: ", err)
}
mathed, err := regexp.MatchString(dataFilter, string(data))
if err != nil {
log.Fatal("regexp.MatchString: ", err)
}
if mathed {
fmt.Printf("%s\n", relpath)
total++
}
} else {
fmt.Printf("%s\n", relpath)
total++
}
}
return nil
})
fmt.Printf("total %d\n", total)
}
| bsd-3-clause |
WillisXChen/django-oscar | oscar/lib/python2.7/site-packages/factory/helpers.py | 4635 | # -*- coding: utf-8 -*-
# Copyright (c) 2010 Mark Sandstrom
# Copyright (c) 2011-2013 Raphaël Barrois
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Simple wrappers around Factory class definition."""
import contextlib
import logging
from . import base
from . import declarations
from . import django
@contextlib.contextmanager
def debug(logger='factory', stream=None):
logger_obj = logging.getLogger(logger)
old_level = logger_obj.level
handler = logging.StreamHandler(stream)
handler.setLevel(logging.DEBUG)
logger_obj.addHandler(handler)
logger_obj.setLevel(logging.DEBUG)
yield
logger_obj.setLevel(old_level)
logger_obj.removeHandler(handler)
def make_factory(klass, **kwargs):
"""Create a new, simple factory for the given class."""
factory_name = '%sFactory' % klass.__name__
class Meta:
model = klass
kwargs['Meta'] = Meta
base_class = kwargs.pop('FACTORY_CLASS', base.Factory)
factory_class = type(base.Factory).__new__(
type(base.Factory), factory_name, (base_class,), kwargs)
factory_class.__name__ = '%sFactory' % klass.__name__
factory_class.__doc__ = 'Auto-generated factory for class %s' % klass
return factory_class
def build(klass, **kwargs):
"""Create a factory for the given class, and build an instance."""
return make_factory(klass, **kwargs).build()
def build_batch(klass, size, **kwargs):
"""Create a factory for the given class, and build a batch of instances."""
return make_factory(klass, **kwargs).build_batch(size)
def create(klass, **kwargs):
"""Create a factory for the given class, and create an instance."""
return make_factory(klass, **kwargs).create()
def create_batch(klass, size, **kwargs):
"""Create a factory for the given class, and create a batch of instances."""
return make_factory(klass, **kwargs).create_batch(size)
def stub(klass, **kwargs):
"""Create a factory for the given class, and stub an instance."""
return make_factory(klass, **kwargs).stub()
def stub_batch(klass, size, **kwargs):
"""Create a factory for the given class, and stub a batch of instances."""
return make_factory(klass, **kwargs).stub_batch(size)
def generate(klass, strategy, **kwargs):
"""Create a factory for the given class, and generate an instance."""
return make_factory(klass, **kwargs).generate(strategy)
def generate_batch(klass, strategy, size, **kwargs):
"""Create a factory for the given class, and generate instances."""
return make_factory(klass, **kwargs).generate_batch(strategy, size)
# We're reusing 'create' as a keyword.
# pylint: disable=W0621
def simple_generate(klass, create, **kwargs):
"""Create a factory for the given class, and simple_generate an instance."""
return make_factory(klass, **kwargs).simple_generate(create)
def simple_generate_batch(klass, create, size, **kwargs):
"""Create a factory for the given class, and simple_generate instances."""
return make_factory(klass, **kwargs).simple_generate_batch(create, size)
# pylint: enable=W0621
def lazy_attribute(func):
return declarations.LazyAttribute(func)
def iterator(func):
"""Turn a generator function into an iterator attribute."""
return declarations.Iterator(func())
def sequence(func):
return declarations.Sequence(func)
def lazy_attribute_sequence(func):
return declarations.LazyAttributeSequence(func)
def container_attribute(func):
return declarations.ContainerAttribute(func, strict=False)
def post_generation(fun):
return declarations.PostGeneration(fun)
| bsd-3-clause |
tushar2708/Mach7 | code/compile-time/compile-time-pat-power-a.cpp | 2198 | ///
/// \file
///
/// This file is a part of pattern matching testing suite.
///
/// \author Yuriy Solodkyy <yuriy.solodkyy@gmail.com>
///
/// This file is a part of Mach7 library (http://parasol.tamu.edu/mach7/).
/// Copyright (C) 2011-2012 Texas A&M University.
/// All rights reserved.
///
#include <iostream>
#include "type_switchN-patterns.hpp"
#include "patterns/all.hpp"
#include "testutils.hpp"
//------------------------------------------------------------------------------
using namespace mch;
//------------------------------------------------------------------------------
typedef std::pair<double,int> arg_type;
//------------------------------------------------------------------------------
template <typename T>
inline T sqr(const T& x) { return x*x; }
//------------------------------------------------------------------------------
extern double power1(const double, const int);
extern double power_opt(const double, const int);
inline double power1(const arg_type a) { return power_opt(a.first,a.second); }
//------------------------------------------------------------------------------
XTL_TIMED_FUNC_BEGIN
double power1(const double x, const int n)
{
if (n == 0) return 1.0;
if (n == 1) return x;
if (n % 2 == 0) return sqr(power1(x,n/2));
if (n % 2 == 1) return x*power1(x,n-1);
}
XTL_TIMED_FUNC_END
//------------------------------------------------------------------------------
XTL_TIMED_FUNC_BEGIN
double power_opt(double x, int n)
{
if (n == 0) return 1.0;
if (n == 1) return x;
return
n%2 == 0
? sqr(power_opt(x,n/2))
: x*sqr(power_opt(x,n/2));
}
XTL_TIMED_FUNC_END
//------------------------------------------------------------------------------
int main()
{
std::vector<arg_type> arguments(N);
for (size_t i = 0; i < N; ++i)
arguments[i] = arg_type(1.0+1.0/double(1+rand()-RAND_MAX/2), rand() % 100);
verdict v = get_timings1<double,arg_type,power1,power1>(arguments);
std::cout << "Verdict: \t" << v << std::endl;
}
//------------------------------------------------------------------------------
| bsd-3-clause |
development2015/kds | frontend/views/answer/view.php | 972 | <?php
use yii\helpers\Html;
use yii\widgets\DetailView;
/* @var $this yii\web\View */
/* @var $model common\models\Answer */
$this->title = $model->id_answer;
$this->params['breadcrumbs'][] = ['label' => 'Answers', 'url' => ['index']];
$this->params['breadcrumbs'][] = $this->title;
?>
<div class="answer-view">
<h1><?= Html::encode($this->title) ?></h1>
<p>
<?= Html::a('Update', ['update', 'id' => $model->id_answer], ['class' => 'btn btn-primary']) ?>
<?= Html::a('Delete', ['delete', 'id' => $model->id_answer], [
'class' => 'btn btn-danger',
'data' => [
'confirm' => 'Are you sure you want to delete this item?',
'method' => 'post',
],
]) ?>
</p>
<?= DetailView::widget([
'model' => $model,
'attributes' => [
'id_answer',
'answer',
'question_id',
'people_id',
],
]) ?>
</div>
| bsd-3-clause |
gmimano/commcaretest | corehq/apps/toggle_ui/urls.py | 312 | from django.conf.urls import *
from corehq.apps.toggle_ui.views import ToggleListView, ToggleEditView
urlpatterns = patterns('toggle.views',
url(r'^$', ToggleListView.as_view(), name=ToggleListView.urlname),
url(r'^edit/(?P<toggle>[\w_-]+)/$', ToggleEditView.as_view(), name=ToggleEditView.urlname),
)
| bsd-3-clause |
devoncarew/sky_engine | tools/fuchsia/gather_flutter_runner_artifacts.py | 2297 | #!/usr/bin/env python
#
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Gather all the fuchsia artifacts to a destination directory.
"""
import argparse
import errno
import json
import os
import platform
import shutil
import subprocess
import sys
_ARTIFACT_PATH_TO_DST = {
'flutter_jit_runner': 'flutter_jit_runner',
'icudtl.dat': 'data/icudtl.dat',
'dart_runner': 'dart_runner',
'flutter_patched_sdk': 'flutter_patched_sdk'
}
def EnsureParentExists(path):
dir_name, _ = os.path.split(path)
if not os.path.exists(dir_name):
os.makedirs(dir_name)
def CopyPath(src, dst):
try:
EnsureParentExists(dst)
shutil.copytree(src, dst)
except OSError as exc:
if exc.errno == errno.ENOTDIR:
shutil.copy(src, dst)
else:
raise
def CreateMetaPackage(dst_root, far_name):
meta = os.path.join(dst_root, 'meta')
if not os.path.isdir(meta):
os.makedirs(meta)
content = {}
content['name'] = far_name
content['version'] = '0'
package = os.path.join(meta, 'package')
with open(package, 'w') as out_file:
json.dump(content, out_file)
def GatherArtifacts(src_root, dst_root, create_meta_package=True):
if not os.path.exists(dst_root):
os.makedirs(dst_root)
else:
shutil.rmtree(dst_root)
for src_rel, dst_rel in _ARTIFACT_PATH_TO_DST.iteritems():
src_full = os.path.join(src_root, src_rel)
dst_full = os.path.join(dst_root, dst_rel)
if not os.path.exists(src_full):
print('Unable to find artifact: ', str(src_full))
sys.exit(1)
CopyPath(src_full, dst_full)
if create_meta_package:
CreateMetaPackage(dst_root, 'flutter_runner')
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--artifacts-root', dest='artifacts_root', action='store', required=True)
parser.add_argument(
'--dest-dir', dest='dst_dir', action='store', required=True)
args = parser.parse_args()
assert os.path.exists(args.artifacts_root)
dst_parent = os.path.abspath(os.path.join(args.dst_dir, os.pardir))
assert os.path.exists(dst_parent)
GatherArtifacts(args.artifacts_root, args.dst_dir)
return 0
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
zarelit/django-localflavor | tests/test_au/tests.py | 4231 | from __future__ import absolute_import, unicode_literals
import re
from django.test import TestCase
from localflavor.au.forms import (AUPostCodeField, AUPhoneNumberField,
AUStateSelect)
from .forms import AustralianPlaceForm
SELECTED_OPTION_PATTERN = r'<option value="%s" selected="selected">'
BLANK_OPTION_PATTERN = r'<option value="">'
INPUT_VALUE_PATTERN = r'<input[^>]*value="%s"[^>]*>'
class AULocalflavorTests(TestCase):
def setUp(self):
self.form = AustralianPlaceForm(
{'state': 'WA',
'state_required': 'QLD',
'name': 'dummy',
'postcode': '1234',
'postcode_required': '4321',
})
def test_get_display_methods(self):
""" Ensure get_*_display() methods are added to model instances. """
place = self.form.save()
self.assertEqual(place.get_state_display(), 'Western Australia')
self.assertEqual(place.get_state_required_display(), 'Queensland')
def test_default_values(self):
""" Ensure that default values are selected in forms. """
form = AustralianPlaceForm()
self.assertTrue(re.search(SELECTED_OPTION_PATTERN % 'NSW',
str(form['state_default'])))
self.assertTrue(re.search(INPUT_VALUE_PATTERN % '2500',
str(form['postcode_default'])))
def test_required(self):
""" Test that required AUStateFields throw appropriate errors. """
form = AustralianPlaceForm({'state': 'NSW', 'name': 'Wollongong'})
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors['state_required'], ['This field is required.'])
self.assertEqual(
form.errors['postcode_required'], ['This field is required.'])
def test_field_blank_option(self):
""" Test that the empty option is there. """
self.assertTrue(re.search(BLANK_OPTION_PATTERN,
str(self.form['state'])))
def test_selected_values(self):
""" Ensure selected states match the initial values provided. """
self.assertTrue(re.search(SELECTED_OPTION_PATTERN % 'WA',
str(self.form['state'])))
self.assertTrue(re.search(SELECTED_OPTION_PATTERN % 'QLD',
str(self.form['state_required'])))
self.assertTrue(re.search(INPUT_VALUE_PATTERN % '1234',
str(self.form['postcode'])))
self.assertTrue(re.search(INPUT_VALUE_PATTERN % '4321',
str(self.form['postcode_required'])))
def test_AUStateSelect(self):
f = AUStateSelect()
out = '''<select name="state">
<option value="ACT">Australian Capital Territory</option>
<option value="NSW" selected="selected">New South Wales</option>
<option value="NT">Northern Territory</option>
<option value="QLD">Queensland</option>
<option value="SA">South Australia</option>
<option value="TAS">Tasmania</option>
<option value="VIC">Victoria</option>
<option value="WA">Western Australia</option>
</select>'''
self.assertHTMLEqual(f.render('state', 'NSW'), out)
def test_AUPostCodeField(self):
error_format = ['Enter a 4 digit postcode.']
valid = {
'1234': '1234',
'2000': '2000',
}
invalid = {
'abcd': error_format,
'20001': ['Ensure this value has at most 4 characters (it has 5).'] + error_format,
}
self.assertFieldOutput(AUPostCodeField, valid, invalid)
def test_AUPhoneNumberField(self):
error_format = ['Phone numbers must contain 10 digits.']
valid = {
'1234567890': '1234567890',
'0213456789': '0213456789',
'02 13 45 67 89': '0213456789',
'(02) 1345 6789': '0213456789',
'(02) 1345-6789': '0213456789',
'(02)1345-6789': '0213456789',
'0408 123 456': '0408123456',
}
invalid = {
'123': error_format,
'1800DJANGO': error_format,
}
self.assertFieldOutput(AUPhoneNumberField, valid, invalid)
| bsd-3-clause |
xkproject/Orchard2 | src/OrchardCore.Modules/OrchardCore.Https/AdminMenu.cs | 1149 | using System;
using System.Threading.Tasks;
using Microsoft.Extensions.Localization;
using OrchardCore.Navigation;
namespace OrchardCore.Https
{
public class AdminMenu : INavigationProvider
{
private readonly IStringLocalizer S;
public AdminMenu(IStringLocalizer<AdminMenu> localizer)
{
S = localizer;
}
public Task BuildNavigationAsync(string name, NavigationBuilder builder)
{
if (!String.Equals(name, "admin", StringComparison.OrdinalIgnoreCase))
{
return Task.CompletedTask;
}
builder
.Add(S["Security"], security => security
.Add(S["Settings"], settings => settings
.Add(S["HTTPS"], S["HTTPS"].PrefixPosition(), entry => entry
.Action("Index", "Admin", new { area = "OrchardCore.Settings", groupId = "Https" })
.Permission(Permissions.ManageHttps)
.LocalNav()
))
);
return Task.CompletedTask;
}
}
}
| bsd-3-clause |
xkproject/Orchard2 | src/OrchardCore/OrchardCore.Indexing.Abstractions/Properties/AssemblyInfo.cs | 803 | using System.Reflection;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("OrchardCore.Indexing.Abstractions")]
[assembly: AssemblyTrademark("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("ca48c824-1b11-46ff-9d43-aa0bc6955345")]
| bsd-3-clause |
351784144/DhtvWeiXin | vendor/doctrine/doctrine-module/src/DoctrineModule/Form/Element/ObjectSelect.php | 2148 | <?php
/*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* This software consists of voluntary contributions made by many individuals
* and is licensed under the MIT license. For more information, see
* <http://www.doctrine-project.org>.
*/
namespace DoctrineModule\Form\Element;
use DoctrineModule\Form\Element\Proxy;
use Zend\Form\Element\Select as SelectElement;
use Zend\Form\Form;
class ObjectSelect extends SelectElement
{
/**
* @var Proxy
*/
protected $proxy;
/**
* @return Proxy
*/
public function getProxy()
{
if (null === $this->proxy) {
$this->proxy = new Proxy();
}
return $this->proxy;
}
/**
* @param array|\Traversable $options
* @return ObjectSelect
*/
public function setOptions($options)
{
$this->getProxy()->setOptions($options);
return parent::setOptions($options);
}
/**
* {@inheritDoc}
*/
public function setValue($value)
{
return parent::setValue($this->getProxy()->getValue($value));
}
/**
* {@inheritDoc}
*/
public function getValueOptions()
{
if (empty($this->valueOptions)) {
$this->setValueOptions($this->getProxy()->getValueOptions());
}
return $this->valueOptions;
}
}
| bsd-3-clause |
cawka/packaging-ndn-cpp | include/ndnboost/mpl/aux_/preprocessed/bcc/advance_forward.hpp | 2185 |
// Copyright Aleksey Gurtovoy 2000-2004
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// *Preprocessed* version of the main "advance_forward.hpp" header
// -- DO NOT modify by hand!
namespace ndnboost { namespace mpl { namespace aux {
template< long N > struct advance_forward;
template<>
struct advance_forward<0>
{
template< typename Iterator > struct apply
{
typedef Iterator iter0;
typedef iter0 type;
};
};
template<>
struct advance_forward<1>
{
template< typename Iterator > struct apply
{
typedef Iterator iter0;
typedef typename next<iter0>::type iter1;
typedef iter1 type;
};
};
template<>
struct advance_forward<2>
{
template< typename Iterator > struct apply
{
typedef Iterator iter0;
typedef typename next<iter0>::type iter1;
typedef typename next<iter1>::type iter2;
typedef iter2 type;
};
};
template<>
struct advance_forward<3>
{
template< typename Iterator > struct apply
{
typedef Iterator iter0;
typedef typename next<iter0>::type iter1;
typedef typename next<iter1>::type iter2;
typedef typename next<iter2>::type iter3;
typedef iter3 type;
};
};
template<>
struct advance_forward<4>
{
template< typename Iterator > struct apply
{
typedef Iterator iter0;
typedef typename next<iter0>::type iter1;
typedef typename next<iter1>::type iter2;
typedef typename next<iter2>::type iter3;
typedef typename next<iter3>::type iter4;
typedef iter4 type;
};
};
template< long N >
struct advance_forward
{
template< typename Iterator > struct apply
{
typedef typename apply_wrap1<
advance_forward<4>
, Iterator
>::type chunk_result_;
typedef typename apply_wrap1<
advance_forward<(
(N - 4) < 0
? 0
: N - 4
)>
, chunk_result_
>::type type;
};
};
}}}
| bsd-3-clause |
ayende/rhino-licensing | Rhino.Licensing.AdminTool/Views/AboutView.xaml.cs | 318 | namespace Rhino.Licensing.AdminTool.Views
{
public interface IAboutView
{
}
/// <summary>
/// Interaction logic for AboutView.xaml
/// </summary>
public partial class AboutView : IAboutView
{
public AboutView()
{
InitializeComponent();
}
}
}
| bsd-3-clause |
daaquan/pimcore | pimcore/models/Webservice/Data/ClassDefinition/Out.php | 592 | <?php
/**
* Pimcore
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://www.pimcore.org/license
*
* @category Pimcore
* @package Webservice
* @copyright Copyright (c) 2009-2014 pimcore GmbH (http://www.pimcore.org)
* @license http://www.pimcore.org/license New BSD License
*/
namespace Pimcore\Model\Webservice\Data\ClassDefinition;
use Pimcore\Model;
class Out extends Model\Webservice\Data\ClassDefinition {
}
| bsd-3-clause |
lodyagin/bare_cxx | tests.HIDE/strings/basic.string/string.modifiers/string_erase/iter_iter.pass.cpp | 6411 | //===----------------------------------------------------------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is dual licensed under the MIT and the University of Illinois Open
// Source Licenses. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
// <string>
// iterator erase(const_iterator first, const_iterator last);
#include <string>
#include <cassert>
#include "../../min_allocator.h"
template <class S>
void
test(S s, typename S::difference_type pos, typename S::difference_type n, S expected)
{
typename S::const_iterator first = s.cbegin() + pos;
typename S::const_iterator last = s.cbegin() + pos + n;
typename S::iterator i = s.erase(first, last);
assert(s.__invariants());
assert(s == expected);
assert(i - s.begin() == pos);
}
int main()
{
{
typedef std::string S;
test(S(""), 0, 0, S(""));
test(S("abcde"), 0, 0, S("abcde"));
test(S("abcde"), 0, 1, S("bcde"));
test(S("abcde"), 0, 2, S("cde"));
test(S("abcde"), 0, 4, S("e"));
test(S("abcde"), 0, 5, S(""));
test(S("abcde"), 1, 0, S("abcde"));
test(S("abcde"), 1, 1, S("acde"));
test(S("abcde"), 1, 2, S("ade"));
test(S("abcde"), 1, 3, S("ae"));
test(S("abcde"), 1, 4, S("a"));
test(S("abcde"), 2, 0, S("abcde"));
test(S("abcde"), 2, 1, S("abde"));
test(S("abcde"), 2, 2, S("abe"));
test(S("abcde"), 2, 3, S("ab"));
test(S("abcde"), 4, 0, S("abcde"));
test(S("abcde"), 4, 1, S("abcd"));
test(S("abcde"), 5, 0, S("abcde"));
test(S("abcdefghij"), 0, 0, S("abcdefghij"));
test(S("abcdefghij"), 0, 1, S("bcdefghij"));
test(S("abcdefghij"), 0, 5, S("fghij"));
test(S("abcdefghij"), 0, 9, S("j"));
test(S("abcdefghij"), 0, 10, S(""));
test(S("abcdefghij"), 1, 0, S("abcdefghij"));
test(S("abcdefghij"), 1, 1, S("acdefghij"));
test(S("abcdefghij"), 1, 4, S("afghij"));
test(S("abcdefghij"), 1, 8, S("aj"));
test(S("abcdefghij"), 1, 9, S("a"));
test(S("abcdefghij"), 5, 0, S("abcdefghij"));
test(S("abcdefghij"), 5, 1, S("abcdeghij"));
test(S("abcdefghij"), 5, 2, S("abcdehij"));
test(S("abcdefghij"), 5, 4, S("abcdej"));
test(S("abcdefghij"), 5, 5, S("abcde"));
test(S("abcdefghij"), 9, 0, S("abcdefghij"));
test(S("abcdefghij"), 9, 1, S("abcdefghi"));
test(S("abcdefghij"), 10, 0, S("abcdefghij"));
test(S("abcdefghijklmnopqrst"), 0, 0, S("abcdefghijklmnopqrst"));
test(S("abcdefghijklmnopqrst"), 0, 1, S("bcdefghijklmnopqrst"));
test(S("abcdefghijklmnopqrst"), 0, 10, S("klmnopqrst"));
test(S("abcdefghijklmnopqrst"), 0, 19, S("t"));
test(S("abcdefghijklmnopqrst"), 0, 20, S(""));
test(S("abcdefghijklmnopqrst"), 1, 0, S("abcdefghijklmnopqrst"));
test(S("abcdefghijklmnopqrst"), 1, 1, S("acdefghijklmnopqrst"));
test(S("abcdefghijklmnopqrst"), 1, 9, S("aklmnopqrst"));
test(S("abcdefghijklmnopqrst"), 1, 18, S("at"));
test(S("abcdefghijklmnopqrst"), 1, 19, S("a"));
test(S("abcdefghijklmnopqrst"), 10, 0, S("abcdefghijklmnopqrst"));
test(S("abcdefghijklmnopqrst"), 10, 1, S("abcdefghijlmnopqrst"));
test(S("abcdefghijklmnopqrst"), 10, 5, S("abcdefghijpqrst"));
test(S("abcdefghijklmnopqrst"), 10, 9, S("abcdefghijt"));
test(S("abcdefghijklmnopqrst"), 10, 10, S("abcdefghij"));
test(S("abcdefghijklmnopqrst"), 19, 0, S("abcdefghijklmnopqrst"));
test(S("abcdefghijklmnopqrst"), 19, 1, S("abcdefghijklmnopqrs"));
test(S("abcdefghijklmnopqrst"), 20, 0, S("abcdefghijklmnopqrst"));
}
#if __cplusplus >= 201103L
{
typedef std::basic_string<char, std::char_traits<char>, min_allocator<char>> S;
test(S(""), 0, 0, S(""));
test(S("abcde"), 0, 0, S("abcde"));
test(S("abcde"), 0, 1, S("bcde"));
test(S("abcde"), 0, 2, S("cde"));
test(S("abcde"), 0, 4, S("e"));
test(S("abcde"), 0, 5, S(""));
test(S("abcde"), 1, 0, S("abcde"));
test(S("abcde"), 1, 1, S("acde"));
test(S("abcde"), 1, 2, S("ade"));
test(S("abcde"), 1, 3, S("ae"));
test(S("abcde"), 1, 4, S("a"));
test(S("abcde"), 2, 0, S("abcde"));
test(S("abcde"), 2, 1, S("abde"));
test(S("abcde"), 2, 2, S("abe"));
test(S("abcde"), 2, 3, S("ab"));
test(S("abcde"), 4, 0, S("abcde"));
test(S("abcde"), 4, 1, S("abcd"));
test(S("abcde"), 5, 0, S("abcde"));
test(S("abcdefghij"), 0, 0, S("abcdefghij"));
test(S("abcdefghij"), 0, 1, S("bcdefghij"));
test(S("abcdefghij"), 0, 5, S("fghij"));
test(S("abcdefghij"), 0, 9, S("j"));
test(S("abcdefghij"), 0, 10, S(""));
test(S("abcdefghij"), 1, 0, S("abcdefghij"));
test(S("abcdefghij"), 1, 1, S("acdefghij"));
test(S("abcdefghij"), 1, 4, S("afghij"));
test(S("abcdefghij"), 1, 8, S("aj"));
test(S("abcdefghij"), 1, 9, S("a"));
test(S("abcdefghij"), 5, 0, S("abcdefghij"));
test(S("abcdefghij"), 5, 1, S("abcdeghij"));
test(S("abcdefghij"), 5, 2, S("abcdehij"));
test(S("abcdefghij"), 5, 4, S("abcdej"));
test(S("abcdefghij"), 5, 5, S("abcde"));
test(S("abcdefghij"), 9, 0, S("abcdefghij"));
test(S("abcdefghij"), 9, 1, S("abcdefghi"));
test(S("abcdefghij"), 10, 0, S("abcdefghij"));
test(S("abcdefghijklmnopqrst"), 0, 0, S("abcdefghijklmnopqrst"));
test(S("abcdefghijklmnopqrst"), 0, 1, S("bcdefghijklmnopqrst"));
test(S("abcdefghijklmnopqrst"), 0, 10, S("klmnopqrst"));
test(S("abcdefghijklmnopqrst"), 0, 19, S("t"));
test(S("abcdefghijklmnopqrst"), 0, 20, S(""));
test(S("abcdefghijklmnopqrst"), 1, 0, S("abcdefghijklmnopqrst"));
test(S("abcdefghijklmnopqrst"), 1, 1, S("acdefghijklmnopqrst"));
test(S("abcdefghijklmnopqrst"), 1, 9, S("aklmnopqrst"));
test(S("abcdefghijklmnopqrst"), 1, 18, S("at"));
test(S("abcdefghijklmnopqrst"), 1, 19, S("a"));
test(S("abcdefghijklmnopqrst"), 10, 0, S("abcdefghijklmnopqrst"));
test(S("abcdefghijklmnopqrst"), 10, 1, S("abcdefghijlmnopqrst"));
test(S("abcdefghijklmnopqrst"), 10, 5, S("abcdefghijpqrst"));
test(S("abcdefghijklmnopqrst"), 10, 9, S("abcdefghijt"));
test(S("abcdefghijklmnopqrst"), 10, 10, S("abcdefghij"));
test(S("abcdefghijklmnopqrst"), 19, 0, S("abcdefghijklmnopqrst"));
test(S("abcdefghijklmnopqrst"), 19, 1, S("abcdefghijklmnopqrs"));
test(S("abcdefghijklmnopqrst"), 20, 0, S("abcdefghijklmnopqrst"));
}
#endif
}
| bsd-3-clause |
Cantera/cantera-svn | interfaces/cython/cantera/examples/surface_chemistry/catalytic_combustion.py | 4700 | """
CATCOMB -- Catalytic combustion of methane on platinum.
This script solves a catalytic combustion problem. A stagnation flow is set
up, with a gas inlet 10 cm from a platinum surface at 900 K. The lean,
premixed methane/air mixture enters at ~ 6 cm/s (0.06 kg/m2/s), and burns
catalytically on the platinum surface. Gas-phase chemistry is included too,
and has some effect very near the surface.
The catalytic combustion mechanism is from Deutschman et al., 26th
Symp. (Intl.) on Combustion,1996 pp. 1747-1754
"""
import numpy as np
import cantera as ct
# Parameter values are collected here to make it easier to modify them
p = ct.one_atm # pressure
tinlet = 300.0 # inlet temperature
tsurf = 900.0 # surface temperature
mdot = 0.06 # kg/m^2/s
transport = 'Mix' # transport model
# We will solve first for a hydrogen/air case to use as the initial estimate
# for the methane/air case
# composition of the inlet premixed gas for the hydrogen/air case
comp1 = 'H2:0.05, O2:0.21, N2:0.78, AR:0.01'
# composition of the inlet premixed gas for the methane/air case
comp2 = 'CH4:0.095, O2:0.21, N2:0.78, AR:0.01'
# the initial grid, in meters. The inlet/surface separation is 10 cm.
initial_grid = [0.0, 0.02, 0.04, 0.06, 0.08, 0.1] # m
# numerical parameters
tol_ss = [1.0e-5, 1.0e-9] # [rtol, atol] for steady-state problem
tol_ts = [1.0e-4, 1.0e-9] # [rtol, atol] for time stepping
loglevel = 1 # amount of diagnostic output (0 to 5)
refine_grid = True # enable or disable refinement
################ create the gas object ########################
#
# This object will be used to evaluate all thermodynamic, kinetic, and
# transport properties. The gas phase will be taken from the definition of
# phase 'gas' in input file 'ptcombust.cti,' which is a stripped-down version
# of GRI-Mech 3.0.
gas = ct.Solution('ptcombust.cti', 'gas')
gas.TPX = tinlet, p, comp1
################ create the interface object ##################
#
# This object will be used to evaluate all surface chemical production rates.
# It will be created from the interface definition 'Pt_surf' in input file
# 'ptcombust.cti,' which implements the reaction mechanism of Deutschmann et
# al., 1995 for catalytic combustion on platinum.
#
surf_phase = ct.Interface('ptcombust.cti', 'Pt_surf', [gas])
surf_phase.TP = tsurf, p
# integrate the coverage equations in time for 1 s, holding the gas
# composition fixed to generate a good starting estimate for the coverages.
surf_phase.advance_coverages(1.0)
# create the object that simulates the stagnation flow, and specify an initial
# grid
sim = ct.ImpingingJet(gas=gas, grid=initial_grid, surface=surf_phase)
# Objects of class StagnationFlow have members that represent the gas inlet
# ('inlet') and the surface ('surface'). Set some parameters of these objects.
sim.inlet.mdot = mdot
sim.inlet.T = tinlet
sim.inlet.X = comp1
sim.surface.T = tsurf
# Set error tolerances
sim.flame.set_steady_tolerances(default=tol_ss)
sim.flame.set_transient_tolerances(default=tol_ts)
# Show the initial solution estimate
sim.show_solution()
# Solving problems with stiff chemistry coulpled to flow can require a
# sequential approach where solutions are first obtained for simpler problems
# and used as the initial guess for more difficult problems.
# start with the energy equation on (default is 'off')
sim.energy_enabled = True
# disable the surface coverage equations, and turn off all gas and surface
# chemistry.
sim.surface.coverage_enabled = False
surf_phase.set_multiplier(0.0)
gas.set_multiplier(0.0)
# solve the problem, refining the grid if needed, to determine the non-
# reacting velocity and temperature distributions
sim.solve(loglevel, refine_grid)
# now turn on the surface coverage equations, and turn the chemistry on slowly
sim.surface.coverage_enabled = True
for mult in np.logspace(-5, 0, 6):
surf_phase.set_multiplier(mult)
gas.set_multiplier(mult)
print('Multiplier =', mult)
sim.solve(loglevel, refine_grid)
# At this point, we should have the solution for the hydrogen/air problem.
sim.show_solution()
# Now switch the inlet to the methane/air composition.
sim.inlet.X = comp2
# set more stringent grid refinement criteria
sim.set_refine_criteria(100.0, 0.15, 0.2, 0.0)
# solve the problem for the final time
sim.solve(loglevel, refine_grid)
# show the solution
sim.show_solution()
# save the solution in XML format. The 'restore' method can be used to restart
# a simulation from a solution stored in this form.
sim.save("catcomb.xml", "soln1")
# save selected solution components in a CSV file for plotting in
# Excel or MATLAB.
sim.write_csv('catalytic_combustion.csv', quiet=False)
sim.show_stats(0)
| bsd-3-clause |
xin3liang/platform_external_chromium_org_third_party_skia | src/gpu/GrGpu.cpp | 18019 |
/*
* Copyright 2010 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#include "GrGpu.h"
#include "GrBufferAllocPool.h"
#include "GrContext.h"
#include "GrDrawTargetCaps.h"
#include "GrIndexBuffer.h"
#include "GrStencilBuffer.h"
#include "GrVertexBuffer.h"
// probably makes no sense for this to be less than a page
static const size_t VERTEX_POOL_VB_SIZE = 1 << 18;
static const int VERTEX_POOL_VB_COUNT = 4;
static const size_t INDEX_POOL_IB_SIZE = 1 << 16;
static const int INDEX_POOL_IB_COUNT = 4;
////////////////////////////////////////////////////////////////////////////////
#define DEBUG_INVAL_BUFFER 0xdeadcafe
#define DEBUG_INVAL_START_IDX -1
GrGpu::GrGpu(GrContext* context)
: GrDrawTarget(context)
, fResetTimestamp(kExpiredTimestamp+1)
, fResetBits(kAll_GrBackendState)
, fVertexPool(NULL)
, fIndexPool(NULL)
, fVertexPoolUseCnt(0)
, fIndexPoolUseCnt(0)
, fQuadIndexBuffer(NULL) {
fClipMaskManager.setGpu(this);
fGeomPoolStateStack.push_back();
#ifdef SK_DEBUG
GeometryPoolState& poolState = fGeomPoolStateStack.back();
poolState.fPoolVertexBuffer = (GrVertexBuffer*)DEBUG_INVAL_BUFFER;
poolState.fPoolStartVertex = DEBUG_INVAL_START_IDX;
poolState.fPoolIndexBuffer = (GrIndexBuffer*)DEBUG_INVAL_BUFFER;
poolState.fPoolStartIndex = DEBUG_INVAL_START_IDX;
#endif
}
GrGpu::~GrGpu() {
SkSafeSetNull(fQuadIndexBuffer);
delete fVertexPool;
fVertexPool = NULL;
delete fIndexPool;
fIndexPool = NULL;
}
void GrGpu::contextAbandonded() {}
////////////////////////////////////////////////////////////////////////////////
GrTexture* GrGpu::createTexture(const GrTextureDesc& desc,
const void* srcData, size_t rowBytes) {
if (!this->caps()->isConfigTexturable(desc.fConfig)) {
return NULL;
}
if ((desc.fFlags & kRenderTarget_GrTextureFlagBit) &&
!this->caps()->isConfigRenderable(desc.fConfig, desc.fSampleCnt > 0)) {
return NULL;
}
GrTexture *tex = NULL;
if (GrPixelConfigIsCompressed(desc.fConfig)) {
// We shouldn't be rendering into this
SkASSERT((desc.fFlags & kRenderTarget_GrTextureFlagBit) == 0);
if (!this->caps()->npotTextureTileSupport() &&
(!SkIsPow2(desc.fWidth) || !SkIsPow2(desc.fHeight))) {
return NULL;
}
this->handleDirtyContext();
tex = this->onCreateCompressedTexture(desc, srcData);
} else {
this->handleDirtyContext();
tex = this->onCreateTexture(desc, srcData, rowBytes);
if (NULL != tex &&
(kRenderTarget_GrTextureFlagBit & desc.fFlags) &&
!(kNoStencil_GrTextureFlagBit & desc.fFlags)) {
SkASSERT(NULL != tex->asRenderTarget());
// TODO: defer this and attach dynamically
if (!this->attachStencilBufferToRenderTarget(tex->asRenderTarget())) {
tex->unref();
return NULL;
}
}
}
return tex;
}
bool GrGpu::attachStencilBufferToRenderTarget(GrRenderTarget* rt) {
SkASSERT(NULL == rt->getStencilBuffer());
GrStencilBuffer* sb =
this->getContext()->findStencilBuffer(rt->width(),
rt->height(),
rt->numSamples());
if (NULL != sb) {
rt->setStencilBuffer(sb);
bool attached = this->attachStencilBufferToRenderTarget(sb, rt);
if (!attached) {
rt->setStencilBuffer(NULL);
}
return attached;
}
if (this->createStencilBufferForRenderTarget(rt,
rt->width(), rt->height())) {
// Right now we're clearing the stencil buffer here after it is
// attached to an RT for the first time. When we start matching
// stencil buffers with smaller color targets this will no longer
// be correct because it won't be guaranteed to clear the entire
// sb.
// We used to clear down in the GL subclass using a special purpose
// FBO. But iOS doesn't allow a stencil-only FBO. It reports unsupported
// FBO status.
GrDrawState::AutoRenderTargetRestore artr(this->drawState(), rt);
this->clearStencil();
return true;
} else {
return false;
}
}
GrTexture* GrGpu::wrapBackendTexture(const GrBackendTextureDesc& desc) {
this->handleDirtyContext();
GrTexture* tex = this->onWrapBackendTexture(desc);
if (NULL == tex) {
return NULL;
}
// TODO: defer this and attach dynamically
GrRenderTarget* tgt = tex->asRenderTarget();
if (NULL != tgt &&
!this->attachStencilBufferToRenderTarget(tgt)) {
tex->unref();
return NULL;
} else {
return tex;
}
}
GrRenderTarget* GrGpu::wrapBackendRenderTarget(const GrBackendRenderTargetDesc& desc) {
this->handleDirtyContext();
return this->onWrapBackendRenderTarget(desc);
}
GrVertexBuffer* GrGpu::createVertexBuffer(size_t size, bool dynamic) {
this->handleDirtyContext();
return this->onCreateVertexBuffer(size, dynamic);
}
GrIndexBuffer* GrGpu::createIndexBuffer(size_t size, bool dynamic) {
this->handleDirtyContext();
return this->onCreateIndexBuffer(size, dynamic);
}
GrPath* GrGpu::createPath(const SkPath& path, const SkStrokeRec& stroke) {
SkASSERT(this->caps()->pathRenderingSupport());
this->handleDirtyContext();
return this->pathRendering()->createPath(path, stroke);
}
GrPathRange* GrGpu::createPathRange(size_t size, const SkStrokeRec& stroke) {
this->handleDirtyContext();
return this->pathRendering()->createPathRange(size, stroke);
}
void GrGpu::clear(const SkIRect* rect,
GrColor color,
bool canIgnoreRect,
GrRenderTarget* renderTarget) {
GrDrawState::AutoRenderTargetRestore art;
if (NULL != renderTarget) {
art.set(this->drawState(), renderTarget);
}
if (NULL == this->getDrawState().getRenderTarget()) {
SkASSERT(0);
return;
}
this->handleDirtyContext();
this->onClear(rect, color, canIgnoreRect);
}
bool GrGpu::readPixels(GrRenderTarget* target,
int left, int top, int width, int height,
GrPixelConfig config, void* buffer,
size_t rowBytes) {
this->handleDirtyContext();
return this->onReadPixels(target, left, top, width, height,
config, buffer, rowBytes);
}
bool GrGpu::writeTexturePixels(GrTexture* texture,
int left, int top, int width, int height,
GrPixelConfig config, const void* buffer,
size_t rowBytes) {
this->handleDirtyContext();
return this->onWriteTexturePixels(texture, left, top, width, height,
config, buffer, rowBytes);
}
void GrGpu::resolveRenderTarget(GrRenderTarget* target) {
SkASSERT(target);
this->handleDirtyContext();
this->onResolveRenderTarget(target);
}
static const GrStencilSettings& winding_path_stencil_settings() {
GR_STATIC_CONST_SAME_STENCIL_STRUCT(gSettings,
kIncClamp_StencilOp,
kIncClamp_StencilOp,
kAlwaysIfInClip_StencilFunc,
0xFFFF, 0xFFFF, 0xFFFF);
return *GR_CONST_STENCIL_SETTINGS_PTR_FROM_STRUCT_PTR(&gSettings);
}
static const GrStencilSettings& even_odd_path_stencil_settings() {
GR_STATIC_CONST_SAME_STENCIL_STRUCT(gSettings,
kInvert_StencilOp,
kInvert_StencilOp,
kAlwaysIfInClip_StencilFunc,
0xFFFF, 0xFFFF, 0xFFFF);
return *GR_CONST_STENCIL_SETTINGS_PTR_FROM_STRUCT_PTR(&gSettings);
}
void GrGpu::getPathStencilSettingsForFillType(SkPath::FillType fill, GrStencilSettings* outStencilSettings) {
switch (fill) {
default:
SkFAIL("Unexpected path fill.");
/* fallthrough */;
case SkPath::kWinding_FillType:
case SkPath::kInverseWinding_FillType:
*outStencilSettings = winding_path_stencil_settings();
break;
case SkPath::kEvenOdd_FillType:
case SkPath::kInverseEvenOdd_FillType:
*outStencilSettings = even_odd_path_stencil_settings();
break;
}
fClipMaskManager.adjustPathStencilParams(outStencilSettings);
}
////////////////////////////////////////////////////////////////////////////////
static const int MAX_QUADS = 1 << 12; // max possible: (1 << 14) - 1;
GR_STATIC_ASSERT(4 * MAX_QUADS <= 65535);
static inline void fill_indices(uint16_t* indices, int quadCount) {
for (int i = 0; i < quadCount; ++i) {
indices[6 * i + 0] = 4 * i + 0;
indices[6 * i + 1] = 4 * i + 1;
indices[6 * i + 2] = 4 * i + 2;
indices[6 * i + 3] = 4 * i + 0;
indices[6 * i + 4] = 4 * i + 2;
indices[6 * i + 5] = 4 * i + 3;
}
}
const GrIndexBuffer* GrGpu::getQuadIndexBuffer() const {
if (NULL == fQuadIndexBuffer || fQuadIndexBuffer->wasDestroyed()) {
SkSafeUnref(fQuadIndexBuffer);
static const int SIZE = sizeof(uint16_t) * 6 * MAX_QUADS;
GrGpu* me = const_cast<GrGpu*>(this);
fQuadIndexBuffer = me->createIndexBuffer(SIZE, false);
if (NULL != fQuadIndexBuffer) {
uint16_t* indices = (uint16_t*)fQuadIndexBuffer->map();
if (NULL != indices) {
fill_indices(indices, MAX_QUADS);
fQuadIndexBuffer->unmap();
} else {
indices = (uint16_t*)sk_malloc_throw(SIZE);
fill_indices(indices, MAX_QUADS);
if (!fQuadIndexBuffer->updateData(indices, SIZE)) {
fQuadIndexBuffer->unref();
fQuadIndexBuffer = NULL;
SkFAIL("Can't get indices into buffer!");
}
sk_free(indices);
}
}
}
return fQuadIndexBuffer;
}
////////////////////////////////////////////////////////////////////////////////
bool GrGpu::setupClipAndFlushState(DrawType type, const GrDeviceCoordTexture* dstCopy,
GrDrawState::AutoRestoreEffects* are,
const SkRect* devBounds) {
if (!fClipMaskManager.setupClipping(this->getClip(), are, devBounds)) {
return false;
}
if (!this->flushGraphicsState(type, dstCopy)) {
return false;
}
return true;
}
////////////////////////////////////////////////////////////////////////////////
void GrGpu::geometrySourceWillPush() {
const GeometrySrcState& geoSrc = this->getGeomSrc();
if (kArray_GeometrySrcType == geoSrc.fVertexSrc ||
kReserved_GeometrySrcType == geoSrc.fVertexSrc) {
this->finalizeReservedVertices();
}
if (kArray_GeometrySrcType == geoSrc.fIndexSrc ||
kReserved_GeometrySrcType == geoSrc.fIndexSrc) {
this->finalizeReservedIndices();
}
GeometryPoolState& newState = fGeomPoolStateStack.push_back();
#ifdef SK_DEBUG
newState.fPoolVertexBuffer = (GrVertexBuffer*)DEBUG_INVAL_BUFFER;
newState.fPoolStartVertex = DEBUG_INVAL_START_IDX;
newState.fPoolIndexBuffer = (GrIndexBuffer*)DEBUG_INVAL_BUFFER;
newState.fPoolStartIndex = DEBUG_INVAL_START_IDX;
#else
(void) newState; // silence compiler warning
#endif
}
void GrGpu::geometrySourceWillPop(const GeometrySrcState& restoredState) {
// if popping last entry then pops are unbalanced with pushes
SkASSERT(fGeomPoolStateStack.count() > 1);
fGeomPoolStateStack.pop_back();
}
void GrGpu::onDraw(const DrawInfo& info) {
this->handleDirtyContext();
GrDrawState::AutoRestoreEffects are;
if (!this->setupClipAndFlushState(PrimTypeToDrawType(info.primitiveType()),
info.getDstCopy(), &are, info.getDevBounds())) {
return;
}
this->onGpuDraw(info);
}
void GrGpu::onStencilPath(const GrPath* path, SkPath::FillType fill) {
this->handleDirtyContext();
GrDrawState::AutoRestoreEffects are;
if (!this->setupClipAndFlushState(kStencilPath_DrawType, NULL, &are, NULL)) {
return;
}
this->pathRendering()->stencilPath(path, fill);
}
void GrGpu::onDrawPath(const GrPath* path, SkPath::FillType fill,
const GrDeviceCoordTexture* dstCopy) {
this->handleDirtyContext();
drawState()->setDefaultVertexAttribs();
GrDrawState::AutoRestoreEffects are;
if (!this->setupClipAndFlushState(kDrawPath_DrawType, dstCopy, &are, NULL)) {
return;
}
this->pathRendering()->drawPath(path, fill);
}
void GrGpu::onDrawPaths(const GrPathRange* pathRange,
const uint32_t indices[], int count,
const float transforms[], PathTransformType transformsType,
SkPath::FillType fill, const GrDeviceCoordTexture* dstCopy) {
this->handleDirtyContext();
drawState()->setDefaultVertexAttribs();
GrDrawState::AutoRestoreEffects are;
if (!this->setupClipAndFlushState(kDrawPaths_DrawType, dstCopy, &are, NULL)) {
return;
}
this->pathRendering()->drawPaths(pathRange, indices, count, transforms, transformsType, fill);
}
void GrGpu::finalizeReservedVertices() {
SkASSERT(NULL != fVertexPool);
fVertexPool->unmap();
}
void GrGpu::finalizeReservedIndices() {
SkASSERT(NULL != fIndexPool);
fIndexPool->unmap();
}
void GrGpu::prepareVertexPool() {
if (NULL == fVertexPool) {
SkASSERT(0 == fVertexPoolUseCnt);
fVertexPool = SkNEW_ARGS(GrVertexBufferAllocPool, (this, true,
VERTEX_POOL_VB_SIZE,
VERTEX_POOL_VB_COUNT));
fVertexPool->releaseGpuRef();
} else if (!fVertexPoolUseCnt) {
// the client doesn't have valid data in the pool
fVertexPool->reset();
}
}
void GrGpu::prepareIndexPool() {
if (NULL == fIndexPool) {
SkASSERT(0 == fIndexPoolUseCnt);
fIndexPool = SkNEW_ARGS(GrIndexBufferAllocPool, (this, true,
INDEX_POOL_IB_SIZE,
INDEX_POOL_IB_COUNT));
fIndexPool->releaseGpuRef();
} else if (!fIndexPoolUseCnt) {
// the client doesn't have valid data in the pool
fIndexPool->reset();
}
}
bool GrGpu::onReserveVertexSpace(size_t vertexSize,
int vertexCount,
void** vertices) {
GeometryPoolState& geomPoolState = fGeomPoolStateStack.back();
SkASSERT(vertexCount > 0);
SkASSERT(NULL != vertices);
this->prepareVertexPool();
*vertices = fVertexPool->makeSpace(vertexSize,
vertexCount,
&geomPoolState.fPoolVertexBuffer,
&geomPoolState.fPoolStartVertex);
if (NULL == *vertices) {
return false;
}
++fVertexPoolUseCnt;
return true;
}
bool GrGpu::onReserveIndexSpace(int indexCount, void** indices) {
GeometryPoolState& geomPoolState = fGeomPoolStateStack.back();
SkASSERT(indexCount > 0);
SkASSERT(NULL != indices);
this->prepareIndexPool();
*indices = fIndexPool->makeSpace(indexCount,
&geomPoolState.fPoolIndexBuffer,
&geomPoolState.fPoolStartIndex);
if (NULL == *indices) {
return false;
}
++fIndexPoolUseCnt;
return true;
}
void GrGpu::releaseReservedVertexSpace() {
const GeometrySrcState& geoSrc = this->getGeomSrc();
SkASSERT(kReserved_GeometrySrcType == geoSrc.fVertexSrc);
size_t bytes = geoSrc.fVertexCount * geoSrc.fVertexSize;
fVertexPool->putBack(bytes);
--fVertexPoolUseCnt;
}
void GrGpu::releaseReservedIndexSpace() {
const GeometrySrcState& geoSrc = this->getGeomSrc();
SkASSERT(kReserved_GeometrySrcType == geoSrc.fIndexSrc);
size_t bytes = geoSrc.fIndexCount * sizeof(uint16_t);
fIndexPool->putBack(bytes);
--fIndexPoolUseCnt;
}
void GrGpu::onSetVertexSourceToArray(const void* vertexArray, int vertexCount) {
this->prepareVertexPool();
GeometryPoolState& geomPoolState = fGeomPoolStateStack.back();
#ifdef SK_DEBUG
bool success =
#endif
fVertexPool->appendVertices(this->getVertexSize(),
vertexCount,
vertexArray,
&geomPoolState.fPoolVertexBuffer,
&geomPoolState.fPoolStartVertex);
++fVertexPoolUseCnt;
GR_DEBUGASSERT(success);
}
void GrGpu::onSetIndexSourceToArray(const void* indexArray, int indexCount) {
this->prepareIndexPool();
GeometryPoolState& geomPoolState = fGeomPoolStateStack.back();
#ifdef SK_DEBUG
bool success =
#endif
fIndexPool->appendIndices(indexCount,
indexArray,
&geomPoolState.fPoolIndexBuffer,
&geomPoolState.fPoolStartIndex);
++fIndexPoolUseCnt;
GR_DEBUGASSERT(success);
}
void GrGpu::releaseVertexArray() {
// if vertex source was array, we stowed data in the pool
const GeometrySrcState& geoSrc = this->getGeomSrc();
SkASSERT(kArray_GeometrySrcType == geoSrc.fVertexSrc);
size_t bytes = geoSrc.fVertexCount * geoSrc.fVertexSize;
fVertexPool->putBack(bytes);
--fVertexPoolUseCnt;
}
void GrGpu::releaseIndexArray() {
// if index source was array, we stowed data in the pool
const GeometrySrcState& geoSrc = this->getGeomSrc();
SkASSERT(kArray_GeometrySrcType == geoSrc.fIndexSrc);
size_t bytes = geoSrc.fIndexCount * sizeof(uint16_t);
fIndexPool->putBack(bytes);
--fIndexPoolUseCnt;
}
| bsd-3-clause |
chuan9/chromium-crosswalk | ui/file_manager/file_manager/foreground/js/file_manager_commands.js | 45372 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* Sets 'hidden' property of a cr.ui.Command instance and dispatches
* 'hiddenChange' event manually so that associated cr.ui.MenuItem can handle
* the event.
* TODO(fukino): Remove this workaround when crbug.com/481941 is fixed.
*
* @param {boolean} value New value of hidden property.
*/
cr.ui.Command.prototype.setHidden = function(value) {
if (value === this.hidden)
return;
var oldValue = this.hidden;
this.hidden = value;
cr.dispatchPropertyChange(this, 'hidden', value, oldValue);
};
/**
* A command.
* @interface
*/
var Command = function() {};
/**
* Metadata property names used by Command.
* These metadata is expected to be cached.
* @const {!Array<string>}
*/
Command.METADATA_PREFETCH_PROPERTY_NAMES = [
'hosted',
'pinned'
];
/**
* Handles the execute event.
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager.
*/
Command.prototype.execute = function(event, fileManager) {};
/**
* Handles the can execute event.
* @param {!Event} event Can execute event.
* @param {!FileManager} fileManager FileManager.
*/
Command.prototype.canExecute = function(event, fileManager) {};
/**
* Utility for commands.
*/
var CommandUtil = {};
/**
* Extracts entry on which command event was dispatched.
*
* @param {EventTarget} element Element which is the command event's target.
* @return {Entry} Entry of the found node.
*/
CommandUtil.getCommandEntry = function(element) {
var entries = CommandUtil.getCommandEntries(element);
return entries.length === 0 ? null : entries[0];
};
/**
* Extracts entries on which command event was dispatched.
*
* @param {EventTarget} element Element which is the command event's target.
* @return {!Array<!Entry>} Entries of the found node.
*/
CommandUtil.getCommandEntries = function(element) {
if (element instanceof DirectoryTree) {
// element is a DirectoryTree.
return element.selectedItem ? [element.selectedItem.entry] : [];
} else if (element instanceof DirectoryItem ||
element instanceof ShortcutItem) {
// element are sub items in DirectoryTree.
return [element.entry];
} else if (element instanceof cr.ui.List) {
// element is a normal List (eg. the file list on the right panel).
var entries = element.selectedItems;
// Check if it is Entry or not by checking for toURL().
return entries ||
entries.some(function(entry) { return !('toURL' in entry); }) ?
entries : [];
} else {
return [];
}
};
/**
* Obtains an entry from the give navigation model item.
* @param {!NavigationModelItem} item Navigation model item.
* @return {Entry} Related entry.
* @private
*/
CommandUtil.getEntryFromNavigationModelItem_ = function(item) {
switch (item.type) {
case NavigationModelItemType.VOLUME:
return /** @type {!NavigationModelVolumeItem} */ (
item).volumeInfo.displayRoot;
case NavigationModelItemType.SHORTCUT:
return /** @type {!NavigationModelShortcutItem} */ (item).entry;
}
return null;
};
/**
* Checks if command can be executed on drive.
* @param {!Event} event Command event to mark.
* @param {!FileManager} fileManager FileManager to use.
*/
CommandUtil.canExecuteEnabledOnDriveOnly = function(event, fileManager) {
event.canExecute = fileManager.isOnDrive();
};
/**
* Sets the command as visible only when the current volume is drive and it's
* running as a normal app, not as a modal dialog.
* @param {!Event} event Command event to mark.
* @param {!FileManager} fileManager FileManager to use.
*/
CommandUtil.canExecuteVisibleOnDriveInNormalAppModeOnly =
function(event, fileManager) {
var enabled = fileManager.isOnDrive() &&
!DialogType.isModal(fileManager.dialogType);
event.canExecute = enabled;
event.command.setHidden(!enabled);
};
/**
* Sets as the command as always enabled.
* @param {!Event} event Command event to mark.
*/
CommandUtil.canExecuteAlways = function(event) {
event.canExecute = true;
};
/**
* Obtains target entries that can be pinned from the selection.
* If directories are included in the selection, it just returns an empty
* array to avoid confusing because pinning directory is not supported
* currently.
*
* @return {!Array<!Entry>} Target entries.
*/
CommandUtil.getPinTargetEntries = function() {
// If current directory is not on drive, no entry can be pinned.
if (!fileManager.isOnDrive())
return [];
var hasDirectory = false;
var results = fileManager.getSelection().entries.filter(function(entry) {
hasDirectory = hasDirectory || entry.isDirectory;
if (!entry || hasDirectory)
return false;
var metadata = fileManager.getMetadataModel().getCache(
[entry], ['hosted', 'pinned'])[0];
if (metadata.hosted)
return false;
entry.pinned = metadata.pinned;
return true;
});
return hasDirectory ? [] : results;
};
/**
* Sets the default handler for the commandId and prevents handling
* the keydown events for this command. Not doing that breaks relationship
* of original keyboard event and the command. WebKit would handle it
* differently in some cases.
* @param {Node} node to register command handler on.
* @param {string} commandId Command id to respond to.
*/
CommandUtil.forceDefaultHandler = function(node, commandId) {
var doc = node.ownerDocument;
var command = doc.querySelector('command[id="' + commandId + '"]');
node.addEventListener('keydown', function(e) {
if (command.matchesEvent(e)) {
// Prevent cr.ui.CommandManager of handling it and leave it
// for the default handler.
e.stopPropagation();
}
});
node.addEventListener('command', function(event) {
if (event.command.id !== commandId)
return;
document.execCommand(event.command.id);
event.cancelBubble = true;
});
node.addEventListener('canExecute', function(event) {
if (event.command.id === commandId)
event.canExecute = document.queryCommandEnabled(event.command.id);
});
};
/**
* Default command.
* @type {Command}
*/
CommandUtil.defaultCommand = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
fileManager.document.execCommand(event.command.id);
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
canExecute: function(event, fileManager) {
event.canExecute = fileManager.document.queryCommandEnabled(
event.command.id);
}
});
/**
* Creates the volume switch command with index.
* @param {number} index Volume index from 1 to 9.
* @return {Command} Volume switch command.
*/
CommandUtil.createVolumeSwitchCommand = function(index) {
return /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
fileManager.directoryTree.activateByIndex(index - 1);
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
canExecute: function(event, fileManager) {
event.canExecute = index > 0 &&
index <= fileManager.directoryTree.items.length;
}
});
};
/**
* Returns a directory entry when only one entry is selected and it is
* directory. Otherwise, returns null.
* @param {FileSelection} selection Instance of FileSelection.
* @return {?DirectoryEntry} Directory entry which is selected alone.
*/
CommandUtil.getOnlyOneSelectedDirectory = function(selection) {
if (!selection)
return null;
if (selection.totalCount !== 1)
return null;
if (!selection.entries[0].isDirectory)
return null;
return /** @type {!DirectoryEntry} */(selection.entries[0]);
};
/**
* Handle of the command events.
* @param {!FileManager} fileManager FileManager.
* @constructor
* @struct
*/
var CommandHandler = function(fileManager) {
/**
* FileManager.
* @type {!FileManager}
* @private
*/
this.fileManager_ = fileManager;
/**
* Command elements.
* @type {Object<cr.ui.Command>}
* @private
*/
this.commands_ = {};
// Decorate command tags in the document.
var commands = fileManager.document.querySelectorAll('command');
for (var i = 0; i < commands.length; i++) {
cr.ui.Command.decorate(commands[i]);
this.commands_[commands[i].id] = commands[i];
}
// Register events.
fileManager.document.addEventListener('command', this.onCommand_.bind(this));
fileManager.document.addEventListener(
'canExecute', this.onCanExecute_.bind(this));
fileManager.directoryModel.addEventListener(
'directory-change', this.updateAvailability.bind(this));
fileManager.volumeManager.addEventListener(
'drive-connection-changed', this.updateAvailability.bind(this));
};
/**
* Updates the availability of all commands.
*/
CommandHandler.prototype.updateAvailability = function() {
for (var id in this.commands_) {
this.commands_[id].canExecuteChange();
}
};
/**
* Checks if the handler should ignore the current event, eg. since there is
* a popup dialog currently opened.
*
* @return {boolean} True if the event should be ignored, false otherwise.
* @private
*/
CommandHandler.prototype.shouldIgnoreEvents_ = function() {
// Do not handle commands, when a dialog is shown. Do not use querySelector
// as it's much slower, and this method is executed often.
var dialogs = this.fileManager_.document.getElementsByClassName(
'cr-dialog-container');
if (dialogs.length !== 0 && dialogs[0].classList.contains('shown'))
return true;
return false; // Do not ignore.
};
/**
* Handles command events.
* @param {!Event} event Command event.
* @private
*/
CommandHandler.prototype.onCommand_ = function(event) {
if (this.shouldIgnoreEvents_())
return;
var handler = CommandHandler.COMMANDS_[event.command.id];
handler.execute.call(/** @type {Command} */ (handler), event,
this.fileManager_);
};
/**
* Handles canExecute events.
* @param {!Event} event Can execute event.
* @private
*/
CommandHandler.prototype.onCanExecute_ = function(event) {
if (this.shouldIgnoreEvents_())
return;
var handler = CommandHandler.COMMANDS_[event.command.id];
handler.canExecute.call(/** @type {Command} */ (handler), event,
this.fileManager_);
};
/**
* Commands.
* @type {Object<Command>}
* @const
* @private
*/
CommandHandler.COMMANDS_ = {};
/**
* Unmounts external drive.
* @type {Command}
*/
CommandHandler.COMMANDS_['unmount'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager The file manager instance.
*/
execute: function(event, fileManager) {
var root = CommandUtil.getCommandEntry(event.target);
if (!root) {
console.warn('unmount command executed on an element which does not ' +
'have corresponding entry.');
return;
}
var errorCallback = function() {
fileManager.ui.alertDialog.showHtml(
'', str('UNMOUNT_FAILED'), null, null, null);
};
var volumeInfo = fileManager.volumeManager.getVolumeInfo(root);
if (!volumeInfo) {
errorCallback();
return;
}
fileManager.volumeManager_.unmount(
volumeInfo,
function() {},
errorCallback);
},
/**
* @param {!Event} event Command event.
* @this {CommandHandler}
*/
canExecute: function(event, fileManager) {
var root = CommandUtil.getCommandEntry(event.target);
if (!root)
return;
var locationInfo = fileManager.volumeManager.getLocationInfo(root);
var rootType =
locationInfo && locationInfo.isRootEntry && locationInfo.rootType;
event.canExecute = (rootType == VolumeManagerCommon.RootType.ARCHIVE ||
rootType == VolumeManagerCommon.RootType.REMOVABLE ||
rootType == VolumeManagerCommon.RootType.PROVIDED);
event.command.setHidden(!event.canExecute);
switch (rootType) {
case VolumeManagerCommon.RootType.ARCHIVE:
case VolumeManagerCommon.RootType.PROVIDED:
event.command.label = str('CLOSE_VOLUME_BUTTON_LABEL');
break;
case VolumeManagerCommon.RootType.REMOVABLE:
event.command.label = str('UNMOUNT_DEVICE_BUTTON_LABEL');
break;
}
}
});
/**
* Formats external drive.
* @type {Command}
*/
CommandHandler.COMMANDS_['format'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager The file manager instance.
*/
execute: function(event, fileManager) {
var directoryModel = fileManager.directoryModel;
var root = CommandUtil.getCommandEntry(event.target);
// If an entry is not found from the event target, use the current
// directory. This can happen for the format button for unsupported and
// unrecognized volumes.
if (!root)
root = directoryModel.getCurrentDirEntry();
var volumeInfo = fileManager.volumeManager.getVolumeInfo(root);
if (volumeInfo) {
fileManager.ui.confirmDialog.show(
loadTimeData.getString('FORMATTING_WARNING'),
chrome.fileManagerPrivate.formatVolume.bind(null,
volumeInfo.volumeId),
null, null);
}
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager The file manager instance.
*/
canExecute: function(event, fileManager) {
var directoryModel = fileManager.directoryModel;
var root = CommandUtil.getCommandEntry(event.target);
// |root| is null for unrecognized volumes. Regard such volumes as writable
// so that the format command is enabled.
var isReadOnly = root && fileManager.isOnReadonlyDirectory();
// See the comment in execute() for why doing this.
if (!root)
root = directoryModel.getCurrentDirEntry();
var location = root && fileManager.volumeManager.getLocationInfo(root);
var removable = location && location.rootType ===
VolumeManagerCommon.RootType.REMOVABLE;
event.canExecute = removable && !isReadOnly;
event.command.setHidden(!removable);
}
});
/**
* Initiates new folder creation.
* @type {Command}
*/
CommandHandler.COMMANDS_['new-folder'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
var defaultName = str('DEFAULT_NEW_FOLDER_NAME');
// Find a name that doesn't exist in the data model.
var files = fileManager.directoryModel.getFileList();
var hash = {};
for (var i = 0; i < files.length; i++) {
var name = files.item(i).name;
// Filtering names prevents from conflicts with prototype's names
// and '__proto__'.
if (name.substring(0, defaultName.length) == defaultName)
hash[name] = 1;
}
var baseName = defaultName;
var separator = '';
var suffix = '';
var index = '';
var advance = function() {
separator = ' (';
suffix = ')';
index++;
};
var current = function() {
return baseName + separator + index + suffix;
};
// Accessing hasOwnProperty is safe since hash properties filtered.
while (hash.hasOwnProperty(current())) {
advance();
}
var list = fileManager.ui.listContainer.currentList;
var onSuccess = function(entry) {
metrics.recordUserAction('CreateNewFolder');
list.selectedItem = entry;
fileManager.ui.listContainer.endBatchUpdates();
fileManager.namingController.initiateRename();
};
var onError = function(error) {
fileManager.ui.listContainer.endBatchUpdates();
fileManager.ui.alertDialog.show(
strf('ERROR_CREATING_FOLDER',
current(),
util.getFileErrorString(error.name)),
null, null);
};
var onAbort = function() {
fileManager.ui.listContainer.endBatchUpdates();
};
fileManager.ui.listContainer.startBatchUpdates();
fileManager.directoryModel.createDirectory(
current(), onSuccess, onError, onAbort);
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
canExecute: function(event, fileManager) {
var directoryModel = fileManager.directoryModel;
event.canExecute = !fileManager.isOnReadonlyDirectory() &&
!fileManager.namingController.isRenamingInProgress() &&
!directoryModel.isSearching() &&
!directoryModel.isScanning();
}
});
/**
* Initiates new window creation.
* @type {Command}
*/
CommandHandler.COMMANDS_['new-window'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
fileManager.backgroundPage.launchFileManager({
currentDirectoryURL: fileManager.getCurrentDirectoryEntry() &&
fileManager.getCurrentDirectoryEntry().toURL()
});
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
canExecute: function(event, fileManager) {
event.canExecute =
fileManager.getCurrentDirectoryEntry() &&
(fileManager.dialogType === DialogType.FULL_PAGE);
}
});
CommandHandler.COMMANDS_['toggle-hidden-files'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
var isFilterHiddenOn = !fileManager.fileFilter.isFilterHiddenOn();
fileManager.fileFilter.setFilterHidden(isFilterHiddenOn);
event.command.checked = /* is show hidden files */!isFilterHiddenOn;
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
canExecute: CommandUtil.canExecuteAlways
});
/**
* Toggles drive sync settings.
* @type {Command}
*/
CommandHandler.COMMANDS_['drive-sync-settings'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
// If checked, the sync is disabled.
var nowCellularDisabled =
fileManager.ui.gearMenu.syncButton.hasAttribute('checked');
var changeInfo = {cellularDisabled: !nowCellularDisabled};
chrome.fileManagerPrivate.setPreferences(changeInfo);
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
canExecute: function(event, fileManager) {
event.canExecute = fileManager.shouldShowDriveSettings() &&
fileManager.volumeManager.getDriveConnectionState().
hasCellularNetworkAccess;
event.command.setHidden(!event.canExecute);
}
});
/**
* Toggles drive hosted settings.
* @type {Command}
*/
CommandHandler.COMMANDS_['drive-hosted-settings'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
// If checked, showing drive hosted files is enabled.
var nowHostedFilesEnabled =
fileManager.ui.gearMenu.hostedButton.hasAttribute('checked');
var nowHostedFilesDisabled = !nowHostedFilesEnabled;
/*
var changeInfo = {hostedFilesDisabled: !nowHostedFilesDisabled};
*/
var changeInfo = {};
changeInfo['hostedFilesDisabled'] = !nowHostedFilesDisabled;
chrome.fileManagerPrivate.setPreferences(changeInfo);
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
canExecute: function(event, fileManager) {
event.canExecute = fileManager.shouldShowDriveSettings();
event.command.setHidden(!event.canExecute);
}
});
/**
* Deletes selected files.
* @type {Command}
*/
CommandHandler.COMMANDS_['delete'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
var entries = CommandUtil.getCommandEntries(event.target);
var message = entries.length === 1 ?
strf('GALLERY_CONFIRM_DELETE_ONE', entries[0].name) :
strf('GALLERY_CONFIRM_DELETE_SOME', entries.length);
fileManager.ui.deleteConfirmDialog.show(message, function() {
fileManager.fileOperationManager.deleteEntries(entries);
}, null, null);
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
canExecute: function(event, fileManager) {
var entries = CommandUtil.getCommandEntries(event.target);
// If it contains a fake entry, hide command.
var containsFakeEntry = entries.some(function(entry) {
return util.isFakeEntry(entry);
});
if (containsFakeEntry) {
event.canExecute = false;
event.command.setHidden(true);
return;
}
// If it contains an entry which is on read only volume or no item is
// selected, disable command.
var containsReadOnlyEntry = entries.some(function(entry) {
var locationInfo = fileManager.volumeManager.getLocationInfo(entry);
return locationInfo && locationInfo.isReadOnly;
});
event.canExecute = !containsReadOnlyEntry && entries.length > 0;
event.command.setHidden(false);
}
});
/**
* Pastes files from clipboard.
* @type {Command}
*/
CommandHandler.COMMANDS_['paste'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
fileManager.document.execCommand(event.command.id);
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
canExecute: function(event, fileManager) {
var fileTransferController = fileManager.fileTransferController;
event.canExecute = (fileTransferController &&
fileTransferController.queryPasteCommandEnabled());
// Hide this command if only one folder is selected.
event.command.setHidden(!!CommandUtil.getOnlyOneSelectedDirectory(
fileManager.getSelection()));
}
});
/**
* Pastes files from clipboard into the selected folder.
* @type {Command}
*/
CommandHandler.COMMANDS_['paste-into-folder'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
var selection = fileManager.getSelection();
var dest = CommandUtil.getOnlyOneSelectedDirectory(selection);
if (!dest) return;
// This handler tweaks the Event object for 'paste' event so that
// the FileTransferController can distinguish this 'paste-into-folder'
// command and know the destination directory.
var handler = function(inEvent) {
inEvent.destDirectory = dest;
};
fileManager.document.addEventListener('paste', handler, true);
fileManager.document.execCommand('paste');
fileManager.document.removeEventListener('paste', handler, true);
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
canExecute: function(event, fileManager) {
var fileTransferController = fileManager.fileTransferController;
event.canExecute = (fileTransferController &&
fileTransferController.queryPasteCommandEnabled());
// Hide this command unless only one folder is selected.
event.command.setHidden(!CommandUtil.getOnlyOneSelectedDirectory(
fileManager.getSelection()));
}
});
CommandHandler.COMMANDS_['cut'] = CommandUtil.defaultCommand;
CommandHandler.COMMANDS_['copy'] = CommandUtil.defaultCommand;
/**
* Initiates file renaming.
* @type {Command}
*/
CommandHandler.COMMANDS_['rename'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
fileManager.namingController.initiateRename();
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
canExecute: function(event, fileManager) {
var selection = fileManager.getSelection();
event.canExecute = !fileManager.namingController.isRenamingInProgress() &&
!fileManager.isOnReadonlyDirectory() &&
selection &&
selection.totalCount == 1;
}
});
/**
* Opens drive help.
* @type {Command}
*/
CommandHandler.COMMANDS_['volume-help'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
if (fileManager.isOnDrive())
util.visitURL(str('GOOGLE_DRIVE_HELP_URL'));
else
util.visitURL(str('FILES_APP_HELP_URL'));
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
canExecute: function(event, fileManager) {
// Hides the help menu in modal dialog mode. It does not make much sense
// because after all, users cannot view the help without closing, and
// besides that the help page is about Files.app as an app, not about the
// dialog mode itself. It can also lead to hard-to-fix bug crbug.com/339089.
var hideHelp = DialogType.isModal(fileManager.dialogType);
event.canExecute = !hideHelp;
event.command.setHidden(hideHelp);
fileManager.document_.getElementById('help-separator').hidden = hideHelp;
}
});
/**
* Opens drive buy-more-space url.
* @type {Command}
*/
CommandHandler.COMMANDS_['drive-buy-more-space'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
util.visitURL(str('GOOGLE_DRIVE_BUY_STORAGE_URL'));
},
canExecute: CommandUtil.canExecuteVisibleOnDriveInNormalAppModeOnly
});
/**
* Opens drive.google.com.
* @type {Command}
*/
CommandHandler.COMMANDS_['drive-go-to-drive'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
util.visitURL(str('GOOGLE_DRIVE_ROOT_URL'));
},
canExecute: CommandUtil.canExecuteVisibleOnDriveInNormalAppModeOnly
});
/**
* Displays open with dialog for current selection.
* @type {Command}
*/
CommandHandler.COMMANDS_['open-with'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
var tasks = fileManager.getSelection().tasks;
if (tasks) {
tasks.showTaskPicker(fileManager.ui.defaultTaskPicker,
str('OPEN_WITH_BUTTON_LABEL'),
'',
function(task) {
tasks.execute(task.taskId);
},
false);
}
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
canExecute: function(event, fileManager) {
var tasks = fileManager.getSelection().tasks;
event.canExecute = tasks && tasks.size() > 1;
}
});
/**
* Focuses search input box.
* @type {Command}
*/
CommandHandler.COMMANDS_['search'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
// Cancel item selection.
fileManager.directoryModel.clearSelection();
// Focus the search box.
var element = fileManager.document.querySelector('#search-box input');
element.focus();
element.select();
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
canExecute: function(event, fileManager) {
event.canExecute = !fileManager.namingController.isRenamingInProgress();
}
});
/**
* Activates the n-th volume.
* @type {Command}
*/
CommandHandler.COMMANDS_['volume-switch-1'] =
CommandUtil.createVolumeSwitchCommand(1);
CommandHandler.COMMANDS_['volume-switch-2'] =
CommandUtil.createVolumeSwitchCommand(2);
CommandHandler.COMMANDS_['volume-switch-3'] =
CommandUtil.createVolumeSwitchCommand(3);
CommandHandler.COMMANDS_['volume-switch-4'] =
CommandUtil.createVolumeSwitchCommand(4);
CommandHandler.COMMANDS_['volume-switch-5'] =
CommandUtil.createVolumeSwitchCommand(5);
CommandHandler.COMMANDS_['volume-switch-6'] =
CommandUtil.createVolumeSwitchCommand(6);
CommandHandler.COMMANDS_['volume-switch-7'] =
CommandUtil.createVolumeSwitchCommand(7);
CommandHandler.COMMANDS_['volume-switch-8'] =
CommandUtil.createVolumeSwitchCommand(8);
CommandHandler.COMMANDS_['volume-switch-9'] =
CommandUtil.createVolumeSwitchCommand(9);
/**
* Flips 'available offline' flag on the file.
* @type {Command}
*/
CommandHandler.COMMANDS_['toggle-pinned'] = /** @type {Command} */ ({
/**
* @param {!Event} event
* @param {!FileManager} fileManager
*/
execute: function(event, fileManager) {
var pin = !event.command.checked;
event.command.checked = pin;
var entries = CommandUtil.getPinTargetEntries();
if (entries.length == 0)
return;
var currentEntry;
var error = false;
var metadataModel = fileManager.getMetadataModel();
var steps = {
// Pick an entry and pin it.
start: function() {
// Check if all the entries are pinned or not.
if (entries.length == 0)
return;
currentEntry = entries.shift();
chrome.fileManagerPrivate.pinDriveFile(
currentEntry.toURL(),
pin,
steps.entryPinned);
},
// Check the result of pinning
entryPinned: function() {
// Convert to boolean.
error = !!chrome.runtime.lastError;
if (error && pin) {
metadataModel.get([currentEntry], ['size']).then(
function(results) {
steps.showError(results[0].size);
});
return;
}
metadataModel.notifyEntriesChanged([currentEntry]);
metadataModel.get([currentEntry], ['pinned']).then(steps.updateUI);
},
// Update the user interface according to the cache state.
updateUI: function() {
fileManager.ui.listContainer.currentView.updateListItemsMetadata(
'external', [currentEntry]);
if (!error)
steps.start();
},
// Show the error
showError: function(size) {
fileManager.ui.alertDialog.showHtml(
str('DRIVE_OUT_OF_SPACE_HEADER'),
strf('DRIVE_OUT_OF_SPACE_MESSAGE',
unescape(currentEntry.name),
util.bytesToString(size)),
null, null, null);
}
};
steps.start();
var driveSyncHandler =
fileManager.backgroundPage.background.driveSyncHandler;
if (pin && driveSyncHandler.isSyncSuppressed())
driveSyncHandler.showDisabledMobileSyncNotification();
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
canExecute: function(event, fileManager) {
var entries = CommandUtil.getPinTargetEntries();
var checked = true;
for (var i = 0; i < entries.length; i++) {
checked = checked && entries[i].pinned;
}
if (entries.length > 0) {
event.canExecute = true;
event.command.setHidden(false);
event.command.checked = checked;
} else {
event.canExecute = false;
event.command.setHidden(true);
}
}
});
/**
* Creates zip file for current selection.
* @type {Command}
*/
CommandHandler.COMMANDS_['zip-selection'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
var dirEntry = fileManager.getCurrentDirectoryEntry();
if (!dirEntry)
return;
var selectionEntries = fileManager.getSelection().entries;
fileManager.fileOperationManager_.zipSelection(
/** @type {!DirectoryEntry} */ (dirEntry), selectionEntries);
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
canExecute: function(event, fileManager) {
var dirEntry = fileManager.getCurrentDirectoryEntry();
var selection = fileManager.getSelection();
event.canExecute =
dirEntry &&
!fileManager.isOnReadonlyDirectory() &&
!fileManager.isOnDrive() &&
!fileManager.isOnMTP() &&
selection && selection.totalCount > 0;
}
});
/**
* Shows the share dialog for the current selection (single only).
* @type {Command}
*/
CommandHandler.COMMANDS_['share'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
var entries = fileManager.getSelection().entries;
if (entries.length != 1) {
console.warn('Unable to share multiple items at once.');
return;
}
// Add the overlapped class to prevent the applicaiton window from
// captureing mouse events.
fileManager.ui.shareDialog.showEntry(entries[0], function(result) {
if (result == ShareDialog.Result.NETWORK_ERROR)
fileManager.ui.errorDialog.show(str('SHARE_ERROR'), null, null, null);
}.bind(this));
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
canExecute: function(event, fileManager) {
var selection = fileManager.getSelection();
var isDriveOffline =
fileManager.volumeManager.getDriveConnectionState().type ===
VolumeManagerCommon.DriveConnectionType.OFFLINE;
event.canExecute = fileManager.isOnDrive() &&
!isDriveOffline &&
selection && selection.totalCount == 1;
event.command.setHidden(!fileManager.isOnDrive());
}
});
/**
* Creates a shortcut of the selected folder (single only).
* @type {Command}
*/
CommandHandler.COMMANDS_['create-folder-shortcut'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager The file manager instance.
*/
execute: function(event, fileManager) {
var entry = CommandUtil.getCommandEntry(event.target);
if (!entry) {
console.warn('create-folder-shortcut command executed on an element ' +
'which does not have corresponding entry.');
return;
}
if (fileManager.folderShortcutsModel.exists(entry))
return;
fileManager.folderShortcutsModel.add(entry);
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager The file manager instance.
*/
canExecute: function(event, fileManager) {
var entry = CommandUtil.getCommandEntry(event.target);
var folderShortcutExists =
entry && fileManager.folderShortcutsModel.exists(entry);
var onlyOneFolderSelected = true;
// Only on list, user can select multiple files. The command is enabled only
// when a single file is selected.
if (event.target instanceof cr.ui.List) {
var items = event.target.selectedItems;
onlyOneFolderSelected = (items.length == 1 && items[0].isDirectory);
}
var location = entry && fileManager.volumeManager.getLocationInfo(entry);
var eligible = location && location.isEligibleForFolderShortcut;
event.canExecute =
eligible && onlyOneFolderSelected && !folderShortcutExists;
event.command.setHidden(!eligible || !onlyOneFolderSelected);
}
});
/**
* Removes the folder shortcut.
* @type {Command}
*/
CommandHandler.COMMANDS_['remove-folder-shortcut'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager The file manager instance.
*/
execute: function(event, fileManager) {
var entry = CommandUtil.getCommandEntry(event.target);
if (!entry) {
console.warn('remove-folder-shortcut command executed on an element ' +
'which does not have corresponding entry.');
return;
}
fileManager.folderShortcutsModel.remove(entry);
},
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager The file manager instance.
*/
canExecute: function(event, fileManager) {
var entry = CommandUtil.getCommandEntry(event.target);
var location = entry && fileManager.volumeManager.getLocationInfo(entry);
var eligible = location && location.isEligibleForFolderShortcut;
var isShortcut = entry && fileManager.folderShortcutsModel.exists(entry);
event.canExecute = isShortcut && eligible;
event.command.setHidden(!event.canExecute);
}
});
/**
* Zoom in to the Files.app.
* @type {Command}
*/
CommandHandler.COMMANDS_['zoom-in'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
chrome.fileManagerPrivate.zoom('in');
},
canExecute: CommandUtil.canExecuteAlways
});
/**
* Zoom out from the Files.app.
* @type {Command}
*/
CommandHandler.COMMANDS_['zoom-out'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
chrome.fileManagerPrivate.zoom('out');
},
canExecute: CommandUtil.canExecuteAlways
});
/**
* Reset the zoom factor.
* @type {Command}
*/
CommandHandler.COMMANDS_['zoom-reset'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
chrome.fileManagerPrivate.zoom('reset');
},
canExecute: CommandUtil.canExecuteAlways
});
/**
* Sort the file list by name (in ascending order).
* @type {Command}
*/
CommandHandler.COMMANDS_['sort-by-name'] = /** @type {Command} */ ({
execute: function(event, fileManager) {
if (fileManager.directoryModel.getFileList())
fileManager.directoryModel.getFileList().sort('name', 'asc');
},
canExecute: CommandUtil.canExecuteAlways
});
/**
* Sort the file list by size (in descending order).
* @type {Command}
*/
CommandHandler.COMMANDS_['sort-by-size'] = /** @type {Command} */ ({
execute: function(event, fileManager) {
if (fileManager.directoryModel.getFileList())
fileManager.directoryModel.getFileList().sort('size', 'desc');
},
canExecute: CommandUtil.canExecuteAlways
});
/**
* Sort the file list by type (in ascending order).
* @type {Command}
*/
CommandHandler.COMMANDS_['sort-by-type'] = /** @type {Command} */ ({
execute: function(event, fileManager) {
if (fileManager.directoryModel.getFileList())
fileManager.directoryModel.getFileList().sort('type', 'asc');
},
canExecute: CommandUtil.canExecuteAlways
});
/**
* Sort the file list by date-modified (in descending order).
* @type {Command}
*/
CommandHandler.COMMANDS_['sort-by-date'] = /** @type {Command} */ ({
execute: function(event, fileManager) {
if (fileManager.directoryModel.getFileList())
fileManager.directoryModel.getFileList().sort('modificationTime', 'desc');
},
canExecute: CommandUtil.canExecuteAlways
});
/**
* Open inspector for foreground page.
* @type {Command}
*/
CommandHandler.COMMANDS_['inspect-normal'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
chrome.fileManagerPrivate.openInspector('normal');
},
canExecute: CommandUtil.canExecuteAlways
});
/**
* Open inspector for foreground page and bring focus to the console.
* @type {Command}
*/
CommandHandler.COMMANDS_['inspect-console'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
chrome.fileManagerPrivate.openInspector('console');
},
canExecute: CommandUtil.canExecuteAlways
});
/**
* Open inspector for foreground page in inspect element mode.
* @type {Command}
*/
CommandHandler.COMMANDS_['inspect-element'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
chrome.fileManagerPrivate.openInspector('element');
},
canExecute: CommandUtil.canExecuteAlways
});
/**
* Open inspector for background page.
* @type {Command}
*/
CommandHandler.COMMANDS_['inspect-background'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
chrome.fileManagerPrivate.openInspector('background');
},
canExecute: CommandUtil.canExecuteAlways
});
/**
* Shows a suggest dialog with new services to be added to the left nav.
* @type {Command}
*/
CommandHandler.COMMANDS_['install-new-extension'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
fileManager.ui.suggestAppsDialog.showProviders(
function(result, itemId) {
// If a new provider is installed, then launch it so the configuration
// dialog is shown (if it's available).
if (result === SuggestAppsDialog.Result.SUCCESS)
fileManager.providersModel.requestMount(assert(itemId));
});
},
canExecute: function(event, fileManager) {
event.canExecute = fileManager.dialogType === DialogType.FULL_PAGE;
event.command.setHidden(!event.canExecute);
}
});
/**
* Configures the currently selected volume.
*/
CommandHandler.COMMANDS_['configure'] = (function() {
/**
* @constructor
* @implements {Command}
*/
var ConfigureCommand = function() {
};
ConfigureCommand.prototype = {
__proto__: Command.prototype,
/**
* @param {EventTarget} element
* @param {!FileManager} fileManager
* @return {VolumeInfo}
* @private
*/
getElementVolumeInfo_: function(element, fileManager) {
if (element instanceof VolumeItem)
return element.volumeInfo;
if (element instanceof ShortcutItem) {
return element.entry && fileManager.volumeManager.getVolumeInfo(
element.entry);
}
},
/**
* If the command is executed on the navigation list, then use it's volume
* info, otherwise use the currently opened volume.
*
* @param {!Event} event
* @param {!FileManager} fileManager
* @return {VolumeInfo}
* @private
*/
getCommandVolumeInfo_: function(event, fileManager) {
var currentDirEntry = fileManager.directoryModel.getCurrentDirEntry();
return this.getElementVolumeInfo_(event.target, fileManager) ||
currentDirEntry && fileManager.volumeManager.getVolumeInfo(
currentDirEntry);
},
/**
* @override
*/
execute: function(event, fileManager) {
var volumeInfo = this.getCommandVolumeInfo_(event, fileManager);
if (volumeInfo && volumeInfo.configurable)
fileManager.volumeManager.configure(volumeInfo);
},
/**
* @override
*/
canExecute: function(event, fileManager) {
var volumeInfo = this.getCommandVolumeInfo_(event, fileManager);
event.canExecute = volumeInfo && volumeInfo.configurable;
event.command.setHidden(!event.canExecute);
}
};
return new ConfigureCommand();
})();
/**
* Refreshes the currently selected directory.
*/
CommandHandler.COMMANDS_['refresh'] = /** @type {Command} */ ({
/**
* @param {!Event} event Command event.
* @param {!FileManager} fileManager FileManager to use.
*/
execute: function(event, fileManager) {
fileManager.directoryModel.rescan(true /* refresh */);
fileManager.spinnerController.blink();
},
canExecute: function(event, fileManager) {
var currentDirEntry = fileManager.directoryModel.getCurrentDirEntry();
var volumeInfo = currentDirEntry &&
fileManager.volumeManager.getVolumeInfo(currentDirEntry);
event.canExecute = volumeInfo && !volumeInfo.watchable;
event.command.setHidden(!event.canExecute ||
fileManager.directoryModel.getFileListSelection().getCheckSelectMode());
}
});
| bsd-3-clause |
edisongustavo/asv | asv/branch_cache.py | 1228 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, unicode_literals, print_function
import tempfile
import six
from . import util
class BranchCache(object):
def __init__(self, conf, repo):
self._branches = conf.branches
self._repo = repo
if not self._branches:
# Master branch only
self._branches = [None]
self._hashes = None
def _load(self):
if self._hashes is not None:
return
self._hashes = {}
for branch in self._branches:
spec = self._repo.get_branch_range_spec(branch)
hashes = set(self._repo.get_hashes_from_range(spec))
self._hashes[spec] = (branch, hashes)
def get_branches(self, commit_hash):
self._load()
branches = []
for spec, item in six.iteritems(self._hashes):
branch, hashes = item
if commit_hash in hashes:
branches.append(branch)
return branches
def get_branch_commits(self, branch):
self._load()
spec = self._repo.get_branch_range_spec(branch)
return self._hashes[spec][1]
| bsd-3-clause |
sschaef/scala-ide | org.scala-ide.sdt.core/src/org/scalaide/core/internal/project/SbtScopesBuildManager.scala | 5323 | package org.scalaide.core.internal.project
import java.io.File
import scala.tools.nsc.Settings
import org.eclipse.core.resources.IFile
import org.eclipse.core.resources.IMarker
import org.eclipse.core.resources.IProject
import org.eclipse.core.resources.IResource
import org.eclipse.core.runtime.IProgressMonitor
import org.eclipse.core.runtime.SubMonitor
import org.eclipse.jdt.core.IJavaModelMarker
import org.scalaide.core.IScalaPlugin
import org.scalaide.core.IScalaProject
import org.scalaide.core.SdtConstants
import org.scalaide.core.internal.builder.BuildProblemMarker
import org.scalaide.core.internal.builder.EclipseBuildManager
import org.scalaide.core.internal.project.scopes.BuildScopeUnit
import org.scalaide.ui.internal.preferences.ScalaPluginSettings
import org.scalaide.util.internal.SettingConverterUtil
import sbt.internal.inc.Analysis
/**
* Manages of source compilation for all scopes.
* Refer to [[CompileScope]]
*/
class SbtScopesBuildManager(val owningProject: IScalaProject, managerSettings: Settings)
extends EclipseBuildManager {
val DefaultScopesOrder: Seq[CompileScope] = Seq(CompileMacrosScope, CompileMainScope, CompileTestsScope)
private val buildScopeUnits = DefaultScopesOrder.foldLeft(List.empty[BuildScopeUnit]) { (acc, scope) =>
new BuildScopeUnit(scope, owningProject, managerSettings, acc.toSeq) :: acc
}.reverse
/** Says about errors in specific scope. */
def hasErrors(compileScope: CompileScope): Boolean =
buildScopeUnits filter { _.scope == compileScope } exists { _.hasErrors }
override def hasErrors: Boolean = hasInternalErrors && (buildScopeUnits exists { _.hasErrors })
override def build(addedOrUpdated: Set[IFile], removed: Set[IFile], monitor: SubMonitor): Unit = {
owningProject.underlying.deleteMarkers(SdtConstants.ProblemMarkerId, true, IResource.DEPTH_INFINITE)
val scopesAndProjectsInError = buildScopeUnits.filter {
_.sources.nonEmpty
}.map { unit =>
ScopeUnitWithProjectsInError(unit, findProjectsInError(unit))
}
scopesAndProjectsInError.foreach { scopePotentiallyToRebuild =>
if (scopePotentiallyToRebuild.projectsInError.isEmpty || shouldBuildContinueOnErrors) {
val scopeUnit = scopePotentiallyToRebuild.owner
scopeUnit.build(addedOrUpdated, removed, monitor)
} else {
putMarkersForTransitives(scopePotentiallyToRebuild)
}
}
hasInternalErrors = scopesAndProjectsInError.exists { scopeWithErrors =>
scopeWithErrors.owner.hasErrors || scopeWithErrors.projectsInError.nonEmpty
}
}
private def foundJavaMarkers = owningProject.underlying.findMarkers(IJavaModelMarker.JAVA_MODEL_PROBLEM_MARKER, true, IResource.DEPTH_INFINITE)
private def foundGlobalScalaMarkers = owningProject.underlying.findMarkers(SdtConstants.ProblemMarkerId, true, IResource.DEPTH_ZERO)
override def buildErrors: Set[IMarker] = buildScopeUnits.flatMap { _.buildErrors }.toSet ++
foundJavaMarkers ++
foundGlobalScalaMarkers
override def invalidateAfterLoad: Boolean = true
override def clean(implicit monitor: IProgressMonitor): Unit = buildScopeUnits.foreach { _.clean }
override def canTrackDependencies: Boolean = true
private def findProjectsInError(scopeUnit: BuildScopeUnit) = {
def hasErrors(project: IProject, scope: CompileScope): Boolean =
IScalaPlugin().asScalaProject(project).map {
_.buildManager match {
case manager: SbtScopesBuildManager => manager.hasErrors(scope)
case manager: EclipseBuildManager => manager.hasErrors
}
}.getOrElse(false)
for {
scope <- scopeUnit.scope.dependentScopesInUpstreamProjects
project <- owningProject.transitiveDependencies if hasErrors(project, scope)
} yield project
}
private def shouldBuildContinueOnErrors = {
val stopBuildOnErrorsProperty = SettingConverterUtil.convertNameToProperty(ScalaPluginSettings.stopBuildOnErrors.name)
!owningProject.storage.getBoolean(stopBuildOnErrorsProperty)
}
private def putMarkersForTransitives(scopeWithError: ScopeUnitWithProjectsInError): Unit = {
if (scopeWithError.projectsInError.nonEmpty) {
val errorProjects = scopeWithError.projectsInError.map(_.getName).toSet.mkString(", ")
val rootErrors = scopeWithError.projectsInError.flatMap { project =>
val foundErrors = IScalaPlugin().asScalaProject(project).toList.flatMap {
_.buildManager.buildErrors
}
foundErrors
}.toSet[IMarker].map {
_.getAttribute(IMarker.MESSAGE, "No message")
}.mkString(";")
val currentScopeName = scopeWithError.owner.scope.name
BuildProblemMarker.create(owningProject.underlying,
s"""Project: "${owningProject.underlying.getName}" in scope: "${currentScopeName}" not built due to errors""" +
s""" in dependent project(s): $errorProjects. Root error(s): $rootErrors""")
}
}
override def latestAnalysis: Analysis = buildScopeUnits.foldLeft(Analysis.Empty) { (analysis, unit) =>
analysis ++ unit.latestAnalysis
}
override def buildManagerOf(outputFile: File): Option[EclipseBuildManager] =
buildScopeUnits.find { _.buildManagerOf(outputFile).nonEmpty }
}
private case class ScopeUnitWithProjectsInError(owner: BuildScopeUnit, projectsInError: Seq[IProject])
| bsd-3-clause |
looker/sentry | src/sentry/plugins/base/notifier.py | 1131 | """
sentry.plugins.base.notifier
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
__all__ = ('Notifier', )
from sentry import ratelimits
class Notifier(object):
def notify(self, notification, **kwargs):
"""
Send a notification.
See :class:`sentry.plugins.Notification` for notification properties.
>>> def notify(self, notification):
>>> self.logger.info('Received notification for event %r', notification.event)
"""
def should_notify(self, group, event):
if group.is_ignored():
return False
project = group.project
rate_limited = ratelimits.is_limited(
project=project,
key=self.get_conf_key(),
limit=10,
)
if rate_limited:
self.logger.info('notification.rate_limited', extra={'project_id': project.id})
return not rate_limited
def notify_about_activity(self, activity):
pass
| bsd-3-clause |
Nikola-K/django-recurrence | tests/test_fields.py | 3906 | from datetime import datetime
from django import forms
from recurrence import Recurrence, Rule
from recurrence.forms import RecurrenceField
import pytest
import recurrence
def test_clean_normal_value():
field = RecurrenceField()
value = "RRULE:FREQ=WEEKLY;BYDAY=TU"
obj = field.clean(value)
assert len(obj.rrules) == 1
assert obj.rrules[0].to_text() == "weekly, each Tuesday"
def test_clean_invalid_value():
field = RecurrenceField()
value = "RRULE:FREQS=WEEKLY"
with pytest.raises(forms.ValidationError) as e:
field.clean(value)
assert e.value.messages[0] == "bad parameter: FREQS"
def test_strip_dtstart_and_dtend_if_required():
rule = Rule(
recurrence.WEEKLY
)
limits = Recurrence(
dtstart=datetime(2014, 1, 1, 0, 0, 0),
dtend=datetime(2014, 2, 3, 0, 0, 0),
rrules=[rule]
)
value = recurrence.serialize(limits)
field = RecurrenceField()
cleaned_value = field.clean(value)
assert cleaned_value == limits
assert cleaned_value.dtstart == datetime(2014, 1, 1, 0, 0, 0)
assert cleaned_value.dtend == datetime(2014, 2, 3, 0, 0, 0)
field = RecurrenceField(accept_dtstart=False, accept_dtend=False)
cleaned_value = field.clean(value)
assert cleaned_value != limits
assert cleaned_value.dtstart is None
assert cleaned_value.dtend is None
def test_check_max_rrules():
rule = Rule(
recurrence.WEEKLY
)
limits = Recurrence(
rrules=[rule]
)
value = recurrence.serialize(limits)
field = RecurrenceField(max_rrules=0)
with pytest.raises(forms.ValidationError) as e:
field.clean(value)
assert e.value.messages[0] == "Max rules exceeded. The limit is 0"
def test_check_max_exrules():
rule = Rule(
recurrence.WEEKLY
)
limits = Recurrence(
exrules=[rule]
)
value = recurrence.serialize(limits)
field = RecurrenceField(max_exrules=0)
with pytest.raises(forms.ValidationError) as e:
field.clean(value)
assert e.value.messages[0] == ("Max exclusion rules exceeded. "
"The limit is 0")
def test_check_max_rdates():
limits = Recurrence(
rdates=[
datetime(2014, 1, 1, 0, 0, 0),
datetime(2014, 1, 2, 0, 0, 0),
]
)
value = recurrence.serialize(limits)
field = RecurrenceField(max_rdates=2)
field.clean(value)
field = RecurrenceField(max_rdates=1)
with pytest.raises(forms.ValidationError) as e:
field.clean(value)
assert e.value.messages[0] == "Max dates exceeded. The limit is 1"
def test_check_max_exdates():
limits = Recurrence(
exdates=[
datetime(2014, 1, 1, 0, 0, 0),
datetime(2014, 1, 2, 0, 0, 0),
]
)
value = recurrence.serialize(limits)
field = RecurrenceField(max_exdates=2)
field.clean(value)
field = RecurrenceField(max_exdates=1)
with pytest.raises(forms.ValidationError) as e:
field.clean(value)
assert e.value.messages[0] == ("Max exclusion dates exceeded. "
"The limit is 1")
def test_check_allowable_frequencies():
rule = Rule(
recurrence.WEEKLY
)
limits = Recurrence(
rrules=[rule]
)
value = recurrence.serialize(limits)
field = RecurrenceField(frequencies=[
recurrence.WEEKLY
])
field.clean(value)
field = RecurrenceField(frequencies=[
recurrence.YEARLY
])
with pytest.raises(forms.ValidationError) as e:
field.clean(value)
assert e.value.messages[0] == "Invalid frequency."
limits = Recurrence(
exrules=[rule]
)
value = recurrence.serialize(limits)
with pytest.raises(forms.ValidationError) as e:
field.clean(value)
assert e.value.messages[0] == "Invalid frequency."
| bsd-3-clause |
JianfengXu/crosswalk-test-suite | misc/sampleapp-android-tests/sampleapp/hangonman_uninstall.py | 2597 | #!/usr/bin/env python
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Li, Cici<cici.x.li@intel.com>
import unittest
import os
import sys
import commands
import comm
class TestSampleAppFunctions(unittest.TestCase):
def test_uninstall(self):
comm.setUp()
app_name = "Hangonman"
cmdfind = "adb -s " + comm.device + \
" shell pm list packages |grep org.xwalk.%s" % (app_name.lower())
# print "cmdfind: ", cmdfind
pmstatus = commands.getstatusoutput(cmdfind)
# print "pmstatus: ", pmstatus
if pmstatus[0] != 0:
print "Uninstall APK ----------------> %s App haven't installed, need to install it!" % app_name
os.chdir(comm.const_path + "/../testapp/")
apk_file = commands.getstatusoutput("ls | grep %s" % app_name)[1]
cmdinst = "adb -s " + comm.device + " install -r " + apk_file
comm.app_install(cmdinst, cmdfind, self)
cmduninst = "adb -s " + comm.device + \
" uninstall org.xwalk.%s" % (app_name.lower())
comm.app_uninstall(cmduninst, self)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
v4hn/ecto | test/scripts/test_modules.py | 4712 | #!/usr/bin/env python
#
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import ecto, util
import ecto.ecto_test as ecto_test
def test_modules_01():
g = ecto_test.Generate(start=0, step=2)
g.process()
assert g.outputs.out == 0
g.process()
assert g.outputs.out == 2
g.configure()
print type(g.outputs)
print type(g.outputs.out)
print g.outputs.out
g.outputs.out = 7.0
g.process()
assert g.outputs.out == 9
s = ecto_test.Scatter(n = 4, x=3)
s.process()
assert(len(s.outputs) == 4)
for out in s.outputs:
print out[1].val
assert(out[1].val == 3)
def test_modules_spec():
g = ecto_test.Generate(start=0, step=2)
x = g["out"]
x = g["out","out"]
try:
x = g[2.0]
util.fail()
except TypeError, e:
print e
try:
x = g["out",2.0]
util.fail()
except RuntimeError, e:
print e
try:
x = g["out","and","about"]
util.fail()
except RuntimeError, e:
print e
scatter = ecto_test.Scatter(n=3, x=3)
gather = ecto_test.Gather(n=3)
a = scatter[scatter.outputs.keys()]
b = gather[gather.inputs.keys()]
print a,b
print a >> b
plasm = ecto.Plasm()
plasm.connect(a>>b)
plasm.execute(1)
result = gather.outputs.out
print result
assert(result == 9) # 3 * 3
connections = scatter[:] >> gather[:]
assert(len(connections) == 3)
try:
scatter[1:-1]
util.fail()
except RuntimeError,e:
print e
def noarg(x):
ecto_test.Generate(start=0, n=3, step=2)
def wrong_type(g):
g.outputs.out = "hello"
def right_type(g):
g.outputs.out = 24.5
g.outputs.notify() #as python manip is unsafe.
assert g.outputs.out == 24.5
def already_set(g):
g.outputs.declare("out","doc","str")
print g.outputs.out
def novel_sets(g):
g.outputs.declare("out2","doc",1.0)
assert g.outputs.out2 == 1.0
g.inputs.declare("in2","doc","hello")
assert g.inputs.in2 == "hello"
def do_fail(x,exception_type = RuntimeError ,args = None):
try:
x(args)
util.fail()
except exception_type,e:
print "good, caught error:", e
def too_many_positionalargs(_):
ecto_test.Generate("foo", "bar")
def type_and_instance_names():
m = ecto_test.Generate()
name = m.name()
print "name is:", name
assert name.startswith("ecto_test::Generate<double>")
t = m.type_name()
print "type is:", t
m2 = ecto_test.Generate("user-supplied name")
assert m2.name() == "user-supplied name"
print "m2.type_name =", m2.type_name()
assert m2.type_name() == "ecto_test::Generate<double>"
def not_allocable():
d = ecto_test.DontAllocateMe()
def test_modules_wrong_args():
not_allocable()
do_fail(noarg)
g = ecto_test.Generate()
do_fail(wrong_type,RuntimeError,g)
do_fail(already_set,RuntimeError,g)
do_fail(too_many_positionalargs, RuntimeError)
novel_sets(g)
right_type(g)
type_and_instance_names()
if __name__ == '__main__':
test_modules_01()
test_modules_wrong_args()
test_modules_spec()
| bsd-3-clause |
tkoolen/drake | systems/plants/massMatrixmex.cpp | 803 | #include <mex.h>
#include <iostream>
#include "drakeUtil.h"
#include "RigidBodyManipulator.h"
using namespace Eigen;
using namespace std;
void mexFunction(int nlhs, mxArray *plhs[],int nrhs, const mxArray *prhs[]) {
string usage = "Usage [M, dM] = massMatrixmex(model_ptr)";
if (nrhs != 1) {
mexErrMsgIdAndTxt("Drake:geometricJacobianmex:WrongNumberOfInputs", usage.c_str());
}
if (nlhs > 2) {
mexErrMsgIdAndTxt("Drake:geometricJacobianmex:WrongNumberOfOutputs", usage.c_str());
}
int gradient_order = nlhs - 1;
RigidBodyManipulator *model = (RigidBodyManipulator*) getDrakeMexPointer(prhs[0]);
auto ret = model->massMatrix<double>(gradient_order);
plhs[0] = eigenToMatlab(ret.value());
if (gradient_order > 0)
plhs[1] = eigenToMatlab(ret.gradient().value());
}
| bsd-3-clause |
SaltyDH/React.NET | src/React.Core/ICache.cs | 1900 | /*
* Copyright (c) 2014-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
using System;
using System.Collections.Generic;
namespace React
{
/// <summary>
/// Handles caching of data and optionally tracking dependencies
/// </summary>
public interface ICache
{
/// <summary>
/// Get an item from the cache. Returns <paramref name="fallback"/> if the item does
/// not exist.
/// </summary>
/// <typeparam name="T">Type of data</typeparam>
/// <param name="key">The cache key</param>
/// <param name="fallback">Value to return if item is not in the cache</param>
/// <returns>Data from cache, otherwise <paramref name="fallback"/></returns>
T Get<T>(string key, T fallback = default(T));
/// <summary>
/// Sets an item in the cache.
/// </summary>
/// <typeparam name="T">Type of data</typeparam>
/// <param name="key">The cache key</param>
/// <param name="data">Data to cache</param>
/// <param name="slidingExpiration">
/// Sliding expiration, if cache key is not accessed in this time period it will
/// automatically be removed from the cache
/// </param>
/// <param name="cacheDependencyFiles">
/// Filenames this cached item is dependent on. If any of these files change, the cache
/// will be cleared automatically
/// </param>
/// <param name="cacheDependencyKeys">
/// Other cache keys this cached item is dependent on. If any of these keys change, the
/// cache will be cleared automatically
/// </param>
void Set<T>(
string key,
T data,
TimeSpan slidingExpiration,
IEnumerable<string> cacheDependencyFiles = null,
IEnumerable<string> cacheDependencyKeys = null
);
}
} | bsd-3-clause |
bbqchickenrobot/Eto-1 | Source/Eto.Gtk/Forms/GtkPanel.cs | 3064 | using System;
using Eto.Forms;
using Eto.Drawing;
namespace Eto.GtkSharp.Forms
{
public abstract class GtkPanel<TControl, TWidget, TCallback> : GtkContainer<TControl, TWidget, TCallback>
where TControl: Gtk.Widget
where TWidget: Panel
where TCallback: Panel.ICallback
{
readonly Gtk.Alignment alignment;
Control content;
public override Gtk.Widget ContainerContentControl
{
get { return Control; }
}
protected GtkPanel()
{
alignment = new Gtk.Alignment(0, 0, 1, 1);
}
protected virtual bool UseMinimumSizeRequested { get { return true; } }
protected override void Initialize()
{
base.Initialize();
SetContainerContent(alignment);
#if GTK2
if (UseMinimumSizeRequested)
ContainerControl.SizeRequested += Connector.HandleContentSizeRequested;
#endif
}
protected new GtkPanelEventConnector Connector { get { return (GtkPanelEventConnector)base.Connector; } }
protected override WeakConnector CreateConnector()
{
return new GtkPanelEventConnector();
}
protected class GtkPanelEventConnector : GtkControlConnector
{
public new GtkPanel<TControl, TWidget, TCallback> Handler { get { return (GtkPanel<TControl, TWidget, TCallback>)base.Handler; } }
#if GTK2
public void HandleContentSizeRequested(object o, Gtk.SizeRequestedArgs args)
{
var handler = Handler;
if (handler != null)
{
var alloc = args.Requisition;
var minimumSize = handler.MinimumSize;
if (minimumSize.Width > 0)
alloc.Width = Math.Max(alloc.Width, minimumSize.Width);
if (minimumSize.Height > 0)
alloc.Height = Math.Max(alloc.Height, minimumSize.Height);
args.Requisition = alloc;
}
}
#endif
}
ContextMenu contextMenu;
public ContextMenu ContextMenu
{
get { return contextMenu; }
set { contextMenu = value; } // TODO
}
Size minimumSize;
public virtual Size MinimumSize
{
get { return minimumSize; }
set
{
minimumSize = value;
#if GTK3
ContainerControl.SetSizeRequest(value.Width > 0 ? value.Width : -1, value.Height > 0 ? value.Height : -1);
#endif
}
}
public Padding Padding
{
get
{
uint top, left, right, bottom;
alignment.GetPadding(out top, out bottom, out left, out right);
return new Padding((int)left, (int)top, (int)right, (int)bottom);
}
set
{
alignment.SetPadding((uint)value.Top, (uint)value.Bottom, (uint)value.Left, (uint)value.Right);
}
}
public Control Content
{
get { return content; }
set
{
if (content != value)
{
if (content != null)
alignment.Remove(content.GetContainerWidget());
content = value;
var widget = content.GetContainerWidget();
if (widget != null)
{
if (widget.Parent != null)
((Gtk.Container)widget.Parent).Remove(widget);
alignment.Child = widget;
widget.ShowAll();
}
}
}
}
public override Gtk.Widget BackgroundControl
{
get { return Control; }
}
protected abstract void SetContainerContent(Gtk.Widget content);
}
}
| bsd-3-clause |
birm/Elemental | src/lapack_like/reflect-C.cpp | 5888 | /*
Copyright (c) 2009-2015, Jack Poulson
All rights reserved.
This file is part of Elemental and is under the BSD 2-Clause License,
which can be found in the LICENSE file in the root directory, or at
http://opensource.org/licenses/BSD-2-Clause
*/
#include "El.hpp"
#include "El.h"
using namespace El;
extern "C" {
#define C_PROTO_FIELD(SIG,SIGBASE,T) \
/* Hyperbolic reflector
==================== */ \
/* Left application
---------------- */ \
ElError ElLeftHyperbolicReflector_ ## SIG \
( CREFLECT(T)* chi, ElMatrix_ ## SIG x, CREFLECT(T)* tau ) \
{ EL_TRY( *CReflect(tau) = \
LeftHyperbolicReflector( *CReflect(chi), *CReflect(x) ) ) } \
ElError ElLeftHyperbolicReflectorDist_ ## SIG \
( CREFLECT(T)* chi, ElDistMatrix_ ## SIG x, CREFLECT(T)* tau ) \
{ EL_TRY( *CReflect(tau) = \
LeftHyperbolicReflector( *CReflect(chi), *CReflect(x) ) ) } \
/* Right application
----------------- */ \
ElError ElRightHyperbolicReflector_ ## SIG \
( CREFLECT(T)* chi, ElMatrix_ ## SIG x, CREFLECT(T)* tau ) \
{ EL_TRY( *CReflect(tau) = \
RightHyperbolicReflector( *CReflect(chi), *CReflect(x) ) ) } \
ElError ElRightHyperbolicReflectorDist_ ## SIG \
( CREFLECT(T)* chi, ElDistMatrix_ ## SIG x, CREFLECT(T)* tau ) \
{ EL_TRY( *CReflect(tau) = \
RightHyperbolicReflector( *CReflect(chi), *CReflect(x) ) ) } \
/* Householder reflector
===================== */ \
/* Left application
---------------- */ \
ElError ElLeftReflector_ ## SIG \
( CREFLECT(T)* chi, ElMatrix_ ## SIG x, CREFLECT(T)* tau ) \
{ EL_TRY( *CReflect(tau) = LeftReflector( *CReflect(chi), *CReflect(x) ) ) } \
ElError ElLeftReflectorDist_ ## SIG \
( CREFLECT(T)* chi, ElDistMatrix_ ## SIG x, CREFLECT(T)* tau ) \
{ EL_TRY( *CReflect(tau) = LeftReflector( *CReflect(chi), *CReflect(x) ) ) } \
/* Right application
----------------- */ \
ElError ElRightReflector_ ## SIG \
( CREFLECT(T)* chi, ElMatrix_ ## SIG x, CREFLECT(T)* tau ) \
{ EL_TRY( *CReflect(tau) = \
RightReflector( *CReflect(chi), *CReflect(x) ) ) } \
ElError ElRightReflectorDist_ ## SIG \
( CREFLECT(T)* chi, ElDistMatrix_ ## SIG x, CREFLECT(T)* tau ) \
{ EL_TRY( *CReflect(tau) = \
RightReflector( *CReflect(chi), *CReflect(x) ) ) }
#define C_PROTO_REAL(SIG,Real) \
C_PROTO_FIELD(SIG,SIG,Real) \
/* Apply packed reflectors
======================= */ \
ElError ElApplyPackedReflectors_ ## SIG \
( ElLeftOrRight side, ElUpperOrLower uplo, \
ElVerticalOrHorizontal dir, ElForwardOrBackward order, \
ElInt offset, ElConstMatrix_ ## SIG H, ElConstMatrix_ ## SIG t, \
ElMatrix_ ## SIG A ) \
{ EL_TRY( ApplyPackedReflectors( CReflect(side), CReflect(uplo), \
CReflect(dir), CReflect(order), UNCONJUGATED, offset, \
*CReflect(H), *CReflect(t), *CReflect(A) ) ) } \
ElError ElApplyPackedReflectorsDist_ ## SIG \
( ElLeftOrRight side, ElUpperOrLower uplo, \
ElVerticalOrHorizontal dir, ElForwardOrBackward order, \
ElInt offset, ElConstDistMatrix_ ## SIG H, ElConstDistMatrix_ ## SIG t, \
ElDistMatrix_ ## SIG A ) \
{ EL_TRY( ApplyPackedReflectors( CReflect(side), CReflect(uplo), \
CReflect(dir), CReflect(order), UNCONJUGATED, offset, \
*CReflect(H), *CReflect(t), *CReflect(A) ) ) } \
/* Expand packed reflectors
======================== */ \
ElError ElExpandPackedReflectors_ ## SIG \
( ElUpperOrLower uplo, ElVerticalOrHorizontal dir, \
ElInt offset, ElMatrix_ ## SIG H, ElConstMatrix_ ## SIG t ) \
{ EL_TRY( ExpandPackedReflectors( CReflect(uplo), CReflect(dir), \
UNCONJUGATED, offset, *CReflect(H), *CReflect(t) ) ) } \
ElError ElExpandPackedReflectorsDist_ ## SIG \
( ElUpperOrLower uplo, ElVerticalOrHorizontal dir, \
ElInt offset, ElDistMatrix_ ## SIG H, ElConstDistMatrix_ ## SIG t ) \
{ EL_TRY( ExpandPackedReflectors( CReflect(uplo), CReflect(dir), \
UNCONJUGATED, offset, *CReflect(H), *CReflect(t) ) ) }
#define C_PROTO_COMPLEX(SIG,SIGBASE,F) \
C_PROTO_FIELD(SIG,SIGBASE,F) \
/* Apply packed reflectors
======================= */ \
ElError ElApplyPackedReflectors_ ## SIG \
( ElLeftOrRight side, ElUpperOrLower uplo, \
ElVerticalOrHorizontal dir, ElForwardOrBackward order, \
ElConjugation conjugate, ElInt offset, \
ElConstMatrix_ ## SIG H, ElConstMatrix_ ## SIG t, ElMatrix_ ## SIG A ) \
{ EL_TRY( ApplyPackedReflectors( CReflect(side), CReflect(uplo), \
CReflect(dir), CReflect(order), CReflect(conjugate), offset, \
*CReflect(H), *CReflect(t), *CReflect(A) ) ) } \
ElError ElApplyPackedReflectorsDist_ ## SIG \
( ElLeftOrRight side, ElUpperOrLower uplo, \
ElVerticalOrHorizontal dir, ElForwardOrBackward order, \
ElConjugation conjugate, ElInt offset, \
ElConstDistMatrix_ ## SIG H, ElConstDistMatrix_ ## SIG t, \
ElDistMatrix_ ## SIG A ) \
{ EL_TRY( ApplyPackedReflectors( CReflect(side), CReflect(uplo), \
CReflect(dir), CReflect(order), CReflect(conjugate), offset, \
*CReflect(H), *CReflect(t), *CReflect(A) ) ) } \
/* Expand packed reflectors
======================== */ \
ElError ElExpandPackedReflectors_ ## SIG \
( ElUpperOrLower uplo, ElVerticalOrHorizontal dir, ElConjugation conjugate, \
ElInt offset, ElMatrix_ ## SIG H, ElConstMatrix_ ## SIG t ) \
{ EL_TRY( ExpandPackedReflectors( CReflect(uplo), CReflect(dir), \
CReflect(conjugate), offset, *CReflect(H), *CReflect(t) ) ) } \
ElError ElExpandPackedReflectorsDist_ ## SIG \
( ElUpperOrLower uplo, ElVerticalOrHorizontal dir, ElConjugation conjugate, \
ElInt offset, ElDistMatrix_ ## SIG H, ElConstDistMatrix_ ## SIG t ) \
{ EL_TRY( ExpandPackedReflectors( CReflect(uplo), CReflect(dir), \
CReflect(conjugate), offset, *CReflect(H), *CReflect(t) ) ) }
#define EL_NO_INT_PROTO
#include "El/macros/CInstantiate.h"
} // extern "C"
| bsd-3-clause |
kuiche/chromium | chrome/tools/test/reference_build/chrome_mac/Chromium.app/Contents/Resources/inspector/BottomUpProfileDataGridTree.js | 9940 | /*
* Copyright (C) 2009 280 North Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// Bottom Up Profiling shows the entire callstack backwards:
// The root node is a representation of each individual function called, and each child of that node represents
// a reverse-callstack showing how many of those calls came from it. So, unlike top-down, the statistics in
// each child still represent the root node. We have to be particularly careful of recursion with this mode
// because a root node can represent itself AND an ancestor.
WebInspector.BottomUpProfileDataGridTree = function(/*ProfileView*/ aProfileView, /*ProfileNode*/ aProfileNode)
{
WebInspector.ProfileDataGridTree.call(this, aProfileView, aProfileNode);
// Iterate each node in pre-order.
var profileNodeUIDs = 0;
var profileNodeGroups = [[], [aProfileNode]];
var visitedProfileNodesForCallUID = {};
this._remainingNodeInfos = [];
for (var profileNodeGroupIndex = 0; profileNodeGroupIndex < profileNodeGroups.length; ++profileNodeGroupIndex) {
var parentProfileNodes = profileNodeGroups[profileNodeGroupIndex];
var profileNodes = profileNodeGroups[++profileNodeGroupIndex];
var count = profileNodes.length;
for (var index = 0; index < count; ++index) {
var profileNode = profileNodes[index];
if (!profileNode.UID)
profileNode.UID = ++profileNodeUIDs;
if (profileNode.head && profileNode !== profileNode.head) {
// The total time of this ancestor is accounted for if we're in any form of recursive cycle.
var visitedNodes = visitedProfileNodesForCallUID[profileNode.callUID];
var totalTimeAccountedFor = false;
if (!visitedNodes) {
visitedNodes = {}
visitedProfileNodesForCallUID[profileNode.callUID] = visitedNodes;
} else {
// The total time for this node has already been accounted for iff one of it's parents has already been visited.
// We can do this check in this style because we are traversing the tree in pre-order.
var parentCount = parentProfileNodes.length;
for (var parentIndex = 0; parentIndex < parentCount; ++parentIndex) {
if (visitedNodes[parentProfileNodes[parentIndex].UID]) {
totalTimeAccountedFor = true;
break;
}
}
}
visitedNodes[profileNode.UID] = true;
this._remainingNodeInfos.push({ ancestor:profileNode, focusNode:profileNode, totalTimeAccountedFor:totalTimeAccountedFor });
}
var children = profileNode.children;
if (children.length) {
profileNodeGroups.push(parentProfileNodes.concat([profileNode]))
profileNodeGroups.push(children);
}
}
}
// Populate the top level nodes.
WebInspector.BottomUpProfileDataGridNode.prototype._populate.call(this);
return this;
}
WebInspector.BottomUpProfileDataGridTree.prototype = {
// When focusing, we keep the entire callstack up to this ancestor.
focus: function(/*ProfileDataGridNode*/ profileDataGridNode)
{
if (!profileDataGridNode)
return;
this._save();
var currentNode = profileDataGridNode;
var focusNode = profileDataGridNode;
while (currentNode.parent && (currentNode instanceof WebInspector.ProfileDataGridNode)) {
currentNode._takePropertiesFromProfileDataGridNode(profileDataGridNode);
focusNode = currentNode;
currentNode = currentNode.parent;
if (currentNode instanceof WebInspector.ProfileDataGridNode)
currentNode._keepOnlyChild(focusNode);
}
this.children = [focusNode];
this.totalTime = profileDataGridNode.totalTime;
},
exclude: function(/*ProfileDataGridNode*/ profileDataGridNode)
{
if (!profileDataGridNode)
return;
this._save();
var excludedCallUID = profileDataGridNode.callUID;
var excludedTopLevelChild = this.childrenByCallUID[excludedCallUID];
// If we have a top level node that is excluded, get rid of it completely (not keeping children),
// since bottom up data relies entirely on the root node.
if (excludedTopLevelChild)
this.children.remove(excludedTopLevelChild);
var children = this.children;
var count = children.length;
for (var index = 0; index < count; ++index)
children[index]._exclude(excludedCallUID);
if (this.lastComparator)
this.sort(this.lastComparator, true);
}
}
WebInspector.BottomUpProfileDataGridTree.prototype.__proto__ = WebInspector.ProfileDataGridTree.prototype;
WebInspector.BottomUpProfileDataGridNode = function(/*ProfileView*/ profileView, /*ProfileNode*/ profileNode, /*BottomUpProfileDataGridTree*/ owningTree)
{
// In bottom up mode, our parents are our children since we display an inverted tree.
// However, we don't want to show the very top parent since it is redundant.
var hasChildren = !!(profileNode.parent && profileNode.parent.parent);
WebInspector.ProfileDataGridNode.call(this, profileView, profileNode, owningTree, hasChildren);
this._remainingNodeInfos = [];
}
WebInspector.BottomUpProfileDataGridNode.prototype = {
_takePropertiesFromProfileDataGridNode: function(/*ProfileDataGridNode*/ profileDataGridNode)
{
this._save();
this.selfTime = profileDataGridNode.selfTime;
this.totalTime = profileDataGridNode.totalTime;
this.numberOfCalls = profileDataGridNode.numberOfCalls;
},
// When focusing, we keep just the members of the callstack.
_keepOnlyChild: function(/*ProfileDataGridNode*/ child)
{
this._save();
this.removeChildren();
this.appendChild(child);
},
_exclude: function(aCallUID)
{
if (this._remainingNodeInfos)
this._populate();
this._save();
var children = this.children;
var index = this.children.length;
while (index--)
children[index]._exclude(aCallUID);
var child = this.childrenByCallUID[aCallUID];
if (child)
this._merge(child, true);
},
_merge: function(/*ProfileDataGridNode*/ child, /*Boolean*/ shouldAbsorb)
{
this.selfTime -= child.selfTime;
WebInspector.ProfileDataGridNode.prototype._merge.call(this, child, shouldAbsorb);
},
_populate: function(event)
{
var remainingNodeInfos = this._remainingNodeInfos;
var count = remainingNodeInfos.length;
for (var index = 0; index < count; ++index) {
var nodeInfo = remainingNodeInfos[index];
var ancestor = nodeInfo.ancestor;
var focusNode = nodeInfo.focusNode;
var child = this.findChild(ancestor);
// If we already have this child, then merge the data together.
if (child) {
var totalTimeAccountedFor = nodeInfo.totalTimeAccountedFor;
child.selfTime += focusNode.selfTime;
child.numberOfCalls += focusNode.numberOfCalls;
if (!totalTimeAccountedFor)
child.totalTime += focusNode.totalTime;
} else {
// If not, add it as a true ancestor.
// In heavy mode, we take our visual identity from ancestor node...
var child = new WebInspector.BottomUpProfileDataGridNode(this.profileView, ancestor, this.tree);
if (ancestor !== focusNode) {
// but the actual statistics from the "root" node (bottom of the callstack).
child.selfTime = focusNode.selfTime;
child.totalTime = focusNode.totalTime;
child.numberOfCalls = focusNode.numberOfCalls;
}
this.appendChild(child);
}
var parent = ancestor.parent;
if (parent && parent.parent) {
nodeInfo.ancestor = parent;
child._remainingNodeInfos.push(nodeInfo);
}
}
delete this._remainingNodeInfos;
if (this.removeEventListener)
this.removeEventListener("populate", this._populate, this);
}
}
WebInspector.BottomUpProfileDataGridNode.prototype.__proto__ = WebInspector.ProfileDataGridNode.prototype;
| bsd-3-clause |
ansendu/elephant.io | src/EngineInterface.php | 1442 | <?php
/**
* This file is part of the Elephant.io package
*
* For the full copyright and license information, please view the LICENSE file
* that was distributed with this source code.
*
* @copyright Wisembly
* @license http://www.opensource.org/licenses/MIT-License MIT License
*/
namespace ElephantIO;
/**
* Represents an engine used within ElephantIO to send / receive messages from
* a websocket real time server
*
* Loosely based on the work of the following :
* - Ludovic Barreca (@ludovicbarreca)
* - Mathieu Lallemand (@lalmat)
*
* @author Baptiste Clavié <baptiste@wisembly.com>
*/
interface EngineInterface
{
const OPEN = 0;
const CLOSE = 1;
const PING = 2;
const PONG = 3;
const MESSAGE = 4;
const UPGRADE = 5;
const NOOP = 6;
/** Connect to the targeted server */
public function connect();
/** Closes the connection to the websocket */
public function close();
/**
* Read data from the socket
*
* @return string Data read from the socket
*/
public function read();
/**
* Emits a message through the websocket
*
* @param string $event Event to emit
* @param array $args Arguments to send
*/
public function emit($event, array $args);
/** Keeps alive the connection */
public function keepAlive();
/** Gets the name of the engine */
public function getName();
}
| mit |
onderdelen/jwt-auth | resources/lang/cs/users.php | 2082 | <?php
return array(
/*
|--------------------------------------------------------------------------
| User Repositiory Messages
|--------------------------------------------------------------------------
*/
'created' => "Váš účet byl vytvořen. Zkontrolujte svůj e-mail pro potvrzení.",
'createdactive' => "Váš účet byl vytvořen. Nyní můžete přihlásit.",
'added' => "Nový uživatel přidán. Zkontrolujte svůj e-mail pro potvrzení.",
'addedactive' => "Nový uživatel přidán.",
'loginreq' => "Pole požadováno.",
'exists' => "Tento uživatel již existuje.",
'notfound' => "Uživatel nenalezen",
'noaccess' => "Nemáte práva.",
'updated' => "Profil aktualizován",
'notupdated' => "Nelze aktualizovat profil",
'activated' => "Aktivace je dokončena. <a href=':url' class='alert-link'> nyní se můžete přihlásit</a>",
'notactivated' => "Aktivaci nelze dokončit.",
'alreadyactive' => "Tento účet byl již aktivován.",
'emailconfirm' => "Zkontrolujte svůj e-mail pro potvrzení.",
'emailinfo' => "Zkontrolujte svůj e-mail pro další kroky.",
'emailpassword' => "Vaše heslo bylo změněno. Zkontrolujte svůj e-mail.",
'problem' => "Došlo k potížím. Obraťte se na správce systému.",
'passwordchg' => "Vaše heslo bylo změněno.",
'passwordprob' => "Nelze změnit vaše heslo.",
'oldpassword' => "Neposkytli jste správné původní heslo.",
'suspended' => "Uživatel byl zablokován na 15 minut.",
'unsuspended' => "Blokace uživatele odstraněna.",
'banned' => "Uživatel byl zabanován.",
'unbanned' => "Uživatel byl odbanován.",
'inactive_reg' => "Registrace nyní není k dispozici.",
'destroyed' => "Uživatel byl odstraněn.",
'notdestroyed' => "Nelze odstranit uživatele."
);
| mit |
sottosviluppo/elcodi | src/Elcodi/Component/Product/Adapter/SimilarPurchasablesProvider/SameCategoryRelatedPurchasableProvider.php | 3393 | <?php
/*
* This file is part of the Elcodi package.
*
* Copyright (c) 2014-2016 Elcodi Networks S.L.
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
* Feel free to edit as you please, and have fun.
*
* @author Marc Morera <yuhu@mmoreram.com>
* @author Aldo Chiecchia <zimage@tiscali.it>
* @author Elcodi Team <tech@elcodi.com>
*/
namespace Elcodi\Component\Product\Adapter\SimilarPurchasablesProvider;
use Doctrine\Common\Collections\Collection;
use Elcodi\Component\Product\Adapter\SimilarPurchasablesProvider\Interfaces\RelatedPurchasablesProviderInterface;
use Elcodi\Component\Product\Entity\Interfaces\CategoryInterface;
use Elcodi\Component\Product\Entity\Interfaces\PurchasableInterface;
use Elcodi\Component\Product\Repository\PurchasableRepository;
/**
* Class SameCategoryRelatedPurchasableProvider.
*
* This adapter takes in account only the principal category of the purchasable.
*/
class SameCategoryRelatedPurchasableProvider implements RelatedPurchasablesProviderInterface
{
/**
* @var PurchasableRepository
*
* Purchasable Repository
*/
private $purchasableRepository;
/**
* Construct method.
*
* @param PurchasableRepository $purchasableRepository Purchasable Repository
*/
public function __construct(PurchasableRepository $purchasableRepository)
{
$this->purchasableRepository = $purchasableRepository;
}
/**
* Given a Purchasable, return a collection of related purchasables.
*
* @param PurchasableInterface $purchasable Purchasable
* @param int $limit Limit of elements retrieved
*
* @return array Related purchasables
*/
public function getRelatedPurchasables(PurchasableInterface $purchasable, $limit)
{
return $this->getRelatedPurchasablesFromArray(
[$purchasable],
$limit
);
}
/**
* Given a Collection of Purchasables, return a collection of related
* purchasables.
*
* @param PurchasableInterface[] $purchasables Purchasable
* @param int $limit Limit of elements retrieved
*
* @return array Related products
*/
public function getRelatedPurchasablesFromArray(array $purchasables, $limit)
{
$categories = [];
/**
* @var PurchasableInterface $product
*/
foreach ($purchasables as $purchasable) {
$category = $purchasable->getPrincipalCategory();
if (
$category instanceof CategoryInterface &&
!in_array($category, $categories)
) {
$categories[] = $category;
}
}
if (empty($categories)) {
return [];
}
return $this
->purchasableRepository
->createQueryBuilder('p')
->where('p.principalCategory IN(:categories)')
->andWhere('p NOT IN(:purchasables)')
->andWhere('p.enabled = :enabled')
->setParameters([
'categories' => $categories,
'purchasables' => $purchasables,
'enabled' => true,
])
->setMaxResults($limit)
->getQuery()
->getResult();
}
}
| mit |
1974kpkpkp/merb | merb-core/spec10/public/webrat/test_app/gems/gems/merb-helpers-0.9.14/spec/fixture/app/controllers/tag_helper.rb | 323 | class TagHelper < Merb::Controller
def tag_with_content
@content = "Astral Projection ~ Dancing Galaxy"
render
end
def tag_with_content_in_the_block
render
end
def nested_tags
@content = "Astral Projection ~ In the Mix"
render
end
def tag_with_attributes
render
end
end
| mit |
smaillet-ms/llilum | Zelig/Zelig/CompileTime/Llvm.NET/Llvm.NET/Instructions/Cast.cs | 235 | using Llvm.NET.Native;
namespace Llvm.NET.Instructions
{
public class Cast
: UnaryInstruction
{
internal Cast( LLVMValueRef valueRef )
: base( valueRef )
{
}
}
}
| mit |
arunetm/ChakraCore_0114 | test/typedarray/typedArrayProfile.js | 1135 | //-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
var Arr = new Float64Array(100);
var Arr2 = new Array(100);
var Obj = new Object();
var Failed = 0;
function FAILED()
{
Failed++;
WScript.Echo("FAILED");
}
Obj.prop1 = 1;
Obj.prop2 = 1;
Obj.prop3 = 1;
Obj.prop4 = 1;
Obj.prop5 = 1;
Obj.prop6 = 1;
Obj.prop7 = 1;
Obj.prop8 = 1;
var x = 0.1;
var one = 1;
function init(o)
{
for (var str in o)
{
o[str] = x * one;
}
}
function verify(o)
{
for (var str in o)
{
if (o[str] !== x)
{
FAILED();
}
}
}
// Build profile data
init(Arr);
// Init property string cache
init(Obj);
// Hit bug
init(Obj);
// Obj has garbage
verify(Obj);
if (Failed === 0)
{
WScript.Echo("Passed");
}
| mit |
dsebastien/DefinitelyTyped | types/mongodb/test/stats.ts | 478 | import { connect, MongoError, CollStats } from 'mongodb';
import { connectionString } from './index';
async function run() {
const client = await connect(connectionString);
const db = client.db('test');
const collection = db.collection('test.find');
collection.stats((err: MongoError, stats: CollStats) => {});
const stats = await collection.stats();
if (stats.wiredTiger) {
stats.wiredTiger.cache['bytes currently in the cache']; // $ExpectType number
}
}
| mit |
aizlewood/2015-old | panel/app/fields/date/date.php | 1358 | <?php
class DateField extends InputField {
static public $assets = array(
'js' => array(
'moment.min.js',
'pikaday.min.js',
'date.min.js'
),
'css' => array(
'pikaday.css'
)
);
public function __construct() {
$this->type = 'date';
$this->icon = 'calendar';
$this->label = l::get('fields.date.label', 'Date');
$this->format = 'YYYY-MM-DD';
}
public function format() {
return str::upper($this->format);
}
public function validate() {
return v::date($this->result());
}
public function value() {
return !empty($this->value) ? date('Y-m-d', strtotime($this->value)) : null;
}
public function input() {
$input = parent::input();
$input->removeAttr('name');
$input->data(array(
'field' => 'date',
'format' => $this->format(),
'i18n' => html(json_encode(array(
'previousMonth' => '‹',
'nextMonth' => '›',
'months' => l::get('fields.date.months'),
'weekdays' => l::get('fields.date.weekdays'),
'weekdaysShort' => l::get('fields.date.weekdays.short')
)), false)
));
$hidden = new Brick('input', null);
$hidden->type = 'hidden';
$hidden->name = $this->name();
$hidden->value = $this->value();
return $input . $hidden;
}
}
| mit |
bissu/ui-acceptance | node_modules/generator-karma/node_modules/yeoman-generator/node_modules/inquirer/test/node_modules/inquirer/test/specs/prompts/base.js | 837 | var expect = require("chai").expect;
var sinon = require("sinon");
var ReadlineStub = require("../../helpers/readline");
var Base = require("../../../lib/prompts/base");
// Prevent prompt from writing to screen
// Confirm.prototype.write = function() { return this; };
describe("`base` prompt (e.g. prompt helpers)", function() {
beforeEach(function() {
this.rl = new ReadlineStub();
this.base = new Base({
message: "foo bar",
name: "name"
}, this.rl );
});
it("`suffix` method should only add ':' if last char is a letter", function() {
expect(this.base.suffix("m:")).to.equal("m: ");
expect(this.base.suffix("m?")).to.equal("m? ");
expect(this.base.suffix("m")).to.equal("m: ");
expect(this.base.suffix("m ")).to.equal("m ");
expect(this.base.suffix()).to.equal(": ");
});
});
| mit |
holtkamp/doctrine2-spatial | lib/CrEOF/Spatial/DBAL/Platform/AbstractPlatform.php | 4068 | <?php
/**
* Copyright (C) 2015 Derek J. Lambert
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
namespace CrEOF\Spatial\DBAL\Platform;
use CrEOF\Geo\WKT\Parser as StringParser;
use CrEOF\Geo\WKB\Parser as BinaryParser;
use CrEOF\Spatial\DBAL\Types\AbstractSpatialType;
use CrEOF\Spatial\DBAL\Types\GeographyType;
use CrEOF\Spatial\Exception\InvalidValueException;
use CrEOF\Spatial\PHP\Types\Geometry\GeometryInterface;
/**
* Abstract spatial platform
*
* @author Derek J. Lambert <dlambert@dereklambert.com>
* @license http://dlambert.mit-license.org MIT
*/
abstract class AbstractPlatform implements PlatformInterface
{
/**
* @param AbstractSpatialType $type
* @param string $sqlExpr
*
* @return GeometryInterface
*/
public function convertStringToPHPValue(AbstractSpatialType $type, $sqlExpr)
{
$parser = new StringParser($sqlExpr);
return $this->newObjectFromValue($type, $parser->parse());
}
/**
* @param AbstractSpatialType $type
* @param string $sqlExpr
*
* @return GeometryInterface
*/
public function convertBinaryToPHPValue(AbstractSpatialType $type, $sqlExpr)
{
$parser = new BinaryParser($sqlExpr);
return $this->newObjectFromValue($type, $parser->parse());
}
/**
* @param AbstractSpatialType $type
* @param GeometryInterface $value
*
* @return string
*/
public function convertToDatabaseValue(AbstractSpatialType $type, GeometryInterface $value)
{
return sprintf('%s(%s)', strtoupper($value->getType()), $value);
}
/**
* Get an array of database types that map to this Doctrine type.
*
* @param AbstractSpatialType $type
*
* @return string[]
*/
public function getMappedDatabaseTypes(AbstractSpatialType $type)
{
$sqlType = strtolower($type->getSQLType());
if ($type instanceof GeographyType && $sqlType !== 'geography') {
$sqlType = sprintf('geography(%s)', $sqlType);
}
return array($sqlType);
}
/**
* Create spatial object from parsed value
*
* @param AbstractSpatialType $type
* @param array $value
*
* @return GeometryInterface
* @throws \CrEOF\Spatial\Exception\InvalidValueException
*/
private function newObjectFromValue(AbstractSpatialType $type, $value)
{
$typeFamily = $type->getTypeFamily();
$typeName = strtoupper($value['type']);
$constName = sprintf('CrEOF\Spatial\PHP\Types\Geometry\GeometryInterface::%s', $typeName);
if (! defined($constName)) {
// @codeCoverageIgnoreStart
throw new InvalidValueException(sprintf('Unsupported %s type "%s".', $typeFamily, $typeName));
// @codeCoverageIgnoreEnd
}
$class = sprintf('CrEOF\Spatial\PHP\Types\%s\%s', $typeFamily, constant($constName));
return new $class($value['value'], $value['srid']);
}
}
| mit |
abhisheksugam/Climate_Police | Climate_Police/__init__.py | 23 | __version__ = '1.0.0'
| mit |
emicklei/landskape | vendor/cloud.google.com/go/spanner/admin/instance/apiv1/instance_admin_client_example_test.go | 5130 | // Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// AUTO-GENERATED CODE. DO NOT EDIT.
package instance_test
import (
"cloud.google.com/go/spanner/admin/instance/apiv1"
"golang.org/x/net/context"
"google.golang.org/api/iterator"
iampb "google.golang.org/genproto/googleapis/iam/v1"
instancepb "google.golang.org/genproto/googleapis/spanner/admin/instance/v1"
)
func ExampleNewInstanceAdminClient() {
ctx := context.Background()
c, err := instance.NewInstanceAdminClient(ctx)
if err != nil {
// TODO: Handle error.
}
// TODO: Use client.
_ = c
}
func ExampleInstanceAdminClient_ListInstanceConfigs() {
ctx := context.Background()
c, err := instance.NewInstanceAdminClient(ctx)
if err != nil {
// TODO: Handle error.
}
req := &instancepb.ListInstanceConfigsRequest{
// TODO: Fill request struct fields.
}
it := c.ListInstanceConfigs(ctx, req)
for {
resp, err := it.Next()
if err == iterator.Done {
break
}
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp
}
}
func ExampleInstanceAdminClient_GetInstanceConfig() {
ctx := context.Background()
c, err := instance.NewInstanceAdminClient(ctx)
if err != nil {
// TODO: Handle error.
}
req := &instancepb.GetInstanceConfigRequest{
// TODO: Fill request struct fields.
}
resp, err := c.GetInstanceConfig(ctx, req)
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp
}
func ExampleInstanceAdminClient_ListInstances() {
ctx := context.Background()
c, err := instance.NewInstanceAdminClient(ctx)
if err != nil {
// TODO: Handle error.
}
req := &instancepb.ListInstancesRequest{
// TODO: Fill request struct fields.
}
it := c.ListInstances(ctx, req)
for {
resp, err := it.Next()
if err == iterator.Done {
break
}
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp
}
}
func ExampleInstanceAdminClient_GetInstance() {
ctx := context.Background()
c, err := instance.NewInstanceAdminClient(ctx)
if err != nil {
// TODO: Handle error.
}
req := &instancepb.GetInstanceRequest{
// TODO: Fill request struct fields.
}
resp, err := c.GetInstance(ctx, req)
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp
}
func ExampleInstanceAdminClient_CreateInstance() {
ctx := context.Background()
c, err := instance.NewInstanceAdminClient(ctx)
if err != nil {
// TODO: Handle error.
}
req := &instancepb.CreateInstanceRequest{
// TODO: Fill request struct fields.
}
op, err := c.CreateInstance(ctx, req)
if err != nil {
// TODO: Handle error.
}
resp, err := op.Wait(ctx)
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp
}
func ExampleInstanceAdminClient_UpdateInstance() {
ctx := context.Background()
c, err := instance.NewInstanceAdminClient(ctx)
if err != nil {
// TODO: Handle error.
}
req := &instancepb.UpdateInstanceRequest{
// TODO: Fill request struct fields.
}
op, err := c.UpdateInstance(ctx, req)
if err != nil {
// TODO: Handle error.
}
resp, err := op.Wait(ctx)
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp
}
func ExampleInstanceAdminClient_DeleteInstance() {
ctx := context.Background()
c, err := instance.NewInstanceAdminClient(ctx)
if err != nil {
// TODO: Handle error.
}
req := &instancepb.DeleteInstanceRequest{
// TODO: Fill request struct fields.
}
err = c.DeleteInstance(ctx, req)
if err != nil {
// TODO: Handle error.
}
}
func ExampleInstanceAdminClient_SetIamPolicy() {
ctx := context.Background()
c, err := instance.NewInstanceAdminClient(ctx)
if err != nil {
// TODO: Handle error.
}
req := &iampb.SetIamPolicyRequest{
// TODO: Fill request struct fields.
}
resp, err := c.SetIamPolicy(ctx, req)
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp
}
func ExampleInstanceAdminClient_GetIamPolicy() {
ctx := context.Background()
c, err := instance.NewInstanceAdminClient(ctx)
if err != nil {
// TODO: Handle error.
}
req := &iampb.GetIamPolicyRequest{
// TODO: Fill request struct fields.
}
resp, err := c.GetIamPolicy(ctx, req)
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp
}
func ExampleInstanceAdminClient_TestIamPermissions() {
ctx := context.Background()
c, err := instance.NewInstanceAdminClient(ctx)
if err != nil {
// TODO: Handle error.
}
req := &iampb.TestIamPermissionsRequest{
// TODO: Fill request struct fields.
}
resp, err := c.TestIamPermissions(ctx, req)
if err != nil {
// TODO: Handle error.
}
// TODO: Use resp.
_ = resp
}
| mit |
rick111111/vscode | src/vs/workbench/parts/git/browser/gitServices.ts | 27360 | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import nls = require('vs/nls');
import platform = require('vs/base/common/platform');
import winjs = require('vs/base/common/winjs.base');
import lifecycle = require('vs/base/common/lifecycle');
import types = require('vs/base/common/types');
import actions = require('vs/base/common/actions');
import errors = require('vs/base/common/errors');
import mime = require('vs/base/common/mime');
import paths = require('vs/base/common/paths');
import ee = require('vs/base/common/eventEmitter');
import wbevents = require('vs/workbench/common/events');
import WorkbenchEditorCommon = require('vs/workbench/common/editor');
import git = require('vs/workbench/parts/git/common/git');
import model = require('vs/workbench/parts/git/common/gitModel');
import giteditorinputs = require('vs/workbench/parts/git/browser/gitEditorInputs');
import operations = require('vs/workbench/parts/git/browser/gitOperations');
import filesCommon = require('vs/workbench/parts/files/common/files');
import { IFileService, EventType as FileEventType, FileChangesEvent, FileChangeType } from 'vs/platform/files/common/files';
import async = require('vs/base/common/async');
import severity from 'vs/base/common/severity';
import {IOutputService} from 'vs/workbench/parts/output/common/output';
import {IWorkbenchEditorService} from 'vs/workbench/services/editor/common/editorService';
import {IConfigurationService, IConfigurationServiceEvent, ConfigurationServiceEventTypes} from 'vs/platform/configuration/common/configuration';
import {IEventService} from 'vs/platform/event/common/event';
import {IInstantiationService} from 'vs/platform/instantiation/common/instantiation';
import {IMessageService} from 'vs/platform/message/common/message';
import {IWorkspaceContextService} from 'vs/platform/workspace/common/workspace';
import {ILifecycleService} from 'vs/platform/lifecycle/common/lifecycle';
import URI from 'vs/base/common/uri';
function toReadablePath(path: string): string {
if (!platform.isWindows) {
return path;
}
return path.replace(/\//g, '\\');
}
class EditorInputCache
{
private gitService: git.IGitService;
private fileService: IFileService;
private eventService: IEventService;
private instantiationService: IInstantiationService;
private editorService: IWorkbenchEditorService;
private contextService: IWorkspaceContextService;
private cache: { [key: string]: winjs.TPromise<WorkbenchEditorCommon.EditorInput> };
private toDispose: lifecycle.IDisposable[];
constructor(gitService: git.IGitService, // gitService passed as argument, not by injection
@IInstantiationService instantiationService: IInstantiationService,
@IFileService fileService: IFileService,
@IEventService eventService: IEventService,
@IWorkbenchEditorService editorService: IWorkbenchEditorService,
@IWorkspaceContextService contextService: IWorkspaceContextService
) {
this.instantiationService = instantiationService;
this.fileService = fileService;
this.eventService = eventService;
this.editorService = editorService;
this.contextService = contextService;
this.gitService = gitService;
this.cache = {};
this.toDispose = [];
this.toDispose.push(this.gitService.getModel().addListener2('fileStatus:dispose', (fileStatus: git.IFileStatus) => this.onFileStatusDispose(fileStatus)));
}
public getInput(status: git.IFileStatus): winjs.TPromise<WorkbenchEditorCommon.EditorInput> {
var result = this.cache[status.getId()];
if (result) {
return result;
}
result = this.createInput(status);
this.cache[status.getId()] = result;
return result;
}
private createInput(status: git.IFileStatus): winjs.TPromise<WorkbenchEditorCommon.EditorInput> {
return winjs.TPromise.join<WorkbenchEditorCommon.EditorInput>([this.createLeftInput(status), this.createRightInput(status)]).then((result) => {
var leftInput = result[0];
var rightInput = result[1];
var fileSegment: string;
var folderSegment: string;
if (status.getStatus() === git.Status.INDEX_RENAMED) {
let pathComponents = status.getRename().split('/');
fileSegment = pathComponents[pathComponents.length - 1];
folderSegment = toReadablePath(pathComponents.slice(0, pathComponents.length - 1).join('/'));
} else {
let pathComponents = status.getPathComponents();
fileSegment = pathComponents[pathComponents.length - 1];
folderSegment = toReadablePath(pathComponents.slice(0, pathComponents.length - 1).join('/'));
}
if (!leftInput) {
if (!rightInput) {
var error = new Error(nls.localize('cantOpen', "Can't open this git resource."));
(<git.IGitServiceError> error).gitErrorCode = git.GitErrorCodes.CantOpenResource;
return winjs.Promise.wrapError(error);
}
return winjs.Promise.as(rightInput);
}
switch (status.getStatus()) {
case git.Status.INDEX_MODIFIED:
return winjs.Promise.as(new giteditorinputs.GitIndexDiffEditorInput(fileSegment, nls.localize('gitIndexChanges', "{0} - Changes on index", folderSegment), leftInput, rightInput, status));
case git.Status.INDEX_RENAMED:
return winjs.Promise.as(new giteditorinputs.GitIndexDiffEditorInput(fileSegment, nls.localize('gitIndexChangesRenamed', "{0} - Renamed - Changes on index", folderSegment), leftInput, rightInput, status));
case git.Status.MODIFIED:
return winjs.Promise.as(new giteditorinputs.GitWorkingTreeDiffEditorInput(fileSegment, nls.localize('workingTreeChanges', "{0} - Changes on working tree", folderSegment), leftInput, rightInput, status));
default:
return winjs.Promise.as(new giteditorinputs.GitDiffEditorInput(fileSegment, nls.localize('gitMergeChanges', "{0} - Merge changes", folderSegment), leftInput, rightInput, status));
}
}).then((editorInput) => {
return editorInput;
}, (errs) => {
return winjs.Promise.wrapError(types.isArray(errs) ? errs[0] || errs[1] : errs);
});
}
private createLeftInput(status: git.IFileStatus): winjs.Promise {
var path = status.getPath();
var model = this.gitService.getModel();
switch (status.getStatus()) {
case git.Status.INDEX_MODIFIED:
case git.Status.INDEX_RENAMED:
return this.gitService.show(path, status, 'HEAD', status.getMimetype());
case git.Status.MODIFIED:
var indexStatus = model.getStatus().find(path, git.StatusType.INDEX);
if (indexStatus && indexStatus.getStatus() === git.Status.INDEX_RENAMED) {
return this.gitService.show(indexStatus.getRename(), status, '~', status.getMimetype());
}
if (indexStatus) {
return this.gitService.show(path, status, '~', status.getMimetype());
}
return this.gitService.show(path, status, 'HEAD', status.getMimetype());
default:
return winjs.Promise.as(null);
}
}
private createRightInput(status: git.IFileStatus): winjs.Promise {
const model = this.gitService.getModel();
const path = status.getPath();
let resource = URI.file(paths.join(model.getRepositoryRoot(), path));
switch (status.getStatus()) {
case git.Status.INDEX_MODIFIED:
case git.Status.INDEX_ADDED:
case git.Status.INDEX_COPIED:
return this.gitService.show(path, status, '~', status.getMimetype());
case git.Status.INDEX_RENAMED:
return this.gitService.show(status.getRename(), status, '~', status.getMimetype());
case git.Status.INDEX_DELETED:
case git.Status.DELETED:
return this.gitService.show(path, status, 'HEAD', status.getMimetype());
case git.Status.MODIFIED:
case git.Status.UNTRACKED:
case git.Status.IGNORED:
var indexStatus = model.getStatus().find(path, git.StatusType.INDEX);
if (indexStatus && indexStatus.getStatus() === git.Status.INDEX_RENAMED) {
resource = URI.file(paths.join(model.getRepositoryRoot(), indexStatus.getRename()));
}
return this.editorService.inputToType({ resource });
case git.Status.BOTH_MODIFIED:
return this.editorService.inputToType({ resource });
default:
return winjs.Promise.as(null);
}
}
private onFileStatusDispose(fileStatus: git.IFileStatus): void {
var id = fileStatus.getId();
var editorInputPromise = this.cache[id];
if (editorInputPromise) {
editorInputPromise.done((editorInput) => { this.eventuallyDispose(editorInput); });
delete this.cache[id];
}
}
/**
* If the disposed status is the same as this input's status, we must try to dispose the input.
* But we should not do it while the input is still open. This method will eventually call dispose
* when the editor input goes out of the visible editors.
*/
private eventuallyDispose(editorInput: WorkbenchEditorCommon.EditorInput): void {
if (!this.maybeDispose(editorInput)) {
var listener = this.eventService.addListener2(wbevents.EventType.EDITOR_INPUT_CHANGED, () => {
if (this.maybeDispose(editorInput)) {
listener.dispose();
}
});
}
}
private maybeDispose(editorInput: WorkbenchEditorCommon.EditorInput): boolean {
if (!this.editorService.getVisibleEditors().some((editor) => editor.input && editor.input.matches(editorInput))) {
editorInput.dispose();
return true;
}
return false;
}
public dispose(): void {
Object.keys(this.cache).forEach(key => {
this.cache[key].done((editorInput) => { editorInput.dispose(); });
delete this.cache[key];
});
this.toDispose = lifecycle.disposeAll(this.toDispose);
}
}
export class AutoFetcher implements git.IAutoFetcher, lifecycle.IDisposable
{
private static MIN_TIMEOUT = 2 * 60 * 1000; // every two minutes
private static MAX_TIMEOUT = 5 * 60 * 1000; // every five minutes
private _state: git.AutoFetcherState;
private gitService: git.IGitService;
private eventService: IEventService;
private messageService: IMessageService;
private configurationService: IConfigurationService;
private instantiationService: IInstantiationService;
private currentRequest: winjs.Promise;
private timeout: number;
private toDispose: lifecycle.IDisposable[];
private gitServiceStateDisposable: lifecycle.IDisposable;
constructor(gitService: git.IGitService, // gitService passed as argument, not by injection
@IEventService eventService: IEventService,
@IMessageService messageService: IMessageService,
@IWorkbenchEditorService editorService: IWorkbenchEditorService,
@IConfigurationService configurationService: IConfigurationService,
@IInstantiationService instantiationService: IInstantiationService
) {
this._state = git.AutoFetcherState.Disabled;
this.gitService = gitService;
this.eventService = eventService;
this.messageService = messageService;
this.configurationService = configurationService;
this.instantiationService = instantiationService;
this.currentRequest = null;
this.timeout = AutoFetcher.MIN_TIMEOUT;
this.toDispose = [];
this.toDispose.push(this.configurationService.addListener2(ConfigurationServiceEventTypes.UPDATED, e => this.onConfiguration(e.config.git)));
configurationService.loadConfiguration('git').done(c => this.onConfiguration(c));
}
public get state(): git.AutoFetcherState {
return this._state;
}
private onConfiguration(config: git.IGitConfiguration): void {
if (config.autofetch === false) {
this.disable();
} else {
this.enable();
}
}
public enable(): void {
if (this._state !== git.AutoFetcherState.Disabled) {
return;
}
this.gitServiceStateDisposable = this.gitService.addListener2(git.ServiceEvents.STATE_CHANGED, (e) => this.onGitServiceStateChange(e));
this._state = git.AutoFetcherState.Active;
this.onGitServiceStateChange(this.gitService.getState());
}
public disable(): void {
if (this.gitServiceStateDisposable) {
this.gitServiceStateDisposable.dispose();
this.gitServiceStateDisposable = null;
}
this.deactivate();
this._state = git.AutoFetcherState.Disabled;
}
private onGitServiceStateChange(state: git.ServiceState): void {
if (state === git.ServiceState.OK) {
this.activate();
} else {
this.deactivate();
}
}
public activate(): void {
if (this.currentRequest) {
this.currentRequest.cancel();
}
this._state = git.AutoFetcherState.Active;
this.loop();
}
public deactivate(): void {
if (!this.currentRequest) {
return;
}
this._state = git.AutoFetcherState.Inactive;
this.currentRequest.cancel();
this.currentRequest = null;
}
private loop(): void {
this._state = git.AutoFetcherState.Fetching;
this.currentRequest = this.gitService.fetch().then(() => {
this.timeout = AutoFetcher.MIN_TIMEOUT;
}, (err) => {
if (errors.isPromiseCanceledError(err)) {
return winjs.Promise.wrapError(err);
} else if (err.gitErrorCode === git.GitErrorCodes.AuthenticationFailed) {
return winjs.Promise.wrapError(err);
} else {
this.timeout = Math.min(Math.round(this.timeout * 1.2), AutoFetcher.MAX_TIMEOUT); // backoff
}
});
this.currentRequest.then(() => {
this._state = git.AutoFetcherState.Active;
this.currentRequest = winjs.Promise.timeout(this.timeout);
return this.currentRequest;
}).then(() => this.loop(), (err) => this.deactivate());
}
public dispose(): void {
this.disable();
}
}
interface IGitCredentialRequest {
guid: string;
scope: git.IGitCredentialScope;
}
export class GitService extends ee.EventEmitter
implements
git.IGitService {
static ID = 'Monaco.IDE.UI.Services.GitService';
public serviceId = git.IGitService;
private eventService: IEventService;
private contextService: IWorkspaceContextService;
private messageService: IMessageService;
private instantiationService:IInstantiationService;
private editorService: IWorkbenchEditorService;
private lifecycleService: ILifecycleService;
private outputService: IOutputService;
private raw: git.IRawGitService;
private state: git.ServiceState;
private operations: git.IGitOperation[];
private model: git.IModel;
private inputCache: EditorInputCache;
private remoteListenerUnbind:ee.ListenerUnbind;
private toDispose: lifecycle.IDisposable[];
private needsRefresh: boolean;
private refreshDelayer: async.ThrottledDelayer<void>;
private autoFetcher: AutoFetcher;
constructor(
raw: git.IRawGitService,
@IInstantiationService instantiationService: IInstantiationService,
@IEventService eventService: IEventService,
@IMessageService messageService: IMessageService,
@IWorkbenchEditorService editorService: IWorkbenchEditorService,
@IOutputService outputService: IOutputService,
@IWorkspaceContextService contextService: IWorkspaceContextService,
@ILifecycleService lifecycleService: ILifecycleService
) {
super();
this.instantiationService = instantiationService;
this.eventService = eventService;
this.messageService = messageService;
this.editorService = editorService;
this.outputService = outputService;
this.contextService = contextService;
this.lifecycleService = lifecycleService;
this.raw = raw;
this.state = git.ServiceState.NotInitialized;
this.operations = [];
this.model = new model.Model();
this.toDispose = [];
this.needsRefresh = false;
this.refreshDelayer = new async.PeriodThrottledDelayer<void>(500, 10000);
this.autoFetcher = this.instantiationService.createInstance(AutoFetcher, this);
this.registerListeners();
this.inputCache = this.instantiationService.createInstance(EditorInputCache, this);
this.triggerStatus(true); // trigger initial status
}
private registerListeners():void {
this.toDispose.push(this.eventService.addListener2(FileEventType.FILE_CHANGES,(e) => this.onFileChanges(e)));
this.toDispose.push(this.eventService.addListener2(filesCommon.EventType.FILE_SAVED, (e) => this.onLocalFileChange(e)));
this.toDispose.push(this.eventService.addListener2(filesCommon.EventType.FILE_REVERTED, (e) => this.onLocalFileChange(e)));
this.lifecycleService.onShutdown(this.dispose, this);
}
private triggerStatus(force: boolean = false): void {
if (this.isInitialized() && !this.isIdle() && !force) {
this.refreshDelayer.cancel();
return;
}
var onError = async.once<any, void>(e => {
if (!errors.isPromiseCanceledError(e)) {
this.messageService.show(severity.Error, e);
}
});
this.refreshDelayer.trigger(() => this.status()).done(null, onError);
}
private onLocalFileChange(e:filesCommon.LocalFileChangeEvent): void {
var shouldTriggerStatus = e.gotUpdated() && paths.basename(e.getAfter().resource.fsPath) === '.gitignore';
if (!shouldTriggerStatus) {
return;
}
this.triggerStatus();
}
private onFileChanges(e: FileChangesEvent): void {
var isIdle = this.isIdle();
var shouldTriggerStatus = e.changes.some(c => {
var workspacePath = this.contextService.toWorkspaceRelativePath(c.resource);
if (!workspacePath) {
return false; // ignore out of workspace files
}
// for .gitindex, the service must be idle
if ('.git/index' === workspacePath) {
return isIdle;
}
// for anything other that .git*
if (!/^\.git/.test(workspacePath)) {
return true;
}
// added or deleted .git folder
if (workspacePath === '.git') {
return c.type === FileChangeType.ADDED || c.type === FileChangeType.DELETED;
}
return ['.git/index.lock', '.git/FETCH_HEAD', '.gitignore', '.gitmodules'].indexOf(workspacePath) === -1;
});
if (!shouldTriggerStatus) {
return;
}
this.triggerStatus();
}
private onGitServiceOperationEnd(e: { operation: git.IGitOperation; }): void {
if (e.operation.id === git.ServiceOperations.COMMAND) {
this.triggerStatus();
}
}
public getState(): git.ServiceState {
return this.state;
}
public getModel(): git.IModel {
return this.model;
}
public status(): winjs.Promise {
return this.run(git.ServiceOperations.STATUS, () => this.raw.status());
}
public init(): winjs.Promise {
return this.run(git.ServiceOperations.INIT, () => this.raw.init());
}
public add(files?: git.IFileStatus[]): winjs.Promise {
return this.run(git.ServiceOperations.ADD, () => this.raw.add(GitService.toPaths(files)));
}
public stage(filePath: string, content: string): winjs.Promise {
return this.run(git.ServiceOperations.STAGE, () => this.raw.stage(filePath, content));
}
public branch(name: string, checkout: boolean = false): winjs.Promise {
return this.run(git.ServiceOperations.BRANCH, () => this.raw.branch(name, checkout));
}
public checkout(treeish: string = '', files: git.IFileStatus[] = null): winjs.Promise {
return this.run(git.ServiceOperations.CHECKOUT, () => this.raw.checkout(treeish, GitService.toPaths(files)));
}
public clean(files: git.IFileStatus[]): winjs.Promise {
return this.run(git.ServiceOperations.CLEAN, () => this.raw.clean(files.map((s) => s.getPath())));
}
public undo(): winjs.Promise {
return this.run(git.ServiceOperations.UNDO, () => this.raw.undo());
}
public reset(treeish: string, hard?: boolean): winjs.Promise {
return this.run(git.ServiceOperations.RESET, () => this.raw.reset(treeish, hard));
}
public revertFiles(treeish: string, files?: git.IFileStatus[]): winjs.Promise {
return this.run(git.ServiceOperations.RESET, () => this.raw.revertFiles(treeish, (files || []).map((s) => s.getPath())));
}
public fetch(): winjs.Promise {
return this.run(git.ServiceOperations.BACKGROUND_FETCH, () => this.raw.fetch());
}
public pull(rebase?: boolean): winjs.Promise {
return this.run(git.ServiceOperations.PULL, () => this.raw.pull(rebase));
}
public push(remote?: string, name?: string, options?:git.IPushOptions): winjs.Promise {
return this.run(git.ServiceOperations.PUSH, () => this.raw.push(remote, name, options));
}
public sync(): winjs.Promise {
return this.run(git.ServiceOperations.SYNC, () => this.raw.sync());
}
public commit(message:string, amend: boolean = false, stage: boolean = false): winjs.Promise {
return this.run(git.ServiceOperations.COMMIT, () => this.raw.commit(message, amend, stage));
}
public detectMimetypes(path: string, treeish: string = '~'): winjs.Promise {
return this.raw.detectMimetypes(path, treeish);
}
private run(operationId: string, fn: () => winjs.Promise): winjs.Promise {
return this.raw.serviceState().then(state => {
if (state === git.RawServiceState.GitNotFound) {
this.transition(git.ServiceState.NoGit);
return winjs.Promise.as(null);
} else if (state === git.RawServiceState.Disabled) {
this.transition(git.ServiceState.Disabled);
return winjs.Promise.as(null);
} else {
return this._run(operationId, fn);
}
});
}
private _run(operationId: string, fn: () => winjs.Promise): winjs.Promise {
var operation = new operations.GitOperation(operationId, fn);
this.operations.push(operation);
this.emit(git.ServiceEvents.OPERATION_START, operation);
this.emit(git.ServiceEvents.OPERATION, operation);
var onDone = (error: any = null) => {
var index = this.operations.indexOf(operation);
if (index > -1) {
this.operations.splice(index, 1);
}
var e = { operation: operation, error: error };
this.emit(git.ServiceEvents.OPERATION_END, e);
this.onGitServiceOperationEnd(e);
this.emit(git.ServiceEvents.OPERATION, operation);
};
return operation.run().then((status: git.IRawStatus) => {
this.model.update(status);
onDone();
if (status) {
this.transition(types.isUndefinedOrNull(status.state) ? git.ServiceState.OK : status.state);
} else {
this.transition(git.ServiceState.NotARepo);
}
return this.model;
}, (e) => {
onDone(e);
if (errors.isPromiseCanceledError(e)) {
return winjs.Promise.wrapError(e);
}
var gitErrorCode: string = e.gitErrorCode || null;
if (gitErrorCode === git.GitErrorCodes.NotAtRepositoryRoot) {
this.transition(git.ServiceState.NotAtRepoRoot);
return winjs.Promise.as(this.model);
}
this.emit(git.ServiceEvents.ERROR, e);
this.transition(git.ServiceState.OK);
if (gitErrorCode === git.GitErrorCodes.NoUserNameConfigured || gitErrorCode === git.GitErrorCodes.NoUserEmailConfigured) {
this.messageService.show(severity.Warning, nls.localize('configureUsernameEmail', "Please configure your git user name and e-mail."));
return winjs.Promise.as(null);
} else if (gitErrorCode === git.GitErrorCodes.BadConfigFile) {
this.messageService.show(severity.Error, nls.localize('badConfigFile', "Git {0}", e.message));
return winjs.Promise.as(null);
} else if (gitErrorCode === git.GitErrorCodes.UnmergedChanges) {
this.messageService.show(severity.Warning, nls.localize('unmergedChanges', "You should first resolve the unmerged changes before committing your changes."));
return winjs.Promise.as(null);
}
var error: Error;
var showOutputAction = new actions.Action('show.gitOutput', nls.localize('showOutput', "Show Output"), null, true, () => this.outputService.showOutput('Git'));
var cancelAction = new actions.Action('close.message', nls.localize('cancel', "Cancel"), null, true, ()=>winjs.Promise.as(true));
error = errors.create(
nls.localize('checkNativeConsole', "There was an issue running a git operation. Please review the output or use a console to check the state of your repository."),
{ actions: [showOutputAction, cancelAction] }
);
(<any>error).gitErrorCode = gitErrorCode;
return winjs.Promise.wrapError(error);
});
}
private transition(state: git.ServiceState): void {
var oldState = this.state;
this.state = state;
if (state !== oldState) {
this.emit(git.ServiceEvents.STATE_CHANGED, state);
}
}
public buffer(path: string, treeish: string = '~'): winjs.TPromise<string> {
return this.raw.show(path, treeish);
}
public show(path: string, status: git.IFileStatus, treeish: string = '~', mimetype: string = 'text/plain'): winjs.Promise {
return this.detectMimetypes(path, treeish).then((mimetypes:string[]) => {
var pathComponents = status.getPathComponents();
var fileSegment = pathComponents[pathComponents.length - 1];
var folderSegment = toReadablePath(pathComponents.slice(0, pathComponents.length - 1).join('/'));
var description:string;
if (treeish === '~') {
description = nls.localize('changesFromIndex', "{0} - Changes on index", folderSegment);
} else {
description = nls.localize('changesFromTree', "{0} - Changes on {1}", folderSegment, treeish);
}
if (mime.isUnspecific(mimetypes)) {
mimetypes = mime.guessMimeTypes(path); // guess from path if our detection did not yield results
}
// Binary: our story is weak here for binary files on the index. Since we run natively, we do not have a way currently
// to e.g. show images as binary inside the renderer because images need to be served through a URL to show. We could revisit this by
// allowing to use data URLs for resource inputs to render them. However, this would mean potentially loading a large file into memory
//
// Our solution now is to detect binary files and immediately return an input that is flagged as binary unknown mime type.
if (mime.isBinaryMime(mime.guessMimeTypes(path)) || mimetypes.indexOf(mime.MIME_BINARY) >= 0) {
return winjs.Promise.wrapError(new Error('The resource seems to be binary and cannot be displayed'));
}
// Text
return winjs.Promise.as(this.instantiationService.createInstance(giteditorinputs.NativeGitIndexStringEditorInput, fileSegment, description, mimetypes.join(', '), status, path, treeish));
});
}
public getInput(status: git.IFileStatus): winjs.TPromise<WorkbenchEditorCommon.EditorInput> {
return this.inputCache.getInput(status).then(null, (err) => {
if (err.gitErrorCode = git.GitErrorCodes.CantOpenResource) {
this.messageService.show(severity.Warning, nls.localize('cantOpenResource', "Can't open this git resource."));
return winjs.Promise.as(null);
}
return winjs.Promise.wrapError(err);
});
}
public isInitialized(): boolean {
return this.state === git.ServiceState.OK;
}
public isIdle(): boolean {
return this.isInitialized() && !this.operations.some(op => op.id !== git.ServiceOperations.BACKGROUND_FETCH);
}
public getRunningOperations(): git.IGitOperation[] {
return this.operations;
}
public onOutput(): winjs.Promise {
return this.raw.onOutput();
}
public getAutoFetcher(): git.IAutoFetcher {
return this.autoFetcher;
}
private static toPaths(files: git.IFileStatus[]): string[] {
if (!files) {
return null;
}
return files.map((status) => {
/* In the case that a file was renamed in the index and (changed || deleted) in the
working tree, we must use its new name, running the checkout command.
*/
switch (status.getStatus()) {
case git.Status.MODIFIED:
case git.Status.DELETED:
if (status.getRename()) {
return status.getRename();
}
default:
return status.getPath();
}
});
}
public dispose(): void {
this.emit(git.ServiceEvents.DISPOSE);
if (this.model) {
this.model.dispose();
this.model = null;
}
if (this.remoteListenerUnbind) {
this.remoteListenerUnbind();
this.remoteListenerUnbind = null;
}
super.dispose();
}
} | mit |
flextry/Telerik-Academy | Programming with C#/0. Exams/Telerik 2012-2013 - OOP Exam/C# OOP - 25 March 2013 - Evening/Solutions/01. HTMLRenderer/HTMLElementFactory.cs | 510 | namespace HTMLRenderer
{
using System;
using System.Linq;
public class HTMLElementFactory : IElementFactory
{
public IElement CreateElement(string name)
{
return new HtmlElement(name);
}
public IElement CreateElement(string name, string content)
{
return new HtmlElement(name, content);
}
public ITable CreateTable(int rows, int cols)
{
return new HtmlTable(rows, cols);
}
}
} | mit |
ljknight/gulp-jshint | test/specs/reporters/index.js | 3142 | var RcFixture = require('../../util').RcFixture;
var lint = require('../../util').lint;
var File = require('../../util').File;
var jshint = require('../../../src');
var should = require('should');
function fileContent(content) {
return new File({
path: './test/fixture/file.js',
contents: content || 'wadup();'
});
}
describe('reporting', function () {
it('should send success status', function (done) {
lint({
file: fileContent('var a = 0; a += 1;'),
eachFile: function (file) {
should(file).property('jshint');
should(file.jshint).property('success').equal(true);
should(file.jshint).not.have.property('results');
should(file.jshint).not.have.property('data');
should(file.jshint).not.have.property('opt');
}
}, done);
});
it('should send failure status', function (done) {
lint({
file: fileContent('doe ='),
eachFile: function (file) {
should(file).have.property('jshint');
should(file.jshint).property('success').equal(false);
should(file.jshint).property('results').is.an.Array;
should(file.jshint).property('data').is.an.Array;
}
}, done);
});
it('should load jshint file and pass', function (done) {
var a = 0;
var fakeFile = new File({
path: './test/fixture/file.js',
cwd: './test/',
base: './test/fixture/',
contents: new Buffer('wadup = 123;')
});
var stream = jshint(RcFixture('.rc-!undef'));
stream.on('data', function (newFile) {
++a;
should.exist(newFile.jshint.success);
newFile.jshint.success.should.equal(true);
should.not.exist(newFile.jshint.results);
should.not.exist(newFile.jshint.data);
should.not.exist(newFile.jshint.opt);
});
stream.once('end', function () {
a.should.equal(1);
done();
});
stream.write(fakeFile);
stream.end();
});
});
require('./fail');
describe('jshint.reporter()', function () {
it('file should pass through', function (done) {
var a = 0;
var fakeFile = new File({
path: './test/fixture/file.js',
cwd: './test/',
base: './test/fixture/',
contents: new Buffer('wadup();')
});
var stream = jshint.reporter();
stream.on('data', function (newFile) {
newFile.should.equal(fakeFile);
++a;
});
stream.once('end', function () {
a.should.equal(1);
done();
});
stream.write(fakeFile);
stream.end();
});
it('file should trigger reporter when .jshint exists', function (done) {
var fakeFile = new File({
path: './test/fixture/file.js',
cwd: './test/',
base: './test/fixture/',
contents: new Buffer('wadup();')
});
fakeFile.jshint = {
success: false,
results: 200, // not real data
data: 300, // not real data
opt: {} // not real data
};
var stream = jshint.reporter(function (results, data, opt) {
should(results).equal(200);
should(data).equal(300);
should(opt).eql({});
done();
});
stream.write(fakeFile);
stream.end();
});
}); | mit |
FAU-Inf2/spongycastle | prov/src/main/java/org/spongycastle/jce/ECKeyUtil.java | 8881 | package org.spongycastle.jce;
import java.io.UnsupportedEncodingException;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.NoSuchProviderException;
import java.security.PrivateKey;
import java.security.Provider;
import java.security.PublicKey;
import java.security.Security;
import java.security.spec.PKCS8EncodedKeySpec;
import java.security.spec.X509EncodedKeySpec;
import org.spongycastle.asn1.ASN1ObjectIdentifier;
import org.spongycastle.asn1.ASN1Primitive;
import org.spongycastle.asn1.cryptopro.CryptoProObjectIdentifiers;
import org.spongycastle.asn1.pkcs.PrivateKeyInfo;
import org.spongycastle.asn1.x509.AlgorithmIdentifier;
import org.spongycastle.asn1.x509.SubjectPublicKeyInfo;
import org.spongycastle.asn1.x9.X962Parameters;
import org.spongycastle.asn1.x9.X9ECParameters;
import org.spongycastle.asn1.x9.X9ObjectIdentifiers;
import org.spongycastle.jcajce.provider.asymmetric.util.ECUtil;
import org.spongycastle.jce.provider.BouncyCastleProvider;
/**
* Utility class to allow conversion of EC key parameters to explicit from named
* curves and back (where possible).
*/
public class ECKeyUtil
{
/**
* Convert a passed in public EC key to have explicit parameters. If the key
* is already using explicit parameters it is returned.
*
* @param key key to be converted
* @param providerName provider name to be used.
* @return the equivalent key with explicit curve parameters
* @throws IllegalArgumentException
* @throws NoSuchAlgorithmException
* @throws NoSuchProviderException
*/
public static PublicKey publicToExplicitParameters(PublicKey key, String providerName)
throws IllegalArgumentException, NoSuchAlgorithmException, NoSuchProviderException
{
Provider provider = Security.getProvider(providerName);
if (provider == null)
{
throw new NoSuchProviderException("cannot find provider: " + providerName);
}
return publicToExplicitParameters(key, provider);
}
/**
* Convert a passed in public EC key to have explicit parameters. If the key
* is already using explicit parameters it is returned.
*
* @param key key to be converted
* @param provider provider to be used.
* @return the equivalent key with explicit curve parameters
* @throws IllegalArgumentException
* @throws NoSuchAlgorithmException
*/
public static PublicKey publicToExplicitParameters(PublicKey key, Provider provider)
throws IllegalArgumentException, NoSuchAlgorithmException
{
try
{
SubjectPublicKeyInfo info = SubjectPublicKeyInfo.getInstance(ASN1Primitive.fromByteArray(key.getEncoded()));
if (info.getAlgorithmId().getObjectId().equals(CryptoProObjectIdentifiers.gostR3410_2001))
{
throw new IllegalArgumentException("cannot convert GOST key to explicit parameters.");
}
else
{
X962Parameters params = X962Parameters.getInstance(info.getAlgorithmId().getParameters());
X9ECParameters curveParams;
if (params.isNamedCurve())
{
ASN1ObjectIdentifier oid = ASN1ObjectIdentifier.getInstance(params.getParameters());
curveParams = ECUtil.getNamedCurveByOid(oid);
// ignore seed value due to JDK bug
curveParams = new X9ECParameters(curveParams.getCurve(), curveParams.getG(), curveParams.getN(), curveParams.getH());
}
else if (params.isImplicitlyCA())
{
curveParams = new X9ECParameters(BouncyCastleProvider.CONFIGURATION.getEcImplicitlyCa().getCurve(), BouncyCastleProvider.CONFIGURATION.getEcImplicitlyCa().getG(), BouncyCastleProvider.CONFIGURATION.getEcImplicitlyCa().getN(), BouncyCastleProvider.CONFIGURATION.getEcImplicitlyCa().getH());
}
else
{
return key; // already explicit
}
params = new X962Parameters(curveParams);
info = new SubjectPublicKeyInfo(new AlgorithmIdentifier(X9ObjectIdentifiers.id_ecPublicKey, params), info.getPublicKeyData().getBytes());
KeyFactory keyFact = KeyFactory.getInstance(key.getAlgorithm(), provider);
return keyFact.generatePublic(new X509EncodedKeySpec(info.getEncoded()));
}
}
catch (IllegalArgumentException e)
{
throw e;
}
catch (NoSuchAlgorithmException e)
{
throw e;
}
catch (Exception e)
{ // shouldn't really happen...
throw new UnexpectedException(e);
}
}
/**
* Convert a passed in private EC key to have explicit parameters. If the key
* is already using explicit parameters it is returned.
*
* @param key key to be converted
* @param providerName provider name to be used.
* @return the equivalent key with explicit curve parameters
* @throws IllegalArgumentException
* @throws NoSuchAlgorithmException
* @throws NoSuchProviderException
*/
public static PrivateKey privateToExplicitParameters(PrivateKey key, String providerName)
throws IllegalArgumentException, NoSuchAlgorithmException, NoSuchProviderException
{
Provider provider = Security.getProvider(providerName);
if (provider == null)
{
throw new NoSuchProviderException("cannot find provider: " + providerName);
}
return privateToExplicitParameters(key, provider);
}
/**
* Convert a passed in private EC key to have explicit parameters. If the key
* is already using explicit parameters it is returned.
*
* @param key key to be converted
* @param provider provider to be used.
* @return the equivalent key with explicit curve parameters
* @throws IllegalArgumentException
* @throws NoSuchAlgorithmException
*/
public static PrivateKey privateToExplicitParameters(PrivateKey key, Provider provider)
throws IllegalArgumentException, NoSuchAlgorithmException
{
try
{
PrivateKeyInfo info = PrivateKeyInfo.getInstance(ASN1Primitive.fromByteArray(key.getEncoded()));
if (info.getAlgorithmId().getObjectId().equals(CryptoProObjectIdentifiers.gostR3410_2001))
{
throw new UnsupportedEncodingException("cannot convert GOST key to explicit parameters.");
}
else
{
X962Parameters params = X962Parameters.getInstance(info.getAlgorithmId().getParameters());
X9ECParameters curveParams;
if (params.isNamedCurve())
{
ASN1ObjectIdentifier oid = ASN1ObjectIdentifier.getInstance(params.getParameters());
curveParams = ECUtil.getNamedCurveByOid(oid);
// ignore seed value due to JDK bug
curveParams = new X9ECParameters(curveParams.getCurve(), curveParams.getG(), curveParams.getN(), curveParams.getH());
}
else if (params.isImplicitlyCA())
{
curveParams = new X9ECParameters(BouncyCastleProvider.CONFIGURATION.getEcImplicitlyCa().getCurve(), BouncyCastleProvider.CONFIGURATION.getEcImplicitlyCa().getG(), BouncyCastleProvider.CONFIGURATION.getEcImplicitlyCa().getN(), BouncyCastleProvider.CONFIGURATION.getEcImplicitlyCa().getH());
}
else
{
return key; // already explicit
}
params = new X962Parameters(curveParams);
info = new PrivateKeyInfo(new AlgorithmIdentifier(X9ObjectIdentifiers.id_ecPublicKey, params), info.parsePrivateKey());
KeyFactory keyFact = KeyFactory.getInstance(key.getAlgorithm(), provider);
return keyFact.generatePrivate(new PKCS8EncodedKeySpec(info.getEncoded()));
}
}
catch (IllegalArgumentException e)
{
throw e;
}
catch (NoSuchAlgorithmException e)
{
throw e;
}
catch (Exception e)
{ // shouldn't really happen
throw new UnexpectedException(e);
}
}
private static class UnexpectedException
extends RuntimeException
{
private Throwable cause;
UnexpectedException(Throwable cause)
{
super(cause.toString());
this.cause = cause;
}
public Throwable getCause()
{
return cause;
}
}
}
| mit |
beecode/mikrotik-api | src/MikrotikAPI/Talker/TalkerReciever.php | 2914 | <?php
namespace MikrotikAPI\Talker;
use MikrotikAPI\Core\Connector,
MikrotikAPI\Util\ResultUtil,
MikrotikAPI\Util\Util,
MikrotikAPI\Entity\Attribute,
MikrotikAPI\Util\DebugDumper;
/**
* Description of TalkerReciever
*
* @author Lalu Erfandi Maula Yusnu nunenuh@gmail.com <http://vthink.web.id>
* @copyright Copyright (c) 2011, Virtual Think Team.
* @license http://opensource.org/licenses/gpl-license.php GNU Public License
* @category Libraries
*/
class TalkerReciever {
private $con;
private $result;
private $trap = FALSE;
private $done = FALSE;
private $re = FALSE;
private $debug = FALSE;
public function __construct(Connector $con) {
$this->con = $con;
$this->result = new ResultUtil();
}
public function isTrap() {
return $this->trap;
}
public function isDone() {
return $this->done;
}
public function isData() {
return $this->re;
}
public function isDebug() {
return $this->debug;
}
public function setDebug($boolean) {
$this->debug = $boolean;
}
private function parseRawToList($raw) {
$raw = trim($raw);
if (!empty($raw)) {
$list = new \ArrayObject();
$token = explode("\n", $raw);
$a = 1;
while ($a < count($token)) {
next($token);
$attr = new Attribute();
if (!(current($token) == "!re") && !(current($token) == "!trap")) {
$split = explode("=", current($token));
$attr->setName($split[1]);
if (count($split) == 3) {
$attr->setValue($split[2]);
} else {
$attr->setValue(NULL);
}
$list->append($attr);
}
$a++;
}
if ($list->count() != 0)
$this->result->add($list);
}
}
public function getResult() {
return $this->result;
}
public function doRecieving() {
$this->run();
}
private function runDebugger($string) {
if ($this->isDebug()) {
DebugDumper::dump($string);
}
}
private function run() {
$s = "";
while (true) {
$s = $this->con->recieveStream();
if (Util::contains($s, "!re")) {
$this->parseRawToList($s);
$this->runDebugger($s);
$this->re = TRUE;
}
if (Util::contains($s, "!trap")) {
$this->runDebugger($s);
$this->trap = TRUE;
break;
}
if (Util::contains($s, "!done")) {
$this->runDebugger($s);
$this->done = TRUE;
break;
}
}
}
}
| mit |
codeforboston/property_dashboard | node_modules/generator-flight/lib/generators/page/index.js | 670 | /**
* Module dependencies.
*/
var path = require('path');
var util = require('util');
var yeoman = require('yeoman-generator');
/**
* Module exports.
*/
module.exports = Generator;
/**
* Generator constructor.
*
* @api public
*/
function Generator() {
yeoman.generators.NamedBase.apply(this, arguments);
this.sourceRoot(path.join(__dirname, '../../templates/'));
}
util.inherits(Generator, yeoman.generators.Base);
/**
* Generate files for a Flight page
*
* @api public
*/
Generator.prototype.createPageFiles = function createPageFiles() {
this.name = this.name || 'my_page';
this.template('page.js', 'app/js/page/' + this.name + '.js');
};
| mit |
mgrman/Votyra | Assets/Plugins/UniRx/Scripts/System/Tuple.cs | 37591 | // defined from .NET Framework 4.0 and NETFX_CORE
// This code is basaed from mono/mcs, but some performance modified
// 1. class to struct
// 2. implements IEquatable<T1, T2,...>
// note, we need to create ValueTuple or UniRxTuple...
#if !(NETFX_CORE || NET_4_6 || NET_STANDARD_2_0 || UNITY_WSA_10_0)
using System;
using System.Collections;
using System.Collections.Generic;
namespace UniRx
{
public interface IStructuralEquatable
{
bool Equals(object other, IEqualityComparer comparer);
int GetHashCode(IEqualityComparer comparer);
}
public interface IStructuralComparable
{
int CompareTo(object other, IComparer comparer);
}
interface ITuple
{
string ToString();
}
public static class Tuple
{
public static Tuple<T1, T2, T3, T4, T5, T6, T7, Tuple<T8>> Create<T1, T2, T3, T4, T5, T6, T7, T8>
(
T1 item1,
T2 item2,
T3 item3,
T4 item4,
T5 item5,
T6 item6,
T7 item7,
T8 item8)
{
return new Tuple<T1, T2, T3, T4, T5, T6, T7, Tuple<T8>>(item1, item2, item3, item4, item5, item6, item7, new Tuple<T8>(item8));
}
public static Tuple<T1, T2, T3, T4, T5, T6, T7> Create<T1, T2, T3, T4, T5, T6, T7>
(
T1 item1,
T2 item2,
T3 item3,
T4 item4,
T5 item5,
T6 item6,
T7 item7)
{
return new Tuple<T1, T2, T3, T4, T5, T6, T7>(item1, item2, item3, item4, item5, item6, item7);
}
public static Tuple<T1, T2, T3, T4, T5, T6> Create<T1, T2, T3, T4, T5, T6>
(
T1 item1,
T2 item2,
T3 item3,
T4 item4,
T5 item5,
T6 item6)
{
return new Tuple<T1, T2, T3, T4, T5, T6>(item1, item2, item3, item4, item5, item6);
}
public static Tuple<T1, T2, T3, T4, T5> Create<T1, T2, T3, T4, T5>
(
T1 item1,
T2 item2,
T3 item3,
T4 item4,
T5 item5)
{
return new Tuple<T1, T2, T3, T4, T5>(item1, item2, item3, item4, item5);
}
public static Tuple<T1, T2, T3, T4> Create<T1, T2, T3, T4>
(
T1 item1,
T2 item2,
T3 item3,
T4 item4)
{
return new Tuple<T1, T2, T3, T4>(item1, item2, item3, item4);
}
public static Tuple<T1, T2, T3> Create<T1, T2, T3>
(
T1 item1,
T2 item2,
T3 item3)
{
return new Tuple<T1, T2, T3>(item1, item2, item3);
}
public static Tuple<T1, T2> Create<T1, T2>
(
T1 item1,
T2 item2)
{
return new Tuple<T1, T2>(item1, item2);
}
public static Tuple<T1> Create<T1>
(
T1 item1)
{
return new Tuple<T1>(item1);
}
}
public partial class Tuple<T1, T2, T3, T4, T5, T6, T7, TRest>
{
public Tuple(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7, TRest rest)
{
this.item1 = item1;
this.item2 = item2;
this.item3 = item3;
this.item4 = item4;
this.item5 = item5;
this.item6 = item6;
this.item7 = item7;
this.rest = rest;
if (!(rest is ITuple))
throw new ArgumentException("rest", "The last element of an eight element tuple must be a Tuple.");
}
}
[Serializable]
public struct Tuple<T1> : IStructuralEquatable, IStructuralComparable, IComparable, ITuple, IEquatable<Tuple<T1>>
{
T1 item1;
public Tuple(T1 item1)
{
this.item1 = item1;
}
public T1 Item1
{
get { return item1; }
}
int IComparable.CompareTo(object obj)
{
return ((IStructuralComparable)this).CompareTo(obj, Comparer<object>.Default);
}
int IStructuralComparable.CompareTo(object other, IComparer comparer)
{
if (other == null) return 1;
if (!(other is Tuple<T1>))
{
throw new ArgumentException("other");
}
var t = (Tuple<T1>)other;
return comparer.Compare(item1, t.item1);
}
public override bool Equals(object obj)
{
return ((IStructuralEquatable)this).Equals(obj, EqualityComparer<object>.Default);
}
bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer)
{
if (!(other is Tuple<T1>))
return false;
var t = (Tuple<T1>)other;
return comparer.Equals(item1, t.item1);
}
public override int GetHashCode()
{
return EqualityComparer<T1>.Default.GetHashCode(item1);
}
int IStructuralEquatable.GetHashCode(IEqualityComparer comparer)
{
return comparer.GetHashCode(item1);
}
string ITuple.ToString()
{
return String.Format("{0}", item1);
}
public override string ToString()
{
return "(" + ((ITuple)this).ToString() + ")";
}
public bool Equals(Tuple<T1> other)
{
return EqualityComparer<T1>.Default.Equals(item1, other.item1);
}
}
[Serializable]
public struct Tuple<T1, T2> : IStructuralEquatable, IStructuralComparable, IComparable, ITuple, IEquatable<Tuple<T1, T2>>
{
T1 item1;
T2 item2;
public Tuple(T1 item1, T2 item2)
{
this.item1 = item1;
this.item2 = item2;
}
public T1 Item1
{
get { return item1; }
}
public T2 Item2
{
get { return item2; }
}
int IComparable.CompareTo(object obj)
{
return ((IStructuralComparable)this).CompareTo(obj, Comparer<object>.Default);
}
int IStructuralComparable.CompareTo(object other, IComparer comparer)
{
if (other == null) return 1;
if (!(other is Tuple<T1, T2>))
{
throw new ArgumentException("other");
}
var t = (Tuple<T1, T2>)other;
int res = comparer.Compare(item1, t.item1);
if (res != 0) return res;
return comparer.Compare(item2, t.item2);
}
public override bool Equals(object obj)
{
return ((IStructuralEquatable)this).Equals(obj, EqualityComparer<object>.Default);
}
bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer)
{
if (!(other is Tuple<T1, T2>))
return false;
var t = (Tuple<T1, T2>)other;
return comparer.Equals(item1, t.item1) &&
comparer.Equals(item2, t.item2);
}
public override int GetHashCode()
{
var comparer1 = EqualityComparer<T1>.Default;
var comparer2 = EqualityComparer<T2>.Default;
int h0;
h0 = comparer1.GetHashCode(item1);
h0 = (h0 << 5) + h0 ^ comparer2.GetHashCode(item2);
return h0;
}
int IStructuralEquatable.GetHashCode(IEqualityComparer comparer)
{
int h0;
h0 = comparer.GetHashCode(item1);
h0 = (h0 << 5) + h0 ^ comparer.GetHashCode(item2);
return h0;
}
string ITuple.ToString()
{
return String.Format("{0}, {1}", item1, item2);
}
public override string ToString()
{
return "(" + ((ITuple)this).ToString() + ")";
}
public bool Equals(Tuple<T1, T2> other)
{
var comparer1 = EqualityComparer<T1>.Default;
var comparer2 = EqualityComparer<T2>.Default;
return comparer1.Equals(item1, other.item1) &&
comparer2.Equals(item2, other.item2);
}
}
[Serializable]
public struct Tuple<T1, T2, T3> : IStructuralEquatable, IStructuralComparable, IComparable, ITuple, IEquatable<Tuple<T1, T2, T3>>
{
T1 item1;
T2 item2;
T3 item3;
public Tuple(T1 item1, T2 item2, T3 item3)
{
this.item1 = item1;
this.item2 = item2;
this.item3 = item3;
}
public T1 Item1
{
get { return item1; }
}
public T2 Item2
{
get { return item2; }
}
public T3 Item3
{
get { return item3; }
}
int IComparable.CompareTo(object obj)
{
return ((IStructuralComparable)this).CompareTo(obj, Comparer<object>.Default);
}
int IStructuralComparable.CompareTo(object other, IComparer comparer)
{
if (other == null) return 1;
if (!(other is Tuple<T1, T2, T3>))
{
throw new ArgumentException("other");
}
var t = (Tuple<T1, T2, T3>)other;
int res = comparer.Compare(item1, t.item1);
if (res != 0) return res;
res = comparer.Compare(item2, t.item2);
if (res != 0) return res;
return comparer.Compare(item3, t.item3);
}
public override bool Equals(object obj)
{
return ((IStructuralEquatable)this).Equals(obj, EqualityComparer<object>.Default);
}
bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer)
{
if (!(other is Tuple<T1, T2, T3>))
return false;
var t = (Tuple<T1, T2, T3>)other;
return comparer.Equals(item1, t.item1) &&
comparer.Equals(item2, t.item2) &&
comparer.Equals(item3, t.item3);
}
public override int GetHashCode()
{
var comparer1 = EqualityComparer<T1>.Default;
var comparer2 = EqualityComparer<T2>.Default;
var comparer3 = EqualityComparer<T3>.Default;
int h0;
h0 = comparer1.GetHashCode(item1);
h0 = (h0 << 5) + h0 ^ comparer2.GetHashCode(item2);
h0 = (h0 << 5) + h0 ^ comparer3.GetHashCode(item3);
return h0;
}
int IStructuralEquatable.GetHashCode(IEqualityComparer comparer)
{
int h0;
h0 = comparer.GetHashCode(item1);
h0 = (h0 << 5) + h0 ^ comparer.GetHashCode(item2);
h0 = (h0 << 5) + h0 ^ comparer.GetHashCode(item3);
return h0;
}
string ITuple.ToString()
{
return String.Format("{0}, {1}, {2}", item1, item2, item3);
}
public override string ToString()
{
return "(" + ((ITuple)this).ToString() + ")";
}
public bool Equals(Tuple<T1, T2, T3> other)
{
var comparer1 = EqualityComparer<T1>.Default;
var comparer2 = EqualityComparer<T2>.Default;
var comparer3 = EqualityComparer<T3>.Default;
return comparer1.Equals(item1, other.item1) &&
comparer2.Equals(item2, other.item2) &&
comparer3.Equals(item3, other.item3);
}
}
[Serializable]
public struct Tuple<T1, T2, T3, T4> : IStructuralEquatable, IStructuralComparable, IComparable, ITuple, IEquatable<Tuple<T1, T2, T3, T4>>
{
T1 item1;
T2 item2;
T3 item3;
T4 item4;
public Tuple(T1 item1, T2 item2, T3 item3, T4 item4)
{
this.item1 = item1;
this.item2 = item2;
this.item3 = item3;
this.item4 = item4;
}
public T1 Item1
{
get { return item1; }
}
public T2 Item2
{
get { return item2; }
}
public T3 Item3
{
get { return item3; }
}
public T4 Item4
{
get { return item4; }
}
int IComparable.CompareTo(object obj)
{
return ((IStructuralComparable)this).CompareTo(obj, Comparer<object>.Default);
}
int IStructuralComparable.CompareTo(object other, IComparer comparer)
{
if (other == null) return 1;
if (!(other is Tuple<T1, T2, T3, T4>))
{
throw new ArgumentException("other");
}
var t = (Tuple<T1, T2, T3, T4>)other;
int res = comparer.Compare(item1, t.item1);
if (res != 0) return res;
res = comparer.Compare(item2, t.item2);
if (res != 0) return res;
res = comparer.Compare(item3, t.item3);
if (res != 0) return res;
return comparer.Compare(item4, t.item4);
}
public override bool Equals(object obj)
{
return ((IStructuralEquatable)this).Equals(obj, EqualityComparer<object>.Default);
}
bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer)
{
if (!(other is Tuple<T1, T2, T3, T4>))
return false;
var t = (Tuple<T1, T2, T3, T4>)other;
return comparer.Equals(item1, t.item1) &&
comparer.Equals(item2, t.item2) &&
comparer.Equals(item3, t.item3) &&
comparer.Equals(item4, t.item4);
}
public override int GetHashCode()
{
var comparer1 = EqualityComparer<T1>.Default;
var comparer2 = EqualityComparer<T2>.Default;
var comparer3 = EqualityComparer<T3>.Default;
var comparer4 = EqualityComparer<T4>.Default;
int h0, h1;
h0 = comparer1.GetHashCode(item1);
h0 = (h0 << 5) + h0 ^ comparer2.GetHashCode(item2);
h1 = comparer3.GetHashCode(item3);
h1 = (h1 << 5) + h1 ^ comparer4.GetHashCode(item4);
h0 = (h0 << 5) + h0 ^ h1;
return h0;
}
int IStructuralEquatable.GetHashCode(IEqualityComparer comparer)
{
int h0, h1;
h0 = comparer.GetHashCode(item1);
h0 = (h0 << 5) + h0 ^ comparer.GetHashCode(item2);
h1 = comparer.GetHashCode(item3);
h1 = (h1 << 5) + h1 ^ comparer.GetHashCode(item4);
h0 = (h0 << 5) + h0 ^ h1;
return h0;
}
string ITuple.ToString()
{
return String.Format("{0}, {1}, {2}, {3}", item1, item2, item3, item4);
}
public override string ToString()
{
return "(" + ((ITuple)this).ToString() + ")";
}
public bool Equals(Tuple<T1, T2, T3, T4> other)
{
var comparer1 = EqualityComparer<T1>.Default;
var comparer2 = EqualityComparer<T2>.Default;
var comparer3 = EqualityComparer<T3>.Default;
var comparer4 = EqualityComparer<T4>.Default;
return comparer1.Equals(item1, other.item1) &&
comparer2.Equals(item2, other.item2) &&
comparer3.Equals(item3, other.item3) &&
comparer4.Equals(item4, other.item4);
}
}
[Serializable]
public struct Tuple<T1, T2, T3, T4, T5> : IStructuralEquatable, IStructuralComparable, IComparable, ITuple, IEquatable<Tuple<T1, T2, T3, T4, T5>>
{
T1 item1;
T2 item2;
T3 item3;
T4 item4;
T5 item5;
public Tuple(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5)
{
this.item1 = item1;
this.item2 = item2;
this.item3 = item3;
this.item4 = item4;
this.item5 = item5;
}
public T1 Item1
{
get { return item1; }
}
public T2 Item2
{
get { return item2; }
}
public T3 Item3
{
get { return item3; }
}
public T4 Item4
{
get { return item4; }
}
public T5 Item5
{
get { return item5; }
}
int IComparable.CompareTo(object obj)
{
return ((IStructuralComparable)this).CompareTo(obj, Comparer<object>.Default);
}
int IStructuralComparable.CompareTo(object other, IComparer comparer)
{
if (other == null) return 1;
if (!(other is Tuple<T1, T2, T3, T4, T5>))
{
throw new ArgumentException("other");
}
var t = (Tuple<T1, T2, T3, T4, T5>)other;
int res = comparer.Compare(item1, t.item1);
if (res != 0) return res;
res = comparer.Compare(item2, t.item2);
if (res != 0) return res;
res = comparer.Compare(item3, t.item3);
if (res != 0) return res;
res = comparer.Compare(item4, t.item4);
if (res != 0) return res;
return comparer.Compare(item5, t.item5);
}
public override bool Equals(object obj)
{
return ((IStructuralEquatable)this).Equals(obj, EqualityComparer<object>.Default);
}
bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer)
{
if (!(other is Tuple<T1, T2, T3, T4, T5>))
return false;
var t = (Tuple<T1, T2, T3, T4, T5>)other;
return comparer.Equals(item1, t.item1) &&
comparer.Equals(item2, t.item2) &&
comparer.Equals(item3, t.item3) &&
comparer.Equals(item4, t.item4) &&
comparer.Equals(item5, t.item5);
}
public override int GetHashCode()
{
var comparer1 = EqualityComparer<T1>.Default;
var comparer2 = EqualityComparer<T2>.Default;
var comparer3 = EqualityComparer<T3>.Default;
var comparer4 = EqualityComparer<T4>.Default;
var comparer5 = EqualityComparer<T5>.Default;
int h0, h1;
h0 = comparer1.GetHashCode(item1);
h0 = (h0 << 5) + h0 ^ comparer2.GetHashCode(item2);
h1 = comparer3.GetHashCode(item3);
h1 = (h1 << 5) + h1 ^ comparer4.GetHashCode(item4);
h0 = (h0 << 5) + h0 ^ h1;
h0 = (h0 << 5) + h0 ^ comparer5.GetHashCode(item5);
return h0;
}
int IStructuralEquatable.GetHashCode(IEqualityComparer comparer)
{
int h0, h1;
h0 = comparer.GetHashCode(item1);
h0 = (h0 << 5) + h0 ^ comparer.GetHashCode(item2);
h1 = comparer.GetHashCode(item3);
h1 = (h1 << 5) + h1 ^ comparer.GetHashCode(item4);
h0 = (h0 << 5) + h0 ^ h1;
h0 = (h0 << 5) + h0 ^ comparer.GetHashCode(item5);
return h0;
}
string ITuple.ToString()
{
return String.Format("{0}, {1}, {2}, {3}, {4}", item1, item2, item3, item4, item5);
}
public override string ToString()
{
return "(" + ((ITuple)this).ToString() + ")";
}
public bool Equals(Tuple<T1, T2, T3, T4, T5> other)
{
var comparer1 = EqualityComparer<T1>.Default;
var comparer2 = EqualityComparer<T2>.Default;
var comparer3 = EqualityComparer<T3>.Default;
var comparer4 = EqualityComparer<T4>.Default;
var comparer5 = EqualityComparer<T5>.Default;
return comparer1.Equals(item1, other.Item1) &&
comparer2.Equals(item2, other.Item2) &&
comparer3.Equals(item3, other.Item3) &&
comparer4.Equals(item4, other.Item4) &&
comparer5.Equals(item5, other.Item5);
}
}
[Serializable]
public struct Tuple<T1, T2, T3, T4, T5, T6> : IStructuralEquatable, IStructuralComparable, IComparable, ITuple, IEquatable<Tuple<T1, T2, T3, T4, T5, T6>>
{
T1 item1;
T2 item2;
T3 item3;
T4 item4;
T5 item5;
T6 item6;
public Tuple(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6)
{
this.item1 = item1;
this.item2 = item2;
this.item3 = item3;
this.item4 = item4;
this.item5 = item5;
this.item6 = item6;
}
public T1 Item1
{
get { return item1; }
}
public T2 Item2
{
get { return item2; }
}
public T3 Item3
{
get { return item3; }
}
public T4 Item4
{
get { return item4; }
}
public T5 Item5
{
get { return item5; }
}
public T6 Item6
{
get { return item6; }
}
int IComparable.CompareTo(object obj)
{
return ((IStructuralComparable)this).CompareTo(obj, Comparer<object>.Default);
}
int IStructuralComparable.CompareTo(object other, IComparer comparer)
{
if (other == null) return 1;
if (!(other is Tuple<T1, T2, T3, T4, T5, T6>))
{
throw new ArgumentException("other");
}
var t = (Tuple<T1, T2, T3, T4, T5, T6>)other;
int res = comparer.Compare(item1, t.item1);
if (res != 0) return res;
res = comparer.Compare(item2, t.item2);
if (res != 0) return res;
res = comparer.Compare(item3, t.item3);
if (res != 0) return res;
res = comparer.Compare(item4, t.item4);
if (res != 0) return res;
res = comparer.Compare(item5, t.item5);
if (res != 0) return res;
return comparer.Compare(item6, t.item6);
}
public override bool Equals(object obj)
{
return ((IStructuralEquatable)this).Equals(obj, EqualityComparer<object>.Default);
}
bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer)
{
if (!(other is Tuple<T1, T2, T3, T4, T5, T6>))
return false;
var t = (Tuple<T1, T2, T3, T4, T5, T6>)other;
return comparer.Equals(item1, t.item1) &&
comparer.Equals(item2, t.item2) &&
comparer.Equals(item3, t.item3) &&
comparer.Equals(item4, t.item4) &&
comparer.Equals(item5, t.item5) &&
comparer.Equals(item6, t.item6);
}
public override int GetHashCode()
{
var comparer1 = EqualityComparer<T1>.Default;
var comparer2 = EqualityComparer<T2>.Default;
var comparer3 = EqualityComparer<T3>.Default;
var comparer4 = EqualityComparer<T4>.Default;
var comparer5 = EqualityComparer<T5>.Default;
var comparer6 = EqualityComparer<T6>.Default;
int h0, h1;
h0 = comparer1.GetHashCode(item1);
h0 = (h0 << 5) + h0 ^ comparer2.GetHashCode(item2);
h1 = comparer3.GetHashCode(item3);
h1 = (h1 << 5) + h1 ^ comparer4.GetHashCode(item4);
h0 = (h0 << 5) + h0 ^ h1;
h1 = comparer5.GetHashCode(item5);
h1 = (h1 << 5) + h1 ^ comparer6.GetHashCode(item6);
h0 = (h0 << 5) + h0 ^ h1;
return h0;
}
int IStructuralEquatable.GetHashCode(IEqualityComparer comparer)
{
int h0, h1;
h0 = comparer.GetHashCode(item1);
h0 = (h0 << 5) + h0 ^ comparer.GetHashCode(item2);
h1 = comparer.GetHashCode(item3);
h1 = (h1 << 5) + h1 ^ comparer.GetHashCode(item4);
h0 = (h0 << 5) + h0 ^ h1;
h1 = comparer.GetHashCode(item5);
h1 = (h1 << 5) + h1 ^ comparer.GetHashCode(item6);
h0 = (h0 << 5) + h0 ^ h1;
return h0;
}
string ITuple.ToString()
{
return String.Format("{0}, {1}, {2}, {3}, {4}, {5}", item1, item2, item3, item4, item5, item6);
}
public override string ToString()
{
return "(" + ((ITuple)this).ToString() + ")";
}
public bool Equals(Tuple<T1, T2, T3, T4, T5, T6> other)
{
var comparer1 = EqualityComparer<T1>.Default;
var comparer2 = EqualityComparer<T2>.Default;
var comparer3 = EqualityComparer<T3>.Default;
var comparer4 = EqualityComparer<T4>.Default;
var comparer5 = EqualityComparer<T5>.Default;
var comparer6 = EqualityComparer<T6>.Default;
return comparer1.Equals(item1, other.Item1) &&
comparer2.Equals(item2, other.Item2) &&
comparer3.Equals(item3, other.Item3) &&
comparer4.Equals(item4, other.Item4) &&
comparer5.Equals(item5, other.Item5) &&
comparer6.Equals(item6, other.Item6);
}
}
[Serializable]
public struct Tuple<T1, T2, T3, T4, T5, T6, T7> : IStructuralEquatable, IStructuralComparable, IComparable, ITuple, IEquatable<Tuple<T1, T2, T3, T4, T5, T6, T7>>
{
T1 item1;
T2 item2;
T3 item3;
T4 item4;
T5 item5;
T6 item6;
T7 item7;
public Tuple(T1 item1, T2 item2, T3 item3, T4 item4, T5 item5, T6 item6, T7 item7)
{
this.item1 = item1;
this.item2 = item2;
this.item3 = item3;
this.item4 = item4;
this.item5 = item5;
this.item6 = item6;
this.item7 = item7;
}
public T1 Item1
{
get { return item1; }
}
public T2 Item2
{
get { return item2; }
}
public T3 Item3
{
get { return item3; }
}
public T4 Item4
{
get { return item4; }
}
public T5 Item5
{
get { return item5; }
}
public T6 Item6
{
get { return item6; }
}
public T7 Item7
{
get { return item7; }
}
int IComparable.CompareTo(object obj)
{
return ((IStructuralComparable)this).CompareTo(obj, Comparer<object>.Default);
}
int IStructuralComparable.CompareTo(object other, IComparer comparer)
{
if (other == null) return 1;
if (!(other is Tuple<T1, T2, T3, T4, T5, T6, T7>))
{
throw new ArgumentException("other");
}
var t = (Tuple<T1, T2, T3, T4, T5, T6, T7>)other;
int res = comparer.Compare(item1, t.item1);
if (res != 0) return res;
res = comparer.Compare(item2, t.item2);
if (res != 0) return res;
res = comparer.Compare(item3, t.item3);
if (res != 0) return res;
res = comparer.Compare(item4, t.item4);
if (res != 0) return res;
res = comparer.Compare(item5, t.item5);
if (res != 0) return res;
res = comparer.Compare(item6, t.item6);
if (res != 0) return res;
return comparer.Compare(item7, t.item7);
}
public override bool Equals(object obj)
{
return ((IStructuralEquatable)this).Equals(obj, EqualityComparer<object>.Default);
}
bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer)
{
if (!(other is Tuple<T1, T2, T3, T4, T5, T6, T7>))
return false;
var t = (Tuple<T1, T2, T3, T4, T5, T6, T7>)other;
return comparer.Equals(item1, t.item1) &&
comparer.Equals(item2, t.item2) &&
comparer.Equals(item3, t.item3) &&
comparer.Equals(item4, t.item4) &&
comparer.Equals(item5, t.item5) &&
comparer.Equals(item6, t.item6) &&
comparer.Equals(item7, t.item7);
}
public override int GetHashCode()
{
var comparer1 = EqualityComparer<T1>.Default;
var comparer2 = EqualityComparer<T2>.Default;
var comparer3 = EqualityComparer<T3>.Default;
var comparer4 = EqualityComparer<T4>.Default;
var comparer5 = EqualityComparer<T5>.Default;
var comparer6 = EqualityComparer<T6>.Default;
var comparer7 = EqualityComparer<T7>.Default;
int h0, h1;
h0 = comparer1.GetHashCode(item1);
h0 = (h0 << 5) + h0 ^ comparer2.GetHashCode(item2);
h1 = comparer3.GetHashCode(item3);
h1 = (h1 << 5) + h1 ^ comparer4.GetHashCode(item4);
h0 = (h0 << 5) + h0 ^ h1;
h1 = comparer5.GetHashCode(item5);
h1 = (h1 << 5) + h1 ^ comparer6.GetHashCode(item6);
h1 = (h1 << 5) + h1 ^ comparer7.GetHashCode(item7);
h0 = (h0 << 5) + h0 ^ h1;
return h0;
}
int IStructuralEquatable.GetHashCode(IEqualityComparer comparer)
{
int h0, h1;
h0 = comparer.GetHashCode(item1);
h0 = (h0 << 5) + h0 ^ comparer.GetHashCode(item2);
h1 = comparer.GetHashCode(item3);
h1 = (h1 << 5) + h1 ^ comparer.GetHashCode(item4);
h0 = (h0 << 5) + h0 ^ h1;
h1 = comparer.GetHashCode(item5);
h1 = (h1 << 5) + h1 ^ comparer.GetHashCode(item6);
h1 = (h1 << 5) + h1 ^ comparer.GetHashCode(item7);
h0 = (h0 << 5) + h0 ^ h1;
return h0;
}
string ITuple.ToString()
{
return String.Format("{0}, {1}, {2}, {3}, {4}, {5}, {6}", item1, item2, item3, item4, item5, item6, item7);
}
public override string ToString()
{
return "(" + ((ITuple)this).ToString() + ")";
}
public bool Equals(Tuple<T1, T2, T3, T4, T5, T6, T7> other)
{
var comparer1 = EqualityComparer<T1>.Default;
var comparer2 = EqualityComparer<T2>.Default;
var comparer3 = EqualityComparer<T3>.Default;
var comparer4 = EqualityComparer<T4>.Default;
var comparer5 = EqualityComparer<T5>.Default;
var comparer6 = EqualityComparer<T6>.Default;
var comparer7 = EqualityComparer<T7>.Default;
return comparer1.Equals(item1, other.Item1) &&
comparer2.Equals(item2, other.Item2) &&
comparer3.Equals(item3, other.Item3) &&
comparer4.Equals(item4, other.Item4) &&
comparer5.Equals(item5, other.Item5) &&
comparer6.Equals(item6, other.Item6) &&
comparer7.Equals(item7, other.Item7);
}
}
[Serializable]
public partial class Tuple<T1, T2, T3, T4, T5, T6, T7, TRest> : IStructuralEquatable, IStructuralComparable, IComparable, ITuple, IEquatable<Tuple<T1, T2, T3, T4, T5, T6, T7, TRest>>
{
T1 item1;
T2 item2;
T3 item3;
T4 item4;
T5 item5;
T6 item6;
T7 item7;
TRest rest;
public T1 Item1
{
get { return item1; }
}
public T2 Item2
{
get { return item2; }
}
public T3 Item3
{
get { return item3; }
}
public T4 Item4
{
get { return item4; }
}
public T5 Item5
{
get { return item5; }
}
public T6 Item6
{
get { return item6; }
}
public T7 Item7
{
get { return item7; }
}
public TRest Rest
{
get { return rest; }
}
int IComparable.CompareTo(object obj)
{
return ((IStructuralComparable)this).CompareTo(obj, Comparer<object>.Default);
}
int IStructuralComparable.CompareTo(object other, IComparer comparer)
{
if (other == null) return 1;
if (!(other is Tuple<T1, T2, T3, T4, T5, T6, T7, TRest>))
{
throw new ArgumentException("other");
}
var t = (Tuple<T1, T2, T3, T4, T5, T6, T7, TRest>)other;
int res = comparer.Compare(item1, t.item1);
if (res != 0) return res;
res = comparer.Compare(item2, t.item2);
if (res != 0) return res;
res = comparer.Compare(item3, t.item3);
if (res != 0) return res;
res = comparer.Compare(item4, t.item4);
if (res != 0) return res;
res = comparer.Compare(item5, t.item5);
if (res != 0) return res;
res = comparer.Compare(item6, t.item6);
if (res != 0) return res;
res = comparer.Compare(item7, t.item7);
if (res != 0) return res;
return comparer.Compare(rest, t.rest);
}
public override bool Equals(object obj)
{
return ((IStructuralEquatable)this).Equals(obj, EqualityComparer<object>.Default);
}
bool IStructuralEquatable.Equals(object other, IEqualityComparer comparer)
{
if (!(other is Tuple<T1, T2, T3, T4, T5, T6, T7, TRest>))
return false;
var t = (Tuple<T1, T2, T3, T4, T5, T6, T7, TRest>)other;
return comparer.Equals(item1, t.item1) &&
comparer.Equals(item2, t.item2) &&
comparer.Equals(item3, t.item3) &&
comparer.Equals(item4, t.item4) &&
comparer.Equals(item5, t.item5) &&
comparer.Equals(item6, t.item6) &&
comparer.Equals(item7, t.item7) &&
comparer.Equals(rest, t.rest);
}
public override int GetHashCode()
{
var comparer1 = EqualityComparer<T1>.Default;
var comparer2 = EqualityComparer<T2>.Default;
var comparer3 = EqualityComparer<T3>.Default;
var comparer4 = EqualityComparer<T4>.Default;
var comparer5 = EqualityComparer<T5>.Default;
var comparer6 = EqualityComparer<T6>.Default;
var comparer7 = EqualityComparer<T7>.Default;
var comparer8 = EqualityComparer<TRest>.Default;
int h0, h1, h2;
h0 = comparer1.GetHashCode(item1);
h0 = (h0 << 5) + h0 ^ comparer2.GetHashCode(item2);
h1 = comparer3.GetHashCode(item3);
h1 = (h1 << 5) + h1 ^ comparer4.GetHashCode(item4);
h0 = (h0 << 5) + h0 ^ h1;
h1 = comparer5.GetHashCode(item5);
h1 = (h1 << 5) + h1 ^ comparer6.GetHashCode(item6);
h2 = comparer7.GetHashCode(item7);
h2 = (h2 << 5) + h2 ^ comparer8.GetHashCode(rest);
h1 = (h1 << 5) + h1 ^ h2;
h0 = (h0 << 5) + h0 ^ h1;
return h0;
}
int IStructuralEquatable.GetHashCode(IEqualityComparer comparer)
{
int h0, h1, h2;
h0 = comparer.GetHashCode(item1);
h0 = (h0 << 5) + h0 ^ comparer.GetHashCode(item2);
h1 = comparer.GetHashCode(item3);
h1 = (h1 << 5) + h1 ^ comparer.GetHashCode(item4);
h0 = (h0 << 5) + h0 ^ h1;
h1 = comparer.GetHashCode(item5);
h1 = (h1 << 5) + h1 ^ comparer.GetHashCode(item6);
h2 = comparer.GetHashCode(item7);
h2 = (h2 << 5) + h2 ^ comparer.GetHashCode(rest);
h1 = (h1 << 5) + h1 ^ h2;
h0 = (h0 << 5) + h0 ^ h1;
return h0;
}
string ITuple.ToString()
{
return String.Format("{0}, {1}, {2}, {3}, {4}, {5}, {6}, {7}", item1, item2, item3, item4, item5, item6, item7, ((ITuple)rest).ToString());
}
public override string ToString()
{
return "(" + ((ITuple)this).ToString() + ")";
}
public bool Equals(Tuple<T1, T2, T3, T4, T5, T6, T7, TRest> other)
{
var comparer1 = EqualityComparer<T1>.Default;
var comparer2 = EqualityComparer<T2>.Default;
var comparer3 = EqualityComparer<T3>.Default;
var comparer4 = EqualityComparer<T4>.Default;
var comparer5 = EqualityComparer<T5>.Default;
var comparer6 = EqualityComparer<T6>.Default;
var comparer7 = EqualityComparer<T7>.Default;
var comparer8 = EqualityComparer<TRest>.Default;
return comparer1.Equals(item1, other.Item1) &&
comparer2.Equals(item2, other.Item2) &&
comparer3.Equals(item3, other.Item3) &&
comparer4.Equals(item4, other.Item4) &&
comparer5.Equals(item5, other.Item5) &&
comparer6.Equals(item6, other.Item6) &&
comparer7.Equals(item7, other.Item7) &&
comparer8.Equals(rest, other.rest);
}
}
}
#endif | mit |
tisb-vikram/azure-iot-sdks | c/doc/api_reference/html/search/enums_0.js | 558 | var searchData=
[
['http_5fheaders_5fresult_5ftag',['HTTP_HEADERS_RESULT_TAG',['../httpheaders_8h.html#a8f45dc684b1c576d92316e46a253203c',1,'httpheaders.h']]],
['httpapi_5frequest_5ftype_5ftag',['HTTPAPI_REQUEST_TYPE_TAG',['../httpapi_8h.html#a81bf3649034ead3dd83e2fd34571e89c',1,'httpapi.h']]],
['httpapi_5fresult_5ftag',['HTTPAPI_RESULT_TAG',['../httpapi_8h.html#a98e397f3dccc10de6bd136785012ca87',1,'httpapi.h']]],
['httpapiex_5fresult_5ftag',['HTTPAPIEX_RESULT_TAG',['../httpapiex_8h.html#a605d1cdabe6e2dd794c507a096bb06e8',1,'httpapiex.h']]]
];
| mit |
azverkan/scons | test/Fortran/F95FILESUFFIXES.py | 3937 | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import TestSCons
from common import write_fake_link
_python_ = TestSCons._python_
_exe = TestSCons._exe
test = TestSCons.TestSCons()
write_fake_link(test)
test.write('myfortran.py', r"""
import getopt
import sys
comment = '#' + sys.argv[1]
opts, args = getopt.getopt(sys.argv[2:], 'co:')
for opt, arg in opts:
if opt == '-o': out = arg
infile = open(args[0], 'rb')
outfile = open(out, 'wb')
for l in infile.readlines():
if l[:len(comment)] != comment:
outfile.write(l)
sys.exit(0)
""")
# Test default file suffix: .f90/.F90 for F90
test.write('SConstruct', """
env = Environment(LINK = r'%(_python_)s mylink.py',
LINKFLAGS = [],
F95 = r'%(_python_)s myfortran.py f95',
FORTRAN = r'%(_python_)s myfortran.py fortran')
env.Program(target = 'test01', source = 'test01.f')
env.Program(target = 'test02', source = 'test02.F')
env.Program(target = 'test03', source = 'test03.for')
env.Program(target = 'test04', source = 'test04.FOR')
env.Program(target = 'test05', source = 'test05.ftn')
env.Program(target = 'test06', source = 'test06.FTN')
env.Program(target = 'test07', source = 'test07.fpp')
env.Program(target = 'test08', source = 'test08.FPP')
env.Program(target = 'test09', source = 'test09.f95')
env.Program(target = 'test10', source = 'test10.F95')
""" % locals())
test.write('test01.f', "This is a .f file.\n#link\n#fortran\n")
test.write('test02.F', "This is a .F file.\n#link\n#fortran\n")
test.write('test03.for', "This is a .for file.\n#link\n#fortran\n")
test.write('test04.FOR', "This is a .FOR file.\n#link\n#fortran\n")
test.write('test05.ftn', "This is a .ftn file.\n#link\n#fortran\n")
test.write('test06.FTN', "This is a .FTN file.\n#link\n#fortran\n")
test.write('test07.fpp', "This is a .fpp file.\n#link\n#fortran\n")
test.write('test08.FPP', "This is a .FPP file.\n#link\n#fortran\n")
test.write('test09.f95', "This is a .f95 file.\n#link\n#f95\n")
test.write('test10.F95', "This is a .F95 file.\n#link\n#f95\n")
test.run(arguments = '.', stderr = None)
test.must_match('test01' + _exe, "This is a .f file.\n")
test.must_match('test02' + _exe, "This is a .F file.\n")
test.must_match('test03' + _exe, "This is a .for file.\n")
test.must_match('test04' + _exe, "This is a .FOR file.\n")
test.must_match('test05' + _exe, "This is a .ftn file.\n")
test.must_match('test06' + _exe, "This is a .FTN file.\n")
test.must_match('test07' + _exe, "This is a .fpp file.\n")
test.must_match('test08' + _exe, "This is a .FPP file.\n")
test.must_match('test09' + _exe, "This is a .f95 file.\n")
test.must_match('test10' + _exe, "This is a .F95 file.\n")
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit |
taylorzane/adelyte-boxen | modules/homebrew/spec/classes/homebrew_spec.rb | 1148 | require "spec_helper"
describe "homebrew" do
let(:facts) { default_test_facts }
let(:dir) { facts[:homebrew_root] }
let(:cmddir) { "#{dir}/Library/Taps/boxen/homebrew-brews/cmd" }
it do
should contain_exec("install homebrew to #{dir}").with({
:cwd => dir,
:user => 'testuser',
:creates => "#{dir}/.git"
})
["ldflags.sh", "cflags.sh", "homebrew.sh"].each do |f|
should contain_file("/test/boxen/env.d/#{f}").
with_ensure(:absent)
end
should contain_boxen__env_script("homebrew")
should contain_file("#{cmddir}/boxen-bottle-hooks.rb").
with_source("puppet:///modules/homebrew/boxen-bottle-hooks.rb")
["latest", "install", "upgrade"].each do |cmd|
should contain_file("#{cmddir}/brew-boxen-#{cmd}.rb").
with_source("puppet:///modules/homebrew/brew-boxen-#{cmd}.rb")
end
should contain_file("#{dir}/lib").with_ensure("directory")
should contain_file(cmddir).with_ensure("directory")
should contain_file("#{dir}/Library/Taps").with_ensure("directory")
should contain_file("/test/boxen/cache/homebrew").with_ensure("directory")
end
end
| mit |
QLGu/rubillow | spec/rubillow/configuration_spec.rb | 479 | require "spec_helper"
describe Rubillow::Configuration do
it { should have_configuration_option(:host).default("www.zillow.com") }
it { should have_configuration_option(:port).default(80) }
it { should have_configuration_option(:path).default("webservice/") }
it { should have_configuration_option(:zwsid).default(nil) }
it { should have_configuration_option(:http_open_timeout).default(2) }
it { should have_configuration_option(:http_read_timeout).default(2) }
end | mit |
hoangnghiem/shareable | lib/shareable/railtie.rb | 152 | module Shareable
class Railtie < ::Rails::Railtie #:nodoc:
initializer 'shareable' do |_app|
Shareable::Hooks.init
end
end
end
| mit |
kbase/kb_sdk | src/java/us/kbase/narrativemethodstore/AppSteps.java | 3018 |
package us.kbase.narrativemethodstore;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Generated;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
/**
* <p>Original spec-file type: AppSteps</p>
*
*
*/
@JsonInclude(JsonInclude.Include.NON_NULL)
@Generated("com.googlecode.jsonschema2pojo")
@JsonPropertyOrder({
"step_id",
"method_id",
"input_mapping",
"description"
})
public class AppSteps {
@JsonProperty("step_id")
private String stepId;
@JsonProperty("method_id")
private String methodId;
@JsonProperty("input_mapping")
private List<AppStepInputMapping> inputMapping;
@JsonProperty("description")
private String description;
private Map<String, Object> additionalProperties = new HashMap<String, Object>();
@JsonProperty("step_id")
public String getStepId() {
return stepId;
}
@JsonProperty("step_id")
public void setStepId(String stepId) {
this.stepId = stepId;
}
public AppSteps withStepId(String stepId) {
this.stepId = stepId;
return this;
}
@JsonProperty("method_id")
public String getMethodId() {
return methodId;
}
@JsonProperty("method_id")
public void setMethodId(String methodId) {
this.methodId = methodId;
}
public AppSteps withMethodId(String methodId) {
this.methodId = methodId;
return this;
}
@JsonProperty("input_mapping")
public List<AppStepInputMapping> getInputMapping() {
return inputMapping;
}
@JsonProperty("input_mapping")
public void setInputMapping(List<AppStepInputMapping> inputMapping) {
this.inputMapping = inputMapping;
}
public AppSteps withInputMapping(List<AppStepInputMapping> inputMapping) {
this.inputMapping = inputMapping;
return this;
}
@JsonProperty("description")
public String getDescription() {
return description;
}
@JsonProperty("description")
public void setDescription(String description) {
this.description = description;
}
public AppSteps withDescription(String description) {
this.description = description;
return this;
}
@JsonAnyGetter
public Map<String, Object> getAdditionalProperties() {
return this.additionalProperties;
}
@JsonAnySetter
public void setAdditionalProperties(String name, Object value) {
this.additionalProperties.put(name, value);
}
@Override
public String toString() {
return ((((((((((("AppSteps"+" [stepId=")+ stepId)+", methodId=")+ methodId)+", inputMapping=")+ inputMapping)+", description=")+ description)+", additionalProperties=")+ additionalProperties)+"]");
}
}
| mit |
npakai/enhavo | src/Enhavo/Bundle/NavigationBundle/Exception/RenderException.php | 187 | <?php
/**
* Created by PhpStorm.
* User: gseidel
* Date: 28.08.18
* Time: 04:49
*/
namespace Enhavo\Bundle\NavigationBundle\Exception;
class RenderException extends \Exception
{
} | mit |
shutchings/azure-sdk-for-net | src/SDKs/Batch/DataPlane/Azure.Batch/GeneratedProtocol/Models/PoolDisableAutoScaleOptions.cs | 3711 | // <auto-generated>
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
// </auto-generated>
namespace Microsoft.Azure.Batch.Protocol.Models
{
using Microsoft.Rest;
using Microsoft.Rest.Serialization;
using Newtonsoft.Json;
using System.Linq;
/// <summary>
/// Additional parameters for DisableAutoScale operation.
/// </summary>
public partial class PoolDisableAutoScaleOptions
{
/// <summary>
/// Initializes a new instance of the PoolDisableAutoScaleOptions
/// class.
/// </summary>
public PoolDisableAutoScaleOptions()
{
CustomInit();
}
/// <summary>
/// Initializes a new instance of the PoolDisableAutoScaleOptions
/// class.
/// </summary>
/// <param name="timeout">The maximum time that the server can spend
/// processing the request, in seconds. The default is 30
/// seconds.</param>
/// <param name="clientRequestId">The caller-generated request
/// identity, in the form of a GUID with no decoration such as curly
/// braces, e.g. 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.</param>
/// <param name="returnClientRequestId">Whether the server should
/// return the client-request-id in the response.</param>
/// <param name="ocpDate">The time the request was issued. Client
/// libraries typically set this to the current system clock time; set
/// it explicitly if you are calling the REST API directly.</param>
public PoolDisableAutoScaleOptions(int? timeout = default(int?), System.Guid? clientRequestId = default(System.Guid?), bool? returnClientRequestId = default(bool?), System.DateTime? ocpDate = default(System.DateTime?))
{
Timeout = timeout;
ClientRequestId = clientRequestId;
ReturnClientRequestId = returnClientRequestId;
OcpDate = ocpDate;
CustomInit();
}
/// <summary>
/// An initialization method that performs custom operations like setting defaults
/// </summary>
partial void CustomInit();
/// <summary>
/// Gets or sets the maximum time that the server can spend processing
/// the request, in seconds. The default is 30 seconds.
/// </summary>
[JsonProperty(PropertyName = "")]
public int? Timeout { get; set; }
/// <summary>
/// Gets or sets the caller-generated request identity, in the form of
/// a GUID with no decoration such as curly braces, e.g.
/// 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
/// </summary>
[JsonProperty(PropertyName = "")]
public System.Guid? ClientRequestId { get; set; }
/// <summary>
/// Gets or sets whether the server should return the client-request-id
/// in the response.
/// </summary>
[JsonProperty(PropertyName = "")]
public bool? ReturnClientRequestId { get; set; }
/// <summary>
/// Gets or sets the time the request was issued. Client libraries
/// typically set this to the current system clock time; set it
/// explicitly if you are calling the REST API directly.
/// </summary>
[JsonConverter(typeof(DateTimeRfc1123JsonConverter))]
[JsonProperty(PropertyName = "")]
public System.DateTime? OcpDate { get; set; }
}
}
| mit |
doronuziel71/Merchello | src/Merchello.Bazaar/Models/AddressFormModel.cs | 3084 | namespace Merchello.Bazaar.Models
{
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Web.Mvc;
using Merchello.Core;
using Merchello.Core.Models;
/// <summary>
/// A site address model.
/// </summary>
public class AddressFormModel : IAddress
{
/// <summary>
/// Gets or sets the name.
/// </summary>
[Required(ErrorMessage = "Name is reqired"), Display(Name = "Name")]
public string Name { get; set; }
/// <summary>
/// Gets or sets the address 1.
/// </summary>
[Required(ErrorMessage = "Address 1 is required"), Display(Name = "Address 1")]
public string Address1 { get; set; }
/// <summary>
/// Gets or sets the address 2.
/// </summary>
[Display(Name = "Address 2")]
public string Address2 { get; set; }
/// <summary>
/// Gets or sets the locality.
/// </summary>
[Required(ErrorMessage = "Locality is required"), Display(Name = "Locality")]
public string Locality { get; set; }
/// <summary>
/// Gets or sets the region.
/// </summary>
[Display(Name = "Region")]
public string Region { get; set; }
/// <summary>
/// Gets or sets the postal code.
/// </summary>
[Required(ErrorMessage = "Postal code is required"), Display(Name = "Postal code")]
public string PostalCode { get; set; }
/// <summary>
/// Gets or sets the country code.
/// </summary>
public string CountryCode { get; set; }
/// <summary>
/// Gets or sets the countries.
/// </summary>
[Display(Name = "Country")]
public IEnumerable<SelectListItem> Countries { get; set; }
/// <summary>
/// Gets or sets the phone.
/// </summary>
[Display(Name = "Phone")]
public string Phone { get; set; }
/// <summary>
/// Gets or sets the email.
/// </summary>
[Required(ErrorMessage = "Email address is required"), Display(Name = "Email Address")]
[EmailAddress]
public string Email { get; set; }
/// <summary>
/// Gets or sets the organization.
/// </summary>
[Display(Name = "Organization or Company")]
public string Organization { get; set; }
/// <summary>
/// Gets or sets a value indicating whether is commercial.
/// </summary>
/// <remarks>
/// We're not going to use this at the moment
/// </remarks>
public bool IsCommercial { get; set; }
/// <summary>
/// Gets or sets a value indicating whether billing is shipping.
/// </summary>
[Display(Name = "Ship to this address")]
public bool BillingIsShipping { get; set; }
/// <summary>
/// Gets or sets the <see cref="AddressType"/>.
/// </summary>
public AddressType AddressType { get; set; }
}
} | mit |
kroepke/graphql-java | src/main/java/graphql/validation/ValidationErrorType.java | 651 | package graphql.validation;
public enum ValidationErrorType {
DefaultForNonNullArgument,
WrongType,
UnknownType,
SubSelectionRequired,
SubSelectionNotAllowed,
InvalidSyntax,
BadValueForDefaultArg,
FieldUndefined,
InlineFragmentTypeConditionInvalid,
FragmentTypeConditionInvalid,
UnknownArgument,
UndefinedFragment,
NonInputTypeOnVariable,
UnusedFragment,
MissingFieldArgument,
MissingDirectiveArgument,
VariableTypeMismatch,
UnknownDirective,
MisplacedDirective,
UndefinedVariable,
UnusedVariable,
FragmentCycle,
FieldsConflict,
InvalidFragmentType
}
| mit |
lucasmazza/colors | node_modules/stylus/testing/index.js | 384 |
/**
* Module dependencies.
*/
var stylus = require('../')
, fs = require('fs')
, path = 'testing/small.styl'
, str = fs.readFileSync(path, 'utf8');
var start = new Date;
str = str.replace(/\s*,\s*/g, ', ');
stylus(str)
.set('filename', path)
.render(function(err, css){
if (err) throw err;
console.log(css);
// console.log('%dms', new Date - start);
}); | mit |
puaykai/noodles | grails-app/assets/javascripts/dependencies/node_modules/material-ui/svg-icons/action/line-style.js | 934 | 'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _pure = require('recompose/pure');
var _pure2 = _interopRequireDefault(_pure);
var _SvgIcon = require('../../SvgIcon');
var _SvgIcon2 = _interopRequireDefault(_SvgIcon);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var ActionLineStyle = function ActionLineStyle(props) {
return _react2.default.createElement(
_SvgIcon2.default,
props,
_react2.default.createElement('path', { d: 'M3 16h5v-2H3v2zm6.5 0h5v-2h-5v2zm6.5 0h5v-2h-5v2zM3 20h2v-2H3v2zm4 0h2v-2H7v2zm4 0h2v-2h-2v2zm4 0h2v-2h-2v2zm4 0h2v-2h-2v2zM3 12h8v-2H3v2zm10 0h8v-2h-8v2zM3 4v4h18V4H3z' })
);
};
ActionLineStyle = (0, _pure2.default)(ActionLineStyle);
ActionLineStyle.displayName = 'ActionLineStyle';
exports.default = ActionLineStyle; | mit |
tedyhy/SCI | kissy-1.4.9/tools/module-compiler/src/com/google/javascript/jscomp/SyntheticAst.java | 1534 | /*
* Copyright 2009 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
/**
* An AST generated totally by the compiler.
*
* @author nicksantos@google.com (Nick Santos)
*/
class SyntheticAst implements SourceAst {
private static final long serialVersionUID = 1L;
private final String sourceName;
private Node root;
SyntheticAst(String sourceName) {
this.sourceName = sourceName;
clearAst();
}
@Override
public Node getAstRoot(AbstractCompiler compiler) {
return root;
}
@Override
public void clearAst() {
root = new Node(Token.SCRIPT);
root.putProp(Node.SOURCENAME_PROP, sourceName);
}
@Override
public SourceFile getSourceFile() {
return null;
}
@Override
public void setSourceFile(SourceFile file) {
throw new IllegalStateException(
"Cannot set a source file for a synthetic AST");
}
}
| mit |
maysam/activeadmin | lib/active_admin/base_controller.rb | 2697 | require 'active_admin/base_controller/authorization'
require 'active_admin/base_controller/menu'
module ActiveAdmin
# BaseController for ActiveAdmin.
# It implements ActiveAdmin controllers core features.
class BaseController < ::InheritedResources::Base
helper ::ActiveAdmin::ViewHelpers
helper_method :env
layout :determine_active_admin_layout
before_action :only_render_implemented_actions
before_action :authenticate_active_admin_user
class << self
# Ensure that this method is available for the DSL
public :actions
# Reference to the Resource object which initialized
# this controller
attr_accessor :active_admin_config
end
# By default Rails will render un-implemented actions when the view exists. Because Active
# Admin allows you to not render any of the actions by using the #actions method, we need
# to check if they are implemented.
def only_render_implemented_actions
raise AbstractController::ActionNotFound unless action_methods.include?(params[:action])
end
include Authorization
include Menu
private
# Calls the authentication method as defined in ActiveAdmin.authentication_method
def authenticate_active_admin_user
send(active_admin_namespace.authentication_method) if active_admin_namespace.authentication_method
end
def current_active_admin_user
send(active_admin_namespace.current_user_method) if active_admin_namespace.current_user_method
end
helper_method :current_active_admin_user
def current_active_admin_user?
!!current_active_admin_user
end
helper_method :current_active_admin_user?
def active_admin_config
self.class.active_admin_config
end
helper_method :active_admin_config
def active_admin_namespace
active_admin_config.namespace
end
helper_method :active_admin_namespace
ACTIVE_ADMIN_ACTIONS = [:index, :show, :new, :create, :edit, :update, :destroy]
# Determine which layout to use.
#
# 1. If we're rendering a standard Active Admin action, we want layout(false)
# because these actions are subclasses of the Base page (which implements
# all the required layout code)
# 2. If we're rendering a custom action, we'll use the active_admin layout so
# that users can render any template inside Active Admin.
def determine_active_admin_layout
ACTIVE_ADMIN_ACTIONS.include?(params[:action].to_sym) ? false : 'active_admin'
end
def active_admin_root
controller, action = active_admin_namespace.root_to.split '#'
{controller: controller, action: action}
end
end
end
| mit |
enderdickerson/allReady | AllReadyApp/Web-App/AllReady.UnitTest/ViewModels/Event/EventViewModelShould.cs | 5825 | using System;
using System.Collections.Generic;
using AllReady.Models;
using AllReady.UnitTest.Extensions;
using AllReady.ViewModels.Event;
using AllReady.ViewModels.Task;
using Xunit;
namespace AllReady.UnitTest.ViewModels.Event
{
public class EventViewModelShould
{
//happy path test. set up data to get all possible properties populated when EventViewModel is returned from handler
[Fact(Skip = "NotImplemented")]
public void ConstructEventViewModel_WithTheCorrectData()
{
}
[Fact]
public void SetCampaignIdCampaignNameAndTimeZoneId_WhenConstructingWithNonNullCampaign()
{
var campaign = new Models.Campaign { Id = 1, Name = "Campaignname", TimeZoneId = "CampaignTimeZoneId" };
var @event = new Models.Event { Campaign = campaign };
var sut = new EventViewModel(@event);
Assert.Equal(sut.CampaignId, campaign.Id);
Assert.Equal(sut.CampaignName, campaign.Name);
Assert.Equal(sut.TimeZoneId, campaign.TimeZoneId);
}
[Fact]
public void SetOrganizationIdAndOrganizationName_WhenConstructingWithNonNullCampaignAndNonNullManagingOrganization()
{
var campaign = new Models.Campaign { ManagingOrganization = new Organization { Id = 1, Name = "OrgName" }};
var @event = new Models.Event { Campaign = campaign };
var sut = new EventViewModel(@event);
Assert.Equal(sut.OrganizationId, campaign.ManagingOrganization.Id);
Assert.Equal(sut.OrganizationName, campaign.ManagingOrganization.Name);
}
[Fact]
public void SetHasPrivacyPolicyToFalse_WhenEventsCampaignsOrganizationsPrivacyPolicyIsNullOrEmpty_AndConstructingWithNonNullCampaignAndNonNullManagingOrganization()
{
var @event = new Models.Event { Campaign = new Models.Campaign { ManagingOrganization = new Organization() } };
var sut = new EventViewModel(@event);
Assert.False(sut.HasPrivacyPolicy);
}
[Fact]
public void SetHasPrivacyPolicyToTrue_WhenEventsCampaignsOrganizationsPrivacyPolicyIsNotNullOrEmpty_AndConstructingWithNonNullCampaignAndNonNullManagingOrganization()
{
var @event = new Models.Event { Campaign = new Models.Campaign { ManagingOrganization = new Organization { PrivacyPolicy = "PrivacyPolicy" }}};
var sut = new EventViewModel(@event);
Assert.True(sut.HasPrivacyPolicy);
}
[Fact]
public void SetLocationToNewLocationViewModelWithCorrectData_WhenEventsLocationIsNotNull_AndConstructingWithNonNullCampaignAndNonNullManagingOrganization()
{
var location = new Location { Address1 = "Address1", Address2 = "Address2", City = "City", State = "State", PostalCode = "PostalCode", Country = "Country" };
var @event = new Models.Event { Location = location };
var sut = new EventViewModel(@event);
Assert.Equal(sut.Location.Address1, location.Address1);
Assert.Equal(sut.Location.Address2, location.Address2);
Assert.Equal(sut.Location.City, location.City);
Assert.Equal(sut.Location.State, location.State);
Assert.Equal(sut.Location.PostalCode, location.PostalCode);
Assert.Equal(sut.Location.Country, location.Country);
}
[Fact]
public void SetLocationToNull_WhenEventsLocationIstNull_AndConstructingWithNonNullCampaignAndNonNullManagingOrganization()
{
var sut = new EventViewModel(new Models.Event());
Assert.Null(sut.Location);
}
[Fact]
public void SetIsClosedToTrue_WhenEventsEndDateTimeIsLessThanDateTimeOffsetUtcNow()
{
var sut = new EventViewModel(new Models.Event { EndDateTime = DateTimeOffset.MinValue });
Assert.True(sut.IsClosed);
}
[Fact]
public void SetIsClosedToFalse_WhenEventsEndDateTimeIsGreaterThanDateTimeOffsetUtcNow()
{
var sut = new EventViewModel(new Models.Event { EndDateTime = DateTimeOffset.MaxValue });
Assert.False(sut.IsClosed);
}
[Fact]
public void SetTasksToListOfTaskViewModelsInAscendingOrderByStartDateTime_WhenEventsTasksAreNotNull()
{
var @event = new Models.Event { Tasks = new List<AllReadyTask> { new AllReadyTask { StartDateTime = DateTimeOffset.UtcNow.AddDays(2)}, new AllReadyTask { StartDateTime = DateTimeOffset.UtcNow.AddDays(1) }}};
var sut = new EventViewModel(@event);
Assert.IsType<List<TaskViewModel>>(sut.Tasks);
Assert.Equal(sut.Tasks.IsOrderedByAscending(x => x.StartDateTime), true);
}
[Fact]
public void SetTasksToEmptyListOfTaskViewModels_WhenEventsTasksAreNull()
{
var sut = new EventViewModel(new Models.Event());
Assert.IsType<List<TaskViewModel>>(sut.Tasks);
Assert.True(sut.Tasks.Count == 0);
}
[Fact]
public void SetRequiredSkillsToListOfSkillViewModelsForEventsRequiredSkills_WhenEventsRequiredSkillsIsNotNull()
{
var @event = new Models.Event { RequiredSkills = new List<EventSkill> { new EventSkill { Skill = new Skill() }, new EventSkill { Skill = new Skill() }}};
var sut = new EventViewModel(@event);
Assert.IsType<List<SkillViewModel>>(sut.RequiredSkills);
Assert.True(sut.RequiredSkills.Count > 0);
}
[Fact]
public void SetRequiredSkillsToNull_WhenEventsRequiredSkillsIsNull()
{
var sut = new EventViewModel(new Models.Event { RequiredSkills = null });
Assert.Null(sut.RequiredSkills);
}
}
} | mit |
electric-eloquence/fepper-wordpress | backend/wordpress/wp-admin/includes/class-wp-filesystem-base.php | 23141 | <?php
/**
* Base WordPress Filesystem
*
* @package WordPress
* @subpackage Filesystem
*/
/**
* Base WordPress Filesystem class which Filesystem implementations extend.
*
* @since 2.5.0
*/
class WP_Filesystem_Base {
/**
* Whether to display debug data for the connection.
*
* @since 2.5.0
* @var bool
*/
public $verbose = false;
/**
* Cached list of local filepaths to mapped remote filepaths.
*
* @since 2.7.0
* @var array
*/
public $cache = array();
/**
* The Access method of the current connection, Set automatically.
*
* @since 2.5.0
* @var string
*/
public $method = '';
/**
* @var WP_Error
*/
public $errors = null;
/**
*/
public $options = array();
/**
* Returns the path on the remote filesystem of ABSPATH.
*
* @since 2.7.0
*
* @return string The location of the remote path.
*/
public function abspath() {
$folder = $this->find_folder( ABSPATH );
// Perhaps the FTP folder is rooted at the WordPress install.
// Check for wp-includes folder in root. Could have some false positives, but rare.
if ( ! $folder && $this->is_dir( '/' . WPINC ) ) {
$folder = '/';
}
return $folder;
}
/**
* Returns the path on the remote filesystem of WP_CONTENT_DIR.
*
* @since 2.7.0
*
* @return string The location of the remote path.
*/
public function wp_content_dir() {
return $this->find_folder( WP_CONTENT_DIR );
}
/**
* Returns the path on the remote filesystem of WP_PLUGIN_DIR.
*
* @since 2.7.0
*
* @return string The location of the remote path.
*/
public function wp_plugins_dir() {
return $this->find_folder( WP_PLUGIN_DIR );
}
/**
* Returns the path on the remote filesystem of the Themes Directory.
*
* @since 2.7.0
*
* @param string|false $theme Optional. The theme stylesheet or template for the directory.
* Default false.
* @return string The location of the remote path.
*/
public function wp_themes_dir( $theme = false ) {
$theme_root = get_theme_root( $theme );
// Account for relative theme roots.
if ( '/themes' === $theme_root || ! is_dir( $theme_root ) ) {
$theme_root = WP_CONTENT_DIR . $theme_root;
}
return $this->find_folder( $theme_root );
}
/**
* Returns the path on the remote filesystem of WP_LANG_DIR.
*
* @since 3.2.0
*
* @return string The location of the remote path.
*/
public function wp_lang_dir() {
return $this->find_folder( WP_LANG_DIR );
}
/**
* Locates a folder on the remote filesystem.
*
* @since 2.5.0
* @deprecated 2.7.0 use WP_Filesystem::abspath() or WP_Filesystem::wp_*_dir() instead.
* @see WP_Filesystem::abspath()
* @see WP_Filesystem::wp_content_dir()
* @see WP_Filesystem::wp_plugins_dir()
* @see WP_Filesystem::wp_themes_dir()
* @see WP_Filesystem::wp_lang_dir()
*
* @param string $base The folder to start searching from.
* @param bool $echo True to display debug information.
* Default false.
* @return string The location of the remote path.
*/
public function find_base_dir( $base = '.', $echo = false ) {
_deprecated_function( __FUNCTION__, '2.7.0', 'WP_Filesystem::abspath() or WP_Filesystem::wp_*_dir()' );
$this->verbose = $echo;
return $this->abspath();
}
/**
* Locates a folder on the remote filesystem.
*
* @since 2.5.0
* @deprecated 2.7.0 use WP_Filesystem::abspath() or WP_Filesystem::wp_*_dir() methods instead.
* @see WP_Filesystem::abspath()
* @see WP_Filesystem::wp_content_dir()
* @see WP_Filesystem::wp_plugins_dir()
* @see WP_Filesystem::wp_themes_dir()
* @see WP_Filesystem::wp_lang_dir()
*
* @param string $base The folder to start searching from.
* @param bool $echo True to display debug information.
* @return string The location of the remote path.
*/
public function get_base_dir( $base = '.', $echo = false ) {
_deprecated_function( __FUNCTION__, '2.7.0', 'WP_Filesystem::abspath() or WP_Filesystem::wp_*_dir()' );
$this->verbose = $echo;
return $this->abspath();
}
/**
* Locates a folder on the remote filesystem.
*
* Assumes that on Windows systems, Stripping off the Drive
* letter is OK Sanitizes \\ to / in Windows filepaths.
*
* @since 2.7.0
*
* @param string $folder the folder to locate.
* @return string|false The location of the remote path, false on failure.
*/
public function find_folder( $folder ) {
if ( isset( $this->cache[ $folder ] ) ) {
return $this->cache[ $folder ];
}
if ( stripos( $this->method, 'ftp' ) !== false ) {
$constant_overrides = array(
'FTP_BASE' => ABSPATH,
'FTP_CONTENT_DIR' => WP_CONTENT_DIR,
'FTP_PLUGIN_DIR' => WP_PLUGIN_DIR,
'FTP_LANG_DIR' => WP_LANG_DIR,
);
// Direct matches ( folder = CONSTANT/ ).
foreach ( $constant_overrides as $constant => $dir ) {
if ( ! defined( $constant ) ) {
continue;
}
if ( $folder === $dir ) {
return trailingslashit( constant( $constant ) );
}
}
// Prefix matches ( folder = CONSTANT/subdir ),
foreach ( $constant_overrides as $constant => $dir ) {
if ( ! defined( $constant ) ) {
continue;
}
if ( 0 === stripos( $folder, $dir ) ) { // $folder starts with $dir.
$potential_folder = preg_replace( '#^' . preg_quote( $dir, '#' ) . '/#i', trailingslashit( constant( $constant ) ), $folder );
$potential_folder = trailingslashit( $potential_folder );
if ( $this->is_dir( $potential_folder ) ) {
$this->cache[ $folder ] = $potential_folder;
return $potential_folder;
}
}
}
} elseif ( 'direct' === $this->method ) {
$folder = str_replace( '\\', '/', $folder ); // Windows path sanitisation.
return trailingslashit( $folder );
}
$folder = preg_replace( '|^([a-z]{1}):|i', '', $folder ); // Strip out Windows drive letter if it's there.
$folder = str_replace( '\\', '/', $folder ); // Windows path sanitisation.
if ( isset( $this->cache[ $folder ] ) ) {
return $this->cache[ $folder ];
}
if ( $this->exists( $folder ) ) { // Folder exists at that absolute path.
$folder = trailingslashit( $folder );
$this->cache[ $folder ] = $folder;
return $folder;
}
$return = $this->search_for_folder( $folder );
if ( $return ) {
$this->cache[ $folder ] = $return;
}
return $return;
}
/**
* Locates a folder on the remote filesystem.
*
* Expects Windows sanitized path.
*
* @since 2.7.0
*
* @param string $folder The folder to locate.
* @param string $base The folder to start searching from.
* @param bool $loop If the function has recursed. Internal use only.
* @return string|false The location of the remote path, false to cease looping.
*/
public function search_for_folder( $folder, $base = '.', $loop = false ) {
if ( empty( $base ) || '.' === $base ) {
$base = trailingslashit( $this->cwd() );
}
$folder = untrailingslashit( $folder );
if ( $this->verbose ) {
/* translators: 1: Folder to locate, 2: Folder to start searching from. */
printf( "\n" . __( 'Looking for %1$s in %2$s' ) . "<br/>\n", $folder, $base );
}
$folder_parts = explode( '/', $folder );
$folder_part_keys = array_keys( $folder_parts );
$last_index = array_pop( $folder_part_keys );
$last_path = $folder_parts[ $last_index ];
$files = $this->dirlist( $base );
foreach ( $folder_parts as $index => $key ) {
if ( $index == $last_index ) {
continue; // We want this to be caught by the next code block.
}
/*
* Working from /home/ to /user/ to /wordpress/ see if that file exists within
* the current folder, If it's found, change into it and follow through looking
* for it. If it can't find WordPress down that route, it'll continue onto the next
* folder level, and see if that matches, and so on. If it reaches the end, and still
* can't find it, it'll return false for the entire function.
*/
if ( isset( $files[ $key ] ) ) {
// Let's try that folder:
$newdir = trailingslashit( path_join( $base, $key ) );
if ( $this->verbose ) {
/* translators: %s: Directory name. */
printf( "\n" . __( 'Changing to %s' ) . "<br/>\n", $newdir );
}
// Only search for the remaining path tokens in the directory, not the full path again.
$newfolder = implode( '/', array_slice( $folder_parts, $index + 1 ) );
$ret = $this->search_for_folder( $newfolder, $newdir, $loop );
if ( $ret ) {
return $ret;
}
}
}
// Only check this as a last resort, to prevent locating the incorrect install.
// All above procedures will fail quickly if this is the right branch to take.
if ( isset( $files[ $last_path ] ) ) {
if ( $this->verbose ) {
/* translators: %s: Directory name. */
printf( "\n" . __( 'Found %s' ) . "<br/>\n", $base . $last_path );
}
return trailingslashit( $base . $last_path );
}
// Prevent this function from looping again.
// No need to proceed if we've just searched in `/`.
if ( $loop || '/' === $base ) {
return false;
}
// As an extra last resort, Change back to / if the folder wasn't found.
// This comes into effect when the CWD is /home/user/ but WP is at /var/www/....
return $this->search_for_folder( $folder, '/', true );
}
/**
* Returns the *nix-style file permissions for a file.
*
* From the PHP documentation page for fileperms().
*
* @link https://www.php.net/manual/en/function.fileperms.php
*
* @since 2.5.0
*
* @param string $file String filename.
* @return string The *nix-style representation of permissions.
*/
public function gethchmod( $file ) {
$perms = intval( $this->getchmod( $file ), 8 );
if ( ( $perms & 0xC000 ) == 0xC000 ) { // Socket.
$info = 's';
} elseif ( ( $perms & 0xA000 ) == 0xA000 ) { // Symbolic Link.
$info = 'l';
} elseif ( ( $perms & 0x8000 ) == 0x8000 ) { // Regular.
$info = '-';
} elseif ( ( $perms & 0x6000 ) == 0x6000 ) { // Block special.
$info = 'b';
} elseif ( ( $perms & 0x4000 ) == 0x4000 ) { // Directory.
$info = 'd';
} elseif ( ( $perms & 0x2000 ) == 0x2000 ) { // Character special.
$info = 'c';
} elseif ( ( $perms & 0x1000 ) == 0x1000 ) { // FIFO pipe.
$info = 'p';
} else { // Unknown.
$info = 'u';
}
// Owner.
$info .= ( ( $perms & 0x0100 ) ? 'r' : '-' );
$info .= ( ( $perms & 0x0080 ) ? 'w' : '-' );
$info .= ( ( $perms & 0x0040 ) ?
( ( $perms & 0x0800 ) ? 's' : 'x' ) :
( ( $perms & 0x0800 ) ? 'S' : '-' ) );
// Group.
$info .= ( ( $perms & 0x0020 ) ? 'r' : '-' );
$info .= ( ( $perms & 0x0010 ) ? 'w' : '-' );
$info .= ( ( $perms & 0x0008 ) ?
( ( $perms & 0x0400 ) ? 's' : 'x' ) :
( ( $perms & 0x0400 ) ? 'S' : '-' ) );
// World.
$info .= ( ( $perms & 0x0004 ) ? 'r' : '-' );
$info .= ( ( $perms & 0x0002 ) ? 'w' : '-' );
$info .= ( ( $perms & 0x0001 ) ?
( ( $perms & 0x0200 ) ? 't' : 'x' ) :
( ( $perms & 0x0200 ) ? 'T' : '-' ) );
return $info;
}
/**
* Gets the permissions of the specified file or filepath in their octal format.
*
* @since 2.5.0
*
* @param string $file Path to the file.
* @return string Mode of the file (the last 3 digits).
*/
public function getchmod( $file ) {
return '777';
}
/**
* Converts *nix-style file permissions to a octal number.
*
* Converts '-rw-r--r--' to 0644
* From "info at rvgate dot nl"'s comment on the PHP documentation for chmod()
*
* @link https://www.php.net/manual/en/function.chmod.php#49614
*
* @since 2.5.0
*
* @param string $mode string The *nix-style file permission.
* @return int octal representation
*/
public function getnumchmodfromh( $mode ) {
$realmode = '';
$legal = array( '', 'w', 'r', 'x', '-' );
$attarray = preg_split( '//', $mode );
for ( $i = 0, $c = count( $attarray ); $i < $c; $i++ ) {
$key = array_search( $attarray[ $i ], $legal, true );
if ( $key ) {
$realmode .= $legal[ $key ];
}
}
$mode = str_pad( $realmode, 10, '-', STR_PAD_LEFT );
$trans = array(
'-' => '0',
'r' => '4',
'w' => '2',
'x' => '1',
);
$mode = strtr( $mode, $trans );
$newmode = $mode[0];
$newmode .= $mode[1] + $mode[2] + $mode[3];
$newmode .= $mode[4] + $mode[5] + $mode[6];
$newmode .= $mode[7] + $mode[8] + $mode[9];
return $newmode;
}
/**
* Determines if the string provided contains binary characters.
*
* @since 2.7.0
*
* @param string $text String to test against.
* @return bool True if string is binary, false otherwise.
*/
public function is_binary( $text ) {
return (bool) preg_match( '|[^\x20-\x7E]|', $text ); // chr(32)..chr(127)
}
/**
* Changes the owner of a file or directory.
*
* Default behavior is to do nothing, override this in your subclass, if desired.
*
* @since 2.5.0
*
* @param string $file Path to the file or directory.
* @param string|int $owner A user name or number.
* @param bool $recursive Optional. If set to true, changes file owner recursively.
* Default false.
* @return bool True on success, false on failure.
*/
public function chown( $file, $owner, $recursive = false ) {
return false;
}
/**
* Connects filesystem.
*
* @since 2.5.0
* @abstract
*
* @return bool True on success, false on failure (always true for WP_Filesystem_Direct).
*/
public function connect() {
return true;
}
/**
* Reads entire file into a string.
*
* @since 2.5.0
* @abstract
*
* @param string $file Name of the file to read.
* @return string|false Read data on success, false on failure.
*/
public function get_contents( $file ) {
return false;
}
/**
* Reads entire file into an array.
*
* @since 2.5.0
* @abstract
*
* @param string $file Path to the file.
* @return array|false File contents in an array on success, false on failure.
*/
public function get_contents_array( $file ) {
return false;
}
/**
* Writes a string to a file.
*
* @since 2.5.0
* @abstract
*
* @param string $file Remote path to the file where to write the data.
* @param string $contents The data to write.
* @param int|false $mode Optional. The file permissions as octal number, usually 0644.
* Default false.
* @return bool True on success, false on failure.
*/
public function put_contents( $file, $contents, $mode = false ) {
return false;
}
/**
* Gets the current working directory.
*
* @since 2.5.0
* @abstract
*
* @return string|false The current working directory on success, false on failure.
*/
public function cwd() {
return false;
}
/**
* Changes current directory.
*
* @since 2.5.0
* @abstract
*
* @param string $dir The new current directory.
* @return bool True on success, false on failure.
*/
public function chdir( $dir ) {
return false;
}
/**
* Changes the file group.
*
* @since 2.5.0
* @abstract
*
* @param string $file Path to the file.
* @param string|int $group A group name or number.
* @param bool $recursive Optional. If set to true, changes file group recursively.
* Default false.
* @return bool True on success, false on failure.
*/
public function chgrp( $file, $group, $recursive = false ) {
return false;
}
/**
* Changes filesystem permissions.
*
* @since 2.5.0
* @abstract
*
* @param string $file Path to the file.
* @param int|false $mode Optional. The permissions as octal number, usually 0644 for files,
* 0755 for directories. Default false.
* @param bool $recursive Optional. If set to true, changes file permissions recursively.
* Default false.
* @return bool True on success, false on failure.
*/
public function chmod( $file, $mode = false, $recursive = false ) {
return false;
}
/**
* Gets the file owner.
*
* @since 2.5.0
* @abstract
*
* @param string $file Path to the file.
* @return string|false Username of the owner on success, false on failure.
*/
public function owner( $file ) {
return false;
}
/**
* Gets the file's group.
*
* @since 2.5.0
* @abstract
*
* @param string $file Path to the file.
* @return string|false The group on success, false on failure.
*/
public function group( $file ) {
return false;
}
/**
* Copies a file.
*
* @since 2.5.0
* @abstract
*
* @param string $source Path to the source file.
* @param string $destination Path to the destination file.
* @param bool $overwrite Optional. Whether to overwrite the destination file if it exists.
* Default false.
* @param int|false $mode Optional. The permissions as octal number, usually 0644 for files,
* 0755 for dirs. Default false.
* @return bool True on success, false on failure.
*/
public function copy( $source, $destination, $overwrite = false, $mode = false ) {
return false;
}
/**
* Moves a file.
*
* @since 2.5.0
* @abstract
*
* @param string $source Path to the source file.
* @param string $destination Path to the destination file.
* @param bool $overwrite Optional. Whether to overwrite the destination file if it exists.
* Default false.
* @return bool True on success, false on failure.
*/
public function move( $source, $destination, $overwrite = false ) {
return false;
}
/**
* Deletes a file or directory.
*
* @since 2.5.0
* @abstract
*
* @param string $file Path to the file or directory.
* @param bool $recursive Optional. If set to true, deletes files and folders recursively.
* Default false.
* @param string|false $type Type of resource. 'f' for file, 'd' for directory.
* Default false.
* @return bool True on success, false on failure.
*/
public function delete( $file, $recursive = false, $type = false ) {
return false;
}
/**
* Checks if a file or directory exists.
*
* @since 2.5.0
* @abstract
*
* @param string $file Path to file or directory.
* @return bool Whether $file exists or not.
*/
public function exists( $file ) {
return false;
}
/**
* Checks if resource is a file.
*
* @since 2.5.0
* @abstract
*
* @param string $file File path.
* @return bool Whether $file is a file.
*/
public function is_file( $file ) {
return false;
}
/**
* Checks if resource is a directory.
*
* @since 2.5.0
* @abstract
*
* @param string $path Directory path.
* @return bool Whether $path is a directory.
*/
public function is_dir( $path ) {
return false;
}
/**
* Checks if a file is readable.
*
* @since 2.5.0
* @abstract
*
* @param string $file Path to file.
* @return bool Whether $file is readable.
*/
public function is_readable( $file ) {
return false;
}
/**
* Checks if a file or directory is writable.
*
* @since 2.5.0
* @abstract
*
* @param string $file Path to file or directory.
* @return bool Whether $file is writable.
*/
public function is_writable( $file ) {
return false;
}
/**
* Gets the file's last access time.
*
* @since 2.5.0
* @abstract
*
* @param string $file Path to file.
* @return int|false Unix timestamp representing last access time, false on failure.
*/
public function atime( $file ) {
return false;
}
/**
* Gets the file modification time.
*
* @since 2.5.0
* @abstract
*
* @param string $file Path to file.
* @return int|false Unix timestamp representing modification time, false on failure.
*/
public function mtime( $file ) {
return false;
}
/**
* Gets the file size (in bytes).
*
* @since 2.5.0
* @abstract
*
* @param string $file Path to file.
* @return int|false Size of the file in bytes on success, false on failure.
*/
public function size( $file ) {
return false;
}
/**
* Sets the access and modification times of a file.
*
* Note: If $file doesn't exist, it will be created.
*
* @since 2.5.0
* @abstract
*
* @param string $file Path to file.
* @param int $time Optional. Modified time to set for file.
* Default 0.
* @param int $atime Optional. Access time to set for file.
* Default 0.
* @return bool True on success, false on failure.
*/
public function touch( $file, $time = 0, $atime = 0 ) {
return false;
}
/**
* Creates a directory.
*
* @since 2.5.0
* @abstract
*
* @param string $path Path for new directory.
* @param int|false $chmod Optional. The permissions as octal number (or false to skip chmod).
* Default false.
* @param string|int $chown Optional. A user name or number (or false to skip chown).
* Default false.
* @param string|int $chgrp Optional. A group name or number (or false to skip chgrp).
* Default false.
* @return bool True on success, false on failure.
*/
public function mkdir( $path, $chmod = false, $chown = false, $chgrp = false ) {
return false;
}
/**
* Deletes a directory.
*
* @since 2.5.0
* @abstract
*
* @param string $path Path to directory.
* @param bool $recursive Optional. Whether to recursively remove files/directories.
* Default false.
* @return bool True on success, false on failure.
*/
public function rmdir( $path, $recursive = false ) {
return false;
}
/**
* Gets details for files in a directory or a specific file.
*
* @since 2.5.0
* @abstract
*
* @param string $path Path to directory or file.
* @param bool $include_hidden Optional. Whether to include details of hidden ("." prefixed) files.
* Default true.
* @param bool $recursive Optional. Whether to recursively include file details in nested directories.
* Default false.
* @return array|false {
* Array of files. False if unable to list directory contents.
*
* @type string $name Name of the file or directory.
* @type string $perms *nix representation of permissions.
* @type int $permsn Octal representation of permissions.
* @type string $owner Owner name or ID.
* @type int $size Size of file in bytes.
* @type int $lastmodunix Last modified unix timestamp.
* @type mixed $lastmod Last modified month (3 letter) and day (without leading 0).
* @type int $time Last modified time.
* @type string $type Type of resource. 'f' for file, 'd' for directory.
* @type mixed $files If a directory and $recursive is true, contains another array of files.
* }
*/
public function dirlist( $path, $include_hidden = true, $recursive = false ) {
return false;
}
}
| mit |
AVDer/DRON_SW | DRON_Embedded/src/Timer.cpp | 960 | //
// This file is part of the GNU ARM Eclipse distribution.
// Copyright (c) 2014 Liviu Ionescu.
//
#include "Timer.h"
#include "cortexm/ExceptionHandlers.h"
// ----------------------------------------------------------------------------
#if defined(USE_HAL_DRIVER)
extern "C" void HAL_IncTick(void);
#endif
// ----------------------------------------------------------------------------
volatile Timer::ticks_t Timer::ms_delayCount;
// ----------------------------------------------------------------------------
void
Timer::sleep(ticks_t ticks)
{
ms_delayCount = ticks;
// Busy wait until the SysTick decrements the counter to zero.
while (ms_delayCount != 0u)
;
}
// ----- SysTick_Handler() ----------------------------------------------------
extern "C" void
SysTick_Handler(void)
{
#if defined(USE_HAL_DRIVER)
HAL_IncTick();
#endif
Timer::tick();
}
// ----------------------------------------------------------------------------
| mit |
eriawan/octokit.net | Octokit.Tests.Integration/Clients/IssuesLabelsClientTests.cs | 43225 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Octokit;
using Octokit.Tests.Integration;
using Xunit;
using Octokit.Tests.Integration.Helpers;
public class IssuesLabelsClientTests : IDisposable
{
private readonly IIssuesLabelsClient _issuesLabelsClient;
private readonly IIssuesClient _issuesClient;
private readonly RepositoryContext _context;
public IssuesLabelsClientTests()
{
var github = Helper.GetAuthenticatedClient();
_issuesLabelsClient = github.Issue.Labels;
_issuesClient = github.Issue;
var repoName = Helper.MakeNameWithTimestamp("public-repo");
_context = github.CreateRepositoryContext(new NewRepository(repoName)).Result;
}
[IntegrationTest]
public async Task CanListIssueLabelsForAnIssue()
{
var newIssue = new NewIssue("A test issue") { Body = "A new unassigned issue" };
var newLabel = new NewLabel("test label", "FFFFFF");
var label = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var issueLabelsInfo = await _issuesLabelsClient.GetAllForIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number);
Assert.Empty(issueLabelsInfo);
var issueUpdate = new IssueUpdate();
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
issueLabelsInfo = await _issuesLabelsClient.GetAllForIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number);
Assert.Equal(1, issueLabelsInfo.Count);
Assert.Equal(newLabel.Color, issueLabelsInfo[0].Color);
}
[IntegrationTest]
public async Task CanListIssueLabelsForAnIssueWithRepositoryId()
{
var newIssue = new NewIssue("A test issue") { Body = "A new unassigned issue" };
var newLabel = new NewLabel("test label", "FFFFFF");
var label = await _issuesLabelsClient.Create(_context.Repository.Id, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var issueLabelsInfo = await _issuesLabelsClient.GetAllForIssue(_context.Repository.Id, issue.Number);
Assert.Empty(issueLabelsInfo);
var issueUpdate = new IssueUpdate();
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
issueLabelsInfo = await _issuesLabelsClient.GetAllForIssue(_context.Repository.Id, issue.Number);
Assert.Equal(1, issueLabelsInfo.Count);
Assert.Equal(newLabel.Color, issueLabelsInfo[0].Color);
}
[IntegrationTest]
public async Task ReturnsCorrectCountOfIssueLabelsWithoutStartForAnIssue()
{
var newIssue = new NewIssue("A test issue") { Body = "A new unassigned issue" };
var newLabel = new NewLabel("test label", "FFFFFF");
var label = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var issueLabelsInfo = await _issuesLabelsClient.GetAllForIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number);
Assert.Empty(issueLabelsInfo);
var issueUpdate = new IssueUpdate();
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
var options = new ApiOptions
{
PageCount = 1,
PageSize = 1
};
issueLabelsInfo = await _issuesLabelsClient.GetAllForIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number, options);
Assert.Equal(1, issueLabelsInfo.Count);
Assert.Equal(newLabel.Color, issueLabelsInfo[0].Color);
}
[IntegrationTest]
public async Task ReturnsCorrectCountOfIssueLabelsWithoutStartForAnIssueWithRepositoryId()
{
var newIssue = new NewIssue("A test issue") { Body = "A new unassigned issue" };
var newLabel = new NewLabel("test label", "FFFFFF");
var label = await _issuesLabelsClient.Create(_context.Repository.Id, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var issueLabelsInfo = await _issuesLabelsClient.GetAllForIssue(_context.Repository.Id, issue.Number);
Assert.Empty(issueLabelsInfo);
var issueUpdate = new IssueUpdate();
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
var options = new ApiOptions
{
PageCount = 1,
PageSize = 1
};
issueLabelsInfo = await _issuesLabelsClient.GetAllForIssue(_context.Repository.Id, issue.Number, options);
Assert.Equal(1, issueLabelsInfo.Count);
Assert.Equal(newLabel.Color, issueLabelsInfo[0].Color);
}
[IntegrationTest]
public async Task ReturnsCorrectCountOfIssueLabelsWithStartForAnIssue()
{
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, new NewIssue("A test issue") { Body = "A new unassigned issue" });
var issueUpdate = new IssueUpdate();
var labels = new List<Label>();
for (int i = 0; i < 2; i++)
{
var label = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, new NewLabel("test label " + (i + 1), "FFFFF" + (i + 1)));
labels.Add(label);
issueUpdate.AddLabel(label.Name);
}
var issueLabelsInfo = await _issuesLabelsClient.GetAllForIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number);
Assert.Empty(issueLabelsInfo);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
var options = new ApiOptions
{
PageCount = 1,
PageSize = 1,
StartPage = 2
};
issueLabelsInfo = await _issuesLabelsClient.GetAllForIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number, options);
Assert.Equal(1, issueLabelsInfo.Count);
Assert.Equal(labels.Last().Color, issueLabelsInfo.First().Color);
}
[IntegrationTest]
public async Task ReturnsCorrectCountOfIssueLabelsWithStartForAnIssueWithRepositoryId()
{
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, new NewIssue("A test issue") { Body = "A new unassigned issue" });
var issueUpdate = new IssueUpdate();
var labels = new List<Label>();
for (int i = 0; i < 2; i++)
{
var label = await _issuesLabelsClient.Create(_context.Repository.Id, new NewLabel("test label " + (i + 1), "FFFFF" + (i + 1)));
labels.Add(label);
issueUpdate.AddLabel(label.Name);
}
var issueLabelsInfo = await _issuesLabelsClient.GetAllForIssue(_context.Repository.Id, issue.Number);
Assert.Empty(issueLabelsInfo);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
var options = new ApiOptions
{
PageCount = 1,
PageSize = 1,
StartPage = 2
};
issueLabelsInfo = await _issuesLabelsClient.GetAllForIssue(_context.Repository.Id, issue.Number, options);
Assert.Equal(1, issueLabelsInfo.Count);
Assert.Equal(labels.Last().Color, issueLabelsInfo.First().Color);
}
[IntegrationTest]
public async Task ReturnsDistinctIssueLabelsBasedOnStartPageForAnIssue()
{
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, new NewIssue("A test issue") { Body = "A new unassigned issue" });
var issueUpdate = new IssueUpdate();
for (int i = 0; i < 2; i++)
{
var label = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, new NewLabel("test label " + (i + 1), "FFFFF" + (i + 1)));
issueUpdate.AddLabel(label.Name);
}
var issueLabelsInfo = await _issuesLabelsClient.GetAllForIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number);
Assert.Empty(issueLabelsInfo);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
var startOptions = new ApiOptions
{
PageCount = 1,
PageSize = 1,
StartPage = 1
};
var firstPage = await _issuesLabelsClient.GetAllForIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number, startOptions);
var skipStartOptions = new ApiOptions
{
PageSize = 1,
PageCount = 1,
StartPage = 2
};
var secondPage = await _issuesLabelsClient.GetAllForIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number, skipStartOptions);
Assert.Equal(1, firstPage.Count);
Assert.Equal(1, secondPage.Count);
Assert.NotEqual(firstPage.First().Color, secondPage.First().Color);
}
[IntegrationTest]
public async Task ReturnsDistinctIssueLabelsBasedOnStartPageForAnIssueWithRepositoryId()
{
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, new NewIssue("A test issue") { Body = "A new unassigned issue" });
var issueUpdate = new IssueUpdate();
for (int i = 0; i < 2; i++)
{
var label = await _issuesLabelsClient.Create(_context.Repository.Id, new NewLabel("test label " + (i + 1), "FFFFF" + (i + 1)));
issueUpdate.AddLabel(label.Name);
}
var issueLabelsInfo = await _issuesLabelsClient.GetAllForIssue(_context.Repository.Id, issue.Number);
Assert.Empty(issueLabelsInfo);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
var startOptions = new ApiOptions
{
PageCount = 1,
PageSize = 1,
StartPage = 1
};
var firstPage = await _issuesLabelsClient.GetAllForIssue(_context.Repository.Id, issue.Number, startOptions);
var skipStartOptions = new ApiOptions
{
PageSize = 1,
PageCount = 1,
StartPage = 2
};
var secondPage = await _issuesLabelsClient.GetAllForIssue(_context.Repository.Id, issue.Number, skipStartOptions);
Assert.Equal(1, firstPage.Count);
Assert.Equal(1, secondPage.Count);
Assert.NotEqual(firstPage.First().Color, secondPage.First().Color);
}
[IntegrationTest]
public async Task CanListIssueLabelsForARepository()
{
var newLabel1 = new NewLabel("test label 1", "FFFFFF");
var newLabel2 = new NewLabel("test label 2", "FFFFFF");
var originalIssueLabels = await _issuesLabelsClient.GetAllForRepository(_context.RepositoryOwner, _context.RepositoryName);
await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel1);
await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel2);
var issueLabels = await _issuesLabelsClient.GetAllForRepository(_context.RepositoryOwner, _context.RepositoryName);
Assert.Equal(originalIssueLabels.Count + 2, issueLabels.Count);
}
[IntegrationTest]
public async Task CanListIssueLabelsForARepositoryWithRepositoryId()
{
var newLabel1 = new NewLabel("test label 1", "FFFFFF");
var newLabel2 = new NewLabel("test label 2", "FFFFFF");
var originalIssueLabels = await _issuesLabelsClient.GetAllForRepository(_context.Repository.Id);
await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel1);
await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel2);
var issueLabels = await _issuesLabelsClient.GetAllForRepository(_context.Repository.Id);
Assert.Equal(originalIssueLabels.Count + 2, issueLabels.Count);
}
[IntegrationTest]
public async Task ReturnsCorrectCountOfIssueLabelsWithoutStartForARepository()
{
for (int i = 0; i < 2; i++)
{
int k = i + 1;
var newIssue = new NewIssue("A test issue " + k) { Body = "A new unassigned issue " + k };
var newLabel = new NewLabel("test label " + k, "FFFFF" + k);
var label = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var issueUpdate = new IssueUpdate();
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
}
var options = new ApiOptions
{
PageCount = 1,
PageSize = 1
};
var issueLabelsInfo = await _issuesLabelsClient.GetAllForRepository(_context.RepositoryOwner, _context.RepositoryName, options);
Assert.Equal(1, issueLabelsInfo.Count);
}
[IntegrationTest]
public async Task ReturnsCorrectCountOfIssueLabelsWithoutStartForARepositoryWithRepositoryId()
{
for (int i = 0; i < 2; i++)
{
int k = i + 1;
var newIssue = new NewIssue("A test issue " + k) { Body = "A new unassigned issue " + k };
var newLabel = new NewLabel("test label " + k, "FFFFF" + k);
var label = await _issuesLabelsClient.Create(_context.Repository.Id, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var issueUpdate = new IssueUpdate();
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
}
var options = new ApiOptions
{
PageCount = 1,
PageSize = 1
};
var issueLabelsInfo = await _issuesLabelsClient.GetAllForRepository(_context.Repository.Id, options);
Assert.Equal(1, issueLabelsInfo.Count);
}
[IntegrationTest]
public async Task ReturnsCorrectCountOfIssueLabelsWithStartForARepository()
{
for (int i = 0; i < 2; i++)
{
int k = i + 1;
var newIssue = new NewIssue("A test issue " + k) { Body = "A new unassigned issue " + k };
var newLabel = new NewLabel("test label " + k, "FFFFF" + k);
var label = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var issueUpdate = new IssueUpdate();
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
}
var options = new ApiOptions
{
PageCount = 1,
PageSize = 1,
StartPage = 2
};
var issueLabelsInfo = await _issuesLabelsClient.GetAllForRepository(_context.RepositoryOwner, _context.RepositoryName, options);
Assert.Equal(1, issueLabelsInfo.Count);
}
[IntegrationTest]
public async Task ReturnsCorrectCountOfIssueLabelsWithStartForARepositoryWithRepositoryId()
{
for (int i = 0; i < 2; i++)
{
int k = i + 1;
var newIssue = new NewIssue("A test issue " + k) { Body = "A new unassigned issue " + k };
var newLabel = new NewLabel("test label " + k, "FFFFF" + k);
var label = await _issuesLabelsClient.Create(_context.Repository.Id, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var issueUpdate = new IssueUpdate();
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
}
var options = new ApiOptions
{
PageCount = 1,
PageSize = 1,
StartPage = 2
};
var issueLabelsInfo = await _issuesLabelsClient.GetAllForRepository(_context.Repository.Id, options);
Assert.Equal(1, issueLabelsInfo.Count);
}
[IntegrationTest]
public async Task ReturnsDistinctIssueLabelsBasedOnStartPageForARepository()
{
for (int i = 0; i < 2; i++)
{
int k = i + 1;
var newIssue = new NewIssue("A test issue " + k) { Body = "A new unassigned issue " + k };
var newLabel = new NewLabel("test label " + k, "FFFFF" + k);
var label = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var issueUpdate = new IssueUpdate();
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
}
var startOptions = new ApiOptions
{
PageCount = 1,
PageSize = 1,
StartPage = 1
};
var firstPage = await _issuesLabelsClient.GetAllForRepository(_context.RepositoryOwner, _context.RepositoryName, startOptions);
var skipStartOptions = new ApiOptions
{
PageSize = 1,
PageCount = 1,
StartPage = 2
};
var secondPage = await _issuesLabelsClient.GetAllForRepository(_context.RepositoryOwner, _context.RepositoryName, skipStartOptions);
Assert.Equal(1, firstPage.Count);
Assert.Equal(1, secondPage.Count);
Assert.NotEqual(firstPage.First().Color, secondPage.First().Color);
}
[IntegrationTest]
public async Task ReturnsDistinctIssueLabelsBasedOnStartPageForARepositoryWithRepositoryId()
{
for (int i = 0; i < 2; i++)
{
int k = i + 1;
var newIssue = new NewIssue("A test issue " + k) { Body = "A new unassigned issue " + k };
var newLabel = new NewLabel("test label " + k, "FFFFF" + k);
var label = await _issuesLabelsClient.Create(_context.Repository.Id, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var issueUpdate = new IssueUpdate();
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
}
var startOptions = new ApiOptions
{
PageCount = 1,
PageSize = 1,
StartPage = 1
};
var firstPage = await _issuesLabelsClient.GetAllForRepository(_context.Repository.Id, startOptions);
var skipStartOptions = new ApiOptions
{
PageSize = 1,
PageCount = 1,
StartPage = 2
};
var secondPage = await _issuesLabelsClient.GetAllForRepository(_context.Repository.Id, skipStartOptions);
Assert.Equal(1, firstPage.Count);
Assert.Equal(1, secondPage.Count);
Assert.NotEqual(firstPage.First().Color, secondPage.First().Color);
}
[IntegrationTest]
public async Task CanListLabelsForAnMilestone()
{
var newIssue = new NewIssue("A test issue") { Body = "A new unassigned issue" };
var newLabel = new NewLabel("test label", "FFFFFF");
var newMilestone = new NewMilestone("New Milestone");
var label = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var milestone = await _issuesClient.Milestone.Create(_context.RepositoryOwner, _context.RepositoryName, newMilestone);
var issueLabelsInfo = await _issuesLabelsClient.GetAllForMilestone(_context.RepositoryOwner, _context.RepositoryName, milestone.Number);
Assert.Empty(issueLabelsInfo);
var issueUpdate = new IssueUpdate { Milestone = milestone.Number };
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
issueLabelsInfo = await _issuesLabelsClient.GetAllForMilestone(_context.RepositoryOwner, _context.RepositoryName, milestone.Number);
Assert.Equal(1, issueLabelsInfo.Count);
Assert.Equal(label.Color, issueLabelsInfo[0].Color);
}
[IntegrationTest]
public async Task CanListLabelsForAnMilestoneWithRepositoryId()
{
var newIssue = new NewIssue("A test issue") { Body = "A new unassigned issue" };
var newLabel = new NewLabel("test label", "FFFFFF");
var newMilestone = new NewMilestone("New Milestone");
var label = await _issuesLabelsClient.Create(_context.Repository.Id, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var milestone = await _issuesClient.Milestone.Create(_context.RepositoryOwner, _context.RepositoryName, newMilestone);
var issueLabelsInfo = await _issuesLabelsClient.GetAllForMilestone(_context.Repository.Id, milestone.Number);
Assert.Empty(issueLabelsInfo);
var issueUpdate = new IssueUpdate { Milestone = milestone.Number };
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
issueLabelsInfo = await _issuesLabelsClient.GetAllForMilestone(_context.Repository.Id, milestone.Number);
Assert.Equal(1, issueLabelsInfo.Count);
Assert.Equal(label.Color, issueLabelsInfo[0].Color);
}
[IntegrationTest]
public async Task ReturnsCorrectCountOfIssueLabelsWithoutStartForAMilestone()
{
var newMilestone = new NewMilestone("New Milestone");
var milestone = await _issuesClient.Milestone.Create(_context.RepositoryOwner, _context.RepositoryName, newMilestone);
for (int i = 0; i < 2; i++)
{
int k = i + 1;
var newIssue = new NewIssue("A test issue " + k) { Body = "A new unassigned issue " + k };
var newLabel = new NewLabel("test label " + k, "FFFFF" + k);
var label = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var issueUpdate = new IssueUpdate { Milestone = milestone.Number };
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
}
var options = new ApiOptions
{
PageCount = 1,
PageSize = 1
};
var issueLabelsInfo = await _issuesLabelsClient.GetAllForMilestone(_context.RepositoryOwner, _context.RepositoryName, milestone.Number, options);
Assert.Equal(1, issueLabelsInfo.Count);
}
[IntegrationTest]
public async Task ReturnsCorrectCountOfIssueLabelsWithoutStartForAMilestoneWithRepositoryId()
{
var newMilestone = new NewMilestone("New Milestone");
var milestone = await _issuesClient.Milestone.Create(_context.RepositoryOwner, _context.RepositoryName, newMilestone);
for (int i = 0; i < 2; i++)
{
int k = i + 1;
var newIssue = new NewIssue("A test issue " + k) { Body = "A new unassigned issue " + k };
var newLabel = new NewLabel("test label " + k, "FFFFF" + k);
var label = await _issuesLabelsClient.Create(_context.Repository.Id, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var issueUpdate = new IssueUpdate { Milestone = milestone.Number };
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
}
var options = new ApiOptions
{
PageCount = 1,
PageSize = 1
};
var issueLabelsInfo = await _issuesLabelsClient.GetAllForMilestone(_context.Repository.Id, milestone.Number, options);
Assert.Equal(1, issueLabelsInfo.Count);
}
[IntegrationTest]
public async Task ReturnsCorrectCountOfIssueLabelsWithStartForAMilestone()
{
var newMilestone = new NewMilestone("New Milestone");
var milestone = await _issuesClient.Milestone.Create(_context.RepositoryOwner, _context.RepositoryName, newMilestone);
for (int i = 0; i < 2; i++)
{
int k = i + 1;
var newIssue = new NewIssue("A test issue " + k) { Body = "A new unassigned issue " + k };
var newLabel = new NewLabel("test label " + k, "FFFFF" + k);
var label = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var issueUpdate = new IssueUpdate { Milestone = milestone.Number };
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
}
var options = new ApiOptions
{
PageCount = 1,
PageSize = 1,
StartPage = 2
};
var issueLabelsInfo = await _issuesLabelsClient.GetAllForMilestone(_context.RepositoryOwner, _context.RepositoryName, milestone.Number, options);
Assert.Equal(1, issueLabelsInfo.Count);
}
[IntegrationTest]
public async Task ReturnsCorrectCountOfIssueLabelsWithStartForAMilestoneWithRepositoryId()
{
var newMilestone = new NewMilestone("New Milestone");
var milestone = await _issuesClient.Milestone.Create(_context.RepositoryOwner, _context.RepositoryName, newMilestone);
for (int i = 0; i < 2; i++)
{
int k = i + 1;
var newIssue = new NewIssue("A test issue " + k) { Body = "A new unassigned issue " + k };
var newLabel = new NewLabel("test label " + k, "FFFFF" + k);
var label = await _issuesLabelsClient.Create(_context.Repository.Id, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var issueUpdate = new IssueUpdate { Milestone = milestone.Number };
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
}
var options = new ApiOptions
{
PageCount = 1,
PageSize = 1,
StartPage = 2
};
var issueLabelsInfo = await _issuesLabelsClient.GetAllForMilestone(_context.Repository.Id, milestone.Number, options);
Assert.Equal(1, issueLabelsInfo.Count);
}
[IntegrationTest]
public async Task ReturnsDistinctIssueLabelsBasedOnStartPageForAMilestone()
{
var newMilestone = new NewMilestone("New Milestone");
var milestone = await _issuesClient.Milestone.Create(_context.RepositoryOwner, _context.RepositoryName, newMilestone);
for (int i = 0; i < 2; i++)
{
int k = i + 1;
var newIssue = new NewIssue("A test issue " + k) { Body = "A new unassigned issue " + k };
var newLabel = new NewLabel("test label " + k, "FFFFF" + k);
var label = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var issueUpdate = new IssueUpdate { Milestone = milestone.Number };
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
}
var startOptions = new ApiOptions
{
PageCount = 1,
PageSize = 1,
StartPage = 1
};
var firstPage = await _issuesLabelsClient.GetAllForMilestone(_context.RepositoryOwner, _context.RepositoryName, milestone.Number, startOptions);
var skipStartOptions = new ApiOptions
{
PageSize = 1,
PageCount = 1,
StartPage = 2
};
var secondPage = await _issuesLabelsClient.GetAllForMilestone(_context.RepositoryOwner, _context.RepositoryName, milestone.Number, skipStartOptions);
Assert.Equal(1, firstPage.Count);
Assert.Equal(1, secondPage.Count);
Assert.NotEqual(firstPage.First().Color, secondPage.First().Color);
}
[IntegrationTest]
public async Task ReturnsDistinctIssueLabelsBasedOnStartPageForAMilestoneWithRepositoryId()
{
var newMilestone = new NewMilestone("New Milestone");
var milestone = await _issuesClient.Milestone.Create(_context.RepositoryOwner, _context.RepositoryName, newMilestone);
for (int i = 0; i < 2; i++)
{
int k = i + 1;
var newIssue = new NewIssue("A test issue " + k) { Body = "A new unassigned issue " + k };
var newLabel = new NewLabel("test label " + k, "FFFFF" + k);
var label = await _issuesLabelsClient.Create(_context.Repository.Id, newLabel);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
var issueUpdate = new IssueUpdate { Milestone = milestone.Number };
issueUpdate.AddLabel(label.Name);
var updated = await _issuesClient.Update(_context.RepositoryOwner, _context.RepositoryName, issue.Number, issueUpdate);
Assert.NotNull(updated);
}
var startOptions = new ApiOptions
{
PageCount = 1,
PageSize = 1,
StartPage = 1
};
var firstPage = await _issuesLabelsClient.GetAllForMilestone(_context.Repository.Id, milestone.Number, startOptions);
var skipStartOptions = new ApiOptions
{
PageSize = 1,
PageCount = 1,
StartPage = 2
};
var secondPage = await _issuesLabelsClient.GetAllForMilestone(_context.Repository.Id, milestone.Number, skipStartOptions);
Assert.Equal(1, firstPage.Count);
Assert.Equal(1, secondPage.Count);
Assert.NotEqual(firstPage.First().Color, secondPage.First().Color);
}
[IntegrationTest]
public async Task CanRetrieveIssueLabelByName()
{
var newLabel = new NewLabel("test label 1b", "FFFFFF");
var label = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel);
Assert.NotNull(label);
var issueLabelLookupByName = await _issuesLabelsClient.Get(_context.RepositoryOwner, _context.RepositoryName, label.Name);
Assert.Equal(label.Name, issueLabelLookupByName.Name);
Assert.Equal(label.Color, issueLabelLookupByName.Color);
}
[IntegrationTest]
public async Task CanRetrieveIssueLabelByNameWithRepositoryId()
{
var newLabel = new NewLabel("test label 1b", "FFFFFF");
var label = await _issuesLabelsClient.Create(_context.Repository.Id, newLabel);
Assert.NotNull(label);
var issueLabelLookupByName = await _issuesLabelsClient.Get(_context.Repository.Id, label.Name);
Assert.Equal(label.Name, issueLabelLookupByName.Name);
Assert.Equal(label.Color, issueLabelLookupByName.Color);
}
[IntegrationTest]
public async Task CanDeleteIssueLabelByName()
{
var newLabel = new NewLabel("test label 1b", "FFFFFF");
var label = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel);
Assert.NotNull(label);
var issueLabelLookupByName = await _issuesLabelsClient.Get(_context.RepositoryOwner, _context.RepositoryName, label.Name);
Assert.Equal(label.Name, issueLabelLookupByName.Name);
Assert.Equal(label.Color, issueLabelLookupByName.Color);
await _issuesLabelsClient.Delete(_context.RepositoryOwner, _context.RepositoryName, label.Name);
await Assert.ThrowsAsync<NotFoundException>(() => _issuesLabelsClient.Get(_context.RepositoryOwner, _context.RepositoryName, label.Name));
}
[IntegrationTest]
public async Task CanDeleteIssueLabelByNameWithRepositoryId()
{
var newLabel = new NewLabel("test label 1b", "FFFFFF");
var label = await _issuesLabelsClient.Create(_context.Repository.Id, newLabel);
Assert.NotNull(label);
var issueLabelLookupByName = await _issuesLabelsClient.Get(_context.Repository.Id, label.Name);
Assert.Equal(label.Name, issueLabelLookupByName.Name);
Assert.Equal(label.Color, issueLabelLookupByName.Color);
await _issuesLabelsClient.Delete(_context.RepositoryOwner, _context.RepositoryName, label.Name);
await Assert.ThrowsAsync<NotFoundException>(() => _issuesLabelsClient.Get(_context.RepositoryOwner, _context.RepositoryName, label.Name));
}
[IntegrationTest]
public async Task CanAddToIssue()
{
var newIssue = new NewIssue("A test issue") { Body = "A new unassigned issue" };
var newLabel = new NewLabel("test label 1b", "FFFFFF");
var label = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel);
Assert.NotNull(label);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
Assert.NotNull(issue);
await _issuesLabelsClient.AddToIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number, new[] { label.Name });
var labels = await _issuesLabelsClient.GetAllForIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number);
Assert.NotEmpty(labels);
Assert.Equal(label.Name, labels[0].Name);
Assert.Equal(label.Color, labels[0].Color);
}
[IntegrationTest]
public async Task CanAddToIssueWithRepositoryId()
{
var newIssue = new NewIssue("A test issue") { Body = "A new unassigned issue" };
var newLabel = new NewLabel("test label 1b", "FFFFFF");
var label = await _issuesLabelsClient.Create(_context.Repository.Id, newLabel);
Assert.NotNull(label);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
Assert.NotNull(issue);
await _issuesLabelsClient.AddToIssue(_context.Repository.Id, issue.Number, new[] { label.Name });
var labels = await _issuesLabelsClient.GetAllForIssue(_context.Repository.Id, issue.Number);
Assert.NotEmpty(labels);
Assert.Equal(label.Name, labels[0].Name);
Assert.Equal(label.Color, labels[0].Color);
}
[IntegrationTest]
public async Task CanRemoveAllFromIssue()
{
var newIssue = new NewIssue("A test issue") { Body = "A new unassigned issue" };
var newLabel = new NewLabel("test label 1b", "FFFFFF");
var label = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel);
Assert.NotNull(label);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
Assert.NotNull(issue);
await _issuesLabelsClient.AddToIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number, new[] { label.Name });
await _issuesLabelsClient.RemoveAllFromIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number);
var labels = await _issuesLabelsClient.GetAllForIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number);
Assert.Empty(labels);
}
[IntegrationTest]
public async Task CanRemoveAllFromIssueWithRepositoryId()
{
var newIssue = new NewIssue("A test issue") { Body = "A new unassigned issue" };
var newLabel = new NewLabel("test label 1b", "FFFFFF");
var label = await _issuesLabelsClient.Create(_context.Repository.Id, newLabel);
Assert.NotNull(label);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
Assert.NotNull(issue);
await _issuesLabelsClient.AddToIssue(_context.Repository.Id, issue.Number, new[] { label.Name });
await _issuesLabelsClient.RemoveAllFromIssue(_context.Repository.Id, issue.Number);
var labels = await _issuesLabelsClient.GetAllForIssue(_context.Repository.Id, issue.Number);
Assert.Empty(labels);
}
[IntegrationTest]
public async Task CanRemoveFromIssue()
{
var newIssue = new NewIssue("A test issue") { Body = "A new unassigned issue" };
var newLabel = new NewLabel("test label 1b", "FFFFFF");
var label = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel);
Assert.NotNull(label);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
Assert.NotNull(issue);
await _issuesLabelsClient.AddToIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number, new[] { label.Name });
await _issuesLabelsClient.RemoveFromIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number, label.Name);
var labels = await _issuesLabelsClient.GetAllForIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number);
Assert.Empty(labels);
}
[IntegrationTest]
public async Task CanRemoveFromIssueWithRepositoryId()
{
var newIssue = new NewIssue("A test issue") { Body = "A new unassigned issue" };
var newLabel = new NewLabel("test label 1b", "FFFFFF");
var label = await _issuesLabelsClient.Create(_context.Repository.Id, newLabel);
Assert.NotNull(label);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue);
Assert.NotNull(issue);
await _issuesLabelsClient.AddToIssue(_context.Repository.Id, issue.Number, new[] { label.Name });
await _issuesLabelsClient.RemoveFromIssue(_context.Repository.Id, issue.Number, label.Name);
var labels = await _issuesLabelsClient.GetAllForIssue(_context.Repository.Id, issue.Number);
Assert.Empty(labels);
}
[IntegrationTest]
public async Task CanReplaceAllForIssue()
{
var newIssue1 = new NewIssue("A test issue") { Body = "A new unassigned issue" };
var newLabel1 = new NewLabel("test label 1b", "FFFFFF");
var newLabel2 = new NewLabel("test label 1a", "FFFFFF");
var label1 = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel1);
Assert.NotNull(label1);
var label2 = await _issuesLabelsClient.Create(_context.RepositoryOwner, _context.RepositoryName, newLabel2);
Assert.NotNull(label2);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue1);
Assert.NotNull(issue);
await _issuesLabelsClient.AddToIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number, new[] { label1.Name });
await _issuesLabelsClient.ReplaceAllForIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number, new[] { label2.Name });
var labels = await _issuesLabelsClient.GetAllForIssue(_context.RepositoryOwner, _context.RepositoryName, issue.Number);
Assert.NotEmpty(labels);
Assert.Equal(label2.Name, labels[0].Name);
Assert.Equal(label2.Color, labels[0].Color);
}
[IntegrationTest]
public async Task CanReplaceAllForIssueWithRepositoryId()
{
var newIssue1 = new NewIssue("A test issue") { Body = "A new unassigned issue" };
var newLabel1 = new NewLabel("test label 1b", "FFFFFF");
var newLabel2 = new NewLabel("test label 1a", "FFFFFF");
var label1 = await _issuesLabelsClient.Create(_context.Repository.Id, newLabel1);
Assert.NotNull(label1);
var label2 = await _issuesLabelsClient.Create(_context.Repository.Id, newLabel2);
Assert.NotNull(label2);
var issue = await _issuesClient.Create(_context.RepositoryOwner, _context.RepositoryName, newIssue1);
Assert.NotNull(issue);
await _issuesLabelsClient.AddToIssue(_context.Repository.Id, issue.Number, new[] { label1.Name });
await _issuesLabelsClient.ReplaceAllForIssue(_context.Repository.Id, issue.Number, new[] { label2.Name });
var labels = await _issuesLabelsClient.GetAllForIssue(_context.Repository.Id, issue.Number);
Assert.NotEmpty(labels);
Assert.Equal(label2.Name, labels[0].Name);
Assert.Equal(label2.Color, labels[0].Color);
}
public void Dispose()
{
_context.Dispose();
}
}
| mit |
liangjg/openmc | src/tallies/filter_mesh.cpp | 3158 | #include "openmc/tallies/filter_mesh.h"
#include <fmt/core.h>
#include <gsl/gsl>
#include "openmc/capi.h"
#include "openmc/constants.h"
#include "openmc/error.h"
#include "openmc/mesh.h"
#include "openmc/xml_interface.h"
namespace openmc {
void
MeshFilter::from_xml(pugi::xml_node node)
{
auto bins_ = get_node_array<int32_t>(node, "bins");
if (bins_.size() != 1) {
fatal_error("Only one mesh can be specified per " + type()
+ " mesh filter.");
}
auto id = bins_[0];
auto search = model::mesh_map.find(id);
if (search != model::mesh_map.end()) {
set_mesh(search->second);
} else{
fatal_error(fmt::format(
"Could not find mesh {} specified on tally filter.", id));
}
}
void
MeshFilter::get_all_bins(const Particle& p, TallyEstimator estimator, FilterMatch& match)
const
{
if (estimator != TallyEstimator::TRACKLENGTH) {
auto bin = model::meshes[mesh_]->get_bin(p.r());
if (bin >= 0) {
match.bins_.push_back(bin);
match.weights_.push_back(1.0);
}
} else {
model::meshes[mesh_]->bins_crossed(p, match.bins_, match.weights_);
}
}
void
MeshFilter::to_statepoint(hid_t filter_group) const
{
Filter::to_statepoint(filter_group);
write_dataset(filter_group, "bins", model::meshes[mesh_]->id_);
}
std::string
MeshFilter::text_label(int bin) const
{
auto& mesh = *model::meshes.at(mesh_);
return mesh.bin_label(bin);
}
void
MeshFilter::set_mesh(int32_t mesh)
{
mesh_ = mesh;
n_bins_ = model::meshes[mesh_]->n_bins();
}
//==============================================================================
// C-API functions
//==============================================================================
extern "C" int
openmc_mesh_filter_get_mesh(int32_t index, int32_t* index_mesh)
{
if (!index_mesh) {
set_errmsg("Mesh index argument is a null pointer.");
return OPENMC_E_INVALID_ARGUMENT;
}
// Make sure this is a valid index to an allocated filter.
if (int err = verify_filter(index)) return err;
// Get a pointer to the filter and downcast.
const auto& filt_base = model::tally_filters[index].get();
auto* filt = dynamic_cast<MeshFilter*>(filt_base);
// Check the filter type.
if (!filt) {
set_errmsg("Tried to get mesh on a non-mesh filter.");
return OPENMC_E_INVALID_TYPE;
}
// Output the mesh.
*index_mesh = filt->mesh();
return 0;
}
extern "C" int
openmc_mesh_filter_set_mesh(int32_t index, int32_t index_mesh)
{
// Make sure this is a valid index to an allocated filter.
if (int err = verify_filter(index)) return err;
// Get a pointer to the filter and downcast.
const auto& filt_base = model::tally_filters[index].get();
auto* filt = dynamic_cast<MeshFilter*>(filt_base);
// Check the filter type.
if (!filt) {
set_errmsg("Tried to set mesh on a non-mesh filter.");
return OPENMC_E_INVALID_TYPE;
}
// Check the mesh index.
if (index_mesh < 0 || index_mesh >= model::meshes.size()) {
set_errmsg("Index in 'meshes' array is out of bounds.");
return OPENMC_E_OUT_OF_BOUNDS;
}
// Update the filter.
filt->set_mesh(index_mesh);
return 0;
}
} // namespace openmc
| mit |
kenfdev/angular-xeditable | docs/demos/bsdate/test.js | 1624 | describe('bsdate', function() {
beforeEach(function() {
browser().navigateTo(mainUrl);
});
it('should show editor and submit new value', function() {
var s = '[ng-controller="BsdateCtrl"] ';
expect(element(s+'a[editable-bsdate]').css('display')).not().toBe('none');
expect(element(s+'a[editable-bsdate]').text()).toMatch('15/05/1984');
element(s+'a[editable-bsdate]').click();
element(s+'form .input-group-btn button[type="button"]').click();
expect(element(s+'a[editable-bsdate]').css('display')).toBe('none');
expect(element(s+'form[editable-form="$form"]').count()).toBe(1);
expect(element(s+'form input[type="text"]:visible').count()).toBe(1);
expect(element(s+'form input[type="text"]').val()).toBe('15-May-1984');
expect(element(s+'form .editable-buttons button[type="submit"]:visible').count()).toBe(1);
expect(element(s+'form .editable-buttons button[type="button"]:visible').count()).toBe(1);
expect(element(s+'ul.dropdown-menu:visible').count()).toBe(1);
expect(element(s+'form table button.btn-info span').text()).toMatch('15');
//set 29 april
element(s+'form table > tbody > tr:eq(0) > td:eq(1) > button').click();
expect(element(s+'ul.dropdown-menu:visible').count()).toBe(0);
expect(element(s+'form input[type="text"]').val()).toBe('29-April-1984');
//submit
element(s+'form button[type="submit"]').click();
expect(element(s+'a[editable-bsdate]').css('display')).not().toBe('none');
expect(element(s+'a[editable-bsdate]').text()).toMatch('29/04/1984');
expect(element(s+'form').count()).toBe(0);
});
}); | mit |
daltomi/oxygine-framework | examples/Game/part5/proj.android/src/org/oxygine/GamePart5/MainActivity.java | 128 | package org.oxygine.GamePart5;
import org.oxygine.lib.OxygineActivity;
public class MainActivity extends OxygineActivity
{
}
| mit |
zloirock/core-js | packages/core-js/actual/reflect/own-keys.js | 81 | var parent = require('../../stable/reflect/own-keys');
module.exports = parent;
| mit |
lincoln131/rubyspanishxlsx | Portable/Ruby2.3.0/lib/ruby/2.3.0/irb/extend-command.rb | 9949 | # frozen_string_literal: false
#
# irb/extend-command.rb - irb extend command
# $Release Version: 0.9.6$
# $Revision: 53141 $
# by Keiju ISHITSUKA(keiju@ruby-lang.org)
#
# --
#
#
#
module IRB # :nodoc:
# Installs the default irb extensions command bundle.
module ExtendCommandBundle
EXCB = ExtendCommandBundle # :nodoc:
# See #install_alias_method.
NO_OVERRIDE = 0
# See #install_alias_method.
OVERRIDE_PRIVATE_ONLY = 0x01
# See #install_alias_method.
OVERRIDE_ALL = 0x02
# Quits the current irb context
#
# +ret+ is the optional signal or message to send to Context#exit
#
# Same as <code>IRB.CurrentContext.exit</code>.
def irb_exit(ret = 0)
irb_context.exit(ret)
end
# Displays current configuration.
#
# Modifing the configuration is achieved by sending a message to IRB.conf.
def irb_context
IRB.CurrentContext
end
@ALIASES = [
[:context, :irb_context, NO_OVERRIDE],
[:conf, :irb_context, NO_OVERRIDE],
[:irb_quit, :irb_exit, OVERRIDE_PRIVATE_ONLY],
[:exit, :irb_exit, OVERRIDE_PRIVATE_ONLY],
[:quit, :irb_exit, OVERRIDE_PRIVATE_ONLY],
]
@EXTEND_COMMANDS = [
[:irb_current_working_workspace, :CurrentWorkingWorkspace, "irb/cmd/chws",
[:irb_print_working_workspace, OVERRIDE_ALL],
[:irb_cwws, OVERRIDE_ALL],
[:irb_pwws, OVERRIDE_ALL],
[:cwws, NO_OVERRIDE],
[:pwws, NO_OVERRIDE],
[:irb_current_working_binding, OVERRIDE_ALL],
[:irb_print_working_binding, OVERRIDE_ALL],
[:irb_cwb, OVERRIDE_ALL],
[:irb_pwb, OVERRIDE_ALL],
],
[:irb_change_workspace, :ChangeWorkspace, "irb/cmd/chws",
[:irb_chws, OVERRIDE_ALL],
[:irb_cws, OVERRIDE_ALL],
[:chws, NO_OVERRIDE],
[:cws, NO_OVERRIDE],
[:irb_change_binding, OVERRIDE_ALL],
[:irb_cb, OVERRIDE_ALL],
[:cb, NO_OVERRIDE]],
[:irb_workspaces, :Workspaces, "irb/cmd/pushws",
[:workspaces, NO_OVERRIDE],
[:irb_bindings, OVERRIDE_ALL],
[:bindings, NO_OVERRIDE]],
[:irb_push_workspace, :PushWorkspace, "irb/cmd/pushws",
[:irb_pushws, OVERRIDE_ALL],
[:pushws, NO_OVERRIDE],
[:irb_push_binding, OVERRIDE_ALL],
[:irb_pushb, OVERRIDE_ALL],
[:pushb, NO_OVERRIDE]],
[:irb_pop_workspace, :PopWorkspace, "irb/cmd/pushws",
[:irb_popws, OVERRIDE_ALL],
[:popws, NO_OVERRIDE],
[:irb_pop_binding, OVERRIDE_ALL],
[:irb_popb, OVERRIDE_ALL],
[:popb, NO_OVERRIDE]],
[:irb_load, :Load, "irb/cmd/load"],
[:irb_require, :Require, "irb/cmd/load"],
[:irb_source, :Source, "irb/cmd/load",
[:source, NO_OVERRIDE]],
[:irb, :IrbCommand, "irb/cmd/subirb"],
[:irb_jobs, :Jobs, "irb/cmd/subirb",
[:jobs, NO_OVERRIDE]],
[:irb_fg, :Foreground, "irb/cmd/subirb",
[:fg, NO_OVERRIDE]],
[:irb_kill, :Kill, "irb/cmd/subirb",
[:kill, OVERRIDE_PRIVATE_ONLY]],
[:irb_help, :Help, "irb/cmd/help",
[:help, NO_OVERRIDE]],
]
# Installs the default irb commands:
#
# +irb_current_working_workspace+:: Context#main
# +irb_change_workspace+:: Context#change_workspace
# +irb_workspaces+:: Context#workspaces
# +irb_push_workspace+:: Context#push_workspace
# +irb_pop_workspace+:: Context#pop_workspace
# +irb_load+:: #irb_load
# +irb_require+:: #irb_require
# +irb_source+:: IrbLoader#source_file
# +irb+:: IRB.irb
# +irb_jobs+:: JobManager
# +irb_fg+:: JobManager#switch
# +irb_kill+:: JobManager#kill
# +irb_help+:: IRB@Command+line+options
def self.install_extend_commands
for args in @EXTEND_COMMANDS
def_extend_command(*args)
end
end
# Evaluate the given +cmd_name+ on the given +cmd_class+ Class.
#
# Will also define any given +aliases+ for the method.
#
# The optional +load_file+ parameter will be required within the method
# definition.
def self.def_extend_command(cmd_name, cmd_class, load_file = nil, *aliases)
case cmd_class
when Symbol
cmd_class = cmd_class.id2name
when String
when Class
cmd_class = cmd_class.name
end
if load_file
line = __LINE__; eval %[
def #{cmd_name}(*opts, &b)
require "#{load_file}"
arity = ExtendCommand::#{cmd_class}.instance_method(:execute).arity
args = (1..(arity < 0 ? ~arity : arity)).map {|i| "arg" + i.to_s }
args << "*opts" if arity < 0
args << "&block"
args = args.join(", ")
line = __LINE__; eval %[
def #{cmd_name}(\#{args})
ExtendCommand::#{cmd_class}.execute(irb_context, \#{args})
end
], nil, __FILE__, line
send :#{cmd_name}, *opts, &b
end
], nil, __FILE__, line
else
line = __LINE__; eval %[
def #{cmd_name}(*opts, &b)
ExtendCommand::#{cmd_class}.execute(irb_context, *opts, &b)
end
], nil, __FILE__, line
end
for ali, flag in aliases
@ALIASES.push [ali, cmd_name, flag]
end
end
# Installs alias methods for the default irb commands, see
# ::install_extend_commands.
def install_alias_method(to, from, override = NO_OVERRIDE)
to = to.id2name unless to.kind_of?(String)
from = from.id2name unless from.kind_of?(String)
if override == OVERRIDE_ALL or
(override == OVERRIDE_PRIVATE_ONLY) && !respond_to?(to) or
(override == NO_OVERRIDE) && !respond_to?(to, true)
target = self
(class << self; self; end).instance_eval{
if target.respond_to?(to, true) &&
!target.respond_to?(EXCB.irb_original_method_name(to), true)
alias_method(EXCB.irb_original_method_name(to), to)
end
alias_method to, from
}
else
print "irb: warn: can't alias #{to} from #{from}.\n"
end
end
def self.irb_original_method_name(method_name) # :nodoc:
"irb_" + method_name + "_org"
end
# Installs alias methods for the default irb commands on the given object
# using #install_alias_method.
def self.extend_object(obj)
unless (class << obj; ancestors; end).include?(EXCB)
super
for ali, com, flg in @ALIASES
obj.install_alias_method(ali, com, flg)
end
end
end
install_extend_commands
end
# Extends methods for the Context module
module ContextExtender
CE = ContextExtender # :nodoc:
@EXTEND_COMMANDS = [
[:eval_history=, "irb/ext/history.rb"],
[:use_tracer=, "irb/ext/tracer.rb"],
[:math_mode=, "irb/ext/math-mode.rb"],
[:use_loader=, "irb/ext/use-loader.rb"],
[:save_history=, "irb/ext/save-history.rb"],
]
# Installs the default context extensions as irb commands:
#
# Context#eval_history=:: +irb/ext/history.rb+
# Context#use_tracer=:: +irb/ext/tracer.rb+
# Context#math_mode=:: +irb/ext/math-mode.rb+
# Context#use_loader=:: +irb/ext/use-loader.rb+
# Context#save_history=:: +irb/ext/save-history.rb+
def self.install_extend_commands
for args in @EXTEND_COMMANDS
def_extend_command(*args)
end
end
# Evaluate the given +command+ from the given +load_file+ on the Context
# module.
#
# Will also define any given +aliases+ for the method.
def self.def_extend_command(cmd_name, load_file, *aliases)
line = __LINE__; Context.module_eval %[
def #{cmd_name}(*opts, &b)
Context.module_eval {remove_method(:#{cmd_name})}
require "#{load_file}"
send :#{cmd_name}, *opts, &b
end
for ali in aliases
alias_method ali, cmd_name
end
], __FILE__, line
end
CE.install_extend_commands
end
# A convenience module for extending Ruby methods.
module MethodExtender
# Extends the given +base_method+ with a prefix call to the given
# +extend_method+.
def def_pre_proc(base_method, extend_method)
base_method = base_method.to_s
extend_method = extend_method.to_s
alias_name = new_alias_name(base_method)
module_eval %[
alias_method alias_name, base_method
def #{base_method}(*opts)
send :#{extend_method}, *opts
send :#{alias_name}, *opts
end
]
end
# Extends the given +base_method+ with a postfix call to the given
# +extend_method+.
def def_post_proc(base_method, extend_method)
base_method = base_method.to_s
extend_method = extend_method.to_s
alias_name = new_alias_name(base_method)
module_eval %[
alias_method alias_name, base_method
def #{base_method}(*opts)
send :#{alias_name}, *opts
send :#{extend_method}, *opts
end
]
end
# Returns a unique method name to use as an alias for the given +name+.
#
# Usually returns <code>#{prefix}#{name}#{postfix}<num></code>, example:
#
# new_alias_name('foo') #=> __alias_of__foo__
# def bar; end
# new_alias_name('bar') #=> __alias_of__bar__2
def new_alias_name(name, prefix = "__alias_of__", postfix = "__")
base_name = "#{prefix}#{name}#{postfix}"
all_methods = instance_methods(true) + private_instance_methods(true)
same_methods = all_methods.grep(/^#{Regexp.quote(base_name)}[0-9]*$/)
return base_name if same_methods.empty?
no = same_methods.size
while !same_methods.include?(alias_name = base_name + no)
no += 1
end
alias_name
end
end
end
| mit |
abdllhbyrktr/Urho3D | Source/Urho3D/Graphics/Direct3D9/D3D9TextureCube.cpp | 20984 | //
// Copyright (c) 2008-2017 the Urho3D project.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
#include "../../Precompiled.h"
#include "../../Core/Context.h"
#include "../../Core/Profiler.h"
#include "../../Graphics/Graphics.h"
#include "../../Graphics/GraphicsEvents.h"
#include "../../Graphics/GraphicsImpl.h"
#include "../../Graphics/Renderer.h"
#include "../../Graphics/TextureCube.h"
#include "../../IO/FileSystem.h"
#include "../../IO/Log.h"
#include "../../Resource/ResourceCache.h"
#include "../../Resource/XMLFile.h"
#include "../../DebugNew.h"
#ifdef _MSC_VER
#pragma warning(disable:4355)
#endif
namespace Urho3D
{
void TextureCube::OnDeviceLost()
{
if (usage_ > TEXTURE_STATIC)
Release();
}
void TextureCube::OnDeviceReset()
{
if (usage_ > TEXTURE_STATIC || !object_.ptr_ || dataPending_)
{
// If has a resource file, reload through the resource cache. Otherwise just recreate.
ResourceCache* cache = GetSubsystem<ResourceCache>();
if (cache->Exists(GetName()))
dataLost_ = !cache->ReloadResource(this);
if (!object_.ptr_)
{
Create();
dataLost_ = true;
}
}
dataPending_ = false;
}
void TextureCube::Release()
{
if (graphics_)
{
for (unsigned i = 0; i < MAX_TEXTURE_UNITS; ++i)
{
if (graphics_->GetTexture(i) == this)
graphics_->SetTexture(i, 0);
}
}
for (unsigned i = 0; i < MAX_CUBEMAP_FACES; ++i)
{
if (renderSurfaces_[i])
renderSurfaces_[i]->Release();
}
URHO3D_SAFE_RELEASE(object_.ptr_);
resolveDirty_ = false;
levelsDirty_ = false;
}
bool TextureCube::SetData(CubeMapFace face, unsigned level, int x, int y, int width, int height, const void* data)
{
URHO3D_PROFILE(SetTextureData);
if (!object_.ptr_)
{
URHO3D_LOGERROR("No texture created, can not set data");
return false;
}
if (!data)
{
URHO3D_LOGERROR("Null source for setting data");
return false;
}
if (level >= levels_)
{
URHO3D_LOGERROR("Illegal mip level for setting data");
return false;
}
if (graphics_->IsDeviceLost())
{
URHO3D_LOGWARNING("Texture data assignment while device is lost");
dataPending_ = true;
return true;
}
if (IsCompressed())
{
x &= ~3;
y &= ~3;
}
int levelWidth = GetLevelWidth(level);
int levelHeight = GetLevelHeight(level);
if (x < 0 || x + width > levelWidth || y < 0 || y + height > levelHeight || width <= 0 || height <= 0)
{
URHO3D_LOGERROR("Illegal dimensions for setting data");
return false;
}
D3DLOCKED_RECT d3dLockedRect;
RECT d3dRect;
d3dRect.left = x;
d3dRect.top = y;
d3dRect.right = x + width;
d3dRect.bottom = y + height;
DWORD flags = 0;
if (level == 0 && x == 0 && y == 0 && width == levelWidth && height == levelHeight && usage_ > TEXTURE_STATIC)
flags |= D3DLOCK_DISCARD;
HRESULT hr = ((IDirect3DCubeTexture9*)object_.ptr_)->LockRect((D3DCUBEMAP_FACES)face, level, &d3dLockedRect,
(flags & D3DLOCK_DISCARD) ? 0 : &d3dRect, flags);
if (FAILED(hr))
{
URHO3D_LOGD3DERROR("Could not lock texture", hr);
return false;
}
if (IsCompressed())
{
height = (height + 3) >> 2;
y >>= 2;
}
unsigned char* src = (unsigned char*)data;
unsigned rowSize = GetRowDataSize(width);
// GetRowDataSize() returns CPU-side (source) data size, so need to convert for X8R8G8B8
if (format_ == D3DFMT_X8R8G8B8)
rowSize = rowSize / 3 * 4;
// Perform conversion from RGB / RGBA as necessary
switch (format_)
{
default:
for (int i = 0; i < height; ++i)
{
unsigned char* dest = (unsigned char*)d3dLockedRect.pBits + i * d3dLockedRect.Pitch;
memcpy(dest, src, rowSize);
src += rowSize;
}
break;
case D3DFMT_X8R8G8B8:
for (int i = 0; i < height; ++i)
{
unsigned char* dest = (unsigned char*)d3dLockedRect.pBits + i * d3dLockedRect.Pitch;
for (int j = 0; j < width; ++j)
{
*dest++ = src[2];
*dest++ = src[1];
*dest++ = src[0];
*dest++ = 255;
src += 3;
}
}
break;
case D3DFMT_A8R8G8B8:
for (int i = 0; i < height; ++i)
{
unsigned char* dest = (unsigned char*)d3dLockedRect.pBits + i * d3dLockedRect.Pitch;
for (int j = 0; j < width; ++j)
{
*dest++ = src[2];
*dest++ = src[1];
*dest++ = src[0];
*dest++ = src[3];
src += 4;
}
}
break;
}
((IDirect3DCubeTexture9*)object_.ptr_)->UnlockRect((D3DCUBEMAP_FACES)face, level);
return true;
}
bool TextureCube::SetData(CubeMapFace face, Deserializer& source)
{
SharedPtr<Image> image(new Image(context_));
if (!image->Load(source))
return false;
return SetData(face, image);
}
bool TextureCube::SetData(CubeMapFace face, Image* image, bool useAlpha)
{
if (!image)
{
URHO3D_LOGERROR("Null image, can not load texture");
return false;
}
// Use a shared ptr for managing the temporary mip images created during this function
SharedPtr<Image> mipImage;
unsigned memoryUse = 0;
int quality = QUALITY_HIGH;
Renderer* renderer = GetSubsystem<Renderer>();
if (renderer)
quality = renderer->GetTextureQuality();
if (!image->IsCompressed())
{
unsigned char* levelData = image->GetData();
int levelWidth = image->GetWidth();
int levelHeight = image->GetHeight();
unsigned components = image->GetComponents();
unsigned format = 0;
if (levelWidth != levelHeight)
{
URHO3D_LOGERROR("Cube texture width not equal to height");
return false;
}
// Discard unnecessary mip levels
for (unsigned i = 0; i < mipsToSkip_[quality]; ++i)
{
mipImage = image->GetNextLevel(); image = mipImage;
levelData = image->GetData();
levelWidth = image->GetWidth();
levelHeight = image->GetHeight();
}
switch (components)
{
case 1:
format = useAlpha ? Graphics::GetAlphaFormat() : Graphics::GetLuminanceFormat();
break;
case 2:
format = Graphics::GetLuminanceAlphaFormat();
break;
case 3:
format = Graphics::GetRGBFormat();
break;
case 4:
format = Graphics::GetRGBAFormat();
break;
default:
assert(false); // Should never reach here
break;
}
// Create the texture when face 0 is being loaded, check that rest of the faces are same size & format
if (!face)
{
// If image was previously compressed, reset number of requested levels to avoid error if level count is too high for new size
if (IsCompressed() && requestedLevels_ > 1)
requestedLevels_ = 0;
SetSize(levelWidth, format);
}
else
{
if (!object_.ptr_)
{
URHO3D_LOGERROR("Cube texture face 0 must be loaded first");
return false;
}
if (levelWidth != width_ || format != format_)
{
URHO3D_LOGERROR("Cube texture face does not match size or format of face 0");
return false;
}
}
for (unsigned i = 0; i < levels_; ++i)
{
SetData(face, i, 0, 0, levelWidth, levelHeight, levelData);
memoryUse += levelWidth * levelHeight * components;
if (i < levels_ - 1)
{
mipImage = image->GetNextLevel(); image = mipImage;
levelData = image->GetData();
levelWidth = image->GetWidth();
levelHeight = image->GetHeight();
}
}
}
else
{
int width = image->GetWidth();
int height = image->GetHeight();
unsigned levels = image->GetNumCompressedLevels();
unsigned format = graphics_->GetFormat(image->GetCompressedFormat());
bool needDecompress = false;
if (width != height)
{
URHO3D_LOGERROR("Cube texture width not equal to height");
return false;
}
if (!format)
{
format = Graphics::GetRGBAFormat();
needDecompress = true;
}
unsigned mipsToSkip = mipsToSkip_[quality];
if (mipsToSkip >= levels)
mipsToSkip = levels - 1;
while (mipsToSkip && (width / (1 << mipsToSkip) < 4 || height / (1 << mipsToSkip) < 4))
--mipsToSkip;
width /= (1 << mipsToSkip);
height /= (1 << mipsToSkip);
// Create the texture when face 0 is being loaded, assume rest of the faces are same size & format
if (!face)
{
SetNumLevels(Max((levels - mipsToSkip), 1U));
SetSize(width, format);
}
else
{
if (!object_.ptr_)
{
URHO3D_LOGERROR("Cube texture face 0 must be loaded first");
return false;
}
if (width != width_ || format != format_)
{
URHO3D_LOGERROR("Cube texture face does not match size or format of face 0");
return false;
}
}
for (unsigned i = 0; i < levels_ && i < levels - mipsToSkip; ++i)
{
CompressedLevel level = image->GetCompressedLevel(i + mipsToSkip);
if (!needDecompress)
{
SetData(face, i, 0, 0, level.width_, level.height_, level.data_);
memoryUse += level.rows_ * level.rowSize_;
}
else
{
unsigned char* rgbaData = new unsigned char[level.width_ * level.height_ * 4];
level.Decompress(rgbaData);
SetData(face, i, 0, 0, level.width_, level.height_, rgbaData);
memoryUse += level.width_ * level.height_ * 4;
delete[] rgbaData;
}
}
}
faceMemoryUse_[face] = memoryUse;
unsigned totalMemoryUse = sizeof(TextureCube);
for (unsigned i = 0; i < MAX_CUBEMAP_FACES; ++i)
totalMemoryUse += faceMemoryUse_[i];
SetMemoryUse(totalMemoryUse);
return true;
}
bool TextureCube::GetData(CubeMapFace face, unsigned level, void* dest) const
{
if (!object_.ptr_)
{
URHO3D_LOGERROR("No texture created, can not get data");
return false;
}
if (!dest)
{
URHO3D_LOGERROR("Null destination for getting data");
return false;
}
if (level >= levels_)
{
URHO3D_LOGERROR("Illegal mip level for getting data");
return false;
}
if (graphics_->IsDeviceLost())
{
URHO3D_LOGWARNING("Getting texture data while device is lost");
return false;
}
if (resolveDirty_)
graphics_->ResolveToTexture(const_cast<TextureCube*>(this));
int levelWidth = GetLevelWidth(level);
int levelHeight = GetLevelHeight(level);
D3DLOCKED_RECT d3dLockedRect;
RECT d3dRect;
d3dRect.left = 0;
d3dRect.top = 0;
d3dRect.right = levelWidth;
d3dRect.bottom = levelHeight;
IDirect3DSurface9* offscreenSurface = 0;
// Need to use a offscreen surface & GetRenderTargetData() for rendertargets
if (renderSurfaces_[face])
{
if (level != 0)
{
URHO3D_LOGERROR("Can only get mip level 0 data from a rendertarget");
return false;
}
// If multisampled, must copy the surface of the resolve texture instead of the multisampled surface
IDirect3DSurface9* resolveSurface = 0;
if (multiSample_ > 1)
{
HRESULT hr = ((IDirect3DCubeTexture9*)object_.ptr_)->GetCubeMapSurface((D3DCUBEMAP_FACES)face, 0,
(IDirect3DSurface9**)&resolveSurface);
if (FAILED(hr))
{
URHO3D_LOGD3DERROR("Could not get surface of the resolve texture", hr);
URHO3D_SAFE_RELEASE(resolveSurface);
return false;
}
}
IDirect3DDevice9* device = graphics_->GetImpl()->GetDevice();
HRESULT hr = device->CreateOffscreenPlainSurface((UINT)width_, (UINT)height_, (D3DFORMAT)format_, D3DPOOL_SYSTEMMEM, &offscreenSurface, 0);
if (FAILED(hr))
{
URHO3D_LOGD3DERROR("Could not create surface for getting rendertarget data", hr);
URHO3D_SAFE_RELEASE(offscreenSurface);
URHO3D_SAFE_RELEASE(resolveSurface);
return false;
}
if (resolveSurface)
hr = device->GetRenderTargetData(resolveSurface, offscreenSurface);
else
hr = device->GetRenderTargetData((IDirect3DSurface9*)renderSurfaces_[face]->GetSurface(), offscreenSurface);
URHO3D_SAFE_RELEASE(resolveSurface);
if (FAILED(hr))
{
URHO3D_LOGD3DERROR("Could not get rendertarget data", hr);
URHO3D_SAFE_RELEASE(offscreenSurface);
return false;
}
if (FAILED(offscreenSurface->LockRect(&d3dLockedRect, &d3dRect, D3DLOCK_READONLY)))
{
URHO3D_LOGD3DERROR("Could not lock surface for getting rendertarget data", hr);
URHO3D_SAFE_RELEASE(offscreenSurface);
return false;
}
}
else
{
HRESULT hr = ((IDirect3DCubeTexture9*)object_.ptr_)->LockRect((D3DCUBEMAP_FACES)face, level, &d3dLockedRect, &d3dRect, D3DLOCK_READONLY);
if (FAILED(hr))
{
URHO3D_LOGD3DERROR("Could not lock texture", hr);
return false;
}
}
int height = levelHeight;
if (IsCompressed())
height = (height + 3) >> 2;
unsigned char* destPtr = (unsigned char*)dest;
unsigned rowSize = GetRowDataSize(levelWidth);
// GetRowDataSize() returns CPU-side (destination) data size, so need to convert for X8R8G8B8
if (format_ == D3DFMT_X8R8G8B8)
rowSize = rowSize / 3 * 4;
// Perform conversion to RGB / RGBA as necessary
switch (format_)
{
default:
for (int i = 0; i < height; ++i)
{
unsigned char* src = (unsigned char*)d3dLockedRect.pBits + i * d3dLockedRect.Pitch;
memcpy(destPtr, src, rowSize);
destPtr += rowSize;
}
break;
case D3DFMT_X8R8G8B8:
for (int i = 0; i < height; ++i)
{
unsigned char* src = (unsigned char*)d3dLockedRect.pBits + i * d3dLockedRect.Pitch;
for (int j = 0; j < levelWidth; ++j)
{
destPtr[2] = *src++;
destPtr[1] = *src++;
destPtr[0] = *src++;
++src;
destPtr += 3;
}
}
break;
case D3DFMT_A8R8G8B8:
for (int i = 0; i < height; ++i)
{
unsigned char* src = (unsigned char*)d3dLockedRect.pBits + i * d3dLockedRect.Pitch;
for (int j = 0; j < levelWidth; ++j)
{
destPtr[2] = *src++;
destPtr[1] = *src++;
destPtr[0] = *src++;
destPtr[3] = *src++;
destPtr += 4;
}
}
break;
}
if (offscreenSurface)
offscreenSurface->UnlockRect();
else
((IDirect3DCubeTexture9*)object_.ptr_)->UnlockRect((D3DCUBEMAP_FACES)face, level);
URHO3D_SAFE_RELEASE(offscreenSurface);
return true;
}
bool TextureCube::Create()
{
Release();
if (!graphics_ || !width_ || !height_)
return false;
if (graphics_->IsDeviceLost())
{
URHO3D_LOGWARNING("Texture creation while device is lost");
return true;
}
GraphicsImpl* impl = graphics_->GetImpl();
unsigned pool = usage_ > TEXTURE_STATIC ? D3DPOOL_DEFAULT : D3DPOOL_MANAGED;
unsigned d3dUsage = 0;
switch (usage_)
{
case TEXTURE_DYNAMIC:
d3dUsage |= D3DUSAGE_DYNAMIC;
break;
case TEXTURE_RENDERTARGET:
d3dUsage |= D3DUSAGE_RENDERTARGET;
if (requestedLevels_ != 1)
{
// Check mipmap autogeneration support
if (impl->CheckFormatSupport((D3DFORMAT)format_, D3DUSAGE_AUTOGENMIPMAP, D3DRTYPE_TEXTURE))
{
requestedLevels_ = 0;
d3dUsage |= D3DUSAGE_AUTOGENMIPMAP;
}
else
requestedLevels_ = 1;
}
break;
default:
break;
}
if (multiSample_ > 1)
{
// Fall back to non-multisampled if unsupported multisampling mode
GraphicsImpl* impl = graphics_->GetImpl();
if (!impl->CheckMultiSampleSupport((D3DFORMAT)format_, multiSample_))
{
multiSample_ = 1;
autoResolve_ = false;
}
}
IDirect3DDevice9* device = graphics_->GetImpl()->GetDevice();
HRESULT hr = device->CreateCubeTexture(
(UINT)width_,
requestedLevels_,
d3dUsage,
(D3DFORMAT)format_,
(D3DPOOL)pool,
(IDirect3DCubeTexture9**)&object_.ptr_,
0);
if (FAILED(hr))
{
URHO3D_LOGD3DERROR("Could not create cube texture", hr);
URHO3D_SAFE_RELEASE(object_.ptr_);
return false;
}
levels_ = ((IDirect3DCubeTexture9*)object_.ptr_)->GetLevelCount();
if (usage_ == TEXTURE_RENDERTARGET)
{
for (unsigned i = 0; i < MAX_CUBEMAP_FACES; ++i)
{
if (multiSample_ > 1)
{
// Create the multisampled face rendertarget if necessary
HRESULT hr = device->CreateRenderTarget(
(UINT)width_,
(UINT)height_,
(D3DFORMAT)format_,
(D3DMULTISAMPLE_TYPE)multiSample_,
0,
FALSE,
(IDirect3DSurface9**)&renderSurfaces_[i]->surface_,
0);
if (FAILED(hr))
{
URHO3D_LOGD3DERROR("Could not create multisampled rendertarget surface", hr);
URHO3D_SAFE_RELEASE(renderSurfaces_[i]->surface_);
return false;
}
}
else
{
hr = ((IDirect3DCubeTexture9*)object_.ptr_)->GetCubeMapSurface((D3DCUBEMAP_FACES)i, 0,
(IDirect3DSurface9**)&renderSurfaces_[i]->surface_);
if (FAILED(hr))
{
URHO3D_LOGD3DERROR("Could not get rendertarget surface", hr);
URHO3D_SAFE_RELEASE(renderSurfaces_[i]->surface_);
return false;
}
}
}
}
return true;
}
}
| mit |