code stringlengths 1 25.8M | language stringclasses 18 values | source stringclasses 4 values | repo stringclasses 78 values | path stringlengths 0 268 |
|---|---|---|---|---|
# Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from synnefo.db import models
from snf_django.lib.api import faults
from synnefo.api.util import get_image_dict, get_vm
from synnefo.plankton import backend
from synnefo.cyclades_settings import cyclades_services, BASE_HOST
from synnefo.lib import join_urls
from synnefo.lib.services import get_service_path
def get_volume(user_id, volume_id, for_update=False,
non_deleted=False,
exception=faults.ItemNotFound):
volumes = models.Volume.objects
if for_update:
volumes = volumes.select_for_update()
try:
volume_id = int(volume_id)
except (TypeError, ValueError):
raise faults.BadRequest("Invalid volume id: %s" % volume_id)
try:
volume = volumes.get(id=volume_id, userid=user_id)
if non_deleted and volume.deleted:
raise faults.BadRequest("Volume '%s' has been deleted."
% volume_id)
return volume
except models.Volume.DoesNotExist:
raise exception("Volume %s not found" % volume_id)
def get_volume_type(volume_type_id, for_update=False, include_deleted=False,
exception=faults.ItemNotFound):
vtypes = models.VolumeType.objects
if not include_deleted:
vtypes = vtypes.filter(deleted=False)
if for_update:
vtypes = vtypes.select_for_update()
try:
vtype_id = int(volume_type_id)
except (TypeError, ValueError):
raise faults.BadRequest("Invalid volume id: %s" % volume_type_id)
try:
return vtypes.get(id=vtype_id)
except models.VolumeType.DoesNotExist:
raise exception("Volume type %s not found" % vtype_id)
def get_snapshot(user_id, snapshot_id, exception=faults.ItemNotFound):
try:
with backend.PlanktonBackend(user_id) as b:
return b.get_snapshot(snapshot_id)
except faults.ItemNotFound:
raise exception("Snapshot %s not found" % snapshot_id)
def get_image(user_id, image_id, exception=faults.ItemNotFound):
try:
return get_image_dict(image_id, user_id)
except faults.ItemNotFound:
raise exception("Image %s not found" % image_id)
def get_server(user_id, server_id, for_update=False, non_deleted=False,
exception=faults.ItemNotFound):
try:
server_id = int(server_id)
except (TypeError, ValueError):
raise faults.BadRequest("Invalid server id: %s" % server_id)
try:
return get_vm(server_id, user_id, for_update=for_update,
non_deleted=non_deleted, non_suspended=True)
except faults.ItemNotFound:
raise exception("Server %s not found" % server_id)
VOLUME_URL = \
join_urls(BASE_HOST,
get_service_path(cyclades_services, "volume", version="v2.0"))
VOLUMES_URL = join_urls(VOLUME_URL, "volumes/")
SNAPSHOTS_URL = join_urls(VOLUME_URL, "snapshots/")
def volume_to_links(volume_id):
href = join_urls(VOLUMES_URL, str(volume_id))
return [{"rel": rel, "href": href} for rel in ("self", "bookmark")]
def snapshot_to_links(snapshot_id):
href = join_urls(SNAPSHOTS_URL, str(snapshot_id))
return [{"rel": rel, "href": href} for rel in ("self", "bookmark")]
def update_snapshot_state(snapshot_id, user_id, state):
"""Update the state of a snapshot in Pithos.
Use PithosBackend in order to update the state of the snapshots in
Pithos DB.
"""
with backend.PlanktonBackend(user_id) as b:
return b.update_snapshot_state(snapshot_id, state=state) | unknown | codeparrot/codeparrot-clean | ||
import guestfs
import sys
import time
import json
import logging
from .FactoryUtils import launch_inspect_and_mount, qemu_convert_cmd, subprocess_check_output
from .ApplicationConfiguration import ApplicationConfiguration
from .PersistentImageManager import PersistentImageManager
from .BaseImage import BaseImage
from oz.ozutil import copyfile_sparse
class BaseImageImporter(object):
def __init__(self, image_file=None):
"""
@param image_file The name of a local file to be imported as a BaseImage
"""
self.log = logging.getLogger('%s.%s' % (__name__, self.__class__.__name__))
self.image_file = image_file
def do_import(self):
"""
Import file as a base_image and return the resulting BaseImage object
"""
g = launch_inspect_and_mount(self.image_file, readonly=True)
inspection = g.inspect_os()
os_root = inspection[0]
i_type=g.inspect_get_type(os_root)
i_name=g.inspect_get_product_name(os_root)
i_distro=g.inspect_get_distro(os_root)
i_major_version=g.inspect_get_major_version(os_root)
i_minor_version=g.inspect_get_minor_version(os_root)
ins_res = "guestfs inspection result - type: %s - name: %s - distro: %s - major version: %s - minor version: %s" % \
(i_type, i_name, i_distro, i_major_version, i_minor_version)
self.log.debug(ins_res)
if i_type != "linux":
raise Exception("Can only import Linux distros into Factory at the moment")
if i_distro in [ 'centos', 'rhel', 'scientificlinux' ]:
tdl_os_name = "RHEL-%d" % (i_major_version)
tdl_os_version = "%d" % (i_minor_version)
elif i_distro == 'fedora':
tdl_os_name = "Fedora"
tdl_os_version = "%d" % (i_major_version)
elif i_distro == 'ubuntu':
tdl_os_name = "Ubuntu"
tdl_os_version = "%d.%02d" % (i_major_version, i_minor_version)
elif i_distro == 'debian':
tdl_os_name = "Debian"
tdl_os_version = "%d" % (i_major_version)
else:
raise Exception("Unsupported distro for import: %s" % (i_distro))
ftime = time.strftime("%Y-%m-%d--%H:%M:%S", time.localtime())
tname = "%s-%s-import-%s" % (tdl_os_name, tdl_os_version, ftime)
tdl_template="""<template>
<name>%s</name>
<os>
<name>%s</name>
<version>%s</version>
<arch>x86_64</arch>
<install type='url'>
<url>http://foo.com/imported/image/do/not/use/url</url>
</install>
</os>
<description>image imported on %s</description>
</template>
""" % (tname, tdl_os_name, tdl_os_version, ftime)
pim = PersistentImageManager.default_manager()
base_image = BaseImage()
pim.add_image(base_image)
base_image.template=tdl_template
# The input image can be in any format that libguestfs understands
# Here we convert it to qcow2 - If it is already in qcow2 this is benign
# and in some cases can tidy up and serialize it
self.log.debug("Converting and saving intput file %s to final data location %s" % \
(self.image_file, base_image.data))
cmd = qemu_convert_cmd(self.image_file, base_image.data)
(stdout, stderr, retcode) = subprocess_check_output(cmd)
base_image.status="COMPLETE"
base_image.percent_complete=100
pim.save_image(base_image)
return base_image | unknown | codeparrot/codeparrot-clean | ||
<?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <fabien@symfony.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Component\DependencyInjection\Loader\Configurator;
use Symfony\Bundle\SecurityBundle\Security\UserAuthenticator;
use Symfony\Component\DependencyInjection\ServiceLocator;
use Symfony\Component\Security\Http\Authentication\AuthenticatorManager;
use Symfony\Component\Security\Http\Authentication\UserAuthenticatorInterface;
use Symfony\Component\Security\Http\Authenticator\FormLoginAuthenticator;
use Symfony\Component\Security\Http\Authenticator\HttpBasicAuthenticator;
use Symfony\Component\Security\Http\Authenticator\JsonLoginAuthenticator;
use Symfony\Component\Security\Http\Authenticator\RemoteUserAuthenticator;
use Symfony\Component\Security\Http\Authenticator\X509Authenticator;
use Symfony\Component\Security\Http\Event\CheckPassportEvent;
use Symfony\Component\Security\Http\EventListener\CheckCredentialsListener;
use Symfony\Component\Security\Http\EventListener\LoginThrottlingListener;
use Symfony\Component\Security\Http\EventListener\PasswordMigratingListener;
use Symfony\Component\Security\Http\EventListener\SessionStrategyListener;
use Symfony\Component\Security\Http\EventListener\UserCheckerListener;
use Symfony\Component\Security\Http\EventListener\UserProviderListener;
use Symfony\Component\Security\Http\Firewall\AuthenticatorManagerListener;
return static function (ContainerConfigurator $container) {
$container->services()
// Manager
->set('security.authenticator.manager', AuthenticatorManager::class)
->abstract()
->args([
abstract_arg('authenticators'),
service('security.token_storage'),
service('event_dispatcher'),
abstract_arg('provider key'),
service('logger')->nullOnInvalid(),
param('security.authentication.manager.erase_credentials'),
param('.security.authentication.expose_security_errors'),
abstract_arg('required badges'),
])
->tag('monolog.logger', ['channel' => 'security'])
->set('security.authenticator.managers_locator', ServiceLocator::class)
->args([[]])
->set('security.user_authenticator', UserAuthenticator::class)
->args([
service('security.firewall.map'),
service('security.authenticator.managers_locator'),
service('request_stack'),
])
->alias(UserAuthenticatorInterface::class, 'security.user_authenticator')
->set('security.firewall.authenticator', AuthenticatorManagerListener::class)
->abstract()
->args([
abstract_arg('authenticator manager'),
])
// Listeners
->set('security.listener.check_authenticator_credentials', CheckCredentialsListener::class)
->args([
service('security.password_hasher_factory'),
])
->tag('kernel.event_subscriber')
->set('security.listener.user_provider', UserProviderListener::class)
->args([
service('security.user_providers'),
])
->tag('kernel.event_listener', ['event' => CheckPassportEvent::class, 'priority' => 1024, 'method' => 'checkPassport'])
->set('security.listener.user_provider.abstract', UserProviderListener::class)
->abstract()
->args([
abstract_arg('user provider'),
])
->set('security.listener.password_migrating', PasswordMigratingListener::class)
->args([
service('security.password_hasher_factory'),
])
->tag('kernel.event_subscriber')
->set('security.listener.user_checker', UserCheckerListener::class)
->abstract()
->args([
abstract_arg('user checker'),
])
->set('security.listener.session', SessionStrategyListener::class)
->abstract()
->args([
service('security.authentication.session_strategy'),
])
->set('security.listener.login_throttling', LoginThrottlingListener::class)
->abstract()
->args([
service('request_stack'),
abstract_arg('request rate limiter'),
])
// Authenticators
->set('security.authenticator.http_basic', HttpBasicAuthenticator::class)
->abstract()
->args([
abstract_arg('realm name'),
abstract_arg('user provider'),
service('logger')->nullOnInvalid(),
])
->tag('monolog.logger', ['channel' => 'security'])
->set('security.authenticator.form_login', FormLoginAuthenticator::class)
->abstract()
->args([
service('security.http_utils'),
abstract_arg('user provider'),
abstract_arg('authentication success handler'),
abstract_arg('authentication failure handler'),
abstract_arg('options'),
])
->set('security.authenticator.json_login', JsonLoginAuthenticator::class)
->abstract()
->args([
service('security.http_utils'),
abstract_arg('user provider'),
abstract_arg('authentication success handler'),
abstract_arg('authentication failure handler'),
abstract_arg('options'),
service('property_accessor')->nullOnInvalid(),
])
->call('setTranslator', [service('translator')->ignoreOnInvalid()])
->set('security.authenticator.x509', X509Authenticator::class)
->abstract()
->args([
abstract_arg('user provider'),
service('security.token_storage'),
abstract_arg('firewall name'),
abstract_arg('user key'),
abstract_arg('credentials key'),
service('logger')->nullOnInvalid(),
abstract_arg('credentials user identifier'),
])
->tag('monolog.logger', ['channel' => 'security'])
->set('security.authenticator.remote_user', RemoteUserAuthenticator::class)
->abstract()
->args([
abstract_arg('user provider'),
service('security.token_storage'),
abstract_arg('firewall name'),
abstract_arg('user key'),
service('logger')->nullOnInvalid(),
])
->tag('monolog.logger', ['channel' => 'security'])
;
}; | php | github | https://github.com/symfony/symfony | src/Symfony/Bundle/SecurityBundle/Resources/config/security_authenticator.php |
package kotlinx.coroutines.rx3
import kotlinx.coroutines.testing.*
import io.reactivex.rxjava3.core.*
import io.reactivex.rxjava3.disposables.*
import io.reactivex.rxjava3.plugins.*
import io.reactivex.rxjava3.schedulers.*
import kotlinx.coroutines.*
import kotlinx.coroutines.sync.*
import org.junit.*
import org.junit.Test
import java.lang.Runnable
import java.util.concurrent.*
import java.util.concurrent.atomic.AtomicReference
import kotlin.coroutines.*
import kotlin.test.*
class SchedulerTest : TestBase() {
@Before
fun setup() {
ignoreLostThreads("RxCachedThreadScheduler-", "RxCachedWorkerPoolEvictor-", "RxSchedulerPurge-")
}
@Test
fun testIoScheduler(): Unit = runTest {
expect(1)
val mainThread = Thread.currentThread()
withContext(Schedulers.io().asCoroutineDispatcher()) {
val t1 = Thread.currentThread()
assertNotSame(t1, mainThread)
expect(2)
delay(100)
val t2 = Thread.currentThread()
assertNotSame(t2, mainThread)
expect(3)
}
finish(4)
}
/** Tests [toString] implementations of [CoroutineDispatcher.asScheduler] and its [Scheduler.Worker]. */
@Test
fun testSchedulerToString() {
val name = "Dispatchers.Default"
val scheduler = Dispatchers.Default.asScheduler()
assertContains(scheduler.toString(), name)
val worker = scheduler.createWorker()
val activeWorkerName = worker.toString()
assertContains(worker.toString(), name)
worker.dispose()
val disposedWorkerName = worker.toString()
assertNotEquals(activeWorkerName, disposedWorkerName)
}
private fun runSchedulerTest(nThreads: Int = 1, action: (Scheduler) -> Unit) {
val future = CompletableFuture<Unit>()
try {
newFixedThreadPoolContext(nThreads, "test").use { dispatcher ->
RxJavaPlugins.setErrorHandler {
if (!future.completeExceptionally(it)) {
handleUndeliverableException(it, dispatcher)
}
}
action(dispatcher.asScheduler())
}
} finally {
RxJavaPlugins.setErrorHandler(null)
}
future.complete(Unit)
future.getNow(Unit) // rethrow any encountered errors
}
private fun ensureSeparateThread(schedule: (Runnable, Long, TimeUnit) -> Unit, scheduleNoDelay: (Runnable) -> Unit) {
val mainThread = Thread.currentThread()
val cdl1 = CountDownLatch(1)
val cdl2 = CountDownLatch(1)
expect(1)
val thread = AtomicReference<Thread?>(null)
fun checkThread() {
val current = Thread.currentThread()
thread.getAndSet(current)?.let { assertEquals(it, current) }
}
schedule({
assertNotSame(mainThread, Thread.currentThread())
checkThread()
cdl2.countDown()
}, 300, TimeUnit.MILLISECONDS)
scheduleNoDelay {
expect(2)
checkThread()
assertNotSame(mainThread, Thread.currentThread())
cdl1.countDown()
}
cdl1.await()
cdl2.await()
finish(3)
}
/**
* Tests [Scheduler.scheduleDirect] for [CoroutineDispatcher.asScheduler] on a single-threaded dispatcher.
*/
@Test
fun testSingleThreadedDispatcherDirect(): Unit = runSchedulerTest(1) {
ensureSeparateThread(it::scheduleDirect, it::scheduleDirect)
}
/**
* Tests [Scheduler.Worker.schedule] for [CoroutineDispatcher.asScheduler] running its tasks on the correct thread.
*/
@Test
fun testSingleThreadedWorker(): Unit = runSchedulerTest(1) {
val worker = it.createWorker()
ensureSeparateThread(worker::schedule, worker::schedule)
}
private fun checkCancelling(schedule: (Runnable, Long, TimeUnit) -> Disposable) {
// cancel the task before it has a chance to run.
val handle1 = schedule({
throw IllegalStateException("should have been successfully cancelled")
}, 10_000, TimeUnit.MILLISECONDS)
handle1.dispose()
// cancel the task after it started running.
val cdl1 = CountDownLatch(1)
val cdl2 = CountDownLatch(1)
val handle2 = schedule({
cdl1.countDown()
cdl2.await()
if (Thread.interrupted())
throw IllegalStateException("cancelling the task should not interrupt the thread")
}, 100, TimeUnit.MILLISECONDS)
cdl1.await()
handle2.dispose()
cdl2.countDown()
}
/**
* Test cancelling [Scheduler.scheduleDirect] for [CoroutineDispatcher.asScheduler].
*/
@Test
fun testCancellingDirect(): Unit = runSchedulerTest {
checkCancelling(it::scheduleDirect)
}
/**
* Test cancelling [Scheduler.Worker.schedule] for [CoroutineDispatcher.asScheduler].
*/
@Test
fun testCancellingWorker(): Unit = runSchedulerTest {
val worker = it.createWorker()
checkCancelling(worker::schedule)
}
/**
* Test shutting down [CoroutineDispatcher.asScheduler].
*/
@Test
fun testShuttingDown() {
val n = 5
runSchedulerTest(nThreads = n) { scheduler ->
val cdl1 = CountDownLatch(n)
val cdl2 = CountDownLatch(1)
val cdl3 = CountDownLatch(n)
repeat(n) {
scheduler.scheduleDirect {
cdl1.countDown()
try {
cdl2.await()
} catch (e: InterruptedException) {
// this is the expected outcome
cdl3.countDown()
}
}
}
cdl1.await()
scheduler.shutdown()
if (!cdl3.await(1, TimeUnit.SECONDS)) {
cdl2.countDown()
error("the tasks were not cancelled when the scheduler was shut down")
}
}
}
/** Tests that there are no uncaught exceptions if [Disposable.dispose] on a worker happens when tasks are present. */
@Test
fun testDisposingWorker() = runTest {
val dispatcher = currentDispatcher() as CoroutineDispatcher
val scheduler = dispatcher.asScheduler()
val worker = scheduler.createWorker()
yield() // so that the worker starts waiting on the channel
assertFalse(worker.isDisposed)
worker.dispose()
assertTrue(worker.isDisposed)
}
/** Tests trying to use a [Scheduler.Worker]/[Scheduler] after [Scheduler.Worker.dispose]/[Scheduler.shutdown]. */
@Test
fun testSchedulingAfterDisposing() = runSchedulerTest {
expect(1)
val worker = it.createWorker()
// use CDL to ensure that the worker has properly initialized
val cdl1 = CountDownLatch(1)
setScheduler(2, 3)
val disposable1 = worker.schedule {
cdl1.countDown()
}
cdl1.await()
expect(4)
assertFalse(disposable1.isDisposed)
setScheduler(6, -1)
// check that the worker automatically disposes of the tasks after being disposed
assertFalse(worker.isDisposed)
worker.dispose()
assertTrue(worker.isDisposed)
expect(5)
val disposable2 = worker.schedule {
expectUnreached()
}
assertTrue(disposable2.isDisposed)
setScheduler(7, 8)
// ensure that the scheduler still works
val cdl2 = CountDownLatch(1)
val disposable3 = it.scheduleDirect {
cdl2.countDown()
}
cdl2.await()
expect(9)
assertFalse(disposable3.isDisposed)
// check that the scheduler automatically disposes of the tasks after being shut down
it.shutdown()
setScheduler(10, -1)
val disposable4 = it.scheduleDirect {
expectUnreached()
}
assertTrue(disposable4.isDisposed)
RxJavaPlugins.setScheduleHandler(null)
finish(11)
}
@Test
fun testSchedulerWithNoDelay(): Unit = runTest {
val scheduler = (currentDispatcher() as CoroutineDispatcher).asScheduler()
testRunnableWithNoDelay(scheduler::scheduleDirect)
}
@Test
fun testSchedulerWorkerWithNoDelay(): Unit = runTest {
val scheduler = (currentDispatcher() as CoroutineDispatcher).asScheduler()
testRunnableWithNoDelay(scheduler.createWorker()::schedule)
}
private suspend fun testRunnableWithNoDelay(block: RxSchedulerBlockNoDelay) {
expect(1)
suspendCancellableCoroutine<Unit> {
block(Runnable {
expect(2)
it.resume(Unit)
})
}
yield()
finish(3)
}
@Test
fun testSchedulerWithDelay(): Unit = runTest {
val scheduler = (currentDispatcher() as CoroutineDispatcher).asScheduler()
testRunnableWithDelay(scheduler::scheduleDirect, 300)
}
@Test
fun testSchedulerWorkerWithDelay(): Unit = runTest {
val scheduler = (currentDispatcher() as CoroutineDispatcher).asScheduler()
testRunnableWithDelay(scheduler.createWorker()::schedule, 300)
}
@Test
fun testSchedulerWithZeroDelay(): Unit = runTest {
val scheduler = (currentDispatcher() as CoroutineDispatcher).asScheduler()
testRunnableWithDelay(scheduler::scheduleDirect)
}
@Test
fun testSchedulerWorkerWithZeroDelay(): Unit = runTest {
val scheduler = (currentDispatcher() as CoroutineDispatcher).asScheduler()
testRunnableWithDelay(scheduler.createWorker()::schedule)
}
private suspend fun testRunnableWithDelay(block: RxSchedulerBlockWithDelay, delayMillis: Long = 0) {
expect(1)
suspendCancellableCoroutine<Unit> {
block({
expect(2)
it.resume(Unit)
}, delayMillis, TimeUnit.MILLISECONDS)
}
finish(3)
}
@Test
fun testAsSchedulerWithNegativeDelay(): Unit = runTest {
val scheduler = (currentDispatcher() as CoroutineDispatcher).asScheduler()
testRunnableWithDelay(scheduler::scheduleDirect, -1)
}
@Test
fun testAsSchedulerWorkerWithNegativeDelay(): Unit = runTest {
val scheduler = (currentDispatcher() as CoroutineDispatcher).asScheduler()
testRunnableWithDelay(scheduler.createWorker()::schedule, -1)
}
@Test
fun testSchedulerImmediateDispose(): Unit = runTest {
val scheduler = (currentDispatcher() as CoroutineDispatcher).asScheduler()
testRunnableImmediateDispose(scheduler::scheduleDirect)
}
@Test
fun testSchedulerWorkerImmediateDispose(): Unit = runTest {
val scheduler = (currentDispatcher() as CoroutineDispatcher).asScheduler()
testRunnableImmediateDispose(scheduler.createWorker()::schedule)
}
private fun testRunnableImmediateDispose(block: RxSchedulerBlockNoDelay) {
val disposable = block {
expectUnreached()
}
disposable.dispose()
}
@Test
fun testConvertDispatcherToOriginalScheduler(): Unit = runTest {
val originalScheduler = Schedulers.io()
val dispatcher = originalScheduler.asCoroutineDispatcher()
val scheduler = dispatcher.asScheduler()
assertSame(originalScheduler, scheduler)
}
@Test
fun testConvertSchedulerToOriginalDispatcher(): Unit = runTest {
val originalDispatcher = currentDispatcher() as CoroutineDispatcher
val scheduler = originalDispatcher.asScheduler()
val dispatcher = scheduler.asCoroutineDispatcher()
assertSame(originalDispatcher, dispatcher)
}
@Test
fun testSchedulerExpectRxPluginsCall(): Unit = runTest {
val dispatcher = currentDispatcher() as CoroutineDispatcher
val scheduler = dispatcher.asScheduler()
testRunnableExpectRxPluginsCall(scheduler::scheduleDirect)
}
@Test
fun testSchedulerWorkerExpectRxPluginsCall(): Unit = runTest {
val dispatcher = currentDispatcher() as CoroutineDispatcher
val scheduler = dispatcher.asScheduler()
testRunnableExpectRxPluginsCall(scheduler.createWorker()::schedule)
}
private suspend fun testRunnableExpectRxPluginsCall(block: RxSchedulerBlockNoDelay) {
expect(1)
setScheduler(2, 4)
suspendCancellableCoroutine<Unit> {
block(Runnable {
expect(5)
it.resume(Unit)
})
expect(3)
}
RxJavaPlugins.setScheduleHandler(null)
finish(6)
}
@Test
fun testSchedulerExpectRxPluginsCallWithDelay(): Unit = runTest {
val dispatcher = currentDispatcher() as CoroutineDispatcher
val scheduler = dispatcher.asScheduler()
testRunnableExpectRxPluginsCallDelay(scheduler::scheduleDirect)
}
@Test
fun testSchedulerWorkerExpectRxPluginsCallWithDelay(): Unit = runTest {
val dispatcher = currentDispatcher() as CoroutineDispatcher
val scheduler = dispatcher.asScheduler()
val worker = scheduler.createWorker()
testRunnableExpectRxPluginsCallDelay(worker::schedule)
}
private suspend fun testRunnableExpectRxPluginsCallDelay(block: RxSchedulerBlockWithDelay) {
expect(1)
setScheduler(2, 4)
suspendCancellableCoroutine<Unit> {
block({
expect(5)
it.resume(Unit)
}, 10, TimeUnit.MILLISECONDS)
expect(3)
}
RxJavaPlugins.setScheduleHandler(null)
finish(6)
}
private fun setScheduler(expectedCountOnSchedule: Int, expectCountOnRun: Int) {
RxJavaPlugins.setScheduleHandler {
expect(expectedCountOnSchedule)
Runnable {
expect(expectCountOnRun)
it.run()
}
}
}
/**
* Tests that [Scheduler.Worker] runs all work sequentially.
*/
@Test
fun testWorkerSequentialOrdering() = runTest {
expect(1)
val scheduler = Dispatchers.Default.asScheduler()
val worker = scheduler.createWorker()
val iterations = 100
for (i in 0..iterations) {
worker.schedule {
expect(2 + i)
}
}
suspendCoroutine<Unit> {
worker.schedule {
it.resume(Unit)
}
}
finish((iterations + 2) + 1)
}
/**
* Test that ensures that delays are actually respected (tasks scheduled sooner in the future run before tasks scheduled later,
* even when the later task is submitted before the earlier one)
*/
@Test
fun testSchedulerRespectsDelays(): Unit = runTest {
val scheduler = Dispatchers.Default.asScheduler()
testRunnableRespectsDelays(scheduler::scheduleDirect)
}
@Test
fun testSchedulerWorkerRespectsDelays(): Unit = runTest {
val scheduler = Dispatchers.Default.asScheduler()
testRunnableRespectsDelays(scheduler.createWorker()::schedule)
}
private suspend fun testRunnableRespectsDelays(block: RxSchedulerBlockWithDelay) {
expect(1)
val semaphore = Semaphore(2, 2)
block({
expect(3)
semaphore.release()
}, 100, TimeUnit.MILLISECONDS)
block({
expect(2)
semaphore.release()
}, 1, TimeUnit.MILLISECONDS)
semaphore.acquire()
semaphore.acquire()
finish(4)
}
/**
* Tests that cancelling a runnable in one worker doesn't affect work in another scheduler.
*
* This is part of expected behavior documented.
*/
@Test
fun testMultipleWorkerCancellation(): Unit = runTest {
expect(1)
val dispatcher = currentDispatcher() as CoroutineDispatcher
val scheduler = dispatcher.asScheduler()
suspendCancellableCoroutine<Unit> {
val workerOne = scheduler.createWorker()
workerOne.schedule({
expect(3)
it.resume(Unit)
}, 50, TimeUnit.MILLISECONDS)
val workerTwo = scheduler.createWorker()
workerTwo.schedule({
expectUnreached()
}, 1000, TimeUnit.MILLISECONDS)
workerTwo.dispose()
expect(2)
}
finish(4)
}
}
typealias RxSchedulerBlockNoDelay = (Runnable) -> Disposable
typealias RxSchedulerBlockWithDelay = (Runnable, Long, TimeUnit) -> Disposable | kotlin | github | https://github.com/Kotlin/kotlinx.coroutines | reactive/kotlinx-coroutines-rx3/test/SchedulerTest.kt |
export default (()=>{
useLayoutEffect({
"useLayoutEffect": ()=>{}
}["useLayoutEffect"]);
useEffect({
"useEffect": ()=>{}
}["useEffect"]);
const onClick = useCallback({
"useCallback[onClick]": ()=>[]
}["useCallback[onClick]"]);
const computed = useMemo({
"useMemo[computed]": ()=>{}
}["useMemo[computed]"]);
}); | javascript | github | https://github.com/vercel/next.js | crates/next-custom-transforms/tests/fixture/debug-fn-name/export-default-expr/output.js |
import {
Debug,
noop,
Performance,
PerformanceHooks,
sys,
System,
timestamp,
tryGetNativePerformanceHooks,
} from "./_namespaces/ts.js";
/** Performance measurements for the compiler. */
// NOTE: declared global is injected by ts-perf to monitor profiler marks to generate heap snapshots.
declare let onProfilerEvent: ((eventName: string) => void) | undefined;
let perfHooks: PerformanceHooks | undefined;
// when set, indicates the implementation of `Performance` to use for user timing.
// when unset, indicates user timing is unavailable or disabled.
let performanceImpl: Performance | undefined;
/** @internal */
export interface Timer {
enter(): void;
exit(): void;
}
/** @internal */
export function createTimerIf(condition: boolean, measureName: string, startMarkName: string, endMarkName: string): Timer {
return condition ? createTimer(measureName, startMarkName, endMarkName) : nullTimer;
}
/** @internal */
export function createTimer(measureName: string, startMarkName: string, endMarkName: string): Timer {
let enterCount = 0;
return {
enter,
exit,
};
function enter() {
if (++enterCount === 1) {
mark(startMarkName);
}
}
function exit() {
if (--enterCount === 0) {
mark(endMarkName);
measure(measureName, startMarkName, endMarkName);
}
else if (enterCount < 0) {
Debug.fail("enter/exit count does not match.");
}
}
}
/** @internal */
export const nullTimer: Timer = { enter: noop, exit: noop };
let enabled = false;
let timeorigin = timestamp();
const marks = new Map<string, number>();
const counts = new Map<string, number>();
const durations = new Map<string, number>();
/**
* Marks a performance event.
*
* @param markName The name of the mark.
*
* @internal
*/
export function mark(markName: string): void {
if (enabled) {
const count = counts.get(markName) ?? 0;
counts.set(markName, count + 1);
marks.set(markName, timestamp());
performanceImpl?.mark(markName);
if (typeof onProfilerEvent === "function") {
onProfilerEvent(markName);
}
}
}
/**
* Adds a performance measurement with the specified name.
*
* @param measureName The name of the performance measurement.
* @param startMarkName The name of the starting mark. If not supplied, the point at which the
* profiler was enabled is used.
* @param endMarkName The name of the ending mark. If not supplied, the current timestamp is
* used.
*
* @internal
*/
export function measure(measureName: string, startMarkName?: string, endMarkName?: string): void {
if (enabled) {
const end = (endMarkName !== undefined ? marks.get(endMarkName) : undefined) ?? timestamp();
const start = (startMarkName !== undefined ? marks.get(startMarkName) : undefined) ?? timeorigin;
const previousDuration = durations.get(measureName) || 0;
durations.set(measureName, previousDuration + (end - start));
performanceImpl?.measure(measureName, startMarkName, endMarkName);
}
}
/**
* Gets the number of times a marker was encountered.
*
* @param markName The name of the mark.
*
* @internal
*/
export function getCount(markName: string): number {
return counts.get(markName) || 0;
}
/**
* Gets the total duration of all measurements with the supplied name.
*
* @param measureName The name of the measure whose durations should be accumulated.
*
* @internal
*/
export function getDuration(measureName: string): number {
return durations.get(measureName) || 0;
}
/**
* Iterate over each measure, performing some action
*
* @param cb The action to perform for each measure
*
* @internal
*/
export function forEachMeasure(cb: (measureName: string, duration: number) => void): void {
durations.forEach((duration, measureName) => cb(measureName, duration));
}
/** @internal */
export function forEachMark(cb: (markName: string) => void): void {
marks.forEach((_time, markName) => cb(markName));
}
/** @internal */
export function clearMeasures(name?: string): void {
if (name !== undefined) durations.delete(name);
else durations.clear();
performanceImpl?.clearMeasures(name);
}
/** @internal */
export function clearMarks(name?: string): void {
if (name !== undefined) {
counts.delete(name);
marks.delete(name);
}
else {
counts.clear();
marks.clear();
}
performanceImpl?.clearMarks(name);
}
/**
* Indicates whether the performance API is enabled.
*
* @internal
*/
export function isEnabled(): boolean {
return enabled;
}
/**
* Enables (and resets) performance measurements for the compiler.
*
* @internal
*/
export function enable(system: System = sys) {
if (!enabled) {
enabled = true;
perfHooks ||= tryGetNativePerformanceHooks();
if (perfHooks?.performance) {
timeorigin = perfHooks.performance.timeOrigin;
// NodeJS's Web Performance API is currently slower than expected, but we'd still like
// to be able to leverage native trace events when node is run with either `--cpu-prof`
// or `--prof`, if we're running with our own `--generateCpuProfile` flag, or when
// running in debug mode (since its possible to generate a cpu profile while debugging).
if (perfHooks.shouldWriteNativeEvents || system?.cpuProfilingEnabled?.() || system?.debugMode) {
performanceImpl = perfHooks.performance;
}
}
}
return true;
}
/**
* Disables performance measurements for the compiler.
*
* @internal
*/
export function disable(): void {
if (enabled) {
marks.clear();
counts.clear();
durations.clear();
performanceImpl = undefined;
enabled = false;
}
} | typescript | github | https://github.com/microsoft/TypeScript | src/compiler/performance.ts |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013-2014, Epic Games, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: zabbix_hostmacro
short_description: Create/update/delete Zabbix host macros
description:
- manages Zabbix host macros, it can create, update or delete them.
version_added: "2.0"
author:
- "Cove (@cove)"
- Dean Hailin Song (!UNKNOWN)
requirements:
- "python >= 2.6"
- "zabbix-api >= 0.5.3"
options:
host_name:
description:
- Name of the host.
required: true
macro_name:
description:
- Name of the host macro without the enclosing curly braces and the leading dollar sign.
required: true
macro_value:
description:
- Value of the host macro.
required: true
state:
description:
- State of the macro.
- On C(present), it will create if macro does not exist or update the macro if the associated data is different.
- On C(absent) will remove a macro if it exists.
required: false
choices: ['present', 'absent']
default: "present"
force:
description:
- Only updates an existing macro if set to C(yes).
default: 'yes'
type: bool
version_added: 2.5
extends_documentation_fragment:
- zabbix
'''
EXAMPLES = '''
- name: Create a new host macro or update an existing macro's value
local_action:
module: zabbix_hostmacro
server_url: http://monitor.example.com
login_user: username
login_password: password
host_name: ExampleHost
macro_name: EXAMPLE.MACRO
macro_value: Example value
state: present
'''
import atexit
import traceback
try:
from zabbix_api import ZabbixAPI
HAS_ZABBIX_API = True
except ImportError:
ZBX_IMP_ERR = traceback.format_exc()
HAS_ZABBIX_API = False
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
class HostMacro(object):
def __init__(self, module, zbx):
self._module = module
self._zapi = zbx
# get host id by host name
def get_host_id(self, host_name):
try:
host_list = self._zapi.host.get({'output': 'extend', 'filter': {'host': host_name}})
if len(host_list) < 1:
self._module.fail_json(msg="Host not found: %s" % host_name)
else:
host_id = host_list[0]['hostid']
return host_id
except Exception as e:
self._module.fail_json(msg="Failed to get the host %s id: %s." % (host_name, e))
# get host macro
def get_host_macro(self, macro_name, host_id):
try:
host_macro_list = self._zapi.usermacro.get(
{"output": "extend", "selectSteps": "extend", 'hostids': [host_id], 'filter': {'macro': '{$' + macro_name + '}'}})
if len(host_macro_list) > 0:
return host_macro_list[0]
return None
except Exception as e:
self._module.fail_json(msg="Failed to get host macro %s: %s" % (macro_name, e))
# create host macro
def create_host_macro(self, macro_name, macro_value, host_id):
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.usermacro.create({'hostid': host_id, 'macro': '{$' + macro_name + '}', 'value': macro_value})
self._module.exit_json(changed=True, result="Successfully added host macro %s" % macro_name)
except Exception as e:
self._module.fail_json(msg="Failed to create host macro %s: %s" % (macro_name, e))
# update host macro
def update_host_macro(self, host_macro_obj, macro_name, macro_value):
host_macro_id = host_macro_obj['hostmacroid']
if host_macro_obj['macro'] == '{$' + macro_name + '}' and host_macro_obj['value'] == macro_value:
self._module.exit_json(changed=False, result="Host macro %s already up to date" % macro_name)
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.usermacro.update({'hostmacroid': host_macro_id, 'value': macro_value})
self._module.exit_json(changed=True, result="Successfully updated host macro %s" % macro_name)
except Exception as e:
self._module.fail_json(msg="Failed to update host macro %s: %s" % (macro_name, e))
# delete host macro
def delete_host_macro(self, host_macro_obj, macro_name):
host_macro_id = host_macro_obj['hostmacroid']
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.usermacro.delete([host_macro_id])
self._module.exit_json(changed=True, result="Successfully deleted host macro %s" % macro_name)
except Exception as e:
self._module.fail_json(msg="Failed to delete host macro %s: %s" % (macro_name, e))
def main():
module = AnsibleModule(
argument_spec=dict(
server_url=dict(type='str', required=True, aliases=['url']),
login_user=dict(type='str', required=True),
login_password=dict(type='str', required=True, no_log=True),
http_login_user=dict(type='str', required=False, default=None),
http_login_password=dict(type='str', required=False, default=None, no_log=True),
validate_certs=dict(type='bool', required=False, default=True),
host_name=dict(type='str', required=True),
macro_name=dict(type='str', required=True),
macro_value=dict(type='str', required=True),
state=dict(default="present", choices=['present', 'absent']),
timeout=dict(type='int', default=10),
force=dict(type='bool', default=True)
),
supports_check_mode=True
)
if not HAS_ZABBIX_API:
module.fail_json(msg=missing_required_lib('zabbix-api', url='https://pypi.org/project/zabbix-api/'), exception=ZBX_IMP_ERR)
server_url = module.params['server_url']
login_user = module.params['login_user']
login_password = module.params['login_password']
http_login_user = module.params['http_login_user']
http_login_password = module.params['http_login_password']
validate_certs = module.params['validate_certs']
host_name = module.params['host_name']
macro_name = (module.params['macro_name'])
macro_value = module.params['macro_value']
state = module.params['state']
timeout = module.params['timeout']
force = module.params['force']
if ':' in macro_name:
macro_name = ':'.join([macro_name.split(':')[0].upper(), ':'.join(macro_name.split(':')[1:])])
else:
macro_name = macro_name.upper()
zbx = None
# login to zabbix
try:
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password,
validate_certs=validate_certs)
zbx.login(login_user, login_password)
atexit.register(zbx.logout)
except Exception as e:
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
host_macro_class_obj = HostMacro(module, zbx)
if host_name:
host_id = host_macro_class_obj.get_host_id(host_name)
host_macro_obj = host_macro_class_obj.get_host_macro(macro_name, host_id)
if state == 'absent':
if not host_macro_obj:
module.exit_json(changed=False, msg="Host Macro %s does not exist" % macro_name)
else:
# delete a macro
host_macro_class_obj.delete_host_macro(host_macro_obj, macro_name)
else:
if not host_macro_obj:
# create host macro
host_macro_class_obj.create_host_macro(macro_name, macro_value, host_id)
elif force:
# update host macro
host_macro_class_obj.update_host_macro(host_macro_obj, macro_name, macro_value)
else:
module.exit_json(changed=False, result="Host macro %s already exists and force is set to no" % macro_name)
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
# Author: echel0n <sickrage.tv@gmail.com>
# URL: http://www.github.com/sickragetv/sickrage/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
from __future__ import unicode_literals
import os.path
import sys
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import unittest
from tests import SiCKRAGETestCase
import certifi
import requests
import sickbeard.providers as providers
from sickrage.helper.exceptions import ex
class SNI_Tests(SiCKRAGETestCase): pass
def test_sni(self, provider):
try:
requests.head(provider.url, verify=certifi.where(), timeout=5)
except requests.exceptions.Timeout:
pass
except requests.exceptions.SSLError as error:
if 'SSL3_GET_SERVER_CERTIFICATE' not in ex(error):
print(error)
except Exception:
pass
for provider in providers.sortedProviderList():
setattr(SNI_Tests, 'test_%s' % provider.name, lambda self, x=provider: test_sni(self, x))
if __name__ == "__main__":
print("==================")
print("STARTING - SSL TESTS")
print("==================")
print("######################################################################")
unittest.main() | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) Camptocamp SA
# Author: Arnaud WÃŒst
#
#
# This file is part of the c2c_report_tools module.
#
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
{
"name" : "c2c Reporting Tools. A library that provide a new way to create clean reports efficiently",
"version" : "5.0",
"author" : "Camptocamp",
"category" : "Generic Modules/Reporting",
"description": """ This module offer a growing collection of objects to create simple and advanced reports in a new way of doing.
You can create powerful reports with a few lines of python code and nothing else. (no sxw, rml or xml)
This module follow multiple goals:
- To accelerate report creation by creating reusable pieces of code (one line of code to create standard header and footer)
- To accelerate report generation (processing) by getting ride of uncecessary parsing and transformations (direct python to pdf generation)
- To improve reporting capabilities by getting ride of uncomplete parsers and limited middle technologies
- To make reports designs more uniform
For exemples of use, have a look at c2c_planning_management. Our first module based on this tool.
""",
"website": "http://www.camptocamp.com",
"depends" : [],
"init_xml" : [
],
"data" : [
],
"active": False,
"installable": True
} | unknown | codeparrot/codeparrot-clean | ||
# Copyright (c) 2012 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2005-2008 The Regents of The University of Michigan
# Copyright (c) 2011 Regents of the University of California
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
# Rick Strong
# Andreas Hansson
import sys
from m5.defines import buildEnv
from m5.params import *
from m5.proxy import *
from Bus import CoherentBus
from InstTracer import InstTracer
from ExeTracer import ExeTracer
from MemObject import MemObject
default_tracer = ExeTracer()
if buildEnv['TARGET_ISA'] == 'alpha':
from AlphaTLB import AlphaDTB, AlphaITB
from AlphaInterrupts import AlphaInterrupts
elif buildEnv['TARGET_ISA'] == 'sparc':
from SparcTLB import SparcTLB
from SparcInterrupts import SparcInterrupts
elif buildEnv['TARGET_ISA'] == 'x86':
from X86TLB import X86TLB
from X86LocalApic import X86LocalApic
elif buildEnv['TARGET_ISA'] == 'mips':
from MipsTLB import MipsTLB
from MipsInterrupts import MipsInterrupts
elif buildEnv['TARGET_ISA'] == 'arm':
from ArmTLB import ArmTLB
from ArmInterrupts import ArmInterrupts
elif buildEnv['TARGET_ISA'] == 'power':
from PowerTLB import PowerTLB
from PowerInterrupts import PowerInterrupts
class BaseCPU(MemObject):
type = 'BaseCPU'
abstract = True
system = Param.System(Parent.any, "system object")
cpu_id = Param.Int(-1, "CPU identifier")
numThreads = Param.Unsigned(1, "number of HW thread contexts")
function_trace = Param.Bool(False, "Enable function trace")
function_trace_start = Param.Tick(0, "Tick to start function trace")
checker = Param.BaseCPU(NULL, "checker CPU")
do_checkpoint_insts = Param.Bool(True,
"enable checkpoint pseudo instructions")
do_statistics_insts = Param.Bool(True,
"enable statistics pseudo instructions")
profile = Param.Latency('0ns', "trace the kernel stack")
do_quiesce = Param.Bool(True, "enable quiesce instructions")
workload = VectorParam.Process([], "processes to run")
if buildEnv['TARGET_ISA'] == 'sparc':
dtb = Param.SparcTLB(SparcTLB(), "Data TLB")
itb = Param.SparcTLB(SparcTLB(), "Instruction TLB")
interrupts = Param.SparcInterrupts(
NULL, "Interrupt Controller")
elif buildEnv['TARGET_ISA'] == 'alpha':
dtb = Param.AlphaTLB(AlphaDTB(), "Data TLB")
itb = Param.AlphaTLB(AlphaITB(), "Instruction TLB")
interrupts = Param.AlphaInterrupts(
NULL, "Interrupt Controller")
elif buildEnv['TARGET_ISA'] == 'x86':
dtb = Param.X86TLB(X86TLB(), "Data TLB")
itb = Param.X86TLB(X86TLB(), "Instruction TLB")
interrupts = Param.X86LocalApic(NULL, "Interrupt Controller")
elif buildEnv['TARGET_ISA'] == 'mips':
dtb = Param.MipsTLB(MipsTLB(), "Data TLB")
itb = Param.MipsTLB(MipsTLB(), "Instruction TLB")
interrupts = Param.MipsInterrupts(
NULL, "Interrupt Controller")
elif buildEnv['TARGET_ISA'] == 'arm':
dtb = Param.ArmTLB(ArmTLB(), "Data TLB")
itb = Param.ArmTLB(ArmTLB(), "Instruction TLB")
interrupts = Param.ArmInterrupts(
NULL, "Interrupt Controller")
elif buildEnv['TARGET_ISA'] == 'power':
UnifiedTLB = Param.Bool(True, "Is this a Unified TLB?")
dtb = Param.PowerTLB(PowerTLB(), "Data TLB")
itb = Param.PowerTLB(PowerTLB(), "Instruction TLB")
interrupts = Param.PowerInterrupts(
NULL, "Interrupt Controller")
else:
print "Don't know what TLB to use for ISA %s" % \
buildEnv['TARGET_ISA']
sys.exit(1)
max_insts_all_threads = Param.Counter(0,
"terminate when all threads have reached this inst count")
max_insts_any_thread = Param.Counter(0,
"terminate when any thread reaches this inst count")
max_loads_all_threads = Param.Counter(0,
"terminate when all threads have reached this load count")
max_loads_any_thread = Param.Counter(0,
"terminate when any thread reaches this load count")
progress_interval = Param.Frequency('0Hz',
"frequency to print out the progress message")
defer_registration = Param.Bool(False,
"defer registration with system (for sampling)")
tracer = Param.InstTracer(default_tracer, "Instruction tracer")
icache_port = MasterPort("Instruction Port")
dcache_port = MasterPort("Data Port")
_cached_ports = ['icache_port', 'dcache_port']
if buildEnv['TARGET_ISA'] in ['x86', 'arm']:
_cached_ports += ["itb.walker.port", "dtb.walker.port"]
_uncached_slave_ports = []
_uncached_master_ports = []
if buildEnv['TARGET_ISA'] == 'x86':
_uncached_slave_ports += ["interrupts.pio", "interrupts.int_slave"]
_uncached_master_ports += ["interrupts.int_master"]
def createInterruptController(self):
if buildEnv['TARGET_ISA'] == 'sparc':
self.interrupts = SparcInterrupts()
elif buildEnv['TARGET_ISA'] == 'alpha':
self.interrupts = AlphaInterrupts()
elif buildEnv['TARGET_ISA'] == 'x86':
_localApic = X86LocalApic(pio_addr=0x2000000000000000)
self.interrupts = _localApic
elif buildEnv['TARGET_ISA'] == 'mips':
self.interrupts = MipsInterrupts()
elif buildEnv['TARGET_ISA'] == 'arm':
self.interrupts = ArmInterrupts()
elif buildEnv['TARGET_ISA'] == 'power':
self.interrupts = PowerInterrupts()
else:
print "Don't know what Interrupt Controller to use for ISA %s" % \
buildEnv['TARGET_ISA']
sys.exit(1)
def connectCachedPorts(self, bus):
for p in self._cached_ports:
exec('self.%s = bus.slave' % p)
def connectUncachedPorts(self, bus):
for p in self._uncached_slave_ports:
exec('self.%s = bus.master' % p)
for p in self._uncached_master_ports:
exec('self.%s = bus.slave' % p)
def connectAllPorts(self, cached_bus, uncached_bus = None):
self.connectCachedPorts(cached_bus)
if not uncached_bus:
uncached_bus = cached_bus
self.connectUncachedPorts(uncached_bus)
def addPrivateSplitL1Caches(self, ic, dc, iwc = None, dwc = None):
self.icache = ic
self.dcache = dc
self.icache_port = ic.cpu_side
self.dcache_port = dc.cpu_side
self._cached_ports = ['icache.mem_side', 'dcache.mem_side']
if buildEnv['TARGET_ISA'] in ['x86', 'arm']:
if iwc and dwc:
self.itb_walker_cache = iwc
self.dtb_walker_cache = dwc
self.itb.walker.port = iwc.cpu_side
self.dtb.walker.port = dwc.cpu_side
self._cached_ports += ["itb_walker_cache.mem_side", \
"dtb_walker_cache.mem_side"]
else:
self._cached_ports += ["itb.walker.port", "dtb.walker.port"]
# Checker doesn't need its own tlb caches because it does
# functional accesses only
if self.checker != NULL:
self._cached_ports += ["checker.itb.walker.port", \
"checker.dtb.walker.port"]
def addTwoLevelCacheHierarchy(self, ic, dc, l2c, iwc = None, dwc = None):
self.addPrivateSplitL1Caches(ic, dc, iwc, dwc)
self.toL2Bus = CoherentBus()
self.connectCachedPorts(self.toL2Bus)
self.l2cache = l2c
self.toL2Bus.master = self.l2cache.cpu_side
self._cached_ports = ['l2cache.mem_side']
def addCheckerCpu(self):
pass | unknown | codeparrot/codeparrot-clean | ||
#
# Copyright 2009 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr
import socket
import os
def _get_sock_fd(addr, port, server):
"""
Get the file descriptor for the socket.
As a client, block on connect, dup the socket descriptor.
As a server, block on accept, dup the client descriptor.
@param addr the ip address string
@param port the tcp port number
@param server true for server mode, false for client mode
@return the file descriptor number
"""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if server:
sock.bind((addr, port))
sock.listen(1)
clientsock, address = sock.accept()
return os.dup(clientsock.fileno())
else:
sock.connect((addr, port))
return os.dup(sock.fileno())
class tcp_source(gr.hier_block2):
def __init__(self, itemsize, addr, port, server=True):
#init hier block
gr.hier_block2.__init__(
self, 'tcp_source',
gr.io_signature(0, 0, 0),
gr.io_signature(1, 1, itemsize),
)
fd = _get_sock_fd(addr, port, server)
self.connect(gr.file_descriptor_source(itemsize, fd), self)
class tcp_sink(gr.hier_block2):
def __init__(self, itemsize, addr, port, server=False):
#init hier block
gr.hier_block2.__init__(
self, 'tcp_sink',
gr.io_signature(1, 1, itemsize),
gr.io_signature(0, 0, 0),
)
fd = _get_sock_fd(addr, port, server)
self.connect(self, gr.file_descriptor_sink(itemsize, fd)) | unknown | codeparrot/codeparrot-clean | ||
# Copyright: (c) 2016-2018, Matt Davis <mdavis@ansible.com>
# Copyright: (c) 2018, Sam Doran <sdoran@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import annotations
import secrets
import time
from datetime import datetime, timedelta, timezone
from ansible.errors import AnsibleError, AnsibleConnectionFailure
from ansible.module_utils.common.text.converters import to_native, to_text
from ansible.module_utils.common.validation import check_type_list, check_type_str
from ansible.plugins.action import ActionBase
from ansible.utils.display import Display
display = Display()
class TimedOutException(Exception):
pass
class ActionModule(ActionBase):
TRANSFERS_FILES = False
_VALID_ARGS = frozenset((
'boot_time_command',
'connect_timeout',
'msg',
'post_reboot_delay',
'pre_reboot_delay',
'reboot_command',
'reboot_timeout',
'search_paths',
'test_command',
))
DEFAULT_REBOOT_TIMEOUT = 600
DEFAULT_CONNECT_TIMEOUT = None
DEFAULT_PRE_REBOOT_DELAY = 0
DEFAULT_POST_REBOOT_DELAY = 0
DEFAULT_TEST_COMMAND = 'whoami'
DEFAULT_BOOT_TIME_COMMAND = 'cat /proc/sys/kernel/random/boot_id'
DEFAULT_REBOOT_MESSAGE = 'Reboot initiated by Ansible'
DEFAULT_SHUTDOWN_COMMAND = 'shutdown'
DEFAULT_SHUTDOWN_COMMAND_ARGS = '-r {delay_min} "{message}"'
DEFAULT_SUDOABLE = True
DEPRECATED_ARGS = {} # type: dict[str, str]
BOOT_TIME_COMMANDS = {
'freebsd': '/sbin/sysctl kern.boottime',
'openbsd': '/sbin/sysctl kern.boottime',
'macosx': 'who -b',
'solaris': 'who -b',
'sunos': 'who -b',
'vmkernel': 'grep booted /var/log/vmksummary.log | tail -n 1',
'aix': 'who -b',
}
SHUTDOWN_COMMANDS = {
'alpine': 'reboot',
'vmkernel': 'reboot',
}
SHUTDOWN_COMMAND_ARGS = {
'alpine': '',
'void': '-r +{delay_min} "{message}"',
'freebsd': '-r +{delay_sec}s "{message}"',
'linux': DEFAULT_SHUTDOWN_COMMAND_ARGS,
'macosx': '-r +{delay_min} "{message}"',
'openbsd': '-r +{delay_min} "{message}"',
'solaris': '-y -g {delay_sec} -i 6 "{message}"',
'sunos': '-y -g {delay_sec} -i 6 "{message}"',
'vmkernel': '-d {delay_sec}',
'aix': '-Fr',
}
TEST_COMMANDS = {
'solaris': 'who',
'vmkernel': 'who',
}
def __init__(self, *args, **kwargs):
super(ActionModule, self).__init__(*args, **kwargs)
@property
def pre_reboot_delay(self):
return self._check_delay('pre_reboot_delay', self.DEFAULT_PRE_REBOOT_DELAY)
@property
def post_reboot_delay(self):
return self._check_delay('post_reboot_delay', self.DEFAULT_POST_REBOOT_DELAY)
def _check_delay(self, key, default):
"""Ensure that the value is positive or zero"""
value = int(self._task.args.get(key, self._task.args.get(key + '_sec', default)))
if value < 0:
value = 0
return value
def _get_value_from_facts(self, variable_name, distribution, default_value):
"""Get dist+version specific args first, then distribution, then family, lastly use default"""
attr = getattr(self, variable_name)
value = attr.get(
distribution['name'] + distribution['version'],
attr.get(
distribution['name'],
attr.get(
distribution['family'],
getattr(self, default_value))))
return value
def get_shutdown_command_args(self, distribution):
reboot_command = self._task.args.get('reboot_command')
if reboot_command is not None:
try:
reboot_command = check_type_str(reboot_command, allow_conversion=False)
except TypeError as e:
raise AnsibleError("Invalid value given for 'reboot_command': %s." % to_native(e))
# No args were provided
try:
return reboot_command.split(' ', 1)[1]
except IndexError:
return ''
else:
args = self._get_value_from_facts('SHUTDOWN_COMMAND_ARGS', distribution, 'DEFAULT_SHUTDOWN_COMMAND_ARGS')
# Convert seconds to minutes. If less than 60, set it to 0.
delay_min = self.pre_reboot_delay // 60
reboot_message = self._task.args.get('msg', self.DEFAULT_REBOOT_MESSAGE)
return args.format(delay_sec=self.pre_reboot_delay, delay_min=delay_min, message=reboot_message)
def get_distribution(self, task_vars):
# FIXME: only execute the module if we don't already have the facts we need
distribution = {}
display.debug('{action}: running setup module to get distribution'.format(action=self._task.action))
module_output = self._execute_module(
task_vars=task_vars,
module_name='ansible.legacy.setup',
module_args={'gather_subset': 'min'})
try:
if module_output.get('failed', False):
raise AnsibleError('Failed to determine system distribution. {0}, {1}'.format(
to_native(module_output['module_stdout']).strip(),
to_native(module_output['module_stderr']).strip()))
distribution['name'] = module_output['ansible_facts']['ansible_distribution'].lower()
distribution['version'] = to_text(module_output['ansible_facts']['ansible_distribution_version'].split('.')[0])
distribution['family'] = to_text(module_output['ansible_facts']['ansible_os_family'].lower())
display.debug("{action}: distribution: {dist}".format(action=self._task.action, dist=distribution))
return distribution
except KeyError as ke:
raise AnsibleError('Failed to get distribution information. Missing "{0}" in output.'.format(ke.args[0]))
def get_shutdown_command(self, task_vars, distribution):
reboot_command = self._task.args.get('reboot_command')
if reboot_command is not None:
try:
reboot_command = check_type_str(reboot_command, allow_conversion=False)
except TypeError as e:
raise AnsibleError("Invalid value given for 'reboot_command': %s." % to_native(e))
shutdown_bin = reboot_command.split(' ', 1)[0]
else:
shutdown_bin = self._get_value_from_facts('SHUTDOWN_COMMANDS', distribution, 'DEFAULT_SHUTDOWN_COMMAND')
if shutdown_bin[0] == '/':
return shutdown_bin
else:
default_search_paths = ['/sbin', '/bin', '/usr/sbin', '/usr/bin', '/usr/local/sbin']
search_paths = self._task.args.get('search_paths', default_search_paths)
try:
# Convert bare strings to a list
search_paths = check_type_list(search_paths)
except TypeError:
err_msg = "'search_paths' must be a string or flat list of strings, got {0}"
raise AnsibleError(err_msg.format(search_paths))
display.debug('{action}: running find module looking in {paths} to get path for "{command}"'.format(
action=self._task.action,
command=shutdown_bin,
paths=search_paths))
find_result = self._execute_module(
task_vars=task_vars,
# prevent collection search by calling with ansible.legacy (still allows library/ override of find)
module_name='ansible.legacy.find',
module_args={
'paths': search_paths,
'patterns': [shutdown_bin],
'file_type': 'any'
}
)
full_path = [x['path'] for x in find_result['files']]
if not full_path:
raise AnsibleError('Unable to find command "{0}" in search paths: {1}'.format(shutdown_bin, search_paths))
return full_path[0]
def deprecated_args(self):
for arg, version in self.DEPRECATED_ARGS.items():
if self._task.args.get(arg) is not None:
display.warning("Since Ansible {version}, {arg} is no longer a valid option for {action}".format(
version=version,
arg=arg,
action=self._task.action))
def get_system_boot_time(self, distribution):
boot_time_command = self._get_value_from_facts('BOOT_TIME_COMMANDS', distribution, 'DEFAULT_BOOT_TIME_COMMAND')
if self._task.args.get('boot_time_command'):
boot_time_command = self._task.args.get('boot_time_command')
try:
check_type_str(boot_time_command, allow_conversion=False)
except TypeError as e:
raise AnsibleError("Invalid value given for 'boot_time_command': %s." % to_native(e))
display.debug("{action}: getting boot time with command: '{command}'".format(action=self._task.action, command=boot_time_command))
command_result = self._low_level_execute_command(boot_time_command, sudoable=self.DEFAULT_SUDOABLE)
if command_result['rc'] != 0:
stdout = command_result['stdout']
stderr = command_result['stderr']
raise AnsibleError("{action}: failed to get host boot time info, rc: {rc}, stdout: {out}, stderr: {err}".format(
action=self._task.action,
rc=command_result['rc'],
out=to_native(stdout),
err=to_native(stderr)))
display.debug("{action}: last boot time: {boot}".format(action=self._task.action, boot=command_result['stdout'].strip()))
return command_result['stdout'].strip()
def check_boot_time(self, distribution, previous_boot_time):
display.vvv("{action}: attempting to get system boot time".format(action=self._task.action))
connect_timeout = self._task.args.get('connect_timeout', self._task.args.get('connect_timeout_sec', self.DEFAULT_CONNECT_TIMEOUT))
# override connection timeout from defaults to the custom value
if connect_timeout:
try:
display.debug("{action}: setting connect_timeout to {value}".format(action=self._task.action, value=connect_timeout))
self._connection.set_option("connection_timeout", connect_timeout)
except AnsibleError:
try:
self._connection.set_option("timeout", connect_timeout)
except (AnsibleError, AttributeError):
display.warning("Connection plugin does not allow the connection timeout to be overridden")
self._connection.reset()
# try and get boot time
try:
current_boot_time = self.get_system_boot_time(distribution)
except Exception as e:
raise e
# FreeBSD returns an empty string immediately before reboot so adding a length
# check to prevent prematurely assuming system has rebooted
if len(current_boot_time) == 0 or current_boot_time == previous_boot_time:
raise ValueError("boot time has not changed")
def run_test_command(self, distribution, **kwargs):
test_command = self._task.args.get('test_command', self._get_value_from_facts('TEST_COMMANDS', distribution, 'DEFAULT_TEST_COMMAND'))
display.vvv("{action}: attempting post-reboot test command".format(action=self._task.action))
display.debug("{action}: attempting post-reboot test command '{command}'".format(action=self._task.action, command=test_command))
try:
command_result = self._low_level_execute_command(test_command, sudoable=self.DEFAULT_SUDOABLE)
except Exception:
# may need to reset the connection in case another reboot occurred
# which has invalidated our connection
try:
self._connection.reset()
except AttributeError:
pass
raise
if command_result['rc'] != 0:
msg = 'Test command failed: {err} {out}'.format(
err=to_native(command_result['stderr']),
out=to_native(command_result['stdout']))
raise RuntimeError(msg)
display.vvv("{action}: system successfully rebooted".format(action=self._task.action))
def do_until_success_or_timeout(self, action, reboot_timeout, action_desc, distribution, action_kwargs=None):
max_end_time = datetime.now(timezone.utc) + timedelta(seconds=reboot_timeout)
if action_kwargs is None:
action_kwargs = {}
fail_count = 0
max_fail_sleep = 12
last_error_msg = ''
while datetime.now(timezone.utc) < max_end_time:
try:
action(distribution=distribution, **action_kwargs)
if action_desc:
display.debug('{action}: {desc} success'.format(action=self._task.action, desc=action_desc))
return
except Exception as e:
if isinstance(e, AnsibleConnectionFailure):
try:
self._connection.reset()
except AnsibleConnectionFailure:
pass
# Use exponential backoff with a max timeout, plus a little bit of randomness
random_int = secrets.randbelow(1000) / 1000
fail_sleep = 2 ** fail_count + random_int
if fail_sleep > max_fail_sleep:
fail_sleep = max_fail_sleep + random_int
if action_desc:
try:
error = to_text(e).splitlines()[-1]
except IndexError as e:
error = to_text(e)
last_error_msg = f"{self._task.action}: {action_desc} fail '{error}'"
msg = f"{last_error_msg}, retrying in {fail_sleep:.4f} seconds..."
display.debug(msg)
display.vvv(msg)
fail_count += 1
time.sleep(fail_sleep)
if last_error_msg:
msg = f"Last error message before the timeout exception - {last_error_msg}"
display.debug(msg)
display.vvv(msg)
raise TimedOutException('Timed out waiting for {desc} (timeout={timeout})'.format(desc=action_desc, timeout=reboot_timeout))
def perform_reboot(self, task_vars, distribution):
result = {}
reboot_result = {}
shutdown_command = self.get_shutdown_command(task_vars, distribution)
shutdown_command_args = self.get_shutdown_command_args(distribution)
reboot_command = '{0} {1}'.format(shutdown_command, shutdown_command_args)
try:
display.vvv("{action}: rebooting server...".format(action=self._task.action))
display.debug("{action}: rebooting server with command '{command}'".format(action=self._task.action, command=reboot_command))
reboot_result = self._low_level_execute_command(reboot_command, sudoable=self.DEFAULT_SUDOABLE)
except AnsibleConnectionFailure as e:
# If the connection is closed too quickly due to the system being shutdown, carry on
display.debug('{action}: AnsibleConnectionFailure caught and handled: {error}'.format(action=self._task.action, error=to_text(e)))
reboot_result['rc'] = 0
result['start'] = datetime.now(timezone.utc)
if reboot_result['rc'] != 0:
result['failed'] = True
result['rebooted'] = False
result['msg'] = "Reboot command failed. Error was: '{stdout}, {stderr}'".format(
stdout=to_native(reboot_result['stdout'].strip()),
stderr=to_native(reboot_result['stderr'].strip()))
return result
result['failed'] = False
return result
def validate_reboot(self, distribution, original_connection_timeout=None, action_kwargs=None):
display.vvv('{action}: validating reboot'.format(action=self._task.action))
result = {}
try:
# keep on checking system boot_time with short connection responses
reboot_timeout = int(self._task.args.get('reboot_timeout', self._task.args.get('reboot_timeout_sec', self.DEFAULT_REBOOT_TIMEOUT)))
self.do_until_success_or_timeout(
action=self.check_boot_time,
action_desc="last boot time check",
reboot_timeout=reboot_timeout,
distribution=distribution,
action_kwargs=action_kwargs)
# Get the connect_timeout set on the connection to compare to the original
try:
connect_timeout = self._connection.get_option('connection_timeout')
except KeyError:
try:
connect_timeout = self._connection.get_option('timeout')
except KeyError:
pass
else:
if original_connection_timeout != connect_timeout:
try:
display.debug("{action}: setting connect_timeout/timeout back to original value of {value}".format(action=self._task.action,
value=original_connection_timeout))
try:
self._connection.set_option("connection_timeout", original_connection_timeout)
except AnsibleError:
try:
self._connection.set_option("timeout", original_connection_timeout)
except AnsibleError:
raise
# reset the connection to clear the custom connection timeout
self._connection.reset()
except (AnsibleError, AttributeError) as e:
display.debug("{action}: failed to reset connection_timeout back to default: {error}".format(action=self._task.action,
error=to_text(e)))
# finally run test command to ensure everything is working
# FUTURE: add a stability check (system must remain up for N seconds) to deal with self-multi-reboot updates
self.do_until_success_or_timeout(
action=self.run_test_command,
action_desc="post-reboot test command",
reboot_timeout=reboot_timeout,
distribution=distribution,
action_kwargs=action_kwargs)
result['rebooted'] = True
result['changed'] = True
except TimedOutException as toex:
result['failed'] = True
result['rebooted'] = True
result['msg'] = to_text(toex)
return result
return result
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
# If running with local connection, fail so we don't reboot ourselves
if self._connection.transport == 'local':
msg = 'Running {0} with local connection would reboot the control node.'.format(self._task.action)
return {'changed': False, 'elapsed': 0, 'rebooted': False, 'failed': True, 'msg': msg}
if self._task.check_mode:
return {'changed': True, 'elapsed': 0, 'rebooted': True}
if task_vars is None:
task_vars = {}
self.deprecated_args()
result = super(ActionModule, self).run(tmp, task_vars)
if result.get('skipped', False) or result.get('failed', False):
return result
distribution = self.get_distribution(task_vars)
# Get current boot time
try:
previous_boot_time = self.get_system_boot_time(distribution)
except Exception as e:
result['failed'] = True
result['reboot'] = False
result['msg'] = to_text(e)
return result
# Get the original connection_timeout option var so it can be reset after
original_connection_timeout = None
display.debug("{action}: saving original connect_timeout of {timeout}".format(action=self._task.action, timeout=original_connection_timeout))
try:
original_connection_timeout = self._connection.get_option('connection_timeout')
except KeyError:
try:
original_connection_timeout = self._connection.get_option('timeout')
except KeyError:
display.debug("{action}: connect_timeout connection option has not been set".format(action=self._task.action))
# Initiate reboot
reboot_result = self.perform_reboot(task_vars, distribution)
if reboot_result['failed']:
result = reboot_result
elapsed = datetime.now(timezone.utc) - reboot_result['start']
result['elapsed'] = elapsed.seconds
return result
if self.post_reboot_delay != 0:
display.debug("{action}: waiting an additional {delay} seconds".format(action=self._task.action, delay=self.post_reboot_delay))
display.vvv("{action}: waiting an additional {delay} seconds".format(action=self._task.action, delay=self.post_reboot_delay))
time.sleep(self.post_reboot_delay)
# Make sure reboot was successful
result = self.validate_reboot(distribution, original_connection_timeout, action_kwargs={'previous_boot_time': previous_boot_time})
elapsed = datetime.now(timezone.utc) - reboot_result['start']
result['elapsed'] = elapsed.seconds
return result | python | github | https://github.com/ansible/ansible | lib/ansible/plugins/action/reboot.py |
"""
Copyright (C) 2015, Jaguar Land Rover
This program is licensed under the terms and conditions of the
Mozilla Public License, version 2.0. The full text of the
Mozilla Public License is at https://www.mozilla.org/MPL/2.0/
Maintainer: Rudolf Streif (rstreif@jaguarlandrover.com)
Author: Anson Fan(afan1@jaguarlandrover.com)
"""
from django.contrib import admin
from django.core.urlresolvers import reverse
from dynamicagents.models import Agent, UpdateDA, RetryDA
import logging
logger = logging.getLogger('rvi.dynamicagents')
class RetryInline(admin.TabularInline):
"""
A Retry is associated with an Update. We use this Inline to show
all Retries of an Update on the Update's detail page.
"""
model = RetryDA
readonly_fields = ('ret_start_da', 'ret_finish_da', 'ret_timeout_da', 'ret_status_da', 'get_log')
def has_add_permission(self, request):
return False
def get_log(self, obj):
"""
Returns a link to the SOTA log table view filtered for the log
entries that belong to this Retry object.
"""
url = reverse('admin:dblog_sotalog_changelist')
return "<a href='{0}?retry__id__exact={1}'>Messages</a>".format(url, obj.pk)
get_log.short_description = "Log"
class UpdateAdmin(admin.ModelAdmin):
"""
Administration view for Updates
"""
fieldsets = [
(None, {'fields': [('upd_vehicle_da', 'upd_package_da')]}),
('Update Information', {'fields': [('upd_status_da', 'upd_expiration', 'upd_retries_da')]}),
]
inlines = [RetryInline]
def get_readonly_fields(self, request, obj=None):
if obj:
return ['upd_vehicle_da', 'upd_package_da', 'upd_current_da', 'upd_status_da']
else:
return ['upd_status_da']
def start_update(self, request, updates):
updates_started = 0
for update in updates:
retry = update.start()
if retry is not None:
logger.info('Started update: %s', retry)
updates_started += 1
if (updates_started == 0 ) or (updates_started > 1):
mbit = "%s Updates were" % updates_started
else:
mbit = "1 Update was"
self.message_user(request, "%s successfully started." % mbit)
start_update.short_description = "Start selected updates"
def terminate_agent(self, request, updates):
for update in updates:
retry = update.terminate()
self.message_user(request, "Agents terminating")
terminate_agent.short_description = "Terminate selected agents"
def abort_update(self, request, updates):
updates_aborted = 0
for update in updates:
retry = update.abort()
if retry is not None:
logger.info('Aborted update: %s', retry)
updates_aborted += 1
if (updates_aborted == 0 ) or (updates_aborted > 1):
mbit = "%s Updates were" % updates_aborted
else:
mbit = "1 Update was"
self.message_user(request, "%s successfully aborted." % mbit)
abort_update.short_description = "Abort selected updates"
list_display = ('update_name', 'upd_vehicle_da', 'upd_package_da', 'upd_status_da', 'retry_count', 'not_expired')
list_filter = ['upd_status_da']
search_fields = ['upd_vehicle__veh_name', 'upd_package__pac_name']
actions = [start_update, abort_update, terminate_agent]
class AgentAdmin(admin.ModelAdmin):
"""
Administration view for Packages
"""
list_display = ('pac_name_da', 'pac_version_da')
admin.site.register(Agent, AgentAdmin)
admin.site.register(UpdateDA, UpdateAdmin) | unknown | codeparrot/codeparrot-clean | ||
"""
InaSAFE Disaster risk assessment tool developed by AusAid -
**QGIS plugin implementation.**
Contact : ole.moller.nielsen@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
.. note:: This source code was copied from the 'postgis viewer' application
with original authors:
Copyright (c) 2010 by Ivan Mincik, ivan.mincik@gista.sk
Copyright (c) 2011 German Carrillo, geotux_tuxman@linuxmail.org
"""
__author__ = 'tim@linfiniti.com'
__version__ = '0.5.0'
__revision__ = '$Format:%H$'
__date__ = '10/01/2011'
__copyright__ = ('Copyright (c) 2010 by Ivan Mincik, ivan.mincik@gista.sk and '
'Copyright (c) 2011 German Carrillo, '
'geotux_tuxman@linuxmail.org')
import qgis
from PyQt4.QtCore import QObject
from qgis.core import QgsMapLayerRegistry
class QgisInterface(QObject):
"""Class to expose qgis objects and functionalities to plugins.
This class is here for enabling us to run unit tests only,
so most methods are simply stubs.
"""
def __init__(self, canvas):
"""Constructor"""
QObject.__init__(self)
self.canvas = canvas
def zoomFull(self):
"""Zoom to the map full extent"""
pass
def zoomToPrevious(self):
"""Zoom to previous view extent"""
pass
def zoomToNext(self):
"""Zoom to next view extent"""
pass
def zoomToActiveLayer(self):
"""Zoom to extent of active layer"""
pass
def addVectorLayer(self, vectorLayerPath, baseName, providerKey):
"""Add a vector layer"""
pass
def addRasterLayer(self, rasterLayerPath, baseName):
"""Add a raster layer given a raster layer file name"""
pass
def activeLayer(self):
"""Get pointer to the active layer (layer selected in the legend)"""
myLayers = QgsMapLayerRegistry.instance().mapLayers()
for myItem in myLayers:
return myLayers[myItem]
def addToolBarIcon(self, qAction):
"""Add an icon to the plugins toolbar"""
pass
def removeToolBarIcon(self, qAction):
"""Remove an action (icon) from the plugin toolbar"""
pass
def addToolBar(self, name):
"""Add toolbar with specified name"""
pass
def mapCanvas(self):
"""Return a pointer to the map canvas"""
return self.canvas
def mainWindow(self):
"""Return a pointer to the main window
In case of QGIS it returns an instance of QgisApp
"""
pass
def addDockWidget(self, area, dockwidget):
""" Add a dock widget to the main window """
pass | unknown | codeparrot/codeparrot-clean | ||
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import paddle.fluid.core as core
from paddle.fluid.framework import Program
from paddle.fluid.executor import global_scope
class Float16Transpiler:
def transpile(self, program, place, scope=None):
'''
Transpile the program desc and cast the weights to float16 data type to
enable float16 inference.
Since the operator in a program desc will automatically choose the
right compute kernel to run based on the data type of the input tensor.
We actually don't need to change the program desc to run in float16 mode.
However, in this way, users who are used to feeding and fetching tensors
of float32 data type when running typical inference may find it confusing
and difficult to run inference in float16 mode as they need to convert
input data to float16 dtype and then convert the results back to float32
dtype to match the rest of code.
So this function appends cast ops to the program desc where necessary so
that users are able to run inference in float16 mode while providing input
tensor (feed_holder) of float data type and obtaining output tensor
(fetch_holder) of float data type.
Moreover, it is desired that when we have the scope and program desc to run
inference in float32 mode, we can use a single API to do the necessary
modification and then user can run float16 inference on the fly. To make
this happen, this function also create new parameters in the scope to have the
converted float16 weights and change the operators in program desc to use
these new parameters.
:param program: program to transpile
:type program: Program
:param place: inference place
:type place: Place
:param scope: inference scope
:type scope: Scope
'''
if not isinstance(program, Program):
raise TypeError("program should be as Program type")
if not isinstance(place, core.CPUPlace) and not isinstance(
place, core.CUDAPlace):
raise TypeError("place should be as CPUPlace/CUDAPlace type")
if scope is None:
scope = global_scope()
if not isinstance(scope, core.Scope):
raise TypeError("scope should be as Scope type or None")
self.scope = scope
self.place = place
self.block = program.block(0)
self.input_map = {} # store the input names should be adjusted
self._modify_feed_fetch()
self._convert_param_to_float16()
self._adjust_input(skip=True)
self._remove_unused_var()
# TODO(luotao): use clone() method to flush the program.desc in force,
# since some large program.desc will not be flushed immediately.
# And a better solution will be considered later.
program = program.clone()
# ====================== private transpiler functions =====================
def _adjust_input(self, skip=False):
'''
Change the input variable name in operators.
When we are in the process of modifying a program desc, we usually
replace some variables with some other variables, where we create
a dictionary input_map to record the one-to-one correspondence
between each old variable and the new one.
After that, this function will search all the operators that use the
old variables and change the info in op to use the new variables. There
maybe some exceptions to this rule when we are using the float16 transpiler
and insert cast ops to cast float32 variable to float16 one. After we
insert the cast op to cast var_1 to var_1_fp16, we don't want to change
the input of cast op to var_1_fp16 after using this function.
'''
skip_ops = {"cast"}
for i in range(len(self.block.ops)):
current_op = self.block.ops[i]
if skip and current_op.type in skip_ops:
continue
for input_arg in current_op.input_arg_names:
if input_arg in self.input_map:
current_op.rename_input(input_arg,
self.input_map[input_arg])
def _remove_unused_var(self):
'''
remove unused varibles in program
'''
args = []
for i in range(len(self.block.ops)):
current_op = self.block.ops[i]
args += current_op.input_arg_names
args += current_op.output_arg_names
args = list(set(args)) # unique the input and output arguments
for var in self.block.vars.keys():
if var not in args:
self.block.remove_var(var)
def _modify_feed_fetch(self):
'''
Modify feed fetch op/vars for float16 inference.
For each feed op:
feed_op->feed_target_var
Change it to:
feed_op->feed_target_var->cast_op(from other dtype to float16)->tmp_var
For each fetch op:
fetch_target_var->fetch_op
Change it to:
tmp_var->cast_op(from float16 to other dtype)->fetch_target_var->fetch_op
:return: None
'''
def find_op(var):
# It is possible that var.op is not up to date after some
# modifications to program desc. Here we force to make it up to date.
var.op = None
for op in self.block.ops:
if var.name in op.output_arg_names:
var.op = op
break
if var.op is None:
raise ValueError("The target variable must have an "
"associated operator that generates it.")
i = 0
while i < len(self.block.ops):
cur_op = self.block.ops[i]
if cur_op.type == "feed":
var_name = cur_op.output("Out")[0]
tmp_var_name = var_name + ".fp16"
var = self.block.vars[var_name]
tmp_var = self.block.create_var(
name=tmp_var_name.encode('ascii'),
type=var.type,
dtype=core.VarDesc.VarType.FP16,
shape=var.shape,
persistable=var.persistable)
self.block.insert_op(
i + 1,
type="cast",
inputs={"X": var},
outputs={"Out": tmp_var},
attrs={
'in_dtype': int(var.dtype),
'out_dtype': int(tmp_var.dtype)
})
self.input_map[var_name] = tmp_var_name
i = i + 1
elif cur_op.type == "fetch":
var_name = cur_op.input("X")[0]
tmp_var_name = var_name + ".fp16"
var = self.block.vars[var_name]
tmp_var = self.block.create_var(
name=tmp_var_name.encode('ascii'),
type=var.type,
dtype=core.VarDesc.VarType.FP16,
shape=var.shape,
persistable=var.persistable)
find_op(var)
var.op.rename_output(var_name, tmp_var_name)
self.block.insert_op(
i,
type="cast",
inputs={"X": tmp_var},
outputs={"Out": var},
attrs={
'in_dtype': int(tmp_var.dtype),
'out_dtype': int(var.dtype)
})
i = i + 1
i = i + 1
def _convert_param_to_float16(self):
def _get_no_fp16_conversion_var_names():
'''
Get the set of input variable names that shouldn't be converted to float16.
When we want to run inference in float16 mode, most parameters need to be
firstly converted to float16. However, there are some parameters that
shouldn't be converted to float16 because the corresponding operator
requires float32 parameters even in float16 mode (when the input data is
of float16 data type). Currently, the only operator that has this exclusion
is the batch norm op.
:return: set of input variable names
:type var_names: set
'''
op_names = {'batch_norm'}
var_names = []
for op in self.block.ops:
if op.type in op_names:
var_names += op.input_arg_names
return set(var_names)
def _should_be_converted(var):
return var.persistable and \
var.name not in self.no_conversion_vars and \
var.type != core.VarDesc.VarType.FEED_MINIBATCH and \
var.type != core.VarDesc.VarType.FETCH_LIST
self.no_conversion_vars = _get_no_fp16_conversion_var_names()
conversion_var_list = filter(_should_be_converted,
self.block.vars.values())
for var in conversion_var_list:
fp16_var_name = var.name + ".fp16"
fp16_var = self.block.create_parameter(
name=fp16_var_name.encode('ascii'),
type=var.type,
dtype=core.VarDesc.VarType.FP16,
shape=var.shape)
# cast the data in the tensor of the original var to float16
# data type and store it in the tensor of the new float16 var
self.scope.var(fp16_var_name)
fp16_tensor = self.scope.find_var(fp16_var_name).get_tensor()
tensor = np.array(self.scope.find_var(var.name).get_tensor())
# After the old tensor data is converted to np.float16, view(np.uint16)
# is used so that the internal memory of the numpy array will be
# reinterpreted to be of np.uint16 data type, which is binded to fluid
# float16 data type via the help of pybind in tensor_py.h.
fp16_tensor.set(
tensor.astype(np.float16).view(np.uint16), self.place)
# old var will be replaced by the fp16 var in program desc
self.input_map[var.name] = fp16_var_name
self.block.remove_var(var.name) | unknown | codeparrot/codeparrot-clean | ||
###############################################################################
# ilastik: interactive learning and segmentation toolkit
#
# Copyright (C) 2011-2014, the ilastik developers
# <team@ilastik.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# In addition, as a special exception, the copyright holders of
# ilastik give you permission to combine ilastik with applets,
# workflows and plugins which are not covered under the GNU
# General Public License.
#
# See the LICENSE file for details. License information is also available
# on the ilastik web site at:
# http://ilastik.org/license.html
###############################################################################
from ilastik.applets.base.appletSerializer import AppletSerializer, getOrCreateGroup, deleteIfPresent
import h5py
import numpy
import os
from watershed_segmentor import WatershedSegmentor
class PreprocessingSerializer( AppletSerializer ):
def __init__(self, preprocessingTopLevelOperator, *args, **kwargs):
super(PreprocessingSerializer, self).__init__(*args, **kwargs)
self._o = preprocessingTopLevelOperator
self.caresOfHeadless = True
def _serializeToHdf5(self, topGroup, hdf5File, projectFilePath):
preproc = topGroup
for opPre in self._o.innerOperators:
mst = opPre._prepData[0]
if mst is not None:
#The values to be saved for sigma and filter are the
#values of the last valid preprocess
#!These may differ from the current settings!
deleteIfPresent(preproc, "sigma")
deleteIfPresent(preproc, "filter")
deleteIfPresent(preproc, "watershed_source")
deleteIfPresent(preproc, "invert_watershed_source")
deleteIfPresent(preproc, "graph")
preproc.create_dataset("sigma",data= opPre.initialSigma)
preproc.create_dataset("filter",data= opPre.initialFilter)
ws_source = str(opPre.WatershedSource.value)
assert isinstance( ws_source, str ), "WatershedSource was {}, but it should be a string.".format( ws_source )
preproc.create_dataset("watershed_source", data=ws_source)
preproc.create_dataset("invert_watershed_source", data=opPre.InvertWatershedSource.value)
preprocgraph = getOrCreateGroup(preproc, "graph")
mst.saveH5G(preprocgraph)
opPre._unsavedData = False
def _deserializeFromHdf5(self, topGroup, groupVersion, hdf5File, projectFilePath,headless = False):
assert "sigma" in topGroup.keys()
assert "filter" in topGroup.keys()
sigma = topGroup["sigma"].value
sfilter = topGroup["filter"].value
try:
watershed_source = str(topGroup["watershed_source"].value)
invert_watershed_source = bool(topGroup["invert_watershed_source"].value)
except KeyError:
watershed_source = None
invert_watershed_source = False
if "graph" in topGroup.keys():
graphgroup = topGroup["graph"]
else:
assert "graphfile" in topGroup.keys()
#feature: load preprocessed graph from file
filePath = topGroup["graphfile"].value
if not os.path.exists(filePath):
if headless:
raise RuntimeError("Could not find data at " + filePath)
filePath = self.repairFile(filePath,"*.h5")
graphgroup = h5py.File(filePath,"r")["graph"]
for opPre in self._o.innerOperators:
opPre.initialSigma = sigma
opPre.Sigma.setValue(sigma)
if watershed_source:
opPre.WatershedSource.setValue( watershed_source )
opPre.InvertWatershedSource.setValue( invert_watershed_source )
opPre.initialFilter = sfilter
opPre.Filter.setValue(sfilter)
mst = WatershedSegmentor(h5file=graphgroup)
opPre._prepData = numpy.array([mst])
opPre._dirty = False
opPre.applet.writeprotected = True
opPre.PreprocessedData.setDirty()
opPre.enableDownstream(True)
def isDirty(self):
for opPre in self._o.innerOperators:
if opPre._unsavedData:
return True
return False
#this is present only for the serializer AppletInterface
def unload(self):
pass | unknown | codeparrot/codeparrot-clean | ||
/* Copyright 2015 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Hash table on the 4-byte prefixes of static dictionary words. */
#ifndef BROTLI_ENC_DICTIONARY_HASH_H_
#define BROTLI_ENC_DICTIONARY_HASH_H_
#include "../common/platform.h"
#include "../common/static_init.h"
#if (BROTLI_STATIC_INIT != BROTLI_STATIC_INIT_NONE)
#include "../common/dictionary.h"
#endif
#if defined(__cplusplus) || defined(c_plusplus)
extern "C" {
#endif
/* Bucket is (Hash14 * 2 + length_lt_8); in other words we reserve 2 buckets
for each hash - one for shorter words and one for longer words. */
#define BROTLI_ENC_NUM_HASH_BUCKETS 32768
#if (BROTLI_STATIC_INIT != BROTLI_STATIC_INIT_NONE)
BROTLI_BOOL BROTLI_INTERNAL BrotliEncoderInitDictionaryHash(
const BrotliDictionary* dictionary, uint16_t* words, uint8_t* lengths);
BROTLI_INTERNAL extern BROTLI_MODEL("small") uint16_t
kStaticDictionaryHashWords[BROTLI_ENC_NUM_HASH_BUCKETS];
BROTLI_INTERNAL extern BROTLI_MODEL("small") uint8_t
kStaticDictionaryHashLengths[BROTLI_ENC_NUM_HASH_BUCKETS];
#else
BROTLI_INTERNAL extern const BROTLI_MODEL("small") uint16_t
kStaticDictionaryHashWords[BROTLI_ENC_NUM_HASH_BUCKETS];
BROTLI_INTERNAL extern const BROTLI_MODEL("small") uint8_t
kStaticDictionaryHashLengths[BROTLI_ENC_NUM_HASH_BUCKETS];
#endif
#if defined(__cplusplus) || defined(c_plusplus)
} /* extern "C" */
#endif
#endif /* BROTLI_ENC_DICTIONARY_HASH_H_ */ | c | github | https://github.com/nodejs/node | deps/brotli/c/enc/dictionary_hash.h |
The `rustc_sanitizers` crate contains the source code for providing support for
the [sanitizers](https://github.com/google/sanitizers) to the Rust compiler. | unknown | github | https://github.com/rust-lang/rust | compiler/rustc_sanitizers/README.md |
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=R0201
import glob
import logging
import os.path
import subprocess
import sys
from devil.android import device_errors
from devil.android.valgrind_tools import base_tool
from pylib.constants import DIR_SOURCE_ROOT
def SetChromeTimeoutScale(device, scale):
"""Sets the timeout scale in /data/local/tmp/chrome_timeout_scale to scale."""
path = '/data/local/tmp/chrome_timeout_scale'
if not scale or scale == 1.0:
# Delete if scale is None/0.0/1.0 since the default timeout scale is 1.0
device.RunShellCommand('rm %s' % path)
else:
device.WriteFile(path, '%f' % scale, as_root=True)
class AddressSanitizerTool(base_tool.BaseTool):
"""AddressSanitizer tool."""
WRAPPER_NAME = '/system/bin/asanwrapper'
# Disable memcmp overlap check.There are blobs (gl drivers)
# on some android devices that use memcmp on overlapping regions,
# nothing we can do about that.
EXTRA_OPTIONS = 'strict_memcmp=0,use_sigaltstack=1'
def __init__(self, device):
super(AddressSanitizerTool, self).__init__()
self._device = device
@classmethod
def CopyFiles(cls, device):
"""Copies ASan tools to the device."""
libs = glob.glob(os.path.join(DIR_SOURCE_ROOT,
'third_party/llvm-build/Release+Asserts/',
'lib/clang/*/lib/linux/',
'libclang_rt.asan-arm-android.so'))
assert len(libs) == 1
subprocess.call(
[os.path.join(
DIR_SOURCE_ROOT,
'tools/android/asan/third_party/asan_device_setup.sh'),
'--device', str(device),
'--lib', libs[0],
'--extra-options', AddressSanitizerTool.EXTRA_OPTIONS])
device.WaitUntilFullyBooted()
def GetTestWrapper(self):
return AddressSanitizerTool.WRAPPER_NAME
def GetUtilWrapper(self):
"""Returns the wrapper for utilities, such as forwarder.
AddressSanitizer wrapper must be added to all instrumented binaries,
including forwarder and the like. This can be removed if such binaries
were built without instrumentation. """
return self.GetTestWrapper()
def SetupEnvironment(self):
try:
self._device.EnableRoot()
except device_errors.CommandFailedError as e:
# Try to set the timeout scale anyway.
# TODO(jbudorick) Handle this exception appropriately after interface
# conversions are finished.
logging.error(str(e))
SetChromeTimeoutScale(self._device, self.GetTimeoutScale())
def CleanUpEnvironment(self):
SetChromeTimeoutScale(self._device, None)
def GetTimeoutScale(self):
# Very slow startup.
return 20.0
class ValgrindTool(base_tool.BaseTool):
"""Base abstract class for Valgrind tools."""
VG_DIR = '/data/local/tmp/valgrind'
VGLOGS_DIR = '/data/local/tmp/vglogs'
def __init__(self, device):
super(ValgrindTool, self).__init__()
self._device = device
# exactly 31 chars, SystemProperties::PROP_NAME_MAX
self._wrap_properties = ['wrap.com.google.android.apps.ch',
'wrap.org.chromium.native_test']
@classmethod
def CopyFiles(cls, device):
"""Copies Valgrind tools to the device."""
device.RunShellCommand(
'rm -r %s; mkdir %s' % (ValgrindTool.VG_DIR, ValgrindTool.VG_DIR))
device.RunShellCommand(
'rm -r %s; mkdir %s' % (ValgrindTool.VGLOGS_DIR,
ValgrindTool.VGLOGS_DIR))
files = cls.GetFilesForTool()
device.PushChangedFiles(
[((os.path.join(DIR_SOURCE_ROOT, f),
os.path.join(ValgrindTool.VG_DIR, os.path.basename(f)))
for f in files)])
def SetupEnvironment(self):
"""Sets up device environment."""
self._device.RunShellCommand('chmod 777 /data/local/tmp')
self._device.RunShellCommand('setenforce 0')
for prop in self._wrap_properties:
self._device.RunShellCommand(
'setprop %s "logwrapper %s"' % (prop, self.GetTestWrapper()))
SetChromeTimeoutScale(self._device, self.GetTimeoutScale())
def CleanUpEnvironment(self):
"""Cleans up device environment."""
for prop in self._wrap_properties:
self._device.RunShellCommand('setprop %s ""' % (prop,))
SetChromeTimeoutScale(self._device, None)
@staticmethod
def GetFilesForTool():
"""Returns a list of file names for the tool."""
raise NotImplementedError()
def NeedsDebugInfo(self):
"""Whether this tool requires debug info.
Returns:
True if this tool can not work with stripped binaries.
"""
return True
class MemcheckTool(ValgrindTool):
"""Memcheck tool."""
def __init__(self, device):
super(MemcheckTool, self).__init__(device)
@staticmethod
def GetFilesForTool():
"""Returns a list of file names for the tool."""
return ['tools/valgrind/android/vg-chrome-wrapper.sh',
'tools/valgrind/memcheck/suppressions.txt',
'tools/valgrind/memcheck/suppressions_android.txt']
def GetTestWrapper(self):
"""Returns a string that is to be prepended to the test command line."""
return ValgrindTool.VG_DIR + '/' + 'vg-chrome-wrapper.sh'
def GetTimeoutScale(self):
"""Returns a multiplier that should be applied to timeout values."""
return 30
class TSanTool(ValgrindTool):
"""ThreadSanitizer tool. See http://code.google.com/p/data-race-test ."""
def __init__(self, device):
super(TSanTool, self).__init__(device)
@staticmethod
def GetFilesForTool():
"""Returns a list of file names for the tool."""
return ['tools/valgrind/android/vg-chrome-wrapper-tsan.sh',
'tools/valgrind/tsan/suppressions.txt',
'tools/valgrind/tsan/suppressions_android.txt',
'tools/valgrind/tsan/ignores.txt']
def GetTestWrapper(self):
"""Returns a string that is to be prepended to the test command line."""
return ValgrindTool.VG_DIR + '/' + 'vg-chrome-wrapper-tsan.sh'
def GetTimeoutScale(self):
"""Returns a multiplier that should be applied to timeout values."""
return 30.0
TOOL_REGISTRY = {
'memcheck': MemcheckTool,
'memcheck-renderer': MemcheckTool,
'tsan': TSanTool,
'tsan-renderer': TSanTool,
'asan': AddressSanitizerTool,
}
def CreateTool(tool_name, device):
"""Creates a tool with the specified tool name.
Args:
tool_name: Name of the tool to create.
device: A DeviceUtils instance.
Returns:
A tool for the specified tool_name.
"""
if not tool_name:
return base_tool.BaseTool()
ctor = TOOL_REGISTRY.get(tool_name)
if ctor:
return ctor(device)
else:
print 'Unknown tool %s, available tools: %s' % (
tool_name, ', '.join(sorted(TOOL_REGISTRY.keys())))
sys.exit(1)
def PushFilesForTool(tool_name, device):
"""Pushes the files required for |tool_name| to |device|.
Args:
tool_name: Name of the tool to create.
device: A DeviceUtils instance.
"""
if not tool_name:
return
clazz = TOOL_REGISTRY.get(tool_name)
if clazz:
clazz.CopyFiles(device)
else:
print 'Unknown tool %s, available tools: %s' % (
tool_name, ', '.join(sorted(TOOL_REGISTRY.keys())))
sys.exit(1) | unknown | codeparrot/codeparrot-clean | ||
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const {
JAVASCRIPT_MODULE_TYPE_AUTO,
JAVASCRIPT_MODULE_TYPE_DYNAMIC,
JAVASCRIPT_MODULE_TYPE_ESM
} = require("./ModuleTypeConstants");
const ConstDependency = require("./dependencies/ConstDependency");
const ExportsInfoDependency = require("./dependencies/ExportsInfoDependency");
/** @typedef {import("./Compiler")} Compiler */
/** @typedef {import("./Dependency").DependencyLocation} DependencyLocation */
/** @typedef {import("./javascript/JavascriptParser")} JavascriptParser */
/** @typedef {import("./javascript/JavascriptParser").Range} Range */
const PLUGIN_NAME = "ExportsInfoApiPlugin";
class ExportsInfoApiPlugin {
/**
* Apply the plugin
* @param {Compiler} compiler the compiler instance
* @returns {void}
*/
apply(compiler) {
compiler.hooks.compilation.tap(
PLUGIN_NAME,
(compilation, { normalModuleFactory }) => {
compilation.dependencyTemplates.set(
ExportsInfoDependency,
new ExportsInfoDependency.Template()
);
/**
* @param {JavascriptParser} parser the parser
* @returns {void}
*/
const handler = (parser) => {
parser.hooks.expressionMemberChain
.for("__webpack_exports_info__")
.tap(PLUGIN_NAME, (expr, members) => {
const dep =
members.length >= 2
? new ExportsInfoDependency(
/** @type {Range} */ (expr.range),
members.slice(0, -1),
members[members.length - 1]
)
: new ExportsInfoDependency(
/** @type {Range} */ (expr.range),
null,
members[0]
);
dep.loc = /** @type {DependencyLocation} */ (expr.loc);
parser.state.module.addDependency(dep);
return true;
});
parser.hooks.expression
.for("__webpack_exports_info__")
.tap(PLUGIN_NAME, (expr) => {
const dep = new ConstDependency(
"true",
/** @type {Range} */ (expr.range)
);
dep.loc = /** @type {DependencyLocation} */ (expr.loc);
parser.state.module.addPresentationalDependency(dep);
return true;
});
};
normalModuleFactory.hooks.parser
.for(JAVASCRIPT_MODULE_TYPE_AUTO)
.tap(PLUGIN_NAME, handler);
normalModuleFactory.hooks.parser
.for(JAVASCRIPT_MODULE_TYPE_DYNAMIC)
.tap(PLUGIN_NAME, handler);
normalModuleFactory.hooks.parser
.for(JAVASCRIPT_MODULE_TYPE_ESM)
.tap(PLUGIN_NAME, handler);
}
);
}
}
module.exports = ExportsInfoApiPlugin; | javascript | github | https://github.com/webpack/webpack | lib/ExportsInfoApiPlugin.js |
# The Hazard Library
# Copyright (C) 2012-2016 GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import numpy
from openquake.hazardlib.source.complex_fault import (ComplexFaultSource,
_float_ruptures)
from openquake.hazardlib.geo import Line, Point
from openquake.hazardlib.geo.surface.simple_fault import SimpleFaultSurface
from openquake.hazardlib.scalerel.peer import PeerMSR
from openquake.hazardlib.mfd import EvenlyDiscretizedMFD
from openquake.hazardlib.tom import PoissonTOM
from openquake.hazardlib.tests.source import simple_fault_test
from openquake.hazardlib.tests.source import \
_complex_fault_test_data as test_data
from openquake.hazardlib.tests import assert_pickleable
class ComplexFaultSourceSimpleGeometryIterRupturesTestCase(
simple_fault_test.SimpleFaultIterRupturesTestCase):
# test that complex fault sources of simple geometry behave
# exactly the same as simple fault sources of the same geometry
def _make_source(self, *args, **kwargs):
source = super(ComplexFaultSourceSimpleGeometryIterRupturesTestCase,
self)._make_source(*args, **kwargs)
surface = SimpleFaultSurface.from_fault_data(
source.fault_trace, source.upper_seismogenic_depth,
source.lower_seismogenic_depth, source.dip,
source.rupture_mesh_spacing
)
mesh = surface.get_mesh()
top_edge = Line(list(mesh[0:1]))
bottom_edge = Line(list(mesh[-1:]))
cfs = ComplexFaultSource(
source.source_id, source.name, source.tectonic_region_type,
source.mfd, source.rupture_mesh_spacing,
source.magnitude_scaling_relationship, source.rupture_aspect_ratio,
source.temporal_occurrence_model, [top_edge, bottom_edge],
source.rake
)
assert_pickleable(cfs)
return cfs
class ComplexFaultSourceIterRupturesTestCase(
simple_fault_test._BaseFaultSourceTestCase):
def _make_source(self, mfd, aspect_ratio, rupture_mesh_spacing, edges):
source_id = name = 'test-source'
trt = self.TRT
rake = self.RAKE
tom = self.TOM
magnitude_scaling_relationship = PeerMSR()
rupture_aspect_ratio = aspect_ratio
edges = [Line([Point(*coords) for coords in edge])
for edge in edges]
cfs = ComplexFaultSource(
source_id, name, trt, mfd, rupture_mesh_spacing,
magnitude_scaling_relationship, rupture_aspect_ratio, tom,
edges, rake
)
assert_pickleable(cfs)
return cfs
def test_1(self):
# Complex fault source equivalent to Simple fault source defined
# by only the top and bottom edges. That is the complex fault surface
# is equivalent to a simple fault surface defined in the following way:
# fault_trace = [Point(0.0,0.0,0.0),
# Point(0.0,0.0359728811758,0.0),
# Point(0.0190775080917,0.0550503815181,0.0),
# Point(0.03974514139,0.0723925718855,0.0)]
# upper_seismo_depth = 0.0
# lower_seismo_depth = 4.2426406871192848
# dip = 45.0
# mesh_spacing = 1.0
# Being a regular surface and with points in the top and bottom edges
# with a spacing that is a multiple of the given mesh spacing, the
# expected mesh spacing is uniform and equal to the mesh_spacing given
# in the constructor, that is 1 km. Each mesh cell has an area of
# 1 squared km.
# In this case the fmd contains only one magnitude (3.5),
# and this originates ruptures with area equal to 0.3162277660168 km**2
# (according to PeerTestMagAreaScalingRel area = 10**(3.5-4))
# given an aspect ratio of 1, the rupture dimensions are:
# rup_length = sqrt(0.31622776601683794 * 1) = 0.56234132519034907
# rup_width = 0.56234132519034907
# Rupture lenght corresponds therefore to two nodes along length, and
# two nodes along width provides the closest area value, so each
# rupture extends for two nodes along lenght and 2 nodes along width.
# Given 11 nodes along lenght, and 7 along width, and assuming the
# rupture offset to be equal to mesh_spacing, the total number of
# ruptures along lenght is 10 and along width is 6. So the total number
# is 60. the rate associated to a magnitude 3.5 from the truncated GR
# (with bin width = 1.0) is 10**(0.5 -3.0) - 10**(0.5-4.0) =
# 0.0028460498941515417 the rate associated to each rupture is
# 0.0028460498941515417 / 60 = 4.7434164902525696e-05
# for each rupture the probability of one or more occurrences is
# 1-exp(- 4.7434164902525696e-05 * 50.0) = 0.0023688979672850108
source = self._make_source(test_data.TEST1_MFD,
test_data.TEST1_RUPTURE_ASPECT_RATIO,
test_data.TEST1_MESH_SPACING,
test_data.TEST1_EDGES)
self._test_ruptures(test_data.TEST1_RUPTURES, source)
def test_2(self):
# Complex fault source equivalent to Simple fault source defined by
# top, bottom and intermediate edges. That is the complex fault surface
# is equivalent to a simple fault surface defined in the following way:
# fault_trace = [Point(0.0,0.0,0.0),
# Point(0.0,0.0359728811758,0.0),
# Point(0.0190775080917,0.0550503815181,0.0),
# Point(0.03974514139,0.0723925718855,0.0)]
# upper_seismo_depth = 0.0
# lower_seismo_depth = 4.2426406871192848
# dip = 45.0
# mesh_spacing = 1.0
# Being a regular surface and with points in the top and bottom edges
# with a spacing that is a multiple of the given mesh spacing, the
# expected mesh spacing is uniform and equal to the mesh_spacing given
# in the constructor, that is 1 km. Each mesh cell has an area of
# 1 squared km.
# In this case the fmd contains only one magnitude (3.5), and this
# originates ruptures with area equal to 0.31622776601683794 km**2
# (according to PeerTestMagAreaScalingRel area = 10**(3.5-4))
# given an aspect ratio of 1, the rupture dimensions are:
# rup_length = sqrt(0.31622776601683794 * 1) = 0.56234132519034907
# rup_width = 0.56234132519034907
# Rupture lenght corresponds therefore to two nodes along length, and
# two nodes along width provides the closest area value, so each
# rupture extends for two nodes along lenght and 2 nodes along width.
# Given 11 nodes along lenght, and 7 along width, and assuming the
# rupture offset to be equal to mesh_spacing, the total number
# of ruptures along lenght is 10 and along width is 6. So the total
# number is 60. the rate associated to a magnitude 3.5 from the
# truncated GR (with bin width = 1.0) is 10**(0.5 -3.0) - 10**(0.5-4.0)
# = 0.0028460498941515417. the rate associated to each rupture is
# 0.0028460498941515417 / 60 = 4.7434164902525696e-05
# for each rupture the probability of one or more occurrences is
# 1-exp(- 4.7434164902525696e-05 * 50.0) = 0.0023688979672850108
source = self._make_source(test_data.TEST2_MFD,
test_data.TEST2_RUPTURE_ASPECT_RATIO,
test_data.TEST2_MESH_SPACING,
test_data.TEST2_EDGES)
self._test_ruptures(test_data.TEST2_RUPTURES, source)
def test_3(self):
# Complex fault source equivalent to Simple fault source defined by
# top, bottom and intermediate edges. That is the complex fault surface
# is equivalent to a simple fault surface defined in the following way:
# fault_trace = [Point(0.0,0.0,0.0),
# Point(0.0,0.0359728811758,0.0),
# Point(0.0190775080917,0.0550503815181,0.0),
# Point(0.03974514139,0.0723925718855,0.0)]
# upper_seismo_depth = 0.0
# lower_seismo_depth = 4.2426406871192848
# dip = 45.0
# mesh_spacing = 1.0
# Being a regular surface and with points in the top and bottom edges
# with a spacing that is a multiple of the given mesh spacing, the
# expected mesh spacing is uniform and equal to the mesh_spacing given
# in the constructor, that is 1 km. Each mesh cell has an area of
# 1 squared km.
# In this case the fmd contains only one magnitude (6.5), and this
# originates ruptures with area equal to 316.22776601683796 km**2
# (according to PeerTestMagAreaScalingRel area = 10**(6.5-4))
# assuming an aspect ratio of 1.0, the rupture dimensions are:
# rup_length = sqrt(316.22776601683796 * 1.0) = 17.782794100389228
# rup_width = 17.782794100389228
# rupture dimensions are clipped to fault dimensions In this case each
# rupture extends for 11 nodes along lenght and 7 nodes along width.
# The total number of ruptures is 1. the rate associated to a magnitude
# 6.5 from the truncated GR (bin width = 1) is
# 10**(0.5 - 6.0) - 10**(0.5 - 7.0) = 2.8460498941515413e-06
# the rate associated to each rupture is
# 2.8460498941515413e-06 / 1 = 2.8460498941515413e-06
# for each rupture the probability of one or more occurrences is
# 1-exp(- 2.8460498941515413e-06 * 50.0) = 0.00014229237018781316
source = self._make_source(test_data.TEST3_MFD,
test_data.TEST3_RUPTURE_ASPECT_RATIO,
test_data.TEST3_MESH_SPACING,
test_data.TEST3_EDGES)
self._test_ruptures(test_data.TEST3_RUPTURES, source)
def test_4(self):
# test 4 (Complex fault with top, bottom and intermediate edges with
# variable length)
# top edge length = 3 km
# intermediate edge = 6 km
# bottom edge = 9 km
# the spacing between edges along depth is of 1 km. Average lenght is
# 6 km. Assuming a mesh spacing = 2 km, the number of points per edge
# is 6 / 2 + 1 = 4. Consequently, top edge has a spacing of 1km,
# intermediate edge of 2 km, and bottom edge 3km. each cell area is
# a vertical trapezoid.
# cells area in the first row is ((1 + 2) / 2) * 1) = 1.5 km**2
# cells area in the second row is ((2 + 3) / 2 * 1) = 2.5 km**2
# In this case the fmd contains only one magnitude (4.0),
# and this originates ruptures with area equal to 1 km**2 (according to
# PeerTestMagAreaScalingRel area = 10**(4.0-4)). assuming an aspect
# ratio of 1.0, the rupture dimensions are:
# rup_length = sqrt(1.0 * 1.0) = 1.0
# rup_width = 1.0
#
# With these setting, 3 ruptures will be generated in the first row,
# and 3 ruptures in the second row. so the expected total number
# of rupture is 6. each rupture consists of 4 points.
#
# the rate associated to a magnitude 4.0 from the truncated GR (bin
# width = 0.1) is 10**(0.5 - 3.95) - 10**(0.5 - 4.05) = 7.29750961e-5
# the rate associated to each rupture is therefore 7.29750961e-5 / 6
# = 1.216251602e-05
source = self._make_source(test_data.TEST4_MFD,
test_data.TEST4_RUPTURE_ASPECT_RATIO,
test_data.TEST4_MESH_SPACING,
test_data.TEST4_EDGES)
self._test_ruptures(test_data.TEST4_RUPTURES, source)
class ComplexFaultSourceRupEnclPolyTestCase(
simple_fault_test.SimpleFaultRupEncPolyTestCase):
# test that complex fault sources of simple geometry behave
# exactly the same as simple fault sources of the same geometry
def _make_source(self, mfd, aspect_ratio, fault_trace, dip):
sf = super(ComplexFaultSourceRupEnclPolyTestCase, self)._make_source(
mfd, aspect_ratio, fault_trace, dip
)
# create an equivalent top and bottom edges
vdist_top = sf.upper_seismogenic_depth
vdist_bottom = sf.lower_seismogenic_depth
hdist_top = vdist_top / numpy.tan(numpy.radians(dip))
hdist_bottom = vdist_bottom / numpy.tan(numpy.radians(dip))
strike = fault_trace[0].azimuth(fault_trace[-1])
azimuth = (strike + 90.0) % 360
top_edge = []
bottom_edge = []
for point in fault_trace.points:
top_edge.append(point.point_at(hdist_top, vdist_top, azimuth))
bottom_edge.append(point.point_at(hdist_bottom, vdist_bottom,
azimuth))
edges = [Line(top_edge), Line(bottom_edge)]
return ComplexFaultSource(
sf.source_id, sf.name, sf.tectonic_region_type,
sf.mfd, sf.rupture_mesh_spacing,
sf.magnitude_scaling_relationship, sf.rupture_aspect_ratio,
sf.temporal_occurrence_model, edges, sf.rake
)
class FloatRupturesTestCase(unittest.TestCase):
def test_reshaping_along_length(self):
cell_area = numpy.array([[1, 1, 1],
[1, 1, 1]], dtype=float)
cell_length = numpy.array([[1, 1, 1],
[1, 1, 1]], dtype=float)
rupture_area = 3.1
rupture_length = 1.0
slices = _float_ruptures(rupture_area, rupture_length,
cell_area, cell_length)
self.assertEqual(len(slices), 2)
s1, s2 = slices
self.assertEqual(s1, (slice(0, 3), slice(0, 3)))
self.assertEqual(s2, (slice(0, 3), slice(1, 4)))
rupture_area = 4.2
slices = _float_ruptures(rupture_area, rupture_length,
cell_area, cell_length)
self.assertEqual(len(slices), 1)
self.assertEqual(slices, [s1])
def test_reshaping_along_width(self):
cell_area = numpy.array([[4, 4],
[4, 4],
[2, 2]], dtype=float)
cell_length = numpy.array([[2, 2], [2, 2], [2, 2]], dtype=float)
rupture_area = 13.0
rupture_length = 12.0
slices = _float_ruptures(rupture_area, rupture_length,
cell_area, cell_length)
self.assertEqual(len(slices), 2)
s1, s2 = slices
self.assertEqual(s1, (slice(0, 3), slice(0, 3)))
self.assertEqual(s2, (slice(1, 4), slice(0, 3)))
def test_varying_width(self):
cell_area = numpy.array([[1, 1, 1],
[1, 0.1, 1],
[1, 0.1, 1]], dtype=float)
cell_length = numpy.array([[1, 1, 1], [1, 1, 1], [1, 1, 1]],
dtype=float)
rupture_area = 2.1
rupture_length = 1.0
slices = _float_ruptures(rupture_area, rupture_length,
cell_area, cell_length)
self.assertEqual(len(slices), 6)
tl, tm, tr, bl, bm, br = slices
self.assertEqual(tl, (slice(0, 3), slice(0, 2)))
self.assertEqual(tm, (slice(0, 4), slice(1, 3)))
self.assertEqual(tr, (slice(0, 3), slice(2, 4)))
self.assertEqual(bl, (slice(1, 4), slice(0, 2)))
self.assertEqual(bm, (slice(1, 4), slice(1, 3)))
self.assertEqual(br, (slice(1, 4), slice(2, 4)))
class ModifyComplexFaultGeometryTestCase(unittest.TestCase):
"""
"""
def setUp(self):
top_edge_1 = Line([Point(30.0, 30.0, 1.0), Point(31.0, 30.0, 1.0)])
bottom_edge_1 = Line([Point(29.7, 29.9, 30.0),
Point(31.3, 29.9, 32.0)])
self.edges = [top_edge_1, bottom_edge_1]
self.mfd = EvenlyDiscretizedMFD(7.0, 0.1, [1.0])
self.aspect = 1.0
self.spacing = 5.0
self.rake = 90.
def _make_source(self, edges):
source_id = name = 'test-source'
trt = "Subduction Interface"
tom = PoissonTOM(50.0)
magnitude_scaling_relationship = PeerMSR()
cfs = ComplexFaultSource(
source_id, name, trt, self.mfd, self.spacing,
magnitude_scaling_relationship, self.aspect, tom,
edges, self.rake
)
return cfs
def test_modify_geometry(self):
fault = self._make_source(self.edges)
# Modify the edges
top_edge_2 = Line([Point(29.9, 30.0, 2.0), Point(31.1, 30.0, 2.1)])
bottom_edge_2 = Line([Point(29.6, 29.9, 29.0),
Point(31.4, 29.9, 33.0)])
fault.modify_set_geometry([top_edge_2, bottom_edge_2], self.spacing)
exp_lons_top = [29.9, 31.1]
exp_lats_top = [30.0, 30.0]
exp_depths_top = [2.0, 2.1]
exp_lons_bot = [29.6, 31.4]
exp_lats_bot = [29.9, 29.9]
exp_depths_bot = [29.0, 33.0]
for iloc in range(len(fault.edges[0])):
self.assertAlmostEqual(fault.edges[0].points[iloc].longitude,
exp_lons_top[iloc])
self.assertAlmostEqual(fault.edges[0].points[iloc].latitude,
exp_lats_top[iloc])
self.assertAlmostEqual(fault.edges[0].points[iloc].depth,
exp_depths_top[iloc])
for iloc in range(len(fault.edges[1])):
self.assertAlmostEqual(fault.edges[1].points[iloc].longitude,
exp_lons_bot[iloc])
self.assertAlmostEqual(fault.edges[1].points[iloc].latitude,
exp_lats_bot[iloc])
self.assertAlmostEqual(fault.edges[1].points[iloc].depth,
exp_depths_bot[iloc]) | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright 2010-2024 JetBrains s.r.o. and Kotlin Programming Language contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package org.jetbrains.kotlin.analysis.api.fir.test.cases.generated.cases.components.typeProvider;
import com.intellij.testFramework.TestDataPath;
import org.jetbrains.kotlin.test.util.KtTestUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.kotlin.analysis.api.fir.test.configurators.AnalysisApiFirTestConfiguratorFactory;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiTestConfiguratorFactoryData;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiTestConfigurator;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.TestModuleKind;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.FrontendKind;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisSessionMode;
import org.jetbrains.kotlin.analysis.test.framework.test.configurators.AnalysisApiMode;
import org.jetbrains.kotlin.analysis.api.impl.base.test.cases.components.typeProvider.AbstractDefaultTypeTest;
import org.jetbrains.kotlin.test.TestMetadata;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.util.regex.Pattern;
/** This class is generated by {@link org.jetbrains.kotlin.generators.tests.analysis.api.GenerateAnalysisApiTestsKt}. DO NOT MODIFY MANUALLY */
@SuppressWarnings("all")
@TestMetadata("analysis/analysis-api/testData/components/typeProvider/defaultType")
@TestDataPath("$PROJECT_ROOT")
public class FirIdeDependentAnalysisScriptSourceModuleDefaultTypeTestGenerated extends AbstractDefaultTypeTest {
@NotNull
@Override
public AnalysisApiTestConfigurator getConfigurator() {
return AnalysisApiFirTestConfiguratorFactory.INSTANCE.createConfigurator(
new AnalysisApiTestConfiguratorFactoryData(
FrontendKind.Fir,
TestModuleKind.ScriptSource,
AnalysisSessionMode.Dependent,
AnalysisApiMode.Ide
)
);
}
@Test
public void testAllFilesPresentInDefaultType() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("analysis/analysis-api/testData/components/typeProvider/defaultType"), Pattern.compile("^(.+)\\.kts$"), null, true);
}
@Test
@TestMetadata("classScript.kts")
public void testClassScript() {
runTest("analysis/analysis-api/testData/components/typeProvider/defaultType/classScript.kts");
}
@Test
@TestMetadata("nestedClassScript.kts")
public void testNestedClassScript() {
runTest("analysis/analysis-api/testData/components/typeProvider/defaultType/nestedClassScript.kts");
}
@Test
@TestMetadata("nestedClassScriptWithParameters.kts")
public void testNestedClassScriptWithParameters() {
runTest("analysis/analysis-api/testData/components/typeProvider/defaultType/nestedClassScriptWithParameters.kts");
}
@Test
@TestMetadata("scriptAnonymousObject.kts")
public void testScriptAnonymousObject() {
runTest("analysis/analysis-api/testData/components/typeProvider/defaultType/scriptAnonymousObject.kts");
}
@Test
@TestMetadata("scriptLocalObject.kts")
public void testScriptLocalObject() {
runTest("analysis/analysis-api/testData/components/typeProvider/defaultType/scriptLocalObject.kts");
}
} | java | github | https://github.com/JetBrains/kotlin | analysis/analysis-api-fir/tests-gen/org/jetbrains/kotlin/analysis/api/fir/test/cases/generated/cases/components/typeProvider/FirIdeDependentAnalysisScriptSourceModuleDefaultTypeTestGenerated.java |
#!/usr/bin/env python
# $Id$
# pyftpdlib is released under the MIT license, reproduced below:
# ======================================================================
# Copyright (C) 2007-2012 Giampaolo Rodola' <g.rodola@gmail.com>
#
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# ======================================================================
"""
A FTP server banning clients in case of commands flood.
If client sends more than 300 requests per-second it will be
disconnected and won't be able to re-connect for 1 hour.
"""
from pyftpdlib.ftpserver import FTPHandler, FTPServer, DummyAuthorizer, CallEvery
class AntiFloodHandler(FTPHandler):
cmds_per_second = 300 # max number of cmds per second
ban_for = 60 * 60 # 1 hour
banned_ips = []
def __init__(self, *args, **kwargs):
super(AntiFloodHandler, self).__init__(*args, **kwargs)
self.processed_cmds = 0
self.pcmds_callback = CallEvery(1, self.check_processed_cmds)
def handle(self):
# called when client connects.
if self.remote_ip in self.banned_ips:
self.respond('550 you are banned')
self.close()
else:
super(AntiFloodHandler, self).handle()
def check_processed_cmds(self):
# called every second; checks for the number of commands
# sent in the last second.
if self.processed_cmds > self.cmds_per_second:
self.ban(self.remote_ip)
else:
self.processed_cmds = 0
def process_command(self, *args, **kwargs):
# increase counter for every received command
self.processed_cmds += 1
super(AntiFloodHandler, self).process_command(*args, **kwargs)
def ban(self, ip):
# ban ip and schedule next un-ban
if ip not in self.banned_ips:
self.log('banned IP %s for command flooding' % ip)
self.respond('550 you are banned for %s seconds' % self.ban_for)
self.close()
self.banned_ips.append(ip)
def unban(self, ip):
# unban ip
try:
self.banned_ips.remove(ip)
except ValueError:
pass
else:
self.log('unbanning IP %s' % ip)
def close(self):
super(AntiFloodHandler, self).close()
if not self.pcmds_callback.cancelled:
self.pcmds_callback.cancel()
def main():
authorizer = DummyAuthorizer()
authorizer.add_user('user', '12345', '.', perm='elradfmw')
authorizer.add_anonymous('.')
handler = AntiFloodHandler
handler.authorizer = authorizer
ftpd = FTPServer(('', 21), handler)
ftpd.serve_forever(timeout=1)
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
from .common import InfoExtractor
import re
class ToypicsIE(InfoExtractor):
IE_DESC = 'Toypics user profile'
_VALID_URL = r'https?://videos\.toypics\.net/view/(?P<id>[0-9]+)/.*'
_TEST = {
'url': 'http://videos.toypics.net/view/514/chancebulged,-2-1/',
'md5': '16e806ad6d6f58079d210fe30985e08b',
'info_dict': {
'id': '514',
'ext': 'mp4',
'title': 'Chance-Bulge\'d, 2',
'age_limit': 18,
'uploader': 'kidsune',
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
page = self._download_webpage(url, video_id)
video_url = self._html_search_regex(
r'src:\s+"(http://static[0-9]+\.toypics\.net/flvideo/[^"]+)"', page, 'video URL')
title = self._html_search_regex(
r'<title>Toypics - ([^<]+)</title>', page, 'title')
username = self._html_search_regex(
r'toypics.net/([^/"]+)" class="user-name">', page, 'username')
return {
'id': video_id,
'url': video_url,
'title': title,
'uploader': username,
'age_limit': 18,
}
class ToypicsUserIE(InfoExtractor):
IE_DESC = 'Toypics user profile'
_VALID_URL = r'http://videos\.toypics\.net/(?P<username>[^/?]+)(?:$|[?#])'
_TEST = {
'url': 'http://videos.toypics.net/Mikey',
'info_dict': {
'id': 'Mikey',
},
'playlist_mincount': 19,
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
username = mobj.group('username')
profile_page = self._download_webpage(
url, username, note='Retrieving profile page')
video_count = int(self._search_regex(
r'public/">Public Videos \(([0-9]+)\)</a></li>', profile_page,
'video count'))
PAGE_SIZE = 8
urls = []
page_count = (video_count + PAGE_SIZE + 1) // PAGE_SIZE
for n in range(1, page_count + 1):
lpage_url = url + '/public/%d' % n
lpage = self._download_webpage(
lpage_url, username,
note='Downloading page %d/%d' % (n, page_count))
urls.extend(
re.findall(
r'<p class="video-entry-title">\s+<a href="(https?://videos.toypics.net/view/[^"]+)">',
lpage))
return {
'_type': 'playlist',
'id': username,
'entries': [{
'_type': 'url',
'url': eurl,
'ie_key': 'Toypics',
} for eurl in urls]
} | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright 2010-2022 JetBrains s.r.o. and Kotlin Programming Language contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package org.jetbrains.kotlin.analysis.api.impl.base.test.cases.types
import org.jetbrains.kotlin.analysis.api.KaSession
import org.jetbrains.kotlin.analysis.api.types.KaType
import org.jetbrains.kotlin.analysis.test.framework.projectStructure.KtTestModule
import org.jetbrains.kotlin.psi.KtFile
import org.jetbrains.kotlin.test.directives.model.DirectivesContainer
import org.jetbrains.kotlin.test.directives.model.SimpleDirectivesContainer
import org.jetbrains.kotlin.test.directives.model.singleValue
import org.jetbrains.kotlin.test.services.TestServices
import org.jetbrains.kotlin.util.capitalizeDecapitalize.capitalizeAsciiOnly
abstract class AbstractBuiltInTypeTest : AbstractTypeTest() {
override val additionalDirectives: List<DirectivesContainer>
get() = super.additionalDirectives + listOf(Directives)
override fun getType(analysisSession: KaSession, ktFile: KtFile, module: KtTestModule, testServices: TestServices): KaType {
with(analysisSession) {
val builtInTypeName = module.testModule.directives.singleValue(Directives.BUILTIN_TYPE_NAME)
val typeMethod = builtinTypes::class.java.methods.singleOrNull {
it.name == "get" + builtInTypeName.capitalizeAsciiOnly()
}!!
typeMethod.isAccessible = true
return typeMethod.invoke(builtinTypes) as KaType
}
}
object Directives : SimpleDirectivesContainer() {
val BUILTIN_TYPE_NAME by stringDirective("name of built in type")
}
} | kotlin | github | https://github.com/JetBrains/kotlin | analysis/analysis-api-impl-base/testFixtures/org/jetbrains/kotlin/analysis/api/impl/base/test/cases/types/AbstractBuiltInTypeTest.kt |
"""
This is the Django template system.
How it works:
The Lexer.tokenize() function converts a template string (i.e., a string containing
markup with custom template tags) to tokens, which can be either plain text
(TOKEN_TEXT), variables (TOKEN_VAR) or block statements (TOKEN_BLOCK).
The Parser() class takes a list of tokens in its constructor, and its parse()
method returns a compiled template -- which is, under the hood, a list of
Node objects.
Each Node is responsible for creating some sort of output -- e.g. simple text
(TextNode), variable values in a given context (VariableNode), results of basic
logic (IfNode), results of looping (ForNode), or anything else. The core Node
types are TextNode, VariableNode, IfNode and ForNode, but plugin modules can
define their own custom node types.
Each Node has a render() method, which takes a Context and returns a string of
the rendered node. For example, the render() method of a Variable Node returns
the variable's value as a string. The render() method of a ForNode returns the
rendered output of whatever was inside the loop, recursively.
The Template class is a convenient wrapper that takes care of template
compilation and rendering.
Usage:
The only thing you should ever use directly in this file is the Template class.
Create a compiled template object with a template_string, then call render()
with a context. In the compilation stage, the TemplateSyntaxError exception
will be raised if the template doesn't have proper syntax.
Sample code:
>>> from django import template
>>> s = u'<html>{% if test %}<h1>{{ varvalue }}</h1>{% endif %}</html>'
>>> t = template.Template(s)
(t is now a compiled template, and its render() method can be called multiple
times with multiple contexts)
>>> c = template.Context({'test':True, 'varvalue': 'Hello'})
>>> t.render(c)
u'<html><h1>Hello</h1></html>'
>>> c = template.Context({'test':False, 'varvalue': 'Hello'})
>>> t.render(c)
u'<html></html>'
"""
from __future__ import unicode_literals
import re
import warnings
from functools import partial
from importlib import import_module
from inspect import getargspec, getcallargs
from django.apps import apps
from django.template.context import ( # NOQA: imported for backwards compatibility
BaseContext, Context, ContextPopException, RequestContext,
)
from django.utils import lru_cache, six
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import (
force_str, force_text, python_2_unicode_compatible,
)
from django.utils.formats import localize
from django.utils.html import conditional_escape
from django.utils.itercompat import is_iterable
from django.utils.module_loading import module_has_submodule
from django.utils.safestring import (
EscapeData, SafeData, mark_for_escaping, mark_safe,
)
from django.utils.text import (
get_text_list, smart_split, unescape_string_literal,
)
from django.utils.timezone import template_localtime
from django.utils.translation import pgettext_lazy, ugettext_lazy
TOKEN_TEXT = 0
TOKEN_VAR = 1
TOKEN_BLOCK = 2
TOKEN_COMMENT = 3
TOKEN_MAPPING = {
TOKEN_TEXT: 'Text',
TOKEN_VAR: 'Var',
TOKEN_BLOCK: 'Block',
TOKEN_COMMENT: 'Comment',
}
# template syntax constants
FILTER_SEPARATOR = '|'
FILTER_ARGUMENT_SEPARATOR = ':'
VARIABLE_ATTRIBUTE_SEPARATOR = '.'
BLOCK_TAG_START = '{%'
BLOCK_TAG_END = '%}'
VARIABLE_TAG_START = '{{'
VARIABLE_TAG_END = '}}'
COMMENT_TAG_START = '{#'
COMMENT_TAG_END = '#}'
TRANSLATOR_COMMENT_MARK = 'Translators'
SINGLE_BRACE_START = '{'
SINGLE_BRACE_END = '}'
ALLOWED_VARIABLE_CHARS = ('abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.')
# what to report as the origin for templates that come from non-loader sources
# (e.g. strings)
UNKNOWN_SOURCE = '<unknown source>'
# match a variable or block tag and capture the entire tag, including start/end
# delimiters
tag_re = (re.compile('(%s.*?%s|%s.*?%s|%s.*?%s)' %
(re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END),
re.escape(VARIABLE_TAG_START), re.escape(VARIABLE_TAG_END),
re.escape(COMMENT_TAG_START), re.escape(COMMENT_TAG_END))))
# global dictionary of libraries that have been loaded using get_library
libraries = {}
# global list of libraries to load by default for a new parser
builtins = []
class TemplateSyntaxError(Exception):
pass
class TemplateDoesNotExist(Exception):
pass
class TemplateEncodingError(Exception):
pass
@python_2_unicode_compatible
class VariableDoesNotExist(Exception):
def __init__(self, msg, params=()):
self.msg = msg
self.params = params
def __str__(self):
return self.msg % tuple(force_text(p, errors='replace') for p in self.params)
class InvalidTemplateLibrary(Exception):
pass
class Origin(object):
def __init__(self, name):
self.name = name
def reload(self):
raise NotImplementedError('subclasses of Origin must provide a reload() method')
def __str__(self):
return self.name
class StringOrigin(Origin):
def __init__(self, source):
super(StringOrigin, self).__init__(UNKNOWN_SOURCE)
self.source = source
def reload(self):
return self.source
class Template(object):
def __init__(self, template_string, origin=None, name=None, engine=None):
try:
template_string = force_text(template_string)
except UnicodeDecodeError:
raise TemplateEncodingError("Templates can only be constructed "
"from unicode or UTF-8 strings.")
# If Template is instantiated directly rather than from an Engine and
# exactly one Django template engine is configured, use that engine.
# This is required to preserve backwards-compatibility for direct use
# e.g. Template('...').render(Context({...}))
if engine is None:
from .engine import Engine
engine = Engine.get_default()
if engine.debug and origin is None:
origin = StringOrigin(template_string)
self.nodelist = engine.compile_string(template_string, origin)
self.name = name
self.origin = origin
self.engine = engine
def __iter__(self):
for node in self.nodelist:
for subnode in node:
yield subnode
def _render(self, context):
return self.nodelist.render(context)
def render(self, context):
"Display stage -- can be called many times"
context.render_context.push()
try:
if context.template is None:
with context.bind_template(self):
return self._render(context)
else:
return self._render(context)
finally:
context.render_context.pop()
class Token(object):
def __init__(self, token_type, contents):
# token_type must be TOKEN_TEXT, TOKEN_VAR, TOKEN_BLOCK or
# TOKEN_COMMENT.
self.token_type, self.contents = token_type, contents
self.lineno = None
def __str__(self):
token_name = TOKEN_MAPPING[self.token_type]
return ('<%s token: "%s...">' %
(token_name, self.contents[:20].replace('\n', '')))
def split_contents(self):
split = []
bits = iter(smart_split(self.contents))
for bit in bits:
# Handle translation-marked template pieces
if bit.startswith('_("') or bit.startswith("_('"):
sentinal = bit[2] + ')'
trans_bit = [bit]
while not bit.endswith(sentinal):
bit = next(bits)
trans_bit.append(bit)
bit = ' '.join(trans_bit)
split.append(bit)
return split
class Lexer(object):
def __init__(self, template_string, origin):
self.template_string = template_string
self.origin = origin
self.lineno = 1
self.verbatim = False
def tokenize(self):
"""
Return a list of tokens from a given template_string.
"""
in_tag = False
result = []
for bit in tag_re.split(self.template_string):
if bit:
result.append(self.create_token(bit, in_tag))
in_tag = not in_tag
return result
def create_token(self, token_string, in_tag):
"""
Convert the given token string into a new Token object and return it.
If in_tag is True, we are processing something that matched a tag,
otherwise it should be treated as a literal string.
"""
if in_tag and token_string.startswith(BLOCK_TAG_START):
# The [2:-2] ranges below strip off *_TAG_START and *_TAG_END.
# We could do len(BLOCK_TAG_START) to be more "correct", but we've
# hard-coded the 2s here for performance. And it's not like
# the TAG_START values are going to change anytime, anyway.
block_content = token_string[2:-2].strip()
if self.verbatim and block_content == self.verbatim:
self.verbatim = False
if in_tag and not self.verbatim:
if token_string.startswith(VARIABLE_TAG_START):
token = Token(TOKEN_VAR, token_string[2:-2].strip())
elif token_string.startswith(BLOCK_TAG_START):
if block_content[:9] in ('verbatim', 'verbatim '):
self.verbatim = 'end%s' % block_content
token = Token(TOKEN_BLOCK, block_content)
elif token_string.startswith(COMMENT_TAG_START):
content = ''
if token_string.find(TRANSLATOR_COMMENT_MARK):
content = token_string[2:-2].strip()
token = Token(TOKEN_COMMENT, content)
else:
token = Token(TOKEN_TEXT, token_string)
token.lineno = self.lineno
self.lineno += token_string.count('\n')
return token
class Parser(object):
def __init__(self, tokens):
self.tokens = tokens
self.tags = {}
self.filters = {}
for lib in builtins:
self.add_library(lib)
def parse(self, parse_until=None):
if parse_until is None:
parse_until = []
nodelist = self.create_nodelist()
while self.tokens:
token = self.next_token()
# Use the raw values here for TOKEN_* for a tiny performance boost.
if token.token_type == 0: # TOKEN_TEXT
self.extend_nodelist(nodelist, TextNode(token.contents), token)
elif token.token_type == 1: # TOKEN_VAR
if not token.contents:
self.empty_variable(token)
try:
filter_expression = self.compile_filter(token.contents)
except TemplateSyntaxError as e:
if not self.compile_filter_error(token, e):
raise
var_node = self.create_variable_node(filter_expression)
self.extend_nodelist(nodelist, var_node, token)
elif token.token_type == 2: # TOKEN_BLOCK
try:
command = token.contents.split()[0]
except IndexError:
self.empty_block_tag(token)
if command in parse_until:
# put token back on token list so calling
# code knows why it terminated
self.prepend_token(token)
return nodelist
# execute callback function for this tag and append
# resulting node
self.enter_command(command, token)
try:
compile_func = self.tags[command]
except KeyError:
self.invalid_block_tag(token, command, parse_until)
try:
compiled_result = compile_func(self, token)
except TemplateSyntaxError as e:
if not self.compile_function_error(token, e):
raise
self.extend_nodelist(nodelist, compiled_result, token)
self.exit_command()
if parse_until:
self.unclosed_block_tag(parse_until)
return nodelist
def skip_past(self, endtag):
while self.tokens:
token = self.next_token()
if token.token_type == TOKEN_BLOCK and token.contents == endtag:
return
self.unclosed_block_tag([endtag])
def create_variable_node(self, filter_expression):
return VariableNode(filter_expression)
def create_nodelist(self):
return NodeList()
def extend_nodelist(self, nodelist, node, token):
if node.must_be_first and nodelist:
try:
if nodelist.contains_nontext:
raise AttributeError
except AttributeError:
raise TemplateSyntaxError("%r must be the first tag "
"in the template." % node)
if isinstance(nodelist, NodeList) and not isinstance(node, TextNode):
nodelist.contains_nontext = True
nodelist.append(node)
def enter_command(self, command, token):
pass
def exit_command(self):
pass
def error(self, token, msg):
return TemplateSyntaxError(msg)
def empty_variable(self, token):
raise self.error(token, "Empty variable tag")
def empty_block_tag(self, token):
raise self.error(token, "Empty block tag")
def invalid_block_tag(self, token, command, parse_until=None):
if parse_until:
raise self.error(token, "Invalid block tag: '%s', expected %s" %
(command, get_text_list(["'%s'" % p for p in parse_until])))
raise self.error(token, "Invalid block tag: '%s'" % command)
def unclosed_block_tag(self, parse_until):
raise self.error(None, "Unclosed tags: %s " % ', '.join(parse_until))
def compile_filter_error(self, token, e):
pass
def compile_function_error(self, token, e):
pass
def next_token(self):
return self.tokens.pop(0)
def prepend_token(self, token):
self.tokens.insert(0, token)
def delete_first_token(self):
del self.tokens[0]
def add_library(self, lib):
self.tags.update(lib.tags)
self.filters.update(lib.filters)
def compile_filter(self, token):
"""
Convenient wrapper for FilterExpression
"""
return FilterExpression(token, self)
def find_filter(self, filter_name):
if filter_name in self.filters:
return self.filters[filter_name]
else:
raise TemplateSyntaxError("Invalid filter: '%s'" % filter_name)
class TokenParser(object):
"""
Subclass this and implement the top() method to parse a template line.
When instantiating the parser, pass in the line from the Django template
parser.
The parser's "tagname" instance-variable stores the name of the tag that
the filter was called with.
"""
def __init__(self, subject):
self.subject = subject
self.pointer = 0
self.backout = []
self.tagname = self.tag()
def top(self):
"""
Overload this method to do the actual parsing and return the result.
"""
raise NotImplementedError('subclasses of Tokenparser must provide a top() method')
def more(self):
"""
Returns True if there is more stuff in the tag.
"""
return self.pointer < len(self.subject)
def back(self):
"""
Undoes the last microparser. Use this for lookahead and backtracking.
"""
if not len(self.backout):
raise TemplateSyntaxError("back called without some previous "
"parsing")
self.pointer = self.backout.pop()
def tag(self):
"""
A microparser that just returns the next tag from the line.
"""
subject = self.subject
i = self.pointer
if i >= len(subject):
raise TemplateSyntaxError("expected another tag, found "
"end of string: %s" % subject)
p = i
while i < len(subject) and subject[i] not in (' ', '\t'):
i += 1
s = subject[p:i]
while i < len(subject) and subject[i] in (' ', '\t'):
i += 1
self.backout.append(self.pointer)
self.pointer = i
return s
def value(self):
"""
A microparser that parses for a value: some string constant or
variable name.
"""
subject = self.subject
i = self.pointer
def next_space_index(subject, i):
"""
Increment pointer until a real space (i.e. a space not within
quotes) is encountered
"""
while i < len(subject) and subject[i] not in (' ', '\t'):
if subject[i] in ('"', "'"):
c = subject[i]
i += 1
while i < len(subject) and subject[i] != c:
i += 1
if i >= len(subject):
raise TemplateSyntaxError("Searching for value. "
"Unexpected end of string in column %d: %s" %
(i, subject))
i += 1
return i
if i >= len(subject):
raise TemplateSyntaxError("Searching for value. Expected another "
"value but found end of string: %s" %
subject)
if subject[i] in ('"', "'"):
p = i
i += 1
while i < len(subject) and subject[i] != subject[p]:
i += 1
if i >= len(subject):
raise TemplateSyntaxError("Searching for value. Unexpected "
"end of string in column %d: %s" %
(i, subject))
i += 1
# Continue parsing until next "real" space,
# so that filters are also included
i = next_space_index(subject, i)
res = subject[p:i]
while i < len(subject) and subject[i] in (' ', '\t'):
i += 1
self.backout.append(self.pointer)
self.pointer = i
return res
else:
p = i
i = next_space_index(subject, i)
s = subject[p:i]
while i < len(subject) and subject[i] in (' ', '\t'):
i += 1
self.backout.append(self.pointer)
self.pointer = i
return s
# This only matches constant *strings* (things in quotes or marked for
# translation). Numbers are treated as variables for implementation reasons
# (so that they retain their type when passed to filters).
constant_string = r"""
(?:%(i18n_open)s%(strdq)s%(i18n_close)s|
%(i18n_open)s%(strsq)s%(i18n_close)s|
%(strdq)s|
%(strsq)s)
""" % {
'strdq': r'"[^"\\]*(?:\\.[^"\\]*)*"', # double-quoted string
'strsq': r"'[^'\\]*(?:\\.[^'\\]*)*'", # single-quoted string
'i18n_open': re.escape("_("),
'i18n_close': re.escape(")"),
}
constant_string = constant_string.replace("\n", "")
filter_raw_string = r"""
^(?P<constant>%(constant)s)|
^(?P<var>[%(var_chars)s]+|%(num)s)|
(?:\s*%(filter_sep)s\s*
(?P<filter_name>\w+)
(?:%(arg_sep)s
(?:
(?P<constant_arg>%(constant)s)|
(?P<var_arg>[%(var_chars)s]+|%(num)s)
)
)?
)""" % {
'constant': constant_string,
'num': r'[-+\.]?\d[\d\.e]*',
'var_chars': "\w\.",
'filter_sep': re.escape(FILTER_SEPARATOR),
'arg_sep': re.escape(FILTER_ARGUMENT_SEPARATOR),
}
filter_re = re.compile(filter_raw_string, re.UNICODE | re.VERBOSE)
class FilterExpression(object):
"""
Parses a variable token and its optional filters (all as a single string),
and return a list of tuples of the filter name and arguments.
Sample::
>>> token = 'variable|default:"Default value"|date:"Y-m-d"'
>>> p = Parser('')
>>> fe = FilterExpression(token, p)
>>> len(fe.filters)
2
>>> fe.var
<Variable: 'variable'>
"""
def __init__(self, token, parser):
self.token = token
matches = filter_re.finditer(token)
var_obj = None
filters = []
upto = 0
for match in matches:
start = match.start()
if upto != start:
raise TemplateSyntaxError("Could not parse some characters: "
"%s|%s|%s" %
(token[:upto], token[upto:start],
token[start:]))
if var_obj is None:
var, constant = match.group("var", "constant")
if constant:
try:
var_obj = Variable(constant).resolve({})
except VariableDoesNotExist:
var_obj = None
elif var is None:
raise TemplateSyntaxError("Could not find variable at "
"start of %s." % token)
else:
var_obj = Variable(var)
else:
filter_name = match.group("filter_name")
args = []
constant_arg, var_arg = match.group("constant_arg", "var_arg")
if constant_arg:
args.append((False, Variable(constant_arg).resolve({})))
elif var_arg:
args.append((True, Variable(var_arg)))
filter_func = parser.find_filter(filter_name)
self.args_check(filter_name, filter_func, args)
filters.append((filter_func, args))
upto = match.end()
if upto != len(token):
raise TemplateSyntaxError("Could not parse the remainder: '%s' "
"from '%s'" % (token[upto:], token))
self.filters = filters
self.var = var_obj
def resolve(self, context, ignore_failures=False):
if isinstance(self.var, Variable):
try:
obj = self.var.resolve(context)
except VariableDoesNotExist:
if ignore_failures:
obj = None
else:
string_if_invalid = context.template.engine.string_if_invalid
if string_if_invalid:
if '%s' in string_if_invalid:
return string_if_invalid % self.var
else:
return string_if_invalid
else:
obj = string_if_invalid
else:
obj = self.var
for func, args in self.filters:
arg_vals = []
for lookup, arg in args:
if not lookup:
arg_vals.append(mark_safe(arg))
else:
arg_vals.append(arg.resolve(context))
if getattr(func, 'expects_localtime', False):
obj = template_localtime(obj, context.use_tz)
if getattr(func, 'needs_autoescape', False):
new_obj = func(obj, autoescape=context.autoescape, *arg_vals)
else:
new_obj = func(obj, *arg_vals)
if getattr(func, 'is_safe', False) and isinstance(obj, SafeData):
obj = mark_safe(new_obj)
elif isinstance(obj, EscapeData):
obj = mark_for_escaping(new_obj)
else:
obj = new_obj
return obj
def args_check(name, func, provided):
provided = list(provided)
# First argument, filter input, is implied.
plen = len(provided) + 1
# Check to see if a decorator is providing the real function.
func = getattr(func, '_decorated_function', func)
args, varargs, varkw, defaults = getargspec(func)
alen = len(args)
dlen = len(defaults or [])
# Not enough OR Too many
if plen < (alen - dlen) or plen > alen:
raise TemplateSyntaxError("%s requires %d arguments, %d provided" %
(name, alen - dlen, plen))
return True
args_check = staticmethod(args_check)
def __str__(self):
return self.token
def resolve_variable(path, context):
"""
Returns the resolved variable, which may contain attribute syntax, within
the given context.
Deprecated; use the Variable class instead.
"""
warnings.warn("resolve_variable() is deprecated. Use django.template."
"Variable(path).resolve(context) instead",
RemovedInDjango20Warning, stacklevel=2)
return Variable(path).resolve(context)
class Variable(object):
"""
A template variable, resolvable against a given context. The variable may
be a hard-coded string (if it begins and ends with single or double quote
marks)::
>>> c = {'article': {'section':u'News'}}
>>> Variable('article.section').resolve(c)
u'News'
>>> Variable('article').resolve(c)
{'section': u'News'}
>>> class AClass: pass
>>> c = AClass()
>>> c.article = AClass()
>>> c.article.section = u'News'
(The example assumes VARIABLE_ATTRIBUTE_SEPARATOR is '.')
"""
def __init__(self, var):
self.var = var
self.literal = None
self.lookups = None
self.translate = False
self.message_context = None
if not isinstance(var, six.string_types):
raise TypeError(
"Variable must be a string or number, got %s" % type(var))
try:
# First try to treat this variable as a number.
#
# Note that this could cause an OverflowError here that we're not
# catching. Since this should only happen at compile time, that's
# probably OK.
self.literal = float(var)
# So it's a float... is it an int? If the original value contained a
# dot or an "e" then it was a float, not an int.
if '.' not in var and 'e' not in var.lower():
self.literal = int(self.literal)
# "2." is invalid
if var.endswith('.'):
raise ValueError
except ValueError:
# A ValueError means that the variable isn't a number.
if var.startswith('_(') and var.endswith(')'):
# The result of the lookup should be translated at rendering
# time.
self.translate = True
var = var[2:-1]
# If it's wrapped with quotes (single or double), then
# we're also dealing with a literal.
try:
self.literal = mark_safe(unescape_string_literal(var))
except ValueError:
# Otherwise we'll set self.lookups so that resolve() knows we're
# dealing with a bonafide variable
if var.find(VARIABLE_ATTRIBUTE_SEPARATOR + '_') > -1 or var[0] == '_':
raise TemplateSyntaxError("Variables and attributes may "
"not begin with underscores: '%s'" %
var)
self.lookups = tuple(var.split(VARIABLE_ATTRIBUTE_SEPARATOR))
def resolve(self, context):
"""Resolve this variable against a given context."""
if self.lookups is not None:
# We're dealing with a variable that needs to be resolved
value = self._resolve_lookup(context)
else:
# We're dealing with a literal, so it's already been "resolved"
value = self.literal
if self.translate:
if self.message_context:
return pgettext_lazy(self.message_context, value)
else:
return ugettext_lazy(value)
return value
def __repr__(self):
return "<%s: %r>" % (self.__class__.__name__, self.var)
def __str__(self):
return self.var
def _resolve_lookup(self, context):
"""
Performs resolution of a real variable (i.e. not a literal) against the
given context.
As indicated by the method's name, this method is an implementation
detail and shouldn't be called by external code. Use Variable.resolve()
instead.
"""
current = context
try: # catch-all for silent variable failures
for bit in self.lookups:
try: # dictionary lookup
current = current[bit]
# ValueError/IndexError are for numpy.array lookup on
# numpy < 1.9 and 1.9+ respectively
except (TypeError, AttributeError, KeyError, ValueError, IndexError):
try: # attribute lookup
# Don't return class attributes if the class is the context:
if isinstance(current, BaseContext) and getattr(type(current), bit):
raise AttributeError
current = getattr(current, bit)
except (TypeError, AttributeError) as e:
# Reraise an AttributeError raised by a @property
if (isinstance(e, AttributeError) and
not isinstance(current, BaseContext) and bit in dir(current)):
raise
try: # list-index lookup
current = current[int(bit)]
except (IndexError, # list index out of range
ValueError, # invalid literal for int()
KeyError, # current is a dict without `int(bit)` key
TypeError): # unsubscriptable object
raise VariableDoesNotExist("Failed lookup for key "
"[%s] in %r",
(bit, current)) # missing attribute
if callable(current):
if getattr(current, 'do_not_call_in_templates', False):
pass
elif getattr(current, 'alters_data', False):
current = context.template.engine.string_if_invalid
else:
try: # method call (assuming no args required)
current = current()
except TypeError:
try:
getcallargs(current)
except TypeError: # arguments *were* required
current = context.template.engine.string_if_invalid # invalid method call
else:
raise
except Exception as e:
if getattr(e, 'silent_variable_failure', False):
current = context.template.engine.string_if_invalid
else:
raise
return current
class Node(object):
# Set this to True for nodes that must be first in the template (although
# they can be preceded by text nodes.
must_be_first = False
child_nodelists = ('nodelist',)
def render(self, context):
"""
Return the node rendered as a string.
"""
pass
def __iter__(self):
yield self
def get_nodes_by_type(self, nodetype):
"""
Return a list of all nodes (within this node and its nodelist)
of the given type
"""
nodes = []
if isinstance(self, nodetype):
nodes.append(self)
for attr in self.child_nodelists:
nodelist = getattr(self, attr, None)
if nodelist:
nodes.extend(nodelist.get_nodes_by_type(nodetype))
return nodes
class NodeList(list):
# Set to True the first time a non-TextNode is inserted by
# extend_nodelist().
contains_nontext = False
def render(self, context):
bits = []
for node in self:
if isinstance(node, Node):
bit = self.render_node(node, context)
else:
bit = node
bits.append(force_text(bit))
return mark_safe(''.join(bits))
def get_nodes_by_type(self, nodetype):
"Return a list of all nodes of the given type"
nodes = []
for node in self:
nodes.extend(node.get_nodes_by_type(nodetype))
return nodes
def render_node(self, node, context):
return node.render(context)
class TextNode(Node):
def __init__(self, s):
self.s = s
def __repr__(self):
return force_str("<Text Node: '%s'>" % self.s[:25], 'ascii',
errors='replace')
def render(self, context):
return self.s
def render_value_in_context(value, context):
"""
Converts any value to a string to become part of a rendered template. This
means escaping, if required, and conversion to a unicode object. If value
is a string, it is expected to have already been translated.
"""
value = template_localtime(value, use_tz=context.use_tz)
value = localize(value, use_l10n=context.use_l10n)
value = force_text(value)
if ((context.autoescape and not isinstance(value, SafeData)) or
isinstance(value, EscapeData)):
return conditional_escape(value)
else:
return value
class VariableNode(Node):
def __init__(self, filter_expression):
self.filter_expression = filter_expression
def __repr__(self):
return "<Variable Node: %s>" % self.filter_expression
def render(self, context):
try:
output = self.filter_expression.resolve(context)
except UnicodeDecodeError:
# Unicode conversion can fail sometimes for reasons out of our
# control (e.g. exception rendering). In that case, we fail
# quietly.
return ''
return render_value_in_context(output, context)
# Regex for token keyword arguments
kwarg_re = re.compile(r"(?:(\w+)=)?(.+)")
def token_kwargs(bits, parser, support_legacy=False):
"""
A utility method for parsing token keyword arguments.
:param bits: A list containing remainder of the token (split by spaces)
that is to be checked for arguments. Valid arguments will be removed
from this list.
:param support_legacy: If set to true ``True``, the legacy format
``1 as foo`` will be accepted. Otherwise, only the standard ``foo=1``
format is allowed.
:returns: A dictionary of the arguments retrieved from the ``bits`` token
list.
There is no requirement for all remaining token ``bits`` to be keyword
arguments, so the dictionary will be returned as soon as an invalid
argument format is reached.
"""
if not bits:
return {}
match = kwarg_re.match(bits[0])
kwarg_format = match and match.group(1)
if not kwarg_format:
if not support_legacy:
return {}
if len(bits) < 3 or bits[1] != 'as':
return {}
kwargs = {}
while bits:
if kwarg_format:
match = kwarg_re.match(bits[0])
if not match or not match.group(1):
return kwargs
key, value = match.groups()
del bits[:1]
else:
if len(bits) < 3 or bits[1] != 'as':
return kwargs
key, value = bits[2], bits[0]
del bits[:3]
kwargs[key] = parser.compile_filter(value)
if bits and not kwarg_format:
if bits[0] != 'and':
return kwargs
del bits[:1]
return kwargs
def parse_bits(parser, bits, params, varargs, varkw, defaults,
takes_context, name):
"""
Parses bits for template tag helpers (simple_tag, include_tag and
assignment_tag), in particular by detecting syntax errors and by
extracting positional and keyword arguments.
"""
if takes_context:
if params[0] == 'context':
params = params[1:]
else:
raise TemplateSyntaxError(
"'%s' is decorated with takes_context=True so it must "
"have a first argument of 'context'" % name)
args = []
kwargs = {}
unhandled_params = list(params)
for bit in bits:
# First we try to extract a potential kwarg from the bit
kwarg = token_kwargs([bit], parser)
if kwarg:
# The kwarg was successfully extracted
param, value = list(six.iteritems(kwarg))[0]
if param not in params and varkw is None:
# An unexpected keyword argument was supplied
raise TemplateSyntaxError(
"'%s' received unexpected keyword argument '%s'" %
(name, param))
elif param in kwargs:
# The keyword argument has already been supplied once
raise TemplateSyntaxError(
"'%s' received multiple values for keyword argument '%s'" %
(name, param))
else:
# All good, record the keyword argument
kwargs[str(param)] = value
if param in unhandled_params:
# If using the keyword syntax for a positional arg, then
# consume it.
unhandled_params.remove(param)
else:
if kwargs:
raise TemplateSyntaxError(
"'%s' received some positional argument(s) after some "
"keyword argument(s)" % name)
else:
# Record the positional argument
args.append(parser.compile_filter(bit))
try:
# Consume from the list of expected positional arguments
unhandled_params.pop(0)
except IndexError:
if varargs is None:
raise TemplateSyntaxError(
"'%s' received too many positional arguments" %
name)
if defaults is not None:
# Consider the last n params handled, where n is the
# number of defaults.
unhandled_params = unhandled_params[:-len(defaults)]
if unhandled_params:
# Some positional arguments were not supplied
raise TemplateSyntaxError(
"'%s' did not receive value(s) for the argument(s): %s" %
(name, ", ".join("'%s'" % p for p in unhandled_params)))
return args, kwargs
def generic_tag_compiler(parser, token, params, varargs, varkw, defaults,
name, takes_context, node_class):
"""
Returns a template.Node subclass.
"""
bits = token.split_contents()[1:]
args, kwargs = parse_bits(parser, bits, params, varargs, varkw,
defaults, takes_context, name)
return node_class(takes_context, args, kwargs)
class TagHelperNode(Node):
"""
Base class for tag helper nodes such as SimpleNode, InclusionNode and
AssignmentNode. Manages the positional and keyword arguments to be passed
to the decorated function.
"""
def __init__(self, takes_context, args, kwargs):
self.takes_context = takes_context
self.args = args
self.kwargs = kwargs
def get_resolved_arguments(self, context):
resolved_args = [var.resolve(context) for var in self.args]
if self.takes_context:
resolved_args = [context] + resolved_args
resolved_kwargs = {k: v.resolve(context) for k, v in self.kwargs.items()}
return resolved_args, resolved_kwargs
class Library(object):
def __init__(self):
self.filters = {}
self.tags = {}
def tag(self, name=None, compile_function=None):
if name is None and compile_function is None:
# @register.tag()
return self.tag_function
elif name is not None and compile_function is None:
if callable(name):
# @register.tag
return self.tag_function(name)
else:
# @register.tag('somename') or @register.tag(name='somename')
def dec(func):
return self.tag(name, func)
return dec
elif name is not None and compile_function is not None:
# register.tag('somename', somefunc)
self.tags[name] = compile_function
return compile_function
else:
raise InvalidTemplateLibrary("Unsupported arguments to "
"Library.tag: (%r, %r)", (name, compile_function))
def tag_function(self, func):
self.tags[getattr(func, "_decorated_function", func).__name__] = func
return func
def filter(self, name=None, filter_func=None, **flags):
if name is None and filter_func is None:
# @register.filter()
def dec(func):
return self.filter_function(func, **flags)
return dec
elif name is not None and filter_func is None:
if callable(name):
# @register.filter
return self.filter_function(name, **flags)
else:
# @register.filter('somename') or @register.filter(name='somename')
def dec(func):
return self.filter(name, func, **flags)
return dec
elif name is not None and filter_func is not None:
# register.filter('somename', somefunc)
self.filters[name] = filter_func
for attr in ('expects_localtime', 'is_safe', 'needs_autoescape'):
if attr in flags:
value = flags[attr]
# set the flag on the filter for FilterExpression.resolve
setattr(filter_func, attr, value)
# set the flag on the innermost decorated function
# for decorators that need it e.g. stringfilter
if hasattr(filter_func, "_decorated_function"):
setattr(filter_func._decorated_function, attr, value)
filter_func._filter_name = name
return filter_func
else:
raise InvalidTemplateLibrary("Unsupported arguments to "
"Library.filter: (%r, %r)", (name, filter_func))
def filter_function(self, func, **flags):
name = getattr(func, "_decorated_function", func).__name__
return self.filter(name, func, **flags)
def simple_tag(self, func=None, takes_context=None, name=None):
def dec(func):
params, varargs, varkw, defaults = getargspec(func)
class SimpleNode(TagHelperNode):
def render(self, context):
resolved_args, resolved_kwargs = self.get_resolved_arguments(context)
return func(*resolved_args, **resolved_kwargs)
function_name = (name or
getattr(func, '_decorated_function', func).__name__)
compile_func = partial(generic_tag_compiler,
params=params, varargs=varargs, varkw=varkw,
defaults=defaults, name=function_name,
takes_context=takes_context, node_class=SimpleNode)
compile_func.__doc__ = func.__doc__
self.tag(function_name, compile_func)
return func
if func is None:
# @register.simple_tag(...)
return dec
elif callable(func):
# @register.simple_tag
return dec(func)
else:
raise TemplateSyntaxError("Invalid arguments provided to simple_tag")
def assignment_tag(self, func=None, takes_context=None, name=None):
def dec(func):
params, varargs, varkw, defaults = getargspec(func)
class AssignmentNode(TagHelperNode):
def __init__(self, takes_context, args, kwargs, target_var):
super(AssignmentNode, self).__init__(takes_context, args, kwargs)
self.target_var = target_var
def render(self, context):
resolved_args, resolved_kwargs = self.get_resolved_arguments(context)
context[self.target_var] = func(*resolved_args, **resolved_kwargs)
return ''
function_name = (name or
getattr(func, '_decorated_function', func).__name__)
def compile_func(parser, token):
bits = token.split_contents()[1:]
if len(bits) < 2 or bits[-2] != 'as':
raise TemplateSyntaxError(
"'%s' tag takes at least 2 arguments and the "
"second last argument must be 'as'" % function_name)
target_var = bits[-1]
bits = bits[:-2]
args, kwargs = parse_bits(parser, bits, params,
varargs, varkw, defaults, takes_context, function_name)
return AssignmentNode(takes_context, args, kwargs, target_var)
compile_func.__doc__ = func.__doc__
self.tag(function_name, compile_func)
return func
if func is None:
# @register.assignment_tag(...)
return dec
elif callable(func):
# @register.assignment_tag
return dec(func)
else:
raise TemplateSyntaxError("Invalid arguments provided to assignment_tag")
def inclusion_tag(self, file_name, takes_context=False, name=None):
def dec(func):
params, varargs, varkw, defaults = getargspec(func)
class InclusionNode(TagHelperNode):
def render(self, context):
"""
Renders the specified template and context. Caches the
template object in render_context to avoid reparsing and
loading when used in a for loop.
"""
resolved_args, resolved_kwargs = self.get_resolved_arguments(context)
_dict = func(*resolved_args, **resolved_kwargs)
t = context.render_context.get(self)
if t is None:
if isinstance(file_name, Template):
t = file_name
elif isinstance(getattr(file_name, 'template', None), Template):
t = file_name.template
elif not isinstance(file_name, six.string_types) and is_iterable(file_name):
t = context.template.engine.select_template(file_name)
else:
t = context.template.engine.get_template(file_name)
context.render_context[self] = t
new_context = context.new(_dict)
# Copy across the CSRF token, if present, because
# inclusion tags are often used for forms, and we need
# instructions for using CSRF protection to be as simple
# as possible.
csrf_token = context.get('csrf_token', None)
if csrf_token is not None:
new_context['csrf_token'] = csrf_token
return t.render(new_context)
function_name = (name or
getattr(func, '_decorated_function', func).__name__)
compile_func = partial(generic_tag_compiler,
params=params, varargs=varargs, varkw=varkw,
defaults=defaults, name=function_name,
takes_context=takes_context, node_class=InclusionNode)
compile_func.__doc__ = func.__doc__
self.tag(function_name, compile_func)
return func
return dec
def is_library_missing(name):
"""Check if library that failed to load cannot be found under any
templatetags directory or does exist but fails to import.
Non-existing condition is checked recursively for each subpackage in cases
like <appdir>/templatetags/subpackage/package/module.py.
"""
# Don't bother to check if '.' is in name since any name will be prefixed
# with some template root.
path, module = name.rsplit('.', 1)
try:
package = import_module(path)
return not module_has_submodule(package, module)
except ImportError:
return is_library_missing(path)
def import_library(taglib_module):
"""
Load a template tag library module.
Verifies that the library contains a 'register' attribute, and
returns that attribute as the representation of the library
"""
try:
mod = import_module(taglib_module)
except ImportError as e:
# If the ImportError is because the taglib submodule does not exist,
# that's not an error that should be raised. If the submodule exists
# and raised an ImportError on the attempt to load it, that we want
# to raise.
if is_library_missing(taglib_module):
return None
else:
raise InvalidTemplateLibrary("ImportError raised loading %s: %s" %
(taglib_module, e))
try:
return mod.register
except AttributeError:
raise InvalidTemplateLibrary("Template library %s does not have "
"a variable named 'register'" %
taglib_module)
@lru_cache.lru_cache()
def get_templatetags_modules():
"""
Return the list of all available template tag modules.
Caches the result for faster access.
"""
templatetags_modules_candidates = ['django.templatetags']
templatetags_modules_candidates.extend(
'%s.templatetags' % app_config.name
for app_config in apps.get_app_configs())
templatetags_modules = []
for templatetag_module in templatetags_modules_candidates:
try:
import_module(templatetag_module)
except ImportError:
continue
else:
templatetags_modules.append(templatetag_module)
return templatetags_modules
def get_library(library_name):
"""
Load the template library module with the given name.
If library is not already loaded loop over all templatetags modules
to locate it.
{% load somelib %} and {% load someotherlib %} loops twice.
Subsequent loads eg. {% load somelib %} in the same process will grab
the cached module from libraries.
"""
lib = libraries.get(library_name, None)
if not lib:
templatetags_modules = get_templatetags_modules()
tried_modules = []
for module in templatetags_modules:
taglib_module = '%s.%s' % (module, library_name)
tried_modules.append(taglib_module)
lib = import_library(taglib_module)
if lib:
libraries[library_name] = lib
break
if not lib:
raise InvalidTemplateLibrary("Template library %s not found, "
"tried %s" %
(library_name,
','.join(tried_modules)))
return lib
def add_to_builtins(module):
builtins.append(import_library(module))
add_to_builtins('django.template.defaulttags')
add_to_builtins('django.template.defaultfilters')
add_to_builtins('django.template.loader_tags') | unknown | codeparrot/codeparrot-clean | ||
import numpy as np
import copy
import random
import deepchem
class TicTacToeEnvironment(deepchem.rl.Environment):
"""
Play tictactoe against a randomly acting opponent
"""
X = np.array([1.0, 0.0])
O = np.array([0.0, 1.0])
EMPTY = np.array([0.0, 0.0])
ILLEGAL_MOVE_PENALTY = -3.0
LOSS_PENALTY = -3.0
NOT_LOSS = 0.1
DRAW_REWARD = 5.0
WIN_REWARD = 10.0
def __init__(self):
super(TicTacToeEnvironment, self).__init__([(3, 3, 2)], 9)
self.reset()
def reset(self):
self._terminated = False
self._state = [np.zeros(shape=(3, 3, 2), dtype=np.float32)]
# Randomize who goes first
if random.randint(0, 1) == 1:
move = self.get_O_move()
self._state[0][move[0]][move[1]] = TicTacToeEnvironment.O
def step(self, action):
self._state = copy.deepcopy(self._state)
row = action // 3
col = action % 3
# Illegal move -- the square is not empty
if not np.all(self._state[0][row][col] == TicTacToeEnvironment.EMPTY):
self._terminated = True
return TicTacToeEnvironment.ILLEGAL_MOVE_PENALTY
# Move X
self._state[0][row][col] = TicTacToeEnvironment.X
# Did X Win
if self.check_winner(TicTacToeEnvironment.X):
self._terminated = True
return TicTacToeEnvironment.WIN_REWARD
if self.game_over():
self._terminated = True
return TicTacToeEnvironment.DRAW_REWARD
move = self.get_O_move()
self._state[0][move[0]][move[1]] = TicTacToeEnvironment.O
# Did O Win
if self.check_winner(TicTacToeEnvironment.O):
self._terminated = True
return TicTacToeEnvironment.LOSS_PENALTY
if self.game_over():
self._terminated = True
return TicTacToeEnvironment.DRAW_REWARD
return TicTacToeEnvironment.NOT_LOSS
def get_O_move(self):
empty_squares = []
for row in range(3):
for col in range(3):
if np.all(self._state[0][row][col] == TicTacToeEnvironment.EMPTY):
empty_squares.append((row, col))
return random.choice(empty_squares)
def check_winner(self, player):
for i in range(3):
row = np.sum(self._state[0][i][:], axis=0)
if np.all(row == player * 3):
return True
col = np.sum(self._state[0][:][i], axis=0)
if np.all(col == player * 3):
return True
diag1 = self._state[0][0][0] + self._state[0][1][1] + self._state[0][2][2]
if np.all(diag1 == player * 3):
return True
diag2 = self._state[0][0][2] + self._state[0][1][1] + self._state[0][2][0]
if np.all(diag2 == player * 3):
return True
return False
def game_over(self):
for i in range(3):
for j in range(3):
if np.all(self._state[0][i][j] == TicTacToeEnvironment.EMPTY):
return False
return True
def display(self):
state = self._state[0]
s = ""
for row in range(3):
for col in range(3):
if np.all(state[row][col] == TicTacToeEnvironment.EMPTY):
s += "_"
if np.all(state[row][col] == TicTacToeEnvironment.X):
s += "X"
if np.all(state[row][col] == TicTacToeEnvironment.O):
s += "O"
s += "\n"
return s | unknown | codeparrot/codeparrot-clean | ||
{
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Capability",
"description": "A grouping and boundary mechanism developers can use to isolate access to the IPC layer.\n\n It controls application windows' and webviews' fine grained access\n to the Tauri core, application, or plugin commands.\n If a webview or its window is not matching any capability then it has no access to the IPC layer at all.\n\n This can be done to create groups of windows, based on their required system access, which can reduce\n impact of frontend vulnerabilities in less privileged windows.\n Windows can be added to a capability by exact name (e.g. `main-window`) or glob patterns like `*` or `admin-*`.\n A Window can have none, one, or multiple associated capabilities.\n\n ## Example\n\n ```json\n {\n \"identifier\": \"main-user-files-write\",\n \"description\": \"This capability allows the `main` window on macOS and Windows access to `filesystem` write related commands and `dialog` commands to enable programmatic access to files selected by the user.\",\n \"windows\": [\n \"main\"\n ],\n \"permissions\": [\n \"core:default\",\n \"dialog:open\",\n {\n \"identifier\": \"fs:allow-write-text-file\",\n \"allow\": [{ \"path\": \"$HOME/test.txt\" }]\n },\n ],\n \"platforms\": [\"macOS\",\"windows\"]\n }\n ```",
"type": "object",
"required": [
"identifier",
"permissions"
],
"properties": {
"identifier": {
"description": "Identifier of the capability.\n\n ## Example\n\n `main-user-files-write`",
"type": "string"
},
"description": {
"description": "Description of what the capability is intended to allow on associated windows.\n\n It should contain a description of what the grouped permissions should allow.\n\n ## Example\n\n This capability allows the `main` window access to `filesystem` write related\n commands and `dialog` commands to enable programmatic access to files selected by the user.",
"default": "",
"type": "string"
},
"remote": {
"description": "Configure remote URLs that can use the capability permissions.\n\n This setting is optional and defaults to not being set, as our\n default use case is that the content is served from our local application.\n\n :::caution\n Make sure you understand the security implications of providing remote\n sources with local system access.\n :::\n\n ## Example\n\n ```json\n {\n \"urls\": [\"https://*.mydomain.dev\"]\n }\n ```",
"anyOf": [
{
"$ref": "#/definitions/CapabilityRemote"
},
{
"type": "null"
}
]
},
"local": {
"description": "Whether this capability is enabled for local app URLs or not. Defaults to `true`.",
"default": true,
"type": "boolean"
},
"windows": {
"description": "List of windows that are affected by this capability. Can be a glob pattern.\n\n If a window label matches any of the patterns in this list,\n the capability will be enabled on all the webviews of that window,\n regardless of the value of [`Self::webviews`].\n\n On multiwebview windows, prefer specifying [`Self::webviews`] and omitting [`Self::windows`]\n for a fine grained access control.\n\n ## Example\n\n `[\"main\"]`",
"type": "array",
"items": {
"type": "string"
}
},
"webviews": {
"description": "List of webviews that are affected by this capability. Can be a glob pattern.\n\n The capability will be enabled on all the webviews\n whose label matches any of the patterns in this list,\n regardless of whether the webview's window label matches a pattern in [`Self::windows`].\n\n ## Example\n\n `[\"sub-webview-one\", \"sub-webview-two\"]`",
"type": "array",
"items": {
"type": "string"
}
},
"permissions": {
"description": "List of permissions attached to this capability.\n\n Must include the plugin name as prefix in the form of `${plugin-name}:${permission-name}`.\n For commands directly implemented in the application itself only `${permission-name}`\n is required.\n\n ## Example\n\n ```json\n [\n \"core:default\",\n \"shell:allow-open\",\n \"dialog:open\",\n {\n \"identifier\": \"fs:allow-write-text-file\",\n \"allow\": [{ \"path\": \"$HOME/test.txt\" }]\n }\n ]\n ```",
"type": "array",
"items": {
"$ref": "#/definitions/PermissionEntry"
},
"uniqueItems": true
},
"platforms": {
"description": "Limit which target platforms this capability applies to.\n\n By default all platforms are targeted.\n\n ## Example\n\n `[\"macOS\",\"windows\"]`",
"type": [
"array",
"null"
],
"items": {
"$ref": "#/definitions/Target"
}
}
},
"definitions": {
"CapabilityRemote": {
"description": "Configuration for remote URLs that are associated with the capability.",
"type": "object",
"required": [
"urls"
],
"properties": {
"urls": {
"description": "Remote domains this capability refers to using the [URLPattern standard](https://urlpattern.spec.whatwg.org/).\n\n ## Examples\n\n - \"https://*.mydomain.dev\": allows subdomains of mydomain.dev\n - \"https://mydomain.dev/api/*\": allows any subpath of mydomain.dev/api",
"type": "array",
"items": {
"type": "string"
}
}
}
},
"PermissionEntry": {
"description": "An entry for a permission value in a [`Capability`] can be either a raw permission [`Identifier`]\n or an object that references a permission and extends its scope.",
"anyOf": [
{
"description": "Reference a permission or permission set by identifier.",
"allOf": [
{
"$ref": "#/definitions/Identifier"
}
]
},
{
"description": "Reference a permission or permission set by identifier and extends its scope.",
"type": "object",
"required": [
"identifier"
],
"properties": {
"identifier": {
"description": "Identifier of the permission or permission set.",
"allOf": [
{
"$ref": "#/definitions/Identifier"
}
]
},
"allow": {
"description": "Data that defines what is allowed by the scope.",
"type": [
"array",
"null"
],
"items": {
"$ref": "#/definitions/Value"
}
},
"deny": {
"description": "Data that defines what is denied by the scope. This should be prioritized by validation logic.",
"type": [
"array",
"null"
],
"items": {
"$ref": "#/definitions/Value"
}
}
}
}
]
},
"Identifier": {
"type": "string"
},
"Value": {
"description": "All supported ACL values.",
"anyOf": [
{
"description": "Represents a null JSON value.",
"type": "null"
},
{
"description": "Represents a [`bool`].",
"type": "boolean"
},
{
"description": "Represents a valid ACL [`Number`].",
"allOf": [
{
"$ref": "#/definitions/Number"
}
]
},
{
"description": "Represents a [`String`].",
"type": "string"
},
{
"description": "Represents a list of other [`Value`]s.",
"type": "array",
"items": {
"$ref": "#/definitions/Value"
}
},
{
"description": "Represents a map of [`String`] keys to [`Value`]s.",
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/Value"
}
}
]
},
"Number": {
"description": "A valid ACL number.",
"anyOf": [
{
"description": "Represents an [`i64`].",
"type": "integer",
"format": "int64"
},
{
"description": "Represents a [`f64`].",
"type": "number",
"format": "double"
}
]
},
"Target": {
"description": "Platform target.",
"oneOf": [
{
"description": "MacOS.",
"type": "string",
"enum": [
"macOS"
]
},
{
"description": "Windows.",
"type": "string",
"enum": [
"windows"
]
},
{
"description": "Linux.",
"type": "string",
"enum": [
"linux"
]
},
{
"description": "Android.",
"type": "string",
"enum": [
"android"
]
},
{
"description": "iOS.",
"type": "string",
"enum": [
"iOS"
]
}
]
}
}
} | json | github | https://github.com/tauri-apps/tauri | crates/tauri-schema-generator/schemas/capability.schema.json |
"""
===================================================
Label Propagation digits: Demonstrating performance
===================================================
This example demonstrates the power of semisupervised learning by
training a Label Spreading model to classify handwritten digits
with sets of very few labels.
The handwritten digit dataset has 1797 total points. The model will
be trained using all points, but only 30 will be labeled. Results
in the form of a confusion matrix and a series of metrics over each
class will be very good.
At the end, the top 10 most uncertain predictions will be shown.
"""
print(__doc__)
# Authors: Clay Woolam <clay@woolam.org>
# Licence: BSD
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
from sklearn import datasets
from sklearn.semi_supervised import label_propagation
from sklearn.metrics import confusion_matrix, classification_report
digits = datasets.load_digits()
rng = np.random.RandomState(0)
indices = np.arange(len(digits.data))
rng.shuffle(indices)
X = digits.data[indices[:330]]
y = digits.target[indices[:330]]
images = digits.images[indices[:330]]
n_total_samples = len(y)
n_labeled_points = 30
indices = np.arange(n_total_samples)
unlabeled_set = indices[n_labeled_points:]
# shuffle everything around
y_train = np.copy(y)
y_train[unlabeled_set] = -1
###############################################################################
# Learn with LabelSpreading
lp_model = label_propagation.LabelSpreading(gamma=0.25, max_iter=5)
lp_model.fit(X, y_train)
predicted_labels = lp_model.transduction_[unlabeled_set]
true_labels = y[unlabeled_set]
cm = confusion_matrix(true_labels, predicted_labels, labels=lp_model.classes_)
print("Label Spreading model: %d labeled & %d unlabeled points (%d total)" %
(n_labeled_points, n_total_samples - n_labeled_points, n_total_samples))
print(classification_report(true_labels, predicted_labels))
print("Confusion matrix")
print(cm)
# calculate uncertainty values for each transduced distribution
pred_entropies = stats.distributions.entropy(lp_model.label_distributions_.T)
# pick the top 10 most uncertain labels
uncertainty_index = np.argsort(pred_entropies)[-10:]
###############################################################################
# plot
f = plt.figure(figsize=(7, 5))
for index, image_index in enumerate(uncertainty_index):
image = images[image_index]
sub = f.add_subplot(2, 5, index + 1)
sub.imshow(image, cmap=plt.cm.gray_r)
plt.xticks([])
plt.yticks([])
sub.set_title('predict: %i\ntrue: %i' % (
lp_model.transduction_[image_index], y[image_index]))
f.suptitle('Learning with small amount of labeled data')
plt.show() | unknown | codeparrot/codeparrot-clean | ||
/* module.c - the module itself
*
* Copyright (C) 2004-2010 Gerhard Häring <gh@ghaering.de>
*
* This file is part of pysqlite.
*
* This software is provided 'as-is', without any express or implied
* warranty. In no event will the authors be held liable for any damages
* arising from the use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented; you must not
* claim that you wrote the original software. If you use this software
* in a product, an acknowledgment in the product documentation would be
* appreciated but is not required.
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
* 3. This notice may not be removed or altered from any source distribution.
*/
#ifndef Py_BUILD_CORE_BUILTIN
# define Py_BUILD_CORE_MODULE 1
#endif
#include "connection.h"
#include "statement.h"
#include "cursor.h"
#include "prepare_protocol.h"
#include "microprotocols.h"
#include "row.h"
#include "blob.h"
#include "util.h"
#if SQLITE_VERSION_NUMBER < 3015002
#error "SQLite 3.15.2 or higher required"
#endif
#define clinic_state() (pysqlite_get_state(module))
#include "clinic/module.c.h"
#undef clinic_state
/*[clinic input]
module _sqlite3
[clinic start generated code]*/
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=81e330492d57488e]*/
/*
* We create 'clinic/_sqlite3.connect.c.h' in connection.c, in order to
* keep the signatures of sqlite3.Connection.__init__ and
* sqlite3.connect() synchronised.
*/
#include "clinic/_sqlite3.connect.c.h"
static PyObject *
pysqlite_connect(PyObject *module, PyObject *const *args, Py_ssize_t nargsf,
PyObject *kwnames)
{
pysqlite_state *state = pysqlite_get_state(module);
PyObject *factory = (PyObject *)state->ConnectionType;
Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
if (nargs > 1) {
PyErr_Format(PyExc_TypeError,
"connect() takes at most 1 positional arguments (%zd given)", nargs);
return NULL;
}
if (kwnames != NULL) {
for (Py_ssize_t i = 0; i < PyTuple_GET_SIZE(kwnames); i++) {
PyObject *item = PyTuple_GET_ITEM(kwnames, i); // borrowed ref.
if (PyUnicode_EqualToUTF8(item, "factory")) {
factory = args[nargs + i];
break;
}
}
}
return PyObject_Vectorcall(factory, args, nargsf, kwnames);
}
/*[clinic input]
_sqlite3.complete_statement as pysqlite_complete_statement
statement: str
Checks if a string contains a complete SQL statement.
[clinic start generated code]*/
static PyObject *
pysqlite_complete_statement_impl(PyObject *module, const char *statement)
/*[clinic end generated code: output=e55f1ff1952df558 input=ac45d257375bb828]*/
{
if (sqlite3_complete(statement)) {
return Py_NewRef(Py_True);
} else {
return Py_NewRef(Py_False);
}
}
/*[clinic input]
_sqlite3.register_adapter as pysqlite_register_adapter
type: object(type='PyTypeObject *')
adapter as caster: object
/
Register a function to adapt Python objects to SQLite values.
[clinic start generated code]*/
static PyObject *
pysqlite_register_adapter_impl(PyObject *module, PyTypeObject *type,
PyObject *caster)
/*[clinic end generated code: output=a287e8db18e8af23 input=29a5e0f213030242]*/
{
int rc;
/* a basic type is adapted; there's a performance optimization if that's not the case
* (99 % of all usages) */
if (type == &PyLong_Type || type == &PyFloat_Type
|| type == &PyUnicode_Type || type == &PyByteArray_Type) {
pysqlite_state *state = pysqlite_get_state(module);
state->BaseTypeAdapted = 1;
}
pysqlite_state *state = pysqlite_get_state(module);
PyObject *protocol = (PyObject *)state->PrepareProtocolType;
rc = pysqlite_microprotocols_add(state, type, protocol, caster);
if (rc == -1) {
return NULL;
}
Py_RETURN_NONE;
}
/*[clinic input]
_sqlite3.register_converter as pysqlite_register_converter
typename as orig_name: unicode
converter as callable: object
/
Register a function to convert SQLite values to Python objects.
[clinic start generated code]*/
static PyObject *
pysqlite_register_converter_impl(PyObject *module, PyObject *orig_name,
PyObject *callable)
/*[clinic end generated code: output=a2f2bfeed7230062 input=159a444971b40378]*/
{
PyObject* name = NULL;
PyObject* retval = NULL;
/* convert the name to upper case */
pysqlite_state *state = pysqlite_get_state(module);
name = PyObject_CallMethodNoArgs(orig_name, state->str_upper);
if (!name) {
goto error;
}
if (PyDict_SetItem(state->converters, name, callable) != 0) {
goto error;
}
retval = Py_NewRef(Py_None);
error:
Py_XDECREF(name);
return retval;
}
/*[clinic input]
_sqlite3.enable_callback_tracebacks as pysqlite_enable_callback_trace
enable: int
/
Enable or disable callback functions throwing errors to stderr.
[clinic start generated code]*/
static PyObject *
pysqlite_enable_callback_trace_impl(PyObject *module, int enable)
/*[clinic end generated code: output=4ff1d051c698f194 input=cb79d3581eb77c40]*/
{
pysqlite_state *state = pysqlite_get_state(module);
state->enable_callback_tracebacks = enable;
Py_RETURN_NONE;
}
/*[clinic input]
_sqlite3.adapt as pysqlite_adapt
obj: object
proto: object(c_default='(PyObject *)clinic_state()->PrepareProtocolType') = PrepareProtocolType
alt: object = NULL
/
Adapt given object to given protocol.
[clinic start generated code]*/
static PyObject *
pysqlite_adapt_impl(PyObject *module, PyObject *obj, PyObject *proto,
PyObject *alt)
/*[clinic end generated code: output=0c3927c5fcd23dd9 input=a53dc9993e81e15f]*/
{
pysqlite_state *state = pysqlite_get_state(module);
return pysqlite_microprotocols_adapt(state, obj, proto, alt);
}
static int converters_init(PyObject* module)
{
pysqlite_state *state = pysqlite_get_state(module);
state->converters = PyDict_New();
if (state->converters == NULL) {
return -1;
}
return PyModule_AddObjectRef(module, "converters", state->converters);
}
static int
load_functools_lru_cache(PyObject *module)
{
pysqlite_state *state = pysqlite_get_state(module);
state->lru_cache = PyImport_ImportModuleAttrString("functools", "lru_cache");
if (state->lru_cache == NULL) {
return -1;
}
return 0;
}
static PyMethodDef module_methods[] = {
PYSQLITE_ADAPT_METHODDEF
PYSQLITE_COMPLETE_STATEMENT_METHODDEF
{"connect", _PyCFunction_CAST(pysqlite_connect), METH_FASTCALL|METH_KEYWORDS, pysqlite_connect__doc__},
PYSQLITE_ENABLE_CALLBACK_TRACE_METHODDEF
PYSQLITE_REGISTER_ADAPTER_METHODDEF
PYSQLITE_REGISTER_CONVERTER_METHODDEF
{NULL, NULL}
};
/* SQLite C API result codes. See also:
* - https://www.sqlite.org/c3ref/c_abort_rollback.html
*
* Note: the SQLite changelogs rarely mention new result codes, so in order to
* keep the 'error_codes' table in sync with SQLite, we must manually inspect
* sqlite3.h for every release.
*
* We keep the SQLITE_VERSION_NUMBER checks in order to easily declutter the
* code when we adjust the SQLite version requirement.
*/
static const struct {
const char *name;
long value;
} error_codes[] = {
#define DECLARE_ERROR_CODE(code) {#code, code}
// Primary result code list
DECLARE_ERROR_CODE(SQLITE_ABORT),
DECLARE_ERROR_CODE(SQLITE_AUTH),
DECLARE_ERROR_CODE(SQLITE_BUSY),
DECLARE_ERROR_CODE(SQLITE_CANTOPEN),
DECLARE_ERROR_CODE(SQLITE_CONSTRAINT),
DECLARE_ERROR_CODE(SQLITE_CORRUPT),
DECLARE_ERROR_CODE(SQLITE_DONE),
DECLARE_ERROR_CODE(SQLITE_EMPTY),
DECLARE_ERROR_CODE(SQLITE_ERROR),
DECLARE_ERROR_CODE(SQLITE_FORMAT),
DECLARE_ERROR_CODE(SQLITE_FULL),
DECLARE_ERROR_CODE(SQLITE_INTERNAL),
DECLARE_ERROR_CODE(SQLITE_INTERRUPT),
DECLARE_ERROR_CODE(SQLITE_IOERR),
DECLARE_ERROR_CODE(SQLITE_LOCKED),
DECLARE_ERROR_CODE(SQLITE_MISMATCH),
DECLARE_ERROR_CODE(SQLITE_MISUSE),
DECLARE_ERROR_CODE(SQLITE_NOLFS),
DECLARE_ERROR_CODE(SQLITE_NOMEM),
DECLARE_ERROR_CODE(SQLITE_NOTADB),
DECLARE_ERROR_CODE(SQLITE_NOTFOUND),
DECLARE_ERROR_CODE(SQLITE_OK),
DECLARE_ERROR_CODE(SQLITE_PERM),
DECLARE_ERROR_CODE(SQLITE_PROTOCOL),
DECLARE_ERROR_CODE(SQLITE_RANGE),
DECLARE_ERROR_CODE(SQLITE_READONLY),
DECLARE_ERROR_CODE(SQLITE_ROW),
DECLARE_ERROR_CODE(SQLITE_SCHEMA),
DECLARE_ERROR_CODE(SQLITE_TOOBIG),
DECLARE_ERROR_CODE(SQLITE_NOTICE),
DECLARE_ERROR_CODE(SQLITE_WARNING),
// Extended result code list
DECLARE_ERROR_CODE(SQLITE_ABORT_ROLLBACK),
DECLARE_ERROR_CODE(SQLITE_BUSY_RECOVERY),
DECLARE_ERROR_CODE(SQLITE_CANTOPEN_FULLPATH),
DECLARE_ERROR_CODE(SQLITE_CANTOPEN_ISDIR),
DECLARE_ERROR_CODE(SQLITE_CANTOPEN_NOTEMPDIR),
DECLARE_ERROR_CODE(SQLITE_CORRUPT_VTAB),
DECLARE_ERROR_CODE(SQLITE_IOERR_ACCESS),
DECLARE_ERROR_CODE(SQLITE_IOERR_BLOCKED),
DECLARE_ERROR_CODE(SQLITE_IOERR_CHECKRESERVEDLOCK),
DECLARE_ERROR_CODE(SQLITE_IOERR_CLOSE),
DECLARE_ERROR_CODE(SQLITE_IOERR_DELETE),
DECLARE_ERROR_CODE(SQLITE_IOERR_DELETE_NOENT),
DECLARE_ERROR_CODE(SQLITE_IOERR_DIR_CLOSE),
DECLARE_ERROR_CODE(SQLITE_IOERR_DIR_FSYNC),
DECLARE_ERROR_CODE(SQLITE_IOERR_FSTAT),
DECLARE_ERROR_CODE(SQLITE_IOERR_FSYNC),
DECLARE_ERROR_CODE(SQLITE_IOERR_LOCK),
DECLARE_ERROR_CODE(SQLITE_IOERR_NOMEM),
DECLARE_ERROR_CODE(SQLITE_IOERR_RDLOCK),
DECLARE_ERROR_CODE(SQLITE_IOERR_READ),
DECLARE_ERROR_CODE(SQLITE_IOERR_SEEK),
DECLARE_ERROR_CODE(SQLITE_IOERR_SHMLOCK),
DECLARE_ERROR_CODE(SQLITE_IOERR_SHMMAP),
DECLARE_ERROR_CODE(SQLITE_IOERR_SHMOPEN),
DECLARE_ERROR_CODE(SQLITE_IOERR_SHMSIZE),
DECLARE_ERROR_CODE(SQLITE_IOERR_SHORT_READ),
DECLARE_ERROR_CODE(SQLITE_IOERR_TRUNCATE),
DECLARE_ERROR_CODE(SQLITE_IOERR_UNLOCK),
DECLARE_ERROR_CODE(SQLITE_IOERR_WRITE),
DECLARE_ERROR_CODE(SQLITE_LOCKED_SHAREDCACHE),
DECLARE_ERROR_CODE(SQLITE_READONLY_CANTLOCK),
DECLARE_ERROR_CODE(SQLITE_READONLY_RECOVERY),
DECLARE_ERROR_CODE(SQLITE_CONSTRAINT_CHECK),
DECLARE_ERROR_CODE(SQLITE_CONSTRAINT_COMMITHOOK),
DECLARE_ERROR_CODE(SQLITE_CONSTRAINT_FOREIGNKEY),
DECLARE_ERROR_CODE(SQLITE_CONSTRAINT_FUNCTION),
DECLARE_ERROR_CODE(SQLITE_CONSTRAINT_NOTNULL),
DECLARE_ERROR_CODE(SQLITE_CONSTRAINT_PRIMARYKEY),
DECLARE_ERROR_CODE(SQLITE_CONSTRAINT_TRIGGER),
DECLARE_ERROR_CODE(SQLITE_CONSTRAINT_UNIQUE),
DECLARE_ERROR_CODE(SQLITE_CONSTRAINT_VTAB),
DECLARE_ERROR_CODE(SQLITE_READONLY_ROLLBACK),
DECLARE_ERROR_CODE(SQLITE_IOERR_MMAP),
DECLARE_ERROR_CODE(SQLITE_NOTICE_RECOVER_ROLLBACK),
DECLARE_ERROR_CODE(SQLITE_NOTICE_RECOVER_WAL),
DECLARE_ERROR_CODE(SQLITE_BUSY_SNAPSHOT),
DECLARE_ERROR_CODE(SQLITE_IOERR_GETTEMPPATH),
DECLARE_ERROR_CODE(SQLITE_WARNING_AUTOINDEX),
DECLARE_ERROR_CODE(SQLITE_CANTOPEN_CONVPATH),
DECLARE_ERROR_CODE(SQLITE_IOERR_CONVPATH),
DECLARE_ERROR_CODE(SQLITE_CONSTRAINT_ROWID),
DECLARE_ERROR_CODE(SQLITE_READONLY_DBMOVED),
DECLARE_ERROR_CODE(SQLITE_AUTH_USER),
DECLARE_ERROR_CODE(SQLITE_IOERR_VNODE),
DECLARE_ERROR_CODE(SQLITE_IOERR_AUTH),
DECLARE_ERROR_CODE(SQLITE_OK_LOAD_PERMANENTLY),
#if SQLITE_VERSION_NUMBER >= 3021000
DECLARE_ERROR_CODE(SQLITE_IOERR_BEGIN_ATOMIC),
DECLARE_ERROR_CODE(SQLITE_IOERR_COMMIT_ATOMIC),
DECLARE_ERROR_CODE(SQLITE_IOERR_ROLLBACK_ATOMIC),
#endif
#if SQLITE_VERSION_NUMBER >= 3022000
DECLARE_ERROR_CODE(SQLITE_ERROR_MISSING_COLLSEQ),
DECLARE_ERROR_CODE(SQLITE_ERROR_RETRY),
DECLARE_ERROR_CODE(SQLITE_READONLY_CANTINIT),
DECLARE_ERROR_CODE(SQLITE_READONLY_DIRECTORY),
#endif
#if SQLITE_VERSION_NUMBER >= 3024000
DECLARE_ERROR_CODE(SQLITE_CORRUPT_SEQUENCE),
DECLARE_ERROR_CODE(SQLITE_LOCKED_VTAB),
#endif
#if SQLITE_VERSION_NUMBER >= 3025000
DECLARE_ERROR_CODE(SQLITE_CANTOPEN_DIRTYWAL),
DECLARE_ERROR_CODE(SQLITE_ERROR_SNAPSHOT),
#endif
#if SQLITE_VERSION_NUMBER >= 3031000
DECLARE_ERROR_CODE(SQLITE_CANTOPEN_SYMLINK),
DECLARE_ERROR_CODE(SQLITE_CONSTRAINT_PINNED),
DECLARE_ERROR_CODE(SQLITE_OK_SYMLINK),
#endif
#if SQLITE_VERSION_NUMBER >= 3032000
DECLARE_ERROR_CODE(SQLITE_BUSY_TIMEOUT),
DECLARE_ERROR_CODE(SQLITE_CORRUPT_INDEX),
DECLARE_ERROR_CODE(SQLITE_IOERR_DATA),
#endif
#if SQLITE_VERSION_NUMBER >= 3034000
DECLARE_ERROR_CODE(SQLITE_IOERR_CORRUPTFS),
#endif
#undef DECLARE_ERROR_CODE
{NULL, 0},
};
static int
add_error_constants(PyObject *module)
{
for (int i = 0; error_codes[i].name != NULL; i++) {
const char *name = error_codes[i].name;
const long value = error_codes[i].value;
if (PyModule_AddIntConstant(module, name, value) < 0) {
return -1;
}
}
return 0;
}
const char *
pysqlite_error_name(int rc)
{
for (int i = 0; error_codes[i].name != NULL; i++) {
if (error_codes[i].value == rc) {
return error_codes[i].name;
}
}
// No error code matched.
return NULL;
}
static int
add_keyword_tuple(PyObject *module)
{
#if SQLITE_VERSION_NUMBER >= 3024000
int count = sqlite3_keyword_count();
PyObject *keywords = PyTuple_New(count);
if (keywords == NULL) {
return -1;
}
for (int i = 0; i < count; i++) {
const char *keyword;
int size;
int result = sqlite3_keyword_name(i, &keyword, &size);
if (result != SQLITE_OK) {
pysqlite_state *state = pysqlite_get_state(module);
set_error_from_code(state, result);
goto error;
}
PyObject *kwd = PyUnicode_FromStringAndSize(keyword, size);
if (!kwd) {
goto error;
}
PyTuple_SET_ITEM(keywords, i, kwd);
}
return PyModule_Add(module, "SQLITE_KEYWORDS", keywords);
error:
Py_DECREF(keywords);
return -1;
#else
return 0;
#endif
}
static int
add_integer_constants(PyObject *module) {
#define ADD_INT(ival) \
do { \
if (PyModule_AddIntConstant(module, #ival, ival) < 0) { \
return -1; \
} \
} while (0); \
ADD_INT(PARSE_DECLTYPES);
ADD_INT(PARSE_COLNAMES);
ADD_INT(SQLITE_DENY);
ADD_INT(SQLITE_IGNORE);
ADD_INT(SQLITE_CREATE_INDEX);
ADD_INT(SQLITE_CREATE_TABLE);
ADD_INT(SQLITE_CREATE_TEMP_INDEX);
ADD_INT(SQLITE_CREATE_TEMP_TABLE);
ADD_INT(SQLITE_CREATE_TEMP_TRIGGER);
ADD_INT(SQLITE_CREATE_TEMP_VIEW);
ADD_INT(SQLITE_CREATE_TRIGGER);
ADD_INT(SQLITE_CREATE_VIEW);
ADD_INT(SQLITE_DELETE);
ADD_INT(SQLITE_DROP_INDEX);
ADD_INT(SQLITE_DROP_TABLE);
ADD_INT(SQLITE_DROP_TEMP_INDEX);
ADD_INT(SQLITE_DROP_TEMP_TABLE);
ADD_INT(SQLITE_DROP_TEMP_TRIGGER);
ADD_INT(SQLITE_DROP_TEMP_VIEW);
ADD_INT(SQLITE_DROP_TRIGGER);
ADD_INT(SQLITE_DROP_VIEW);
ADD_INT(SQLITE_INSERT);
ADD_INT(SQLITE_PRAGMA);
ADD_INT(SQLITE_READ);
ADD_INT(SQLITE_SELECT);
ADD_INT(SQLITE_TRANSACTION);
ADD_INT(SQLITE_UPDATE);
ADD_INT(SQLITE_ATTACH);
ADD_INT(SQLITE_DETACH);
ADD_INT(SQLITE_ALTER_TABLE);
ADD_INT(SQLITE_REINDEX);
ADD_INT(SQLITE_ANALYZE);
ADD_INT(SQLITE_CREATE_VTABLE);
ADD_INT(SQLITE_DROP_VTABLE);
ADD_INT(SQLITE_FUNCTION);
ADD_INT(SQLITE_SAVEPOINT);
ADD_INT(SQLITE_RECURSIVE);
// Run-time limit categories
ADD_INT(SQLITE_LIMIT_LENGTH);
ADD_INT(SQLITE_LIMIT_SQL_LENGTH);
ADD_INT(SQLITE_LIMIT_COLUMN);
ADD_INT(SQLITE_LIMIT_EXPR_DEPTH);
ADD_INT(SQLITE_LIMIT_COMPOUND_SELECT);
ADD_INT(SQLITE_LIMIT_VDBE_OP);
ADD_INT(SQLITE_LIMIT_FUNCTION_ARG);
ADD_INT(SQLITE_LIMIT_ATTACHED);
ADD_INT(SQLITE_LIMIT_LIKE_PATTERN_LENGTH);
ADD_INT(SQLITE_LIMIT_VARIABLE_NUMBER);
ADD_INT(SQLITE_LIMIT_TRIGGER_DEPTH);
ADD_INT(SQLITE_LIMIT_WORKER_THREADS);
/*
* Database connection configuration options.
* See https://www.sqlite.org/c3ref/c_dbconfig_defensive.html
*/
ADD_INT(SQLITE_DBCONFIG_ENABLE_FKEY);
ADD_INT(SQLITE_DBCONFIG_ENABLE_TRIGGER);
ADD_INT(SQLITE_DBCONFIG_ENABLE_FTS3_TOKENIZER);
ADD_INT(SQLITE_DBCONFIG_ENABLE_LOAD_EXTENSION);
#if SQLITE_VERSION_NUMBER >= 3016000
ADD_INT(SQLITE_DBCONFIG_NO_CKPT_ON_CLOSE);
#endif
#if SQLITE_VERSION_NUMBER >= 3020000
ADD_INT(SQLITE_DBCONFIG_ENABLE_QPSG);
#endif
#if SQLITE_VERSION_NUMBER >= 3022000
ADD_INT(SQLITE_DBCONFIG_TRIGGER_EQP);
#endif
#if SQLITE_VERSION_NUMBER >= 3024000
ADD_INT(SQLITE_DBCONFIG_RESET_DATABASE);
#endif
#if SQLITE_VERSION_NUMBER >= 3026000
ADD_INT(SQLITE_DBCONFIG_DEFENSIVE);
#endif
#if SQLITE_VERSION_NUMBER >= 3028000
ADD_INT(SQLITE_DBCONFIG_WRITABLE_SCHEMA);
#endif
#if SQLITE_VERSION_NUMBER >= 3029000
ADD_INT(SQLITE_DBCONFIG_DQS_DDL);
ADD_INT(SQLITE_DBCONFIG_DQS_DML);
ADD_INT(SQLITE_DBCONFIG_LEGACY_ALTER_TABLE);
#endif
#if SQLITE_VERSION_NUMBER >= 3030000
ADD_INT(SQLITE_DBCONFIG_ENABLE_VIEW);
#endif
#if SQLITE_VERSION_NUMBER >= 3031000
ADD_INT(SQLITE_DBCONFIG_LEGACY_FILE_FORMAT);
ADD_INT(SQLITE_DBCONFIG_TRUSTED_SCHEMA);
#endif
#undef ADD_INT
return 0;
}
/* Convert SQLite default threading mode (as set by the compile-time constant
* SQLITE_THREADSAFE) to the corresponding DB-API 2.0 (PEP 249) threadsafety
* level. */
static int
get_threadsafety(pysqlite_state *state)
{
int mode = sqlite3_threadsafe();
switch (mode) {
case 0: // Single-thread mode; threads may not share the module.
return 0;
case 1: // Serialized mode; threads may share the module,
return 3; // connections, and cursors.
case 2: // Multi-thread mode; threads may share the module, but not
return 1; // connections.
default:
PyErr_Format(state->InterfaceError,
"Unable to interpret SQLite threadsafety mode. Got %d, "
"expected 0, 1, or 2", mode);
return -1;
}
}
static int
module_traverse(PyObject *module, visitproc visit, void *arg)
{
pysqlite_state *state = pysqlite_get_state(module);
// Exceptions
Py_VISIT(state->DataError);
Py_VISIT(state->DatabaseError);
Py_VISIT(state->Error);
Py_VISIT(state->IntegrityError);
Py_VISIT(state->InterfaceError);
Py_VISIT(state->InternalError);
Py_VISIT(state->NotSupportedError);
Py_VISIT(state->OperationalError);
Py_VISIT(state->ProgrammingError);
Py_VISIT(state->Warning);
// Types
Py_VISIT(state->BlobType);
Py_VISIT(state->ConnectionType);
Py_VISIT(state->CursorType);
Py_VISIT(state->PrepareProtocolType);
Py_VISIT(state->RowType);
Py_VISIT(state->StatementType);
// Misc
Py_VISIT(state->converters);
Py_VISIT(state->lru_cache);
Py_VISIT(state->psyco_adapters);
return 0;
}
static int
module_clear(PyObject *module)
{
pysqlite_state *state = pysqlite_get_state(module);
// Exceptions
Py_CLEAR(state->DataError);
Py_CLEAR(state->DatabaseError);
Py_CLEAR(state->Error);
Py_CLEAR(state->IntegrityError);
Py_CLEAR(state->InterfaceError);
Py_CLEAR(state->InternalError);
Py_CLEAR(state->NotSupportedError);
Py_CLEAR(state->OperationalError);
Py_CLEAR(state->ProgrammingError);
Py_CLEAR(state->Warning);
// Types
Py_CLEAR(state->BlobType);
Py_CLEAR(state->ConnectionType);
Py_CLEAR(state->CursorType);
Py_CLEAR(state->PrepareProtocolType);
Py_CLEAR(state->RowType);
Py_CLEAR(state->StatementType);
// Misc
Py_CLEAR(state->converters);
Py_CLEAR(state->lru_cache);
Py_CLEAR(state->psyco_adapters);
// Interned strings
Py_CLEAR(state->str___adapt__);
Py_CLEAR(state->str___conform__);
Py_CLEAR(state->str_executescript);
Py_CLEAR(state->str_finalize);
Py_CLEAR(state->str_inverse);
Py_CLEAR(state->str_step);
Py_CLEAR(state->str_upper);
Py_CLEAR(state->str_value);
return 0;
}
static void
module_free(void *module)
{
(void)module_clear((PyObject *)module);
}
#define ADD_TYPE(module, type) \
do { \
if (PyModule_AddType(module, type) < 0) { \
goto error; \
} \
} while (0)
#define ADD_EXCEPTION(module, state, exc, base) \
do { \
state->exc = PyErr_NewException(MODULE_NAME "." #exc, base, NULL); \
if (state->exc == NULL) { \
goto error; \
} \
ADD_TYPE(module, (PyTypeObject *)state->exc); \
} while (0)
#define ADD_INTERNED(state, string) \
do { \
PyObject *tmp = PyUnicode_InternFromString(#string); \
if (tmp == NULL) { \
goto error; \
} \
state->str_ ## string = tmp; \
} while (0)
static int
module_exec(PyObject *module)
{
if (sqlite3_libversion_number() < 3015002) {
PyErr_SetString(PyExc_ImportError, MODULE_NAME ": SQLite 3.15.2 or higher required");
return -1;
}
int rc = sqlite3_initialize();
if (rc != SQLITE_OK) {
PyErr_SetString(PyExc_ImportError, sqlite3_errstr(rc));
return -1;
}
if ((pysqlite_row_setup_types(module) < 0) ||
(pysqlite_cursor_setup_types(module) < 0) ||
(pysqlite_connection_setup_types(module) < 0) ||
(pysqlite_statement_setup_types(module) < 0) ||
(pysqlite_prepare_protocol_setup_types(module) < 0) ||
(pysqlite_blob_setup_types(module) < 0)
) {
goto error;
}
pysqlite_state *state = pysqlite_get_state(module);
ADD_TYPE(module, state->BlobType);
ADD_TYPE(module, state->ConnectionType);
ADD_TYPE(module, state->CursorType);
ADD_TYPE(module, state->PrepareProtocolType);
ADD_TYPE(module, state->RowType);
/*** Create DB-API Exception hierarchy */
ADD_EXCEPTION(module, state, Error, PyExc_Exception);
ADD_EXCEPTION(module, state, Warning, PyExc_Exception);
/* Error subclasses */
ADD_EXCEPTION(module, state, InterfaceError, state->Error);
ADD_EXCEPTION(module, state, DatabaseError, state->Error);
/* DatabaseError subclasses */
ADD_EXCEPTION(module, state, InternalError, state->DatabaseError);
ADD_EXCEPTION(module, state, OperationalError, state->DatabaseError);
ADD_EXCEPTION(module, state, ProgrammingError, state->DatabaseError);
ADD_EXCEPTION(module, state, IntegrityError, state->DatabaseError);
ADD_EXCEPTION(module, state, DataError, state->DatabaseError);
ADD_EXCEPTION(module, state, NotSupportedError, state->DatabaseError);
/* Add interned strings */
ADD_INTERNED(state, __adapt__);
ADD_INTERNED(state, __conform__);
ADD_INTERNED(state, executescript);
ADD_INTERNED(state, finalize);
ADD_INTERNED(state, inverse);
ADD_INTERNED(state, step);
ADD_INTERNED(state, upper);
ADD_INTERNED(state, value);
/* Set error constants */
if (add_error_constants(module) < 0) {
goto error;
}
/* Set integer constants */
if (add_integer_constants(module) < 0) {
goto error;
}
if (add_keyword_tuple(module) < 0) {
goto error;
}
if (PyModule_AddStringConstant(module, "sqlite_version", sqlite3_libversion())) {
goto error;
}
if (PyModule_AddIntMacro(module, LEGACY_TRANSACTION_CONTROL) < 0) {
goto error;
}
int threadsafety = get_threadsafety(state);
if (threadsafety < 0) {
goto error;
}
if (PyModule_AddIntConstant(module, "threadsafety", threadsafety) < 0) {
goto error;
}
/* initialize microprotocols layer */
if (pysqlite_microprotocols_init(module) < 0) {
goto error;
}
/* initialize the default converters */
if (converters_init(module) < 0) {
goto error;
}
if (load_functools_lru_cache(module) < 0) {
goto error;
}
return 0;
error:
return -1;
}
static struct PyModuleDef_Slot module_slots[] = {
{Py_mod_exec, module_exec},
{Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED},
{Py_mod_gil, Py_MOD_GIL_NOT_USED},
{0, NULL},
};
struct PyModuleDef _sqlite3module = {
.m_base = PyModuleDef_HEAD_INIT,
.m_name = "_sqlite3",
.m_size = sizeof(pysqlite_state),
.m_methods = module_methods,
.m_slots = module_slots,
.m_traverse = module_traverse,
.m_clear = module_clear,
.m_free = module_free,
};
PyMODINIT_FUNC
PyInit__sqlite3(void)
{
return PyModuleDef_Init(&_sqlite3module);
} | c | github | https://github.com/python/cpython | Modules/_sqlite/module.c |
# -*- coding: utf-8 -*-
# Copyright 2012-2013 Rumma & Ko Ltd
# License: BSD (see file COPYING for details)
"""
This module contains "quick" tests that are run on a demo database
without any fixture. You can run only these tests by issuing::
$ python manage.py test cbss.QuickTest
$ django-admin.py test --settings=lino_welfare.projects.eupen.settings.demo cbss.QuickTest
"""
from builtins import str
import datetime
import logging
logger = logging.getLogger(__name__)
#~ from django.utils import unittest
#~ from django.test.client import Client
from django.conf import settings
from django.core.exceptions import ValidationError
from lino.utils.djangotest import TestCase
from lino_welfare.modlib.cbss import models as cbss
from lino.utils import IncompleteDate
from lino.utils.instantiator import create_and_get
from lino.api import rt
NOW = datetime.datetime(2015, 5, 11, 18, 31, 1)
class QuickTest(TestCase):
never_build_site_cache = False
fixtures = 'sectors purposes democfg'.split()
def test01(self):
# print("20180502 test_cbss.test01()")
settings.SITE.startup() # create cache/wsdl files
root = create_and_get(settings.SITE.user_model, username='root')
luc = create_and_get(
rt.models.pcsw.Client, first_name='Luc', last_name='Saffre')
# First IdentifyPersonRequest
# Create an IPR with NISS just to have the XML validated.
req = cbss.IdentifyPersonRequest(
national_id="70100853190", user=root, person=luc)
try:
req.full_clean()
self.fail('Expected ValidationError "birth_date cannot be blank."')
except ValidationError:
pass
req.birth_date = IncompleteDate(1938, 6, 1)
try:
req.validate_request()
except Warning as e:
self.assertEqual(str(e), "")
pass
req.birth_date = IncompleteDate(1938, 0, 0)
req.validate_request()
req.execute_request(simulate_response='Foo', now=NOW)
expected = """\
<ssdn:SSDNRequest xmlns:ssdn="http://www.ksz-bcss.fgov.be/XSD/SSDN/Service">
<ssdn:RequestContext>
<ssdn:AuthorizedUser>
<ssdn:UserID>00901234567</ssdn:UserID>
<ssdn:Email>info@example.com</ssdn:Email>
<ssdn:OrgUnit>0123456789</ssdn:OrgUnit>
<ssdn:MatrixID>17</ssdn:MatrixID>
<ssdn:MatrixSubID>1</ssdn:MatrixSubID>
</ssdn:AuthorizedUser>
<ssdn:Message>
<ssdn:Reference>IdentifyPersonRequest # 1</ssdn:Reference>
<ssdn:TimeRequest>20150511T183101</ssdn:TimeRequest>
</ssdn:Message>
</ssdn:RequestContext>
<ssdn:ServiceRequest>
<ssdn:ServiceId>OCMWCPASIdentifyPerson</ssdn:ServiceId>
<ssdn:Version>20050930</ssdn:Version>
<ipr:IdentifyPersonRequest xmlns:ipr="http://www.ksz-bcss.fgov.be/XSD/SSDN/OCMW_CPAS/IdentifyPerson">
<ipr:SearchCriteria>
<ipr:SSIN>70100853190</ipr:SSIN>
<ipr:PhoneticCriteria>
<ipr:LastName></ipr:LastName>
<ipr:FirstName></ipr:FirstName>
<ipr:MiddleName></ipr:MiddleName>
<ipr:BirthDate>1938-00-00</ipr:BirthDate>
</ipr:PhoneticCriteria>
</ipr:SearchCriteria>
<ipr:VerificationData>
<ipr:PersonData>
<ipr:LastName></ipr:LastName>
<ipr:FirstName></ipr:FirstName>
<ipr:MiddleName></ipr:MiddleName>
<ipr:BirthDate>1938-00-00</ipr:BirthDate>
</ipr:PersonData>
</ipr:VerificationData>
</ipr:IdentifyPersonRequest>
</ssdn:ServiceRequest>
</ssdn:SSDNRequest>"""
self.assertEquivalent(expected, req.request_xml)
##
req = cbss.IdentifyPersonRequest(
last_name="MUSTERMANN",
birth_date=IncompleteDate(1938, 0, 0))
req.validate_request()
# Create another one, this time a name search.
# This time we also inspect the generated XML.
req = cbss.IdentifyPersonRequest(
user=root, person=luc,
last_name="MUSTERMANN",
first_name="Max",
birth_date=IncompleteDate(1938, 6, 1))
req.validate_request()
req.execute_request(simulate_response='Foo', now=NOW)
expected = """\
<ssdn:SSDNRequest xmlns:ssdn="http://www.ksz-bcss.fgov.be/XSD/SSDN/Service">
<ssdn:RequestContext>
<ssdn:AuthorizedUser>
<ssdn:UserID>00901234567</ssdn:UserID>
<ssdn:Email>info@example.com</ssdn:Email>
<ssdn:OrgUnit>0123456789</ssdn:OrgUnit>
<ssdn:MatrixID>17</ssdn:MatrixID>
<ssdn:MatrixSubID>1</ssdn:MatrixSubID>
</ssdn:AuthorizedUser>
<ssdn:Message>
<ssdn:Reference>IdentifyPersonRequest # 2</ssdn:Reference>
<ssdn:TimeRequest>20150511T183101</ssdn:TimeRequest>
</ssdn:Message>
</ssdn:RequestContext>
<ssdn:ServiceRequest>
<ssdn:ServiceId>OCMWCPASIdentifyPerson</ssdn:ServiceId>
<ssdn:Version>20050930</ssdn:Version>
<ipr:IdentifyPersonRequest xmlns:ipr="http://www.ksz-bcss.fgov.be/XSD/SSDN/OCMW_CPAS/IdentifyPerson">
<ipr:SearchCriteria>
<ipr:PhoneticCriteria>
<ipr:LastName>MUSTERMANN</ipr:LastName>
<ipr:FirstName>Max</ipr:FirstName>
<ipr:MiddleName></ipr:MiddleName>
<ipr:BirthDate>1938-06-01</ipr:BirthDate>
</ipr:PhoneticCriteria>
</ipr:SearchCriteria>
</ipr:IdentifyPersonRequest>
</ssdn:ServiceRequest>
</ssdn:SSDNRequest>"""
self.assertEquivalent(expected, req.request_xml)
if settings.SITE.plugins.cbss.cbss_environment != 'test':
# Skip live tests unless we are in test environment.
# Otherwise we would have to build /media/chache/wsdl files
return
# Execute a RetrieveTIGroupsRequest.
req = cbss.RetrieveTIGroupsRequest(
user=root, person=luc,
national_id='12345678901', language='fr')
# Try it without environment and see the XML.
# Note that NewStyleRequests have no validate_request method.
req.execute_request(simulate_response='Foo', now=NOW)
expected = ""
self.assertEquivalent(expected, req.request_xml)
# Now a ManageAccessRequest
today = datetime.date(2012, 5, 24)
kw = dict()
# dossier in onderzoek voor een maximale periode van twee maanden
kw.update(purpose_id=1)
kw.update(national_id='68060105329')
kw.update(user=root)
kw.update(person=luc)
kw.update(start_date=today)
kw.update(end_date=today)
kw.update(action=cbss.ManageActions.REGISTER)
kw.update(query_register=cbss.QueryRegisters.SECONDARY)
#~ kw.update(id_card_no=)
kw.update(last_name='SAFFRE')
kw.update(first_name='LUC JOHANNES')
kw.update(birth_date=IncompleteDate(1968, 6, 1))
req = cbss.ManageAccessRequest(**kw)
req.execute_request(simulate_response='Foo', now=NOW)
expected = """<ssdn:SSDNRequest xmlns:ssdn="http://www.ksz-bcss.fgov.be/XSD/SSDN/Service">
<ssdn:RequestContext>
<ssdn:AuthorizedUser>
<ssdn:UserID>00901234567</ssdn:UserID>
<ssdn:Email>info@example.com</ssdn:Email>
<ssdn:OrgUnit>0123456789</ssdn:OrgUnit>
<ssdn:MatrixID>17</ssdn:MatrixID>
<ssdn:MatrixSubID>1</ssdn:MatrixSubID>
</ssdn:AuthorizedUser>
<ssdn:Message>
<ssdn:Reference>ManageAccessRequest # 1</ssdn:Reference>
<ssdn:TimeRequest>20150511T183101</ssdn:TimeRequest>
</ssdn:Message>
</ssdn:RequestContext>
<ssdn:ServiceRequest>
<ssdn:ServiceId>OCMWCPASManageAccess</ssdn:ServiceId>
<ssdn:Version>20050930</ssdn:Version>
<mar:ManageAccessRequest xmlns:mar="http://www.ksz-bcss.fgov.be/XSD/SSDN/OCMW_CPAS/ManageAccess">
<mar:SSIN>68060105329</mar:SSIN>
<mar:Purpose>10</mar:Purpose>
<mar:Period>
<common:StartDate xmlns:common="http://www.ksz-bcss.fgov.be/XSD/SSDN/Common">2012-05-24</common:StartDate>
<common:EndDate xmlns:common="http://www.ksz-bcss.fgov.be/XSD/SSDN/Common">2012-05-24</common:EndDate>
</mar:Period>
<mar:Action>REGISTER</mar:Action>
<mar:Sector>17</mar:Sector>
<mar:QueryRegister>SECONDARY</mar:QueryRegister>
<mar:ProofOfAuthentication>
<mar:PersonData>
<mar:LastName>SAFFRE</mar:LastName>
<mar:FirstName>LUC JOHANNES</mar:FirstName>
<mar:BirthDate>1968-06-01</mar:BirthDate>
</mar:PersonData>
</mar:ProofOfAuthentication>
</mar:ManageAccessRequest>
</ssdn:ServiceRequest>
</ssdn:SSDNRequest>
"""
self.assertEquivalent(expected, req.request_xml) | unknown | codeparrot/codeparrot-clean | ||
"""
kombu.transport.pyro
======================
Pyro transport.
Requires the :mod:`Pyro4` library to be installed.
"""
from __future__ import absolute_import
import sys
from kombu.five import reraise
from kombu.utils import cached_property
from . import virtual
try:
import Pyro4 as pyro
from Pyro4.errors import NamingError
except ImportError: # pragma: no cover
pyro = NamingError = None # noqa
DEFAULT_PORT = 9090
E_LOOKUP = """\
Unable to locate pyro nameserver {0.virtual_host} on host {0.hostname}\
"""
class Channel(virtual.Channel):
def queues(self):
return self.shared_queues.get_queue_names()
def _new_queue(self, queue, **kwargs):
if queue not in self.queues():
self.shared_queues.new_queue(queue)
def _get(self, queue, timeout=None):
queue = self._queue_for(queue)
msg = self.shared_queues._get(queue)
return msg
def _queue_for(self, queue):
if queue not in self.queues():
self.shared_queues.new_queue(queue)
return queue
def _put(self, queue, message, **kwargs):
queue = self._queue_for(queue)
self.shared_queues._put(queue, message)
def _size(self, queue):
return self.shared_queues._size(queue)
def _delete(self, queue, *args):
self.shared_queues._delete(queue)
def _purge(self, queue):
return self.shared_queues._purge(queue)
def after_reply_message_received(self, queue):
pass
@cached_property
def shared_queues(self):
return self.connection.shared_queues
class Transport(virtual.Transport):
Channel = Channel
#: memory backend state is global.
state = virtual.BrokerState()
default_port = DEFAULT_PORT
driver_type = driver_name = 'pyro'
def _open(self):
conninfo = self.client
pyro.config.HMAC_KEY = conninfo.virtual_host
try:
nameserver = pyro.locateNS(host=conninfo.hostname,
port=self.default_port)
# name of registered pyro object
uri = nameserver.lookup(conninfo.virtual_host)
return pyro.Proxy(uri)
except NamingError:
reraise(NamingError, NamingError(E_LOOKUP.format(conninfo)),
sys.exc_info()[2])
def driver_version(self):
return pyro.__version__
@cached_property
def shared_queues(self):
return self._open() | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/python -u
#
# this tests the DTD validation with the XmlTextReader interface
#
import sys
import glob
import string
import libxml2
try:
import StringIO
str_io = StringIO.StringIO
except:
import io
str_io = io.StringIO
# Memory debug specific
libxml2.debugMemory(1)
err=""
expect="""../../test/valid/rss.xml:177: element rss: validity error : Element rss does not carry attribute version
</rss>
^
../../test/valid/xlink.xml:450: element termdef: validity error : ID dt-arc already defined
<p><termdef id="dt-arc" term="Arc">An <ter
^
../../test/valid/xlink.xml:530: validity error : attribute def line 199 references an unknown ID "dt-xlg"
^
"""
def callback(ctx, str):
global err
err = err + "%s" % (str)
libxml2.registerErrorHandler(callback, "")
valid_files = glob.glob("../../test/valid/*.x*")
valid_files.sort()
for file in valid_files:
if file.find("t8") != -1:
continue
if file == "../../test/valid/rss.xml":
continue
if file == "../../test/valid/xlink.xml":
continue
reader = libxml2.newTextReaderFilename(file)
#print "%s:" % (file)
reader.SetParserProp(libxml2.PARSER_VALIDATE, 1)
ret = reader.Read()
while ret == 1:
ret = reader.Read()
if ret != 0:
print("Error parsing and validating %s" % (file))
#sys.exit(1)
if err != expect:
print(err)
#
# another separate test based on Stephane Bidoul one
#
s = """
<!DOCTYPE test [
<!ELEMENT test (x,b)>
<!ELEMENT x (c)>
<!ELEMENT b (#PCDATA)>
<!ELEMENT c (#PCDATA)>
<!ENTITY x "<x><c>xxx</c></x>">
]>
<test>
&x;
<b>bbb</b>
</test>
"""
expect="""10,test
1,test
14,#text
1,x
1,c
3,#text
15,c
15,x
14,#text
1,b
3,#text
15,b
14,#text
15,test
"""
res=""
err=""
input = libxml2.inputBuffer(str_io(s))
reader = input.newTextReader("test2")
reader.SetParserProp(libxml2.PARSER_LOADDTD,1)
reader.SetParserProp(libxml2.PARSER_DEFAULTATTRS,1)
reader.SetParserProp(libxml2.PARSER_SUBST_ENTITIES,1)
reader.SetParserProp(libxml2.PARSER_VALIDATE,1)
while reader.Read() == 1:
res = res + "%s,%s\n" % (reader.NodeType(),reader.Name())
if res != expect:
print("test2 failed: unexpected output")
print(res)
sys.exit(1)
if err != "":
print("test2 failed: validation error found")
print(err)
sys.exit(1)
#
# Another test for external entity parsing and validation
#
s = """<!DOCTYPE test [
<!ELEMENT test (x)>
<!ELEMENT x (#PCDATA)>
<!ENTITY e SYSTEM "tst.ent">
]>
<test>
&e;
</test>
"""
tst_ent = """<x>hello</x>"""
expect="""10 test
1 test
14 #text
1 x
3 #text
15 x
14 #text
15 test
"""
res=""
def myResolver(URL, ID, ctxt):
if URL == "tst.ent":
return(str_io(tst_ent))
return None
libxml2.setEntityLoader(myResolver)
input = libxml2.inputBuffer(str_io(s))
reader = input.newTextReader("test3")
reader.SetParserProp(libxml2.PARSER_LOADDTD,1)
reader.SetParserProp(libxml2.PARSER_DEFAULTATTRS,1)
reader.SetParserProp(libxml2.PARSER_SUBST_ENTITIES,1)
reader.SetParserProp(libxml2.PARSER_VALIDATE,1)
while reader.Read() == 1:
res = res + "%s %s\n" % (reader.NodeType(),reader.Name())
if res != expect:
print("test3 failed: unexpected output")
print(res)
sys.exit(1)
if err != "":
print("test3 failed: validation error found")
print(err)
sys.exit(1)
#
# Another test for recursive entity parsing, validation, and replacement of
# entities, making sure the entity ref node doesn't show up in that case
#
s = """<!DOCTYPE test [
<!ELEMENT test (x, x)>
<!ELEMENT x (y)>
<!ELEMENT y (#PCDATA)>
<!ENTITY x "<x>&y;</x>">
<!ENTITY y "<y>yyy</y>">
]>
<test>
&x;
&x;
</test>"""
expect="""10 test 0
1 test 0
14 #text 1
1 x 1
1 y 2
3 #text 3
15 y 2
15 x 1
14 #text 1
1 x 1
1 y 2
3 #text 3
15 y 2
15 x 1
14 #text 1
15 test 0
"""
res=""
err=""
input = libxml2.inputBuffer(str_io(s))
reader = input.newTextReader("test4")
reader.SetParserProp(libxml2.PARSER_LOADDTD,1)
reader.SetParserProp(libxml2.PARSER_DEFAULTATTRS,1)
reader.SetParserProp(libxml2.PARSER_SUBST_ENTITIES,1)
reader.SetParserProp(libxml2.PARSER_VALIDATE,1)
while reader.Read() == 1:
res = res + "%s %s %d\n" % (reader.NodeType(),reader.Name(),reader.Depth())
if res != expect:
print("test4 failed: unexpected output")
print(res)
sys.exit(1)
if err != "":
print("test4 failed: validation error found")
print(err)
sys.exit(1)
#
# The same test but without entity substitution this time
#
s = """<!DOCTYPE test [
<!ELEMENT test (x, x)>
<!ELEMENT x (y)>
<!ELEMENT y (#PCDATA)>
<!ENTITY x "<x>&y;</x>">
<!ENTITY y "<y>yyy</y>">
]>
<test>
&x;
&x;
</test>"""
expect="""10 test 0
1 test 0
14 #text 1
5 x 1
14 #text 1
5 x 1
14 #text 1
15 test 0
"""
res=""
err=""
input = libxml2.inputBuffer(str_io(s))
reader = input.newTextReader("test5")
reader.SetParserProp(libxml2.PARSER_VALIDATE,1)
while reader.Read() == 1:
res = res + "%s %s %d\n" % (reader.NodeType(),reader.Name(),reader.Depth())
if res != expect:
print("test5 failed: unexpected output")
print(res)
if err != "":
print("test5 failed: validation error found")
print(err)
#
# cleanup
#
del input
del reader
# Memory debug specific
libxml2.cleanupParser()
if libxml2.debugMemory(1) == 0:
print("OK")
else:
print("Memory leak %d bytes" % (libxml2.debugMemory(1)))
libxml2.dumpMemory() | unknown | codeparrot/codeparrot-clean | ||
import { flushSync } from 'svelte';
import { ok, test } from '../../test';
export default test({
mode: ['client', 'hydrate'],
html: `
<input type="checkbox">
<input type="checkbox">
<input type="checkbox">
0
`,
test({ assert, target, window }) {
const input = target.querySelector('input');
ok(input);
input.checked = true;
input.dispatchEvent(new window.Event('change', { bubbles: true }));
flushSync();
assert.htmlEqual(
target.innerHTML,
`
<input type="checkbox">
<input type="checkbox">
<input type="checkbox">
1
`
);
}
}); | javascript | github | https://github.com/sveltejs/svelte | packages/svelte/tests/runtime-legacy/samples/binding-store-each/_config.js |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Website Mail',
'category': 'Hidden',
'summary': 'Website Module for Mail',
'version': '0.1',
'description': """Glue module holding mail improvements for website.""",
'author': 'OpenERP SA',
'depends': ['website', 'mail', 'email_template'],
'data': [
'views/snippets.xml',
'views/website_mail.xml',
'views/website_email_designer.xml',
'views/email_template_view.xml',
'data/mail_groups.xml',
'security/website_mail.xml',
],
'qweb': [
'static/src/xml/website_mail.xml'
],
'installable': True,
'auto_install': True,
} | unknown | codeparrot/codeparrot-clean | ||
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package terraform
import (
"maps"
"path/filepath"
"slices"
"sort"
"strings"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/hashicorp/hcl/v2"
"github.com/zclconf/go-cty-debug/ctydebug"
"github.com/zclconf/go-cty/cty"
"github.com/hashicorp/terraform/internal/addrs"
"github.com/hashicorp/terraform/internal/configs"
"github.com/hashicorp/terraform/internal/configs/configschema"
"github.com/hashicorp/terraform/internal/lang/marks"
"github.com/hashicorp/terraform/internal/plans"
"github.com/hashicorp/terraform/internal/providers"
testing_provider "github.com/hashicorp/terraform/internal/providers/testing"
"github.com/hashicorp/terraform/internal/states"
"github.com/hashicorp/terraform/internal/tfdiags"
)
func TestContextPlan_actions(t *testing.T) {
testActionSchema := providers.ActionSchema{
ConfigSchema: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"attr": {
Type: cty.String,
Optional: true,
},
},
},
}
writeOnlyActionSchema := providers.ActionSchema{
ConfigSchema: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"attr": {
Type: cty.String,
Optional: true,
WriteOnly: true,
},
},
},
}
// Action schema with nested blocks used for tests exercising block handling.
nestedActionSchema := providers.ActionSchema{
ConfigSchema: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"top_attr": {
Type: cty.String,
Optional: true,
},
},
BlockTypes: map[string]*configschema.NestedBlock{
"settings": {
Nesting: configschema.NestingSingle,
Block: configschema.Block{
Attributes: map[string]*configschema.Attribute{
"name": {
Type: cty.String,
Required: true,
},
},
BlockTypes: map[string]*configschema.NestedBlock{
"rule": {
Nesting: configschema.NestingList,
Block: configschema.Block{
Attributes: map[string]*configschema.Attribute{
"value": {
Type: cty.String,
Required: true,
},
},
},
},
},
},
},
"settings_list": {
Nesting: configschema.NestingList,
Block: configschema.Block{
Attributes: map[string]*configschema.Attribute{
"id": {
Type: cty.String,
Required: true,
},
},
},
},
},
},
}
for topic, tcs := range map[string]map[string]struct {
toBeImplemented bool
module map[string]string
buildState func(*states.SyncState)
planActionFn func(*testing.T, providers.PlanActionRequest) providers.PlanActionResponse
planResourceFn func(*testing.T, providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse
readResourceFn func(*testing.T, providers.ReadResourceRequest) providers.ReadResourceResponse
planOpts *PlanOpts
expectPlanActionCalled bool
// Some tests can produce race-conditions in the error messages, so we
// have two ways of checking the diagnostics. Use expectValidateDiagnostics
// by default, if there is a race condition and you want to allow multiple
// versions, please use assertValidateDiagnostics.
expectValidateDiagnostics func(m *configs.Config) tfdiags.Diagnostics
assertValidateDiagnostics func(*testing.T, tfdiags.Diagnostics)
expectPlanDiagnostics func(m *configs.Config) tfdiags.Diagnostics
assertPlanDiagnostics func(*testing.T, tfdiags.Diagnostics)
assertPlan func(*testing.T, *plans.Plan)
}{
// ======== BASIC ========
// Fundamental behavior of actions
// ======== BASIC ========
"basics": {
"unreferenced": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
`,
},
expectPlanActionCalled: false,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 0 {
t.Fatalf("expected no actions in plan, got %d", len(p.Changes.ActionInvocations))
}
if p.Applyable {
t.Fatalf("should not be able to apply this plan")
}
},
},
"query run": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create, after_update]
actions = [action.test_action.hello]
}
}
}
`,
"main.tfquery.hcl": `
list "test_resource" "test1" {
provider = "test"
config {
filter = {
attr = "foo"
}
}
}
`,
},
expectPlanActionCalled: false,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
Query: true,
},
},
"query run, action references resource": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {
config {
attr = resource.test_object.a.name
}
}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create, after_update]
actions = [action.test_action.hello]
}
}
}
`,
"main.tfquery.hcl": `
list "test_resource" "test1" {
provider = "test"
config {
filter = {
attr = "foo"
}
}
}
`,
},
expectPlanActionCalled: false,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
Query: true,
},
},
"invalid config": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {
config {
unknown_attr = "value"
}
}`,
},
expectPlanActionCalled: false,
expectValidateDiagnostics: func(m *configs.Config) (diags tfdiags.Diagnostics) {
return diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Unsupported argument",
Detail: `An argument named "unknown_attr" is not expected here.`,
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 4, Column: 5, Byte: 47},
End: hcl.Pos{Line: 4, Column: 17, Byte: 59},
},
})
},
},
"actions can't be accessed in resources": {
module: map[string]string{
"main.tf": `
action "test_action" "my_action" {
config {
attr = "value"
}
}
resource "test_object" "a" {
name = action.test_action.my_action.attr
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.my_action]
}
}
}
`,
},
expectValidateDiagnostics: func(m *configs.Config) tfdiags.Diagnostics {
return tfdiags.Diagnostics{}.Append(
&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid reference",
Detail: "Actions can not be referenced in this context. They can only be referenced from within a resource's lifecycle actions list.",
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 8, Column: 10, Byte: 110},
End: hcl.Pos{Line: 8, Column: 40, Byte: 138},
},
})
},
},
"actions can't be accessed in outputs": {
module: map[string]string{
"main.tf": `
action "test_action" "my_action" {
config {
attr = "value"
}
}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.my_action]
}
}
}
output "my_output" {
value = action.test_action.my_action.attr
}
output "my_output2" {
value = action.test_action.my_action
}
`,
},
expectValidateDiagnostics: func(m *configs.Config) tfdiags.Diagnostics {
return tfdiags.Diagnostics{}.Append(
&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid reference",
Detail: "Actions can not be referenced in this context. They can only be referenced from within a resource's lifecycle actions list.",
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 21, Column: 13, Byte: 327},
End: hcl.Pos{Line: 21, Column: 43, Byte: 355},
},
}).Append(
&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid reference",
Detail: "Actions can not be referenced in this context. They can only be referenced from within a resource's lifecycle actions list.",
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 17, Column: 13, Byte: 258},
End: hcl.Pos{Line: 17, Column: 43, Byte: 286},
},
},
)
},
},
"destroy run": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create, after_update]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: false,
planOpts: SimplePlanOpts(plans.DestroyMode, InputValues{}),
},
"non-default provider namespace": {
module: map[string]string{
"main.tf": `
terraform {
required_providers {
ecosystem = {
source = "danielmschmidt/ecosystem"
}
}
}
action "ecosystem" "hello" {}
resource "other_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.ecosystem.hello]
}
}
}
`,
},
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 1 {
t.Fatalf("expected 1 action in plan, got %d", len(p.Changes.ActionInvocations))
}
action := p.Changes.ActionInvocations[0]
if action.Addr.String() != "action.ecosystem.hello" {
t.Fatalf("expected action address to be 'action.ecosystem.hello', got '%s'", action.Addr)
}
at, ok := action.ActionTrigger.(*plans.LifecycleActionTrigger)
if !ok {
t.Fatalf("expected action trigger to be a LifecycleActionTrigger, got %T", action.ActionTrigger)
}
if !at.TriggeringResourceAddr.Equal(mustResourceInstanceAddr("other_object.a")) {
t.Fatalf("expected action to have triggering resource address 'other_object.a', but it is %s", at.TriggeringResourceAddr)
}
if action.ProviderAddr.Provider.Namespace != "danielmschmidt" {
t.Fatalf("expected action to have the namespace 'danielmschmidt', got '%s'", action.ProviderAddr.Provider.Namespace)
}
},
},
},
// ======== TRIGGERING ========
// action_trigger behavior
// ======== TRIGGERING ========
"triggering": {
"before_create triggered": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 1 {
t.Fatalf("expected 1 action in plan, got %d", len(p.Changes.ActionInvocations))
}
action := p.Changes.ActionInvocations[0]
if action.Addr.String() != "action.test_action.hello" {
t.Fatalf("expected action address to be 'action.test_action.hello', got '%s'", action.Addr)
}
at, ok := action.ActionTrigger.(*plans.LifecycleActionTrigger)
if !ok {
t.Fatalf("expected action trigger to be a LifecycleActionTrigger, got %T", action.ActionTrigger)
}
if !at.TriggeringResourceAddr.Equal(mustResourceInstanceAddr("test_object.a")) {
t.Fatalf("expected action to have a triggering resource address 'test_object.a', got '%s'", at.TriggeringResourceAddr)
}
if at.ActionTriggerBlockIndex != 0 {
t.Fatalf("expected action to have a triggering block index of 0, got %d", at.ActionTriggerBlockIndex)
}
if at.TriggerEvent() != configs.BeforeCreate {
t.Fatalf("expected action to have a triggering event of 'before_create', got '%s'", at.TriggerEvent())
}
if at.ActionsListIndex != 0 {
t.Fatalf("expected action to have a actions list index of 0, got %d", at.ActionsListIndex)
}
if action.ProviderAddr.Provider != addrs.NewDefaultProvider("test") {
t.Fatalf("expected action to have a provider address of 'provider[\"registry.terraform.io/hashicorp/test\"]', got '%s'", action.ProviderAddr)
}
},
},
"after_create triggered": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [after_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 1 {
t.Fatalf("expected 1 action in plan, got %d", len(p.Changes.ActionInvocations))
}
action := p.Changes.ActionInvocations[0]
if action.Addr.String() != "action.test_action.hello" {
t.Fatalf("expected action address to be 'action.test_action.hello', got '%s'", action.Addr)
}
// TODO: Test that action the triggering resource address is set correctly
},
},
"before_update triggered - on create": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_update]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: false,
},
"after_update triggered - on create": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [after_update]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: false,
},
"before_update triggered - on update": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_update]
actions = [action.test_action.hello]
}
}
}
`,
},
buildState: func(s *states.SyncState) {
addr := mustResourceInstanceAddr("test_object.a")
s.SetResourceInstanceCurrent(addr, &states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"name":"previous_run"}`),
}, mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`))
},
expectPlanActionCalled: true,
},
"after_update triggered - on update": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [after_update]
actions = [action.test_action.hello]
}
}
}
`,
},
buildState: func(s *states.SyncState) {
addr := mustResourceInstanceAddr("test_object.a")
s.SetResourceInstanceCurrent(addr, &states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"name":"previous_run"}`),
}, mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`))
},
expectPlanActionCalled: true,
},
"before_update triggered - on replace": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_update]
actions = [action.test_action.hello]
}
}
}
`,
},
buildState: func(s *states.SyncState) {
addr := mustResourceInstanceAddr("test_object.a")
s.SetResourceInstanceCurrent(addr, &states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"name":"previous_run"}`),
Status: states.ObjectTainted,
}, mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`))
},
expectPlanActionCalled: false,
},
"after_update triggered - on replace": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [after_update]
actions = [action.test_action.hello]
}
}
}
`,
},
buildState: func(s *states.SyncState) {
addr := mustResourceInstanceAddr("test_object.a")
s.SetResourceInstanceCurrent(addr, &states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"name":"previous_run"}`),
Status: states.ObjectTainted,
}, mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`))
},
expectPlanActionCalled: false,
},
"failing actions cancel next ones": {
module: map[string]string{
"main.tf": `
action "test_action" "failure" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.failure, action.test_action.failure]
}
action_trigger {
events = [before_create]
actions = [action.test_action.failure]
}
}
}
`,
},
planActionFn: func(_ *testing.T, _ providers.PlanActionRequest) providers.PlanActionResponse {
t.Helper()
return providers.PlanActionResponse{
Diagnostics: tfdiags.Diagnostics{
tfdiags.Sourceless(tfdiags.Error, "Planning failed", "Test case simulates an error while planning"),
},
}
},
expectPlanActionCalled: true,
// We only expect a single diagnostic here, the other should not have been called because the first one failed.
expectPlanDiagnostics: func(m *configs.Config) tfdiags.Diagnostics {
return tfdiags.Diagnostics{}.Append(
&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Failed to plan action",
Detail: "Planning failed: Test case simulates an error while planning",
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 7, Column: 8, Byte: 147},
End: hcl.Pos{Line: 7, Column: 46, Byte: 173},
},
},
)
},
},
"actions with warnings don't cancel": {
module: map[string]string{
"main.tf": `
action "test_action" "failure" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.failure, action.test_action.failure]
}
action_trigger {
events = [before_create]
actions = [action.test_action.failure]
}
}
}
`,
},
planActionFn: func(t *testing.T, par providers.PlanActionRequest) providers.PlanActionResponse {
return providers.PlanActionResponse{
Diagnostics: tfdiags.Diagnostics{
tfdiags.Sourceless(tfdiags.Warning, "Warning during planning", "Test case simulates a warning while planning"),
},
}
},
expectPlanActionCalled: true,
// We only expect a single diagnostic here, the other should not have been called because the first one failed.
expectPlanDiagnostics: func(m *configs.Config) tfdiags.Diagnostics {
return tfdiags.Diagnostics{}.Append(
&hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Warnings when planning action",
Detail: "Warning during planning: Test case simulates a warning while planning",
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 7, Column: 8, Byte: 147},
End: hcl.Pos{Line: 7, Column: 46, Byte: 173},
},
},
&hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Warnings when planning action",
Detail: "Warning during planning: Test case simulates a warning while planning",
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 7, Column: 48, Byte: 175},
End: hcl.Pos{Line: 7, Column: 76, Byte: 201},
},
},
&hcl.Diagnostic{
Severity: hcl.DiagWarning,
Summary: "Warnings when planning action",
Detail: "Warning during planning: Test case simulates a warning while planning",
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 11, Column: 8, Byte: 278},
End: hcl.Pos{Line: 11, Column: 46, Byte: 304},
},
},
)
},
},
"splat is not supported": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {
count = 42
}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello[*]]
}
}
}
`,
},
expectPlanActionCalled: false,
expectPlanDiagnostics: func(m *configs.Config) tfdiags.Diagnostics {
return tfdiags.Diagnostics{}.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid action expression",
Detail: "Unexpected expression found in action_triggers.actions.",
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 9, Column: 18, Byte: 159},
End: hcl.Pos{Line: 9, Column: 47, Byte: 186},
},
})
},
},
"multiple events triggering in same action trigger": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [
before_create, // should trigger
after_create, // should trigger
before_update // should be ignored
]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 2 {
t.Fatalf("expected 2 action in plan, got %d", len(p.Changes.ActionInvocations))
}
triggeredEvents := []configs.ActionTriggerEvent{}
for _, action := range p.Changes.ActionInvocations {
at, ok := action.ActionTrigger.(*plans.LifecycleActionTrigger)
if !ok {
t.Fatalf("expected action trigger to be a LifecycleActionTrigger, got %T", action.ActionTrigger)
}
triggeredEvents = append(triggeredEvents, at.ActionTriggerEvent)
}
slices.Sort(triggeredEvents)
if diff := cmp.Diff([]configs.ActionTriggerEvent{configs.BeforeCreate, configs.AfterCreate}, triggeredEvents); diff != "" {
t.Errorf("wrong result\n%s", diff)
}
},
},
"multiple events triggered together": {
module: map[string]string{
"main.tf": `
action "test_action" "one" {}
action "test_action" "two" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create, after_create, before_update, after_update]
actions = [action.test_action.one, action.test_action.two]
}
}
}
`,
},
expectPlanActionCalled: true,
},
"multiple events triggering in multiple action trigger": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "a" {
lifecycle {
// should trigger
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
// should trigger
action_trigger {
events = [after_create]
actions = [action.test_action.hello]
}
// should be ignored
action_trigger {
events = [before_update]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 2 {
t.Fatalf("expected 2 action in plan, got %d", len(p.Changes.ActionInvocations))
}
triggeredEvents := []configs.ActionTriggerEvent{}
for _, action := range p.Changes.ActionInvocations {
at, ok := action.ActionTrigger.(*plans.LifecycleActionTrigger)
if !ok {
t.Fatalf("expected action trigger to be a LifecycleActionTrigger, got %T", action.ActionTrigger)
}
triggeredEvents = append(triggeredEvents, at.ActionTriggerEvent)
}
slices.Sort(triggeredEvents)
if diff := cmp.Diff([]configs.ActionTriggerEvent{configs.BeforeCreate, configs.AfterCreate}, triggeredEvents); diff != "" {
t.Errorf("wrong result\n%s", diff)
}
},
},
},
// ======== EXPANSION ========
// action expansion behavior (count & for_each)
// ======== EXPANSION ========
"expansion": {
"action for_each": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {
for_each = toset(["a", "b"])
config {
attr = "value-${each.key}"
}
}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello["a"], action.test_action.hello["b"]]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 2 {
t.Fatalf("expected 2 action in plan, got %d", len(p.Changes.ActionInvocations))
}
actionAddrs := []string{}
for _, action := range p.Changes.ActionInvocations {
actionAddrs = append(actionAddrs, action.Addr.String())
}
slices.Sort(actionAddrs)
if !slices.Equal(actionAddrs, []string{
"action.test_action.hello[\"a\"]",
"action.test_action.hello[\"b\"]",
}) {
t.Fatalf("expected action addresses to be 'action.test_action.hello[\"a\"]' and 'action.test_action.hello[\"b\"]', got %v", actionAddrs)
}
for _, ai := range p.Changes.ActionInvocations {
at, ok := ai.ActionTrigger.(*plans.LifecycleActionTrigger)
if !ok {
t.Fatalf("expected action trigger to be a LifecycleActionTrigger, got %T", ai.ActionTrigger)
}
if !at.TriggeringResourceAddr.Equal(mustResourceInstanceAddr("test_object.a")) {
t.Fatalf("expected action to have triggering resource address 'test_object.a', but it is %s", at.TriggeringResourceAddr)
}
}
},
},
"action count": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {
count = 2
config {
attr = "value-${count.index}"
}
}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello[0], action.test_action.hello[1]]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 2 {
t.Fatalf("expected 2 action in plan, got %d", len(p.Changes.ActionInvocations))
}
actionAddrs := []string{}
for _, action := range p.Changes.ActionInvocations {
actionAddrs = append(actionAddrs, action.Addr.String())
}
slices.Sort(actionAddrs)
if !slices.Equal(actionAddrs, []string{
"action.test_action.hello[0]",
"action.test_action.hello[1]",
}) {
t.Fatalf("expected action addresses to be 'action.test_action.hello[0]' and 'action.test_action.hello[1]', got %v", actionAddrs)
}
for _, ai := range p.Changes.ActionInvocations {
at, ok := ai.ActionTrigger.(*plans.LifecycleActionTrigger)
if !ok {
t.Fatalf("expected action trigger to be a LifecycleActionTrigger, got %T", ai.ActionTrigger)
}
if !at.TriggeringResourceAddr.Equal(mustResourceInstanceAddr("test_object.a")) {
t.Fatalf("expected action to have triggering resource address 'test_object.a', but it is %s", at.TriggeringResourceAddr)
}
}
},
},
"action for_each invalid access": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {
for_each = toset(["a", "b"])
config {
attr = "value-${each.key}"
}
}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello["c"]]
}
}
}
`,
},
expectPlanActionCalled: false,
expectPlanDiagnostics: func(m *configs.Config) (diags tfdiags.Diagnostics) {
return diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Reference to non-existent action instance",
Detail: "Action instance was not found in the current context.",
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 13, Column: 18, Byte: 224},
End: hcl.Pos{Line: 13, Column: 49, Byte: 253},
},
})
},
},
"action count invalid access": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {
count = 2
config {
attr = "value-${count.index}"
}
}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello[2]]
}
}
}
`,
},
expectPlanActionCalled: false,
expectPlanDiagnostics: func(m *configs.Config) (diags tfdiags.Diagnostics) {
return diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Reference to non-existent action instance",
Detail: "Action instance was not found in the current context.",
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 13, Column: 18, Byte: 208},
End: hcl.Pos{Line: 13, Column: 47, Byte: 235},
},
})
},
},
"expanded resource - unexpanded action": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "a" {
count = 2
name = "test-${count.index}"
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 2 {
t.Fatalf("expected 2 action in plan, got %d", len(p.Changes.ActionInvocations))
}
actionAddrs := []string{}
for _, action := range p.Changes.ActionInvocations {
actionAddrs = append(actionAddrs, action.Addr.String())
}
slices.Sort(actionAddrs)
if !slices.Equal(actionAddrs, []string{
"action.test_action.hello",
"action.test_action.hello",
}) {
t.Fatalf("expected action addresses to be 'action.test_action.hello' and 'action.test_action.hello', got %v", actionAddrs)
}
actionTriggers := []plans.LifecycleActionTrigger{}
for _, ai := range p.Changes.ActionInvocations {
at, ok := ai.ActionTrigger.(*plans.LifecycleActionTrigger)
if !ok {
t.Fatalf("expected action trigger to be a LifecycleActionTrigger, got %T", ai.ActionTrigger)
}
actionTriggers = append(actionTriggers, *at)
}
if !actionTriggers[0].TriggeringResourceAddr.Resource.Resource.Equal(actionTriggers[1].TriggeringResourceAddr.Resource.Resource) {
t.Fatalf("expected both actions to have the same triggering resource address, but got %s and %s", actionTriggers[0].TriggeringResourceAddr, actionTriggers[1].TriggeringResourceAddr)
}
if actionTriggers[0].TriggeringResourceAddr.Resource.Key == actionTriggers[1].TriggeringResourceAddr.Resource.Key {
t.Fatalf("expected both actions to have different triggering resource instance keys, but got the same %s", actionTriggers[0].TriggeringResourceAddr.Resource.Key)
}
},
},
"expanded resource - expanded action": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {
count = 2
config {
attr = "value-${count.index}"
}
}
resource "test_object" "a" {
count = 2
name = "test-${count.index}"
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello[count.index]]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 2 {
t.Fatalf("expected 2 action in plan, got %d", len(p.Changes.ActionInvocations))
}
actionAddrs := []string{}
for _, action := range p.Changes.ActionInvocations {
actionAddrs = append(actionAddrs, action.Addr.String())
}
slices.Sort(actionAddrs)
if !slices.Equal(actionAddrs, []string{
"action.test_action.hello[0]",
"action.test_action.hello[1]",
}) {
t.Fatalf("expected action addresses to be 'action.test_action.hello[0]' and 'action.test_action.hello[1]', got %v", actionAddrs)
}
actionTriggers := []plans.LifecycleActionTrigger{}
for _, ai := range p.Changes.ActionInvocations {
at, ok := ai.ActionTrigger.(*plans.LifecycleActionTrigger)
if !ok {
t.Fatalf("expected action trigger to be a LifecycleActionTrigger, got %T", ai.ActionTrigger)
}
actionTriggers = append(actionTriggers, *at)
}
if !actionTriggers[0].TriggeringResourceAddr.Resource.Resource.Equal(actionTriggers[1].TriggeringResourceAddr.Resource.Resource) {
t.Fatalf("expected both actions to have the same triggering resource address, but got %s and %s", actionTriggers[0].TriggeringResourceAddr, actionTriggers[1].TriggeringResourceAddr)
}
if actionTriggers[0].TriggeringResourceAddr.Resource.Key == actionTriggers[1].TriggeringResourceAddr.Resource.Key {
t.Fatalf("expected both actions to have different triggering resource instance keys, but got the same %s", actionTriggers[0].TriggeringResourceAddr.Resource.Key)
}
},
},
// Since if we just destroy a node there is no reference to an action in config, we try
// to provoke an error by just removing a resource instance.
"destroying expanded node": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "a" {
count = 2
lifecycle {
action_trigger {
events = [before_create, after_update]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: false,
buildState: func(s *states.SyncState) {
s.SetResourceInstanceCurrent(mustResourceInstanceAddr("test_object.a[0]"), &states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{}`),
Status: states.ObjectReady,
}, mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`))
s.SetResourceInstanceCurrent(mustResourceInstanceAddr("test_object.a[1]"), &states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{}`),
Status: states.ObjectReady,
}, mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`))
s.SetResourceInstanceCurrent(mustResourceInstanceAddr("test_object.a[2]"), &states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{}`),
Status: states.ObjectReady,
}, mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`))
},
},
},
// ======== CONFIG ========
// action config behavior (secrets, write_only, dependencies)
// ======== CONFIG ========
"config": {
"transitive dependencies": {
module: map[string]string{
"main.tf": `
resource "test_object" "a" {
name = "a"
}
action "test_action" "hello" {
config {
attr = test_object.a.name
}
}
resource "test_object" "b" {
name = "b"
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true,
},
"expanded transitive dependencies": {
module: map[string]string{
"main.tf": `
resource "test_object" "a" {
name = "a"
}
resource "test_object" "b" {
name = "b"
}
action "test_action" "hello_a" {
config {
attr = test_object.a.name
}
}
action "test_action" "hello_b" {
config {
attr = test_object.a.name
}
}
resource "test_object" "c" {
name = "c"
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello_a]
}
}
}
resource "test_object" "d" {
name = "d"
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello_b]
}
}
}
resource "test_object" "e" {
name = "e"
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello_a, action.test_action.hello_b]
}
}
}
`,
},
expectPlanActionCalled: true,
},
"action config with after_create dependency to triggering resource": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {
config {
attr = test_object.a.name
}
}
resource "test_object" "a" {
name = "test_name"
lifecycle {
action_trigger {
events = [after_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 1 {
t.Fatalf("expected one action in plan, got %d", len(p.Changes.ActionInvocations))
}
if p.Changes.ActionInvocations[0].ActionTrigger.TriggerEvent() != configs.AfterCreate {
t.Fatalf("expected trigger event to be of type AfterCreate, got: %v", p.Changes.ActionInvocations[0].ActionTrigger)
}
if p.Changes.ActionInvocations[0].Addr.Action.String() != "action.test_action.hello" {
t.Fatalf("expected action to equal 'action.test_action.hello', got '%s'", p.Changes.ActionInvocations[0].Addr)
}
decode, err := p.Changes.ActionInvocations[0].ConfigValue.Decode(cty.Object(map[string]cty.Type{"attr": cty.String}))
if err != nil {
t.Fatal(err)
}
if decode.GetAttr("attr").AsString() != "test_name" {
t.Fatalf("expected action config field 'attr' to have value 'test_name', got '%s'", decode.GetAttr("attr").AsString())
}
},
},
"action config refers to before triggering resource leads to validation error": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {
config {
attr = test_object.a.name
}
}
resource "test_object" "a" {
name = "test_name"
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true, // The cycle only appears in the apply graph
assertPlanDiagnostics: func(t *testing.T, diags tfdiags.Diagnostics) {
if !diags.HasErrors() {
t.Fatalf("expected diagnostics to have errors, but it does not")
}
if len(diags) != 1 {
t.Fatalf("expected diagnostics to have 1 error, but it has %d", len(diags))
}
// We expect the diagnostic to be about a cycle
if !strings.Contains(diags[0].Description().Summary, "Cycle") {
t.Fatalf("expected diagnostic summary to contain 'Cycle', got '%s'", diags[0].Description().Summary)
}
// We expect the action node to be part of the cycle
if !strings.Contains(diags[0].Description().Summary, "action.test_action.hello") {
t.Fatalf("expected diagnostic summary to contain 'action.test_action.hello', got '%s'", diags[0].Description().Summary)
}
// We expect the resource node to be part of the cycle
if !strings.Contains(diags[0].Description().Summary, "test_object.a") {
t.Fatalf("expected diagnostic summary to contain 'test_object.a', got '%s'", diags[0].Description().Summary)
}
},
},
"secret values": {
module: map[string]string{
"main.tf": `
variable "secret" {
type = string
sensitive = true
}
action "test_action" "hello" {
config {
attr = var.secret
}
}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
`,
},
planOpts: &PlanOpts{
Mode: plans.NormalMode,
SetVariables: InputValues{
"secret": &InputValue{
Value: cty.StringVal("secret"),
SourceType: ValueFromCLIArg,
}},
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 1 {
t.Fatalf("expected 1 action in plan, got %d", len(p.Changes.ActionInvocations))
}
action := p.Changes.ActionInvocations[0]
ac, err := action.Decode(&testActionSchema)
if err != nil {
t.Fatalf("expected action to decode successfully, but got error: %v", err)
}
if !marks.Has(ac.ConfigValue.GetAttr("attr"), marks.Sensitive) {
t.Fatalf("expected attribute 'attr' to be marked as sensitive")
}
},
},
"ephemeral values": {
module: map[string]string{
"main.tf": `
variable "secret" {
type = string
ephemeral = true
}
action "test_action" "hello" {
config {
attr = var.secret
}
}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
`,
},
planOpts: &PlanOpts{
Mode: plans.NormalMode,
SetVariables: InputValues{
"secret": &InputValue{
Value: cty.StringVal("secret"),
SourceType: ValueFromCLIArg,
}},
},
expectPlanActionCalled: false,
assertValidateDiagnostics: func(t *testing.T, diags tfdiags.Diagnostics) {
if len(diags) != 1 {
t.Fatalf("expected exactly 1 diagnostic but had %d", len(diags))
}
if diags[0].Severity() != tfdiags.Error {
t.Error("expected error diagnostic")
}
if diags[0].Description().Summary != "Invalid use of ephemeral value" {
t.Errorf("expected diagnostics to be because of ephemeral values but was %s", diags[0].Description().Summary)
}
},
},
"write-only attributes": {
module: map[string]string{
"main.tf": `
variable "attr" {
type = string
ephemeral = true
}
resource "test_object" "resource" {
name = "hello"
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action_wo.hello]
}
}
}
action "test_action_wo" "hello" {
config {
attr = var.attr
}
}
`,
},
planOpts: SimplePlanOpts(plans.NormalMode, InputValues{
"attr": {
Value: cty.StringVal("wo-plan"),
},
}),
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, plan *plans.Plan) {
if len(plan.Changes.ActionInvocations) != 1 {
t.Fatalf("expected exactly one invocation, and found %d", len(plan.Changes.ActionInvocations))
}
ais := plan.Changes.ActionInvocations[0]
ai, err := ais.Decode(&writeOnlyActionSchema)
if err != nil {
t.Fatal(err)
}
if !ai.ConfigValue.GetAttr("attr").IsNull() {
t.Fatal("should have converted ephemeral value to null in the plan")
}
},
},
"action config nested single + list blocks": {
module: map[string]string{
"main.tf": `
action "test_nested" "with_blocks" {
config {
top_attr = "top"
settings {
name = "primary"
rule {
value = "r1"
}
rule {
value = "r2"
}
}
}
}
resource "test_object" "a" {
name = "object"
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_nested.with_blocks]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 1 {
t.Fatalf("expected 1 action invocation, got %d", len(p.Changes.ActionInvocations))
}
ais := p.Changes.ActionInvocations[0]
decoded, err := ais.Decode(&nestedActionSchema)
if err != nil {
t.Fatalf("error decoding nested action: %s", err)
}
cv := decoded.ConfigValue
if cv.GetAttr("top_attr").AsString() != "top" {
t.Fatalf("expected top_attr = top, got %s", cv.GetAttr("top_attr").GoString())
}
settings := cv.GetAttr("settings")
if !settings.Type().IsObjectType() {
t.Fatalf("expected settings object, got %s", settings.Type().FriendlyName())
}
if settings.GetAttr("name").AsString() != "primary" {
t.Fatalf("expected settings.name = primary, got %s", settings.GetAttr("name").GoString())
}
rules := settings.GetAttr("rule")
if !rules.Type().IsListType() || rules.LengthInt() != 2 {
t.Fatalf("expected 2 rule blocks, got type %s length %d", rules.Type().FriendlyName(), rules.LengthInt())
}
first := rules.Index(cty.NumberIntVal(0)).GetAttr("value").AsString()
second := rules.Index(cty.NumberIntVal(1)).GetAttr("value").AsString()
if first != "r1" || second != "r2" {
t.Fatalf("expected rule values r1,r2 got %s,%s", first, second)
}
},
},
"action config top-level list block": {
module: map[string]string{
"main.tf": `
action "test_nested" "with_list" {
config {
settings_list {
id = "one"
}
settings_list {
id = "two"
}
}
}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [after_create]
actions = [action.test_nested.with_list]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 1 {
t.Fatalf("expected 1 action invocation, got %d", len(p.Changes.ActionInvocations))
}
ais := p.Changes.ActionInvocations[0]
decoded, err := ais.Decode(&nestedActionSchema)
if err != nil {
t.Fatalf("error decoding nested action: %s", err)
}
cv := decoded.ConfigValue
if !cv.GetAttr("top_attr").IsNull() {
t.Fatalf("expected top_attr to be null, got %s", cv.GetAttr("top_attr").GoString())
}
sl := cv.GetAttr("settings_list")
if !sl.Type().IsListType() || sl.LengthInt() != 2 {
t.Fatalf("expected 2 settings_list blocks, got type %s length %d", sl.Type().FriendlyName(), sl.LengthInt())
}
first := sl.Index(cty.NumberIntVal(0)).GetAttr("id").AsString()
second := sl.Index(cty.NumberIntVal(1)).GetAttr("id").AsString()
if first != "one" || second != "two" {
t.Fatalf("expected ids one,two got %s,%s", first, second)
}
},
},
},
// ======== MODULES ========
// actions within modules
// ======== MODULES ========
"modules": {
"triggered within module": {
module: map[string]string{
"main.tf": `
module "mod" {
source = "./mod"
}
`,
"mod/mod.tf": `
action "test_action" "hello" {}
resource "other_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 1 {
t.Fatalf("expected 1 action in plan, got %d", len(p.Changes.ActionInvocations))
}
action := p.Changes.ActionInvocations[0]
if action.Addr.String() != "module.mod.action.test_action.hello" {
t.Fatalf("expected action address to be 'module.mod.action.test_action.hello', got '%s'", action.Addr)
}
at, ok := action.ActionTrigger.(*plans.LifecycleActionTrigger)
if !ok {
t.Fatalf("expected action trigger to be a LifecycleActionTrigger, got %T", action.ActionTrigger)
}
if !at.TriggeringResourceAddr.Equal(mustResourceInstanceAddr("module.mod.other_object.a")) {
t.Fatalf("expected action to have triggering resource address 'module.mod.other_object.a', but it is %s", at.TriggeringResourceAddr)
}
if at.ActionTriggerBlockIndex != 0 {
t.Fatalf("expected action to have a triggering block index of 0, got %d", at.ActionTriggerBlockIndex)
}
if at.TriggerEvent() != configs.BeforeCreate {
t.Fatalf("expected action to have a triggering event of 'before_create', got '%s'", at.TriggerEvent())
}
if at.ActionsListIndex != 0 {
t.Fatalf("expected action to have a actions list index of 0, got %d", at.ActionsListIndex)
}
if action.ProviderAddr.Provider != addrs.NewDefaultProvider("test") {
t.Fatalf("expected action to have a provider address of 'provider[\"registry.terraform.io/hashicorp/test\"]', got '%s'", action.ProviderAddr)
}
},
},
"triggered within module instance": {
module: map[string]string{
"main.tf": `
module "mod" {
count = 2
source = "./mod"
}
`,
"mod/mod.tf": `
action "test_action" "hello" {}
resource "other_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 2 {
t.Fatalf("expected 1 action in plan, got %d", len(p.Changes.ActionInvocations))
}
// We know we are run within two child modules, so we can just sort by the triggering resource address
slices.SortFunc(p.Changes.ActionInvocations, func(a, b *plans.ActionInvocationInstanceSrc) int {
at, ok := a.ActionTrigger.(*plans.LifecycleActionTrigger)
if !ok {
t.Fatalf("expected action trigger to be a LifecycleActionTrigger, got %T", a.ActionTrigger)
}
bt, ok := b.ActionTrigger.(*plans.LifecycleActionTrigger)
if !ok {
t.Fatalf("expected action trigger to be a LifecycleActionTrigger, got %T", b.ActionTrigger)
}
if at.TriggeringResourceAddr.String() < bt.TriggeringResourceAddr.String() {
return -1
} else {
return 1
}
})
action := p.Changes.ActionInvocations[0]
if action.Addr.String() != "module.mod[0].action.test_action.hello" {
t.Fatalf("expected action address to be 'module.mod[0].action.test_action.hello', got '%s'", action.Addr)
}
at := action.ActionTrigger.(*plans.LifecycleActionTrigger)
if !at.TriggeringResourceAddr.Equal(mustResourceInstanceAddr("module.mod[0].other_object.a")) {
t.Fatalf("expected action to have triggering resource address 'module.mod[0].other_object.a', but it is %s", at.TriggeringResourceAddr)
}
if at.ActionTriggerBlockIndex != 0 {
t.Fatalf("expected action to have a triggering block index of 0, got %d", at.ActionTriggerBlockIndex)
}
if at.TriggerEvent() != configs.BeforeCreate {
t.Fatalf("expected action to have a triggering event of 'before_create', got '%s'", at.TriggerEvent())
}
if at.ActionsListIndex != 0 {
t.Fatalf("expected action to have a actions list index of 0, got %d", at.ActionsListIndex)
}
if action.ProviderAddr.Provider != addrs.NewDefaultProvider("test") {
t.Fatalf("expected action to have a provider address of 'provider[\"registry.terraform.io/hashicorp/test\"]', got '%s'", action.ProviderAddr)
}
action2 := p.Changes.ActionInvocations[1]
if action2.Addr.String() != "module.mod[1].action.test_action.hello" {
t.Fatalf("expected action address to be 'module.mod[1].action.test_action.hello', got '%s'", action2.Addr)
}
a2t := action2.ActionTrigger.(*plans.LifecycleActionTrigger)
if !a2t.TriggeringResourceAddr.Equal(mustResourceInstanceAddr("module.mod[1].other_object.a")) {
t.Fatalf("expected action to have triggering resource address 'module.mod[1].other_object.a', but it is %s", a2t.TriggeringResourceAddr)
}
},
},
"not triggered if module is count=0": {
module: map[string]string{
"main.tf": `
module "mod" {
count = 0
source = "./mod"
}
`,
"mod/mod.tf": `
action "test_action" "hello" {}
resource "other_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: false,
},
"not triggered if for_each is empty": {
module: map[string]string{
"main.tf": `
module "mod" {
for_each = toset([])
source = "./mod"
}
`,
"mod/mod.tf": `
action "test_action" "hello" {}
resource "other_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: false,
},
"action declaration in module if module is count=0": {
module: map[string]string{
"main.tf": `
module "mod" {
count = 0
source = "./mod"
}
`,
"mod/mod.tf": `
action "test_action" "hello" {}
`,
},
expectPlanActionCalled: false,
},
"action declaration in module if for_each is empty": {
module: map[string]string{
"main.tf": `
module "mod" {
for_each = toset([])
source = "./mod"
}
`,
"mod/mod.tf": `
action "test_action" "hello" {}
`,
},
expectPlanActionCalled: false,
},
"provider is within module": {
module: map[string]string{
"main.tf": `
module "mod" {
source = "./mod"
}
`,
"mod/mod.tf": `
provider "test" {
alias = "inthemodule"
}
action "test_action" "hello" {
provider = test.inthemodule
}
resource "other_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 1 {
t.Fatalf("expected 1 action in plan, got %d", len(p.Changes.ActionInvocations))
}
action := p.Changes.ActionInvocations[0]
if action.Addr.String() != "module.mod.action.test_action.hello" {
t.Fatalf("expected action address to be 'module.mod.action.test_action.hello', got '%s'", action.Addr)
}
at, ok := action.ActionTrigger.(*plans.LifecycleActionTrigger)
if !ok {
t.Fatalf("expected action trigger to be a lifecycle action trigger, got %T", action.ActionTrigger)
}
if !at.TriggeringResourceAddr.Equal(mustResourceInstanceAddr("module.mod.other_object.a")) {
t.Fatalf("expected action to have triggering resource address 'module.mod.other_object.a', but it is %s", at.TriggeringResourceAddr)
}
if action.ProviderAddr.Module.String() != "module.mod" {
t.Fatalf("expected action to have a provider module address of 'module.mod' got '%s'", action.ProviderAddr.Module.String())
}
if action.ProviderAddr.Alias != "inthemodule" {
t.Fatalf("expected action to have a provider alias of 'inthemodule', got '%s'", action.ProviderAddr.Alias)
}
},
},
},
// ======== PROVIDER ========
// provider meta-argument
// ======== PROVIDER ========
"provider": {
"aliased provider": {
module: map[string]string{
"main.tf": `
provider "test" {
alias = "aliased"
}
action "test_action" "hello" {
provider = test.aliased
}
resource "other_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 1 {
t.Fatalf("expected 1 action in plan, got %d", len(p.Changes.ActionInvocations))
}
action := p.Changes.ActionInvocations[0]
if action.Addr.String() != "action.test_action.hello" {
t.Fatalf("expected action address to be 'action.test_action.hello', got '%s'", action.Addr)
}
at, ok := action.ActionTrigger.(*plans.LifecycleActionTrigger)
if !ok {
t.Fatalf("expected action trigger to be a LifecycleActionTrigger, got %T", action.ActionTrigger)
}
if !at.TriggeringResourceAddr.Equal(mustResourceInstanceAddr("other_object.a")) {
t.Fatalf("expected action to have triggering resource address 'other_object.a', but it is %s", at.TriggeringResourceAddr)
}
if action.ProviderAddr.Alias != "aliased" {
t.Fatalf("expected action to have a provider alias of 'aliased', got '%s'", action.ProviderAddr.Alias)
}
},
},
},
// ======== DEFERRING ========
// Deferred actions (partial expansion / provider deferring)
// ======== DEFERRING ========
"deferring": {
"provider deferring action while not allowed": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
DeferralAllowed: false,
},
planActionFn: func(*testing.T, providers.PlanActionRequest) providers.PlanActionResponse {
return providers.PlanActionResponse{
Deferred: &providers.Deferred{
Reason: providers.DeferredReasonAbsentPrereq,
},
}
},
expectPlanDiagnostics: func(m *configs.Config) tfdiags.Diagnostics {
return tfdiags.Diagnostics{
tfdiags.Sourceless(
tfdiags.Error,
"Provider deferred changes when Terraform did not allow deferrals",
`The provider signaled a deferred action for "action.test_action.hello", but in this context deferrals are disabled. This is a bug in the provider, please file an issue with the provider developers.`,
),
}
},
},
"provider deferring action": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
DeferralAllowed: true, // actions should ignore this setting
},
planActionFn: func(*testing.T, providers.PlanActionRequest) providers.PlanActionResponse {
return providers.PlanActionResponse{
Deferred: &providers.Deferred{
Reason: providers.DeferredReasonAbsentPrereq,
},
}
},
expectPlanDiagnostics: func(m *configs.Config) tfdiags.Diagnostics {
return tfdiags.Diagnostics{
tfdiags.Sourceless(
tfdiags.Error,
"Provider deferred changes when Terraform did not allow deferrals",
`The provider signaled a deferred action for "action.test_action.hello", but in this context deferrals are disabled. This is a bug in the provider, please file an issue with the provider developers.`,
),
}
},
},
"deferred after actions defer following actions": {
module: map[string]string{
"main.tf": `
// Using this provider to have another provider type for an easier assertion
terraform {
required_providers {
ecosystem = {
source = "danielmschmidt/ecosystem"
}
}
}
action "test_action" "hello" {}
action "ecosystem" "world" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [after_create]
actions = [action.test_action.hello, action.ecosystem.world]
}
}
}
`,
},
expectPlanActionCalled: true,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
DeferralAllowed: true,
},
planActionFn: func(t *testing.T, r providers.PlanActionRequest) providers.PlanActionResponse {
if r.ActionType == "ecosystem" {
t.Fatalf("expected second action to not be planned, but it was planned")
}
return providers.PlanActionResponse{
Deferred: &providers.Deferred{
Reason: providers.DeferredReasonAbsentPrereq,
},
}
},
expectPlanDiagnostics: func(m *configs.Config) tfdiags.Diagnostics {
// for now, it's just an error for any deferrals but when
// this gets implemented we should check that all the
// actions are deferred even though only one of them
// was actually marked as deferred.
return tfdiags.Diagnostics{
tfdiags.Sourceless(
tfdiags.Error,
"Provider deferred changes when Terraform did not allow deferrals",
`The provider signaled a deferred action for "action.test_action.hello", but in this context deferrals are disabled. This is a bug in the provider, please file an issue with the provider developers.`,
),
}
},
},
"deferred before actions defer following actions and resource": {
module: map[string]string{
"main.tf": `
// Using this provider to have another provider type for an easier assertion
terraform {
required_providers {
ecosystem = {
source = "danielmschmidt/ecosystem"
}
}
}
action "test_action" "hello" {}
action "ecosystem" "world" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
action_trigger {
events = [after_create]
actions = [action.ecosystem.world]
}
}
}
`,
},
expectPlanActionCalled: true,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
DeferralAllowed: true,
},
planActionFn: func(t *testing.T, r providers.PlanActionRequest) providers.PlanActionResponse {
if r.ActionType == "ecosystem" {
t.Fatalf("expected second action to not be planned, but it was planned")
}
return providers.PlanActionResponse{
Deferred: &providers.Deferred{
Reason: providers.DeferredReasonAbsentPrereq,
},
}
},
expectPlanDiagnostics: func(m *configs.Config) tfdiags.Diagnostics {
// for now, it's just an error for any deferrals but when
// this gets implemented we should check that all the
// actions are deferred even though only one of them
// was actually marked as deferred.
return tfdiags.Diagnostics{
tfdiags.Sourceless(
tfdiags.Error,
"Provider deferred changes when Terraform did not allow deferrals",
`The provider signaled a deferred action for "action.test_action.hello", but in this context deferrals are disabled. This is a bug in the provider, please file an issue with the provider developers.`,
),
}
},
},
"deferred resources also defer the actions they trigger": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
action_trigger {
events = [after_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: false,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
DeferralAllowed: true,
},
planResourceFn: func(_ *testing.T, req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
return providers.PlanResourceChangeResponse{
PlannedState: req.ProposedNewState,
PlannedPrivate: req.PriorPrivate,
Diagnostics: nil,
Deferred: &providers.Deferred{
Reason: providers.DeferredReasonAbsentPrereq,
},
}
},
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 0 {
t.Fatalf("expected 0 actions in plan, got %d", len(p.Changes.ActionInvocations))
}
if len(p.DeferredActionInvocations) != 2 {
t.Fatalf("expected 2 deferred actions in plan, got %d", len(p.DeferredActionInvocations))
}
sort.Slice(p.DeferredActionInvocations, func(i, j int) bool {
return p.DeferredActionInvocations[i].ActionInvocationInstanceSrc.Addr.String() < p.DeferredActionInvocations[j].ActionInvocationInstanceSrc.Addr.String()
})
firstDeferredActionInvocation := p.DeferredActionInvocations[0]
if firstDeferredActionInvocation.DeferredReason != providers.DeferredReasonDeferredPrereq {
t.Fatalf("expected deferred action to be deferred due to deferred prereq, but got %s", firstDeferredActionInvocation.DeferredReason)
}
if firstDeferredActionInvocation.ActionInvocationInstanceSrc.ActionTrigger.(*plans.LifecycleActionTrigger).TriggeringResourceAddr.String() != "test_object.a" {
t.Fatalf("expected deferred action to be triggered by test_object.a, but got %s", firstDeferredActionInvocation.ActionInvocationInstanceSrc.ActionTrigger.(*plans.LifecycleActionTrigger).TriggeringResourceAddr.String())
}
if firstDeferredActionInvocation.ActionInvocationInstanceSrc.Addr.String() != "action.test_action.hello" {
t.Fatalf("expected deferred action to be triggered by action.test_action.hello, but got %s", firstDeferredActionInvocation.ActionInvocationInstanceSrc.Addr.String())
}
secondDeferredActionInvocation := p.DeferredActionInvocations[1]
if secondDeferredActionInvocation.DeferredReason != providers.DeferredReasonDeferredPrereq {
t.Fatalf("expected second deferred action to be deferred due to deferred prereq, but got %s", secondDeferredActionInvocation.DeferredReason)
}
if secondDeferredActionInvocation.ActionInvocationInstanceSrc.ActionTrigger.(*plans.LifecycleActionTrigger).TriggeringResourceAddr.String() != "test_object.a" {
t.Fatalf("expected second deferred action to be triggered by test_object.a, but got %s", secondDeferredActionInvocation.ActionInvocationInstanceSrc.ActionTrigger.(*plans.LifecycleActionTrigger).TriggeringResourceAddr.String())
}
if secondDeferredActionInvocation.ActionInvocationInstanceSrc.Addr.String() != "action.test_action.hello" {
t.Fatalf("expected second deferred action to be triggered by action.test_action.hello, but got %s", secondDeferredActionInvocation.ActionInvocationInstanceSrc.Addr.String())
}
if len(p.DeferredResources) != 1 {
t.Fatalf("expected 1 resource to be deferred, got %d", len(p.DeferredResources))
}
deferredResource := p.DeferredResources[0]
if deferredResource.ChangeSrc.Addr.String() != "test_object.a" {
t.Fatalf("Expected resource %s to be deferred, but it was not", deferredResource.ChangeSrc.Addr)
}
if deferredResource.DeferredReason != providers.DeferredReasonAbsentPrereq {
t.Fatalf("Expected deferred reason to be absent prereq, got %s", deferredResource.DeferredReason)
}
},
},
"action expansion with unknown instances": {
module: map[string]string{
"main.tf": `
variable "each" {
type = set(string)
}
action "test_action" "hello" {
for_each = var.each
}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello["a"]]
}
}
}
`,
},
expectPlanActionCalled: false,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
DeferralAllowed: true,
SetVariables: InputValues{
"each": &InputValue{
Value: cty.UnknownVal(cty.Set(cty.String)),
SourceType: ValueFromCLIArg,
},
},
},
assertPlanDiagnostics: func(t *testing.T, diagnostics tfdiags.Diagnostics) {
if len(diagnostics) != 1 {
t.Fatal("wrong number of diagnostics")
}
if diagnostics[0].Severity() != tfdiags.Error {
t.Error("expected error severity")
}
if diagnostics[0].Description().Summary != "Invalid for_each argument" {
t.Errorf("expected for_each argument to be source of error but was %s", diagnostics[0].Description().Summary)
}
},
},
"action with unknown module expansion": {
// We have an unknown module expansion (for_each over an unknown value). The
// action and its triggering resource both live inside the (currently
// un-expanded) module instances. Since we cannot expand the module yet, the
// action invocation must be deferred.
module: map[string]string{
"main.tf": `
variable "mods" {
type = set(string)
}
module "mod" {
source = "./mod"
for_each = var.mods
}
`,
"mod/mod.tf": `
action "test_action" "hello" {
config {
attr = "static"
}
}
resource "other_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
DeferralAllowed: true,
SetVariables: InputValues{
"mods": &InputValue{
Value: cty.UnknownVal(cty.Set(cty.String)),
SourceType: ValueFromCLIArg,
},
},
},
assertPlan: func(t *testing.T, p *plans.Plan) {
// No concrete action invocations can be produced yet.
if got := len(p.Changes.ActionInvocations); got != 0 {
t.Fatalf("expected 0 planned action invocations, got %d", got)
}
if got := len(p.DeferredActionInvocations); got != 1 {
t.Fatalf("expected 1 deferred action invocations, got %d", got)
}
ac, err := p.DeferredActionInvocations[0].Decode(&testActionSchema)
if err != nil {
t.Fatalf("error decoding action invocation: %s", err)
}
if ac.DeferredReason != providers.DeferredReasonInstanceCountUnknown {
t.Fatalf("expected DeferredReasonInstanceCountUnknown, got %s", ac.DeferredReason)
}
if ac.ActionInvocationInstance.ConfigValue.GetAttr("attr").AsString() != "static" {
t.Fatalf("expected attr to be static, got %s", ac.ActionInvocationInstance.ConfigValue.GetAttr("attr").AsString())
}
},
},
"action with unknown module expansion and unknown instances": {
// Here both the module expansion and the action for_each expansion are unknown.
// The action is referenced (with a specific key) inside the module so we should
// get a single deferred action invocation for that specific (yet still
// unresolved) instance address.
module: map[string]string{
"main.tf": `
variable "mods" {
type = set(string)
}
variable "actions" {
type = set(string)
}
module "mod" {
source = "./mod"
for_each = var.mods
actions = var.actions
}
`,
"mod/mod.tf": `
variable "actions" {
type = set(string)
}
action "test_action" "hello" {
// Unknown for_each inside the module instance.
for_each = var.actions
}
resource "other_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
// We reference a specific (yet unknown) action instance key.
actions = [action.test_action.hello["a"]]
}
}
}
`,
},
expectPlanActionCalled: true,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
DeferralAllowed: true,
SetVariables: InputValues{
"mods": &InputValue{
Value: cty.UnknownVal(cty.Set(cty.String)),
SourceType: ValueFromCLIArg,
},
"actions": &InputValue{
Value: cty.UnknownVal(cty.Set(cty.String)),
SourceType: ValueFromCLIArg,
},
},
},
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 0 {
t.Fatalf("expected 0 planned action invocations, got %d", len(p.Changes.ActionInvocations))
}
if len(p.DeferredActionInvocations) != 1 {
t.Fatalf("expected 1 deferred partial action invocations, got %d", len(p.DeferredActionInvocations))
}
ac, err := p.DeferredActionInvocations[0].Decode(&testActionSchema)
if err != nil {
t.Fatalf("error decoding action invocation: %s", err)
}
if ac.DeferredReason != providers.DeferredReasonInstanceCountUnknown {
t.Fatalf("expected deferred reason to be DeferredReasonInstanceCountUnknown, got %s", ac.DeferredReason)
}
if !ac.ActionInvocationInstance.ConfigValue.IsNull() {
t.Fatalf("expected config value to be null")
}
},
},
"deferring resource dependencies should defer action": {
module: map[string]string{
"main.tf": `
resource "test_object" "origin" {
name = "origin"
}
action "test_action" "hello" {
config {
attr = test_object.origin.name
}
}
resource "test_object" "a" {
name = "a"
lifecycle {
action_trigger {
events = [after_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: false,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
DeferralAllowed: true,
},
planResourceFn: func(t *testing.T, req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
if req.Config.GetAttr("name").AsString() == "origin" {
return providers.PlanResourceChangeResponse{
Deferred: &providers.Deferred{
Reason: providers.DeferredReasonAbsentPrereq,
},
}
}
return providers.PlanResourceChangeResponse{
PlannedState: req.ProposedNewState,
PlannedPrivate: req.PriorPrivate,
PlannedIdentity: req.PriorIdentity,
}
},
assertPlanDiagnostics: func(t *testing.T, diagnostics tfdiags.Diagnostics) {
if len(diagnostics) != 1 {
t.Fatal("wrong number of diagnostics")
}
if diagnostics[0].Severity() != tfdiags.Error {
t.Error("expected error diagnostics")
}
if diagnostics[0].Description().Summary != "Invalid action deferral" {
t.Errorf("expected deferral to be source of error was %s", diagnostics[0].Description().Summary)
}
},
},
},
// ======== INVOKE ========
// -invoke flag
// ======== INVOKE ========
"invoke": {
"simple action invoke": {
module: map[string]string{
"main.tf": `
action "test_action" "one" {
config {
attr = "one"
}
}
action "test_action" "two" {
config {
attr = "two"
}
}
`,
},
planOpts: &PlanOpts{
Mode: plans.RefreshOnlyMode,
ActionTargets: []addrs.Targetable{
addrs.AbsActionInstance{
Action: addrs.ActionInstance{
Action: addrs.Action{
Type: "test_action",
Name: "one",
},
Key: addrs.NoKey,
},
},
},
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, plan *plans.Plan) {
if len(plan.Changes.ActionInvocations) != 1 {
t.Fatalf("expected exactly one invocation, and found %d", len(plan.Changes.ActionInvocations))
}
ais := plan.Changes.ActionInvocations[0]
ai, err := ais.Decode(&testActionSchema)
if err != nil {
t.Fatal(err)
}
if _, ok := ai.ActionTrigger.(*plans.InvokeActionTrigger); !ok {
t.Fatalf("expected invoke action trigger type but was %T", ai.ActionTrigger)
}
expected := cty.ObjectVal(map[string]cty.Value{
"attr": cty.StringVal("one"),
})
if diff := cmp.Diff(ai.ConfigValue, expected, ctydebug.CmpOptions); len(diff) > 0 {
t.Fatalf("wrong value in plan: %s", diff)
}
if !ai.Addr.Equal(mustActionInstanceAddr("action.test_action.one")) {
t.Fatalf("wrong address in plan: %s", ai.Addr)
}
},
},
"action invoke in module": {
module: map[string]string{
"mod/main.tf": `
action "test_action" "one" {
config {
attr = "one"
}
}
action "test_action" "two" {
config {
attr = "two"
}
}
`,
"main.tf": `
module "mod" {
source = "./mod"
}
`,
},
planOpts: &PlanOpts{
Mode: plans.RefreshOnlyMode,
ActionTargets: []addrs.Targetable{
addrs.AbsActionInstance{
Module: addrs.RootModuleInstance.Child("mod", addrs.NoKey),
Action: addrs.ActionInstance{
Action: addrs.Action{
Type: "test_action",
Name: "one",
},
Key: addrs.NoKey,
},
},
},
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, plan *plans.Plan) {
if len(plan.Changes.ActionInvocations) != 1 {
t.Fatalf("expected exactly one invocation, and found %d", len(plan.Changes.ActionInvocations))
}
ais := plan.Changes.ActionInvocations[0]
ai, err := ais.Decode(&testActionSchema)
if err != nil {
t.Fatal(err)
}
if _, ok := ai.ActionTrigger.(*plans.InvokeActionTrigger); !ok {
t.Fatalf("expected invoke action trigger type but was %T", ai.ActionTrigger)
}
expected := cty.ObjectVal(map[string]cty.Value{
"attr": cty.StringVal("one"),
})
if diff := cmp.Diff(ai.ConfigValue, expected, ctydebug.CmpOptions); len(diff) > 0 {
t.Fatalf("wrong value in plan: %s", diff)
}
if !ai.Addr.Equal(mustActionInstanceAddr("module.mod.action.test_action.one")) {
t.Fatalf("wrong address in plan: %s", ai.Addr)
}
},
},
"action invoke in expanded module": {
module: map[string]string{
"mod/main.tf": `
action "test_action" "one" {
config {
attr = "one"
}
}
action "test_action" "two" {
config {
attr = "two"
}
}
`,
"main.tf": `
module "mod" {
count = 2
source = "./mod"
}
`,
},
planOpts: &PlanOpts{
Mode: plans.RefreshOnlyMode,
ActionTargets: []addrs.Targetable{
addrs.AbsActionInstance{
Module: addrs.RootModuleInstance.Child("mod", addrs.IntKey(1)),
Action: addrs.ActionInstance{
Action: addrs.Action{
Type: "test_action",
Name: "one",
},
Key: addrs.NoKey,
},
},
},
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, plan *plans.Plan) {
if len(plan.Changes.ActionInvocations) != 1 {
t.Fatalf("expected exactly one invocation, and found %d", len(plan.Changes.ActionInvocations))
}
ais := plan.Changes.ActionInvocations[0]
ai, err := ais.Decode(&testActionSchema)
if err != nil {
t.Fatal(err)
}
if _, ok := ai.ActionTrigger.(*plans.InvokeActionTrigger); !ok {
t.Fatalf("expected invoke action trigger type but was %T", ai.ActionTrigger)
}
expected := cty.ObjectVal(map[string]cty.Value{
"attr": cty.StringVal("one"),
})
if diff := cmp.Diff(ai.ConfigValue, expected, ctydebug.CmpOptions); len(diff) > 0 {
t.Fatalf("wrong value in plan: %s", diff)
}
if !ai.Addr.Equal(mustActionInstanceAddr("module.mod[1].action.test_action.one")) {
t.Fatalf("wrong address in plan: %s", ai.Addr)
}
},
},
"action invoke with count (all)": {
module: map[string]string{
"main.tf": `
action "test_action" "one" {
count = 2
config {
attr = "${count.index}"
}
}
action "test_action" "two" {
count = 2
config {
attr = "two"
}
}
`,
},
planOpts: &PlanOpts{
Mode: plans.RefreshOnlyMode,
ActionTargets: []addrs.Targetable{
addrs.AbsAction{
Action: addrs.Action{
Type: "test_action",
Name: "one",
},
},
},
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, plan *plans.Plan) {
if len(plan.Changes.ActionInvocations) != 2 {
t.Fatalf("expected exactly two invocations, and found %d", len(plan.Changes.ActionInvocations))
}
sort.Slice(plan.Changes.ActionInvocations, func(i, j int) bool {
return plan.Changes.ActionInvocations[i].Addr.Less(plan.Changes.ActionInvocations[j].Addr)
})
ais := plan.Changes.ActionInvocations[0]
ai, err := ais.Decode(&testActionSchema)
if err != nil {
t.Fatal(err)
}
if _, ok := ai.ActionTrigger.(*plans.InvokeActionTrigger); !ok {
t.Fatalf("expected invoke action trigger type but was %T", ai.ActionTrigger)
}
expected := cty.ObjectVal(map[string]cty.Value{
"attr": cty.StringVal("0"),
})
if diff := cmp.Diff(ai.ConfigValue, expected, ctydebug.CmpOptions); len(diff) > 0 {
t.Fatalf("wrong value in plan: %s", diff)
}
if !ai.Addr.Equal(mustActionInstanceAddr("action.test_action.one[0]")) {
t.Fatalf("wrong address in plan: %s", ai.Addr)
}
ais = plan.Changes.ActionInvocations[1]
ai, err = ais.Decode(&testActionSchema)
if err != nil {
t.Fatal(err)
}
if _, ok := ai.ActionTrigger.(*plans.InvokeActionTrigger); !ok {
t.Fatalf("expected invoke action trigger type but was %T", ai.ActionTrigger)
}
expected = cty.ObjectVal(map[string]cty.Value{
"attr": cty.StringVal("1"),
})
if diff := cmp.Diff(ai.ConfigValue, expected, ctydebug.CmpOptions); len(diff) > 0 {
t.Fatalf("wrong value in plan: %s", diff)
}
if !ai.Addr.Equal(mustActionInstanceAddr("action.test_action.one[1]")) {
t.Fatalf("wrong address in plan: %s", ai.Addr)
}
},
},
"action invoke with count (instance)": {
module: map[string]string{
"main.tf": `
action "test_action" "one" {
count = 2
config {
attr = "${count.index}"
}
}
action "test_action" "two" {
count = 2
config {
attr = "two"
}
}
`,
},
planOpts: &PlanOpts{
Mode: plans.RefreshOnlyMode,
ActionTargets: []addrs.Targetable{
addrs.AbsActionInstance{
Action: addrs.ActionInstance{
Action: addrs.Action{
Type: "test_action",
Name: "one",
},
Key: addrs.IntKey(0),
},
},
},
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, plan *plans.Plan) {
if len(plan.Changes.ActionInvocations) != 1 {
t.Fatalf("expected exactly one invocation, and found %d", len(plan.Changes.ActionInvocations))
}
ais := plan.Changes.ActionInvocations[0]
ai, err := ais.Decode(&testActionSchema)
if err != nil {
t.Fatal(err)
}
if _, ok := ai.ActionTrigger.(*plans.InvokeActionTrigger); !ok {
t.Fatalf("expected invoke action trigger type but was %T", ai.ActionTrigger)
}
expected := cty.ObjectVal(map[string]cty.Value{
"attr": cty.StringVal("0"),
})
if diff := cmp.Diff(ai.ConfigValue, expected, ctydebug.CmpOptions); len(diff) > 0 {
t.Fatalf("wrong value in plan: %s", diff)
}
if !ai.Addr.Equal(mustActionInstanceAddr("action.test_action.one[0]")) {
t.Fatalf("wrong address in plan: %s", ai.Addr)
}
},
},
"invoke action with reference": {
module: map[string]string{
"main.tf": `
resource "test_object" "a" {
name = "hello"
}
action "test_action" "one" {
config {
attr = test_object.a.name
}
}
`,
},
planOpts: &PlanOpts{
Mode: plans.RefreshOnlyMode,
ActionTargets: []addrs.Targetable{
addrs.AbsAction{
Action: addrs.Action{
Type: "test_action",
Name: "one",
},
},
},
},
expectPlanActionCalled: true,
buildState: func(state *states.SyncState) {
state.SetResourceInstanceCurrent(mustResourceInstanceAddr("test_object.a"), &states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"name":"hello"}`),
Status: states.ObjectReady,
}, mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`))
},
assertPlan: func(t *testing.T, plan *plans.Plan) {
if len(plan.Changes.ActionInvocations) != 1 {
t.Fatalf("expected exactly one invocation, and found %d", len(plan.Changes.ActionInvocations))
}
ais := plan.Changes.ActionInvocations[0]
ai, err := ais.Decode(&testActionSchema)
if err != nil {
t.Fatal(err)
}
if _, ok := ai.ActionTrigger.(*plans.InvokeActionTrigger); !ok {
t.Fatalf("expected invoke action trigger type but was %T", ai.ActionTrigger)
}
expected := cty.ObjectVal(map[string]cty.Value{
"attr": cty.StringVal("hello"),
})
if diff := cmp.Diff(ai.ConfigValue, expected, ctydebug.CmpOptions); len(diff) > 0 {
t.Fatalf("wrong value in plan: %s", diff)
}
if !ai.Addr.Equal(mustActionInstanceAddr("action.test_action.one")) {
t.Fatalf("wrong address in plan: %s", ai.Addr)
}
},
},
"invoke action with reference (drift)": {
module: map[string]string{
"main.tf": `
resource "test_object" "a" {
name = "hello"
}
action "test_action" "one" {
config {
attr = test_object.a.name
}
}
`,
},
planOpts: &PlanOpts{
Mode: plans.RefreshOnlyMode,
ActionTargets: []addrs.Targetable{
addrs.AbsAction{
Action: addrs.Action{
Type: "test_action",
Name: "one",
},
},
},
},
expectPlanActionCalled: true,
buildState: func(state *states.SyncState) {
state.SetResourceInstanceCurrent(mustResourceInstanceAddr("test_object.a"), &states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"name":"hello"}`),
Status: states.ObjectReady,
}, mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`))
},
assertPlan: func(t *testing.T, plan *plans.Plan) {
if len(plan.Changes.ActionInvocations) != 1 {
t.Fatalf("expected exactly one invocation, and found %d", len(plan.Changes.ActionInvocations))
}
ais := plan.Changes.ActionInvocations[0]
ai, err := ais.Decode(&testActionSchema)
if err != nil {
t.Fatal(err)
}
if _, ok := ai.ActionTrigger.(*plans.InvokeActionTrigger); !ok {
t.Fatalf("expected invoke action trigger type but was %T", ai.ActionTrigger)
}
expected := cty.ObjectVal(map[string]cty.Value{
"attr": cty.StringVal("drifted value"),
})
if diff := cmp.Diff(ai.ConfigValue, expected, ctydebug.CmpOptions); len(diff) > 0 {
t.Fatalf("wrong value in plan: %s", diff)
}
if !ai.Addr.Equal(mustActionInstanceAddr("action.test_action.one")) {
t.Fatalf("wrong address in plan: %s", ai.Addr)
}
},
readResourceFn: func(t *testing.T, request providers.ReadResourceRequest) providers.ReadResourceResponse {
return providers.ReadResourceResponse{
NewState: cty.ObjectVal(map[string]cty.Value{
"name": cty.StringVal("drifted value"),
}),
}
},
},
"invoke action with reference (drift, no refresh)": {
module: map[string]string{
"main.tf": `
resource "test_object" "a" {
name = "hello"
}
action "test_action" "one" {
config {
attr = test_object.a.name
}
}
`,
},
planOpts: &PlanOpts{
Mode: plans.RefreshOnlyMode,
SkipRefresh: true,
ActionTargets: []addrs.Targetable{
addrs.AbsAction{
Action: addrs.Action{
Type: "test_action",
Name: "one",
},
},
},
},
expectPlanActionCalled: true,
buildState: func(state *states.SyncState) {
state.SetResourceInstanceCurrent(mustResourceInstanceAddr("test_object.a"), &states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"name":"hello"}`),
Status: states.ObjectReady,
}, mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`))
},
assertPlan: func(t *testing.T, plan *plans.Plan) {
if len(plan.Changes.ActionInvocations) != 1 {
t.Fatalf("expected exactly one invocation, and found %d", len(plan.Changes.ActionInvocations))
}
ais := plan.Changes.ActionInvocations[0]
ai, err := ais.Decode(&testActionSchema)
if err != nil {
t.Fatal(err)
}
if _, ok := ai.ActionTrigger.(*plans.InvokeActionTrigger); !ok {
t.Fatalf("expected invoke action trigger type but was %T", ai.ActionTrigger)
}
expected := cty.ObjectVal(map[string]cty.Value{
"attr": cty.StringVal("hello"),
})
if diff := cmp.Diff(ai.ConfigValue, expected, ctydebug.CmpOptions); len(diff) > 0 {
t.Fatalf("wrong value in plan: %s", diff)
}
if !ai.Addr.Equal(mustActionInstanceAddr("action.test_action.one")) {
t.Fatalf("wrong address in plan: %s", ai.Addr)
}
},
readResourceFn: func(t *testing.T, request providers.ReadResourceRequest) providers.ReadResourceResponse {
return providers.ReadResourceResponse{
NewState: cty.ObjectVal(map[string]cty.Value{
"name": cty.StringVal("drifted value"),
}),
}
},
},
"invoke action with partially applied configuration": {
module: map[string]string{
"main.tf": `
resource "test_object" "a" {
name = "hello"
}
action "test_action" "one" {
config {
attr = test_object.a.name
}
}
`,
},
planOpts: &PlanOpts{
Mode: plans.RefreshOnlyMode,
ActionTargets: []addrs.Targetable{
addrs.AbsAction{
Action: addrs.Action{
Type: "test_action",
Name: "one",
},
},
},
},
expectPlanActionCalled: false,
assertPlanDiagnostics: func(t *testing.T, diagnostics tfdiags.Diagnostics) {
if len(diagnostics) != 1 {
t.Errorf("expected exactly one diagnostic but got %d", len(diagnostics))
}
if diagnostics[0].Description().Summary != "Partially applied configuration" {
t.Errorf("wrong diagnostic: %s", diagnostics[0].Description().Summary)
}
},
},
"non-referenced resource isn't refreshed during invoke": {
module: map[string]string{
"main.tf": `
resource "test_object" "a" {
name = "hello"
}
action "test_action" "one" {
config {
attr = "world"
}
}
`,
},
planOpts: &PlanOpts{
Mode: plans.RefreshOnlyMode,
ActionTargets: []addrs.Targetable{
addrs.AbsAction{
Action: addrs.Action{
Type: "test_action",
Name: "one",
},
},
},
},
expectPlanActionCalled: true,
buildState: func(state *states.SyncState) {
state.SetResourceInstanceCurrent(mustResourceInstanceAddr("test_object.a"), &states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"name":"hello"}`),
Status: states.ObjectReady,
}, mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`))
},
assertPlan: func(t *testing.T, plan *plans.Plan) {
if len(plan.Changes.ActionInvocations) != 1 {
t.Fatalf("expected exactly one invocation, and found %d", len(plan.Changes.ActionInvocations))
}
ais := plan.Changes.ActionInvocations[0]
ai, err := ais.Decode(&testActionSchema)
if err != nil {
t.Fatal(err)
}
if _, ok := ai.ActionTrigger.(*plans.InvokeActionTrigger); !ok {
t.Fatalf("expected invoke action trigger type but was %T", ai.ActionTrigger)
}
expected := cty.ObjectVal(map[string]cty.Value{
"attr": cty.StringVal("world"),
})
if diff := cmp.Diff(ai.ConfigValue, expected, ctydebug.CmpOptions); len(diff) > 0 {
t.Fatalf("wrong value in plan: %s", diff)
}
if !ai.Addr.Equal(mustActionInstanceAddr("action.test_action.one")) {
t.Fatalf("wrong address in plan: %s", ai.Addr)
}
if len(plan.DriftedResources) > 0 {
t.Fatalf("shouldn't have refreshed any resources")
}
},
readResourceFn: func(t *testing.T, request providers.ReadResourceRequest) (resp providers.ReadResourceResponse) {
t.Fatalf("should not have tried to refresh any resources")
return
},
},
},
// ======== CONDITIONS ========
// condition action_trigger attribute
// ======== CONDITIONS ========
"conditions": {
"boolean condition": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
action "test_action" "world" {}
action "test_action" "bye" {}
resource "test_object" "foo" {
name = "foo"
}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
condition = test_object.foo.name == "foo"
actions = [action.test_action.hello, action.test_action.world]
}
action_trigger {
events = [after_create]
condition = test_object.foo.name == "bye"
actions = [action.test_action.bye]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 2 {
t.Fatalf("expected 2 actions in plan, got %d", len(p.Changes.ActionInvocations))
}
invokedActionAddrs := []string{}
for _, action := range p.Changes.ActionInvocations {
invokedActionAddrs = append(invokedActionAddrs, action.Addr.String())
}
slices.Sort(invokedActionAddrs)
expectedActions := []string{
"action.test_action.hello",
"action.test_action.world",
}
if !cmp.Equal(expectedActions, invokedActionAddrs) {
t.Fatalf("expected actions: %v, got %v", expectedActions, invokedActionAddrs)
}
},
},
"unknown condition": {
module: map[string]string{
"main.tf": `
variable "cond" {
type = string
}
action "test_action" "hello" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
condition = var.cond == "foo"
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: false,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
SetVariables: InputValues{
"cond": &InputValue{
Value: cty.UnknownVal(cty.String),
SourceType: ValueFromCaller,
},
},
},
expectPlanDiagnostics: func(m *configs.Config) tfdiags.Diagnostics {
return tfdiags.Diagnostics{}.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Condition must be known",
Detail: "The condition expression resulted in an unknown value, but it must be a known boolean value.",
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 10, Column: 19, Byte: 184},
End: hcl.Pos{Line: 10, Column: 36, Byte: 201},
},
})
},
},
"non-boolean condition": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "foo" {
name = "foo"
}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
condition = test_object.foo.name
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: false,
expectPlanDiagnostics: func(m *configs.Config) tfdiags.Diagnostics {
return tfdiags.Diagnostics{}.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect value type",
Detail: "Invalid expression value: a bool is required.",
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 10, Column: 19, Byte: 194},
End: hcl.Pos{Line: 10, Column: 39, Byte: 214},
},
})
},
},
"using self in before_* condition": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
action "test_action" "world" {}
resource "test_object" "a" {
name = "foo"
lifecycle {
action_trigger {
events = [before_create]
condition = self.name == "foo"
actions = [action.test_action.hello]
}
action_trigger {
events = [after_update]
condition = self.name == "bar"
actions = [action.test_action.world]
}
}
}
`,
},
expectPlanActionCalled: false,
expectPlanDiagnostics: func(m *configs.Config) tfdiags.Diagnostics {
return tfdiags.Diagnostics{}.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Self reference not allowed",
Detail: `The condition expression cannot reference "self".`,
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 9, Column: 19, Byte: 193},
End: hcl.Pos{Line: 9, Column: 37, Byte: 211},
},
})
},
},
"using self in after_* condition": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
action "test_action" "world" {}
resource "test_object" "a" {
name = "foo"
lifecycle {
action_trigger {
events = [after_create]
condition = self.name == "foo"
actions = [action.test_action.hello]
}
action_trigger {
events = [after_update]
condition = self.name == "bar"
actions = [action.test_action.world]
}
}
}
`,
},
expectPlanActionCalled: false,
expectPlanDiagnostics: func(m *configs.Config) tfdiags.Diagnostics {
// We only expect one diagnostic, as the other condition is valid
return tfdiags.Diagnostics{}.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Self reference not allowed",
Detail: `The condition expression cannot reference "self".`,
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 9, Column: 19, Byte: 192},
End: hcl.Pos{Line: 9, Column: 37, Byte: 210},
},
})
},
},
"referencing triggering resource in before_* condition": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
action "test_action" "world" {}
resource "test_object" "a" {
name = "foo"
lifecycle {
action_trigger {
events = [before_create]
condition = test_object.a.name == "foo"
actions = [action.test_action.hello]
}
action_trigger {
events = [before_update]
condition = test_object.a.name == "bar"
actions = [action.test_action.world]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlanDiagnostics: func(t *testing.T, diags tfdiags.Diagnostics) {
if !diags.HasErrors() {
t.Errorf("expected errors, got none")
}
err := diags.Err().Error()
if !strings.Contains(err, "Cycle:") || !strings.Contains(err, "action.test_action.hello") || !strings.Contains(err, "test_object.a") {
t.Fatalf("Expected '[Error] Cycle: action.test_action.hello (instance), test_object.a', got '%s'", err)
}
},
},
"referencing triggering resource in after_* condition": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
action "test_action" "world" {}
resource "test_object" "a" {
name = "foo"
lifecycle {
action_trigger {
events = [after_create]
condition = test_object.a.name == "foo"
actions = [action.test_action.hello]
}
action_trigger {
events = [after_update]
condition = test_object.a.name == "bar"
actions = [action.test_action.world]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 1 {
t.Errorf("expected 1 action invocation, got %d", len(p.Changes.ActionInvocations))
}
},
},
"using each in before_* condition": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
action "test_action" "world" {}
resource "test_object" "a" {
for_each = toset(["foo", "bar"])
name = each.key
lifecycle {
action_trigger {
events = [before_create]
condition = each.key == "foo"
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: false,
expectPlanDiagnostics: func(m *configs.Config) tfdiags.Diagnostics {
return tfdiags.Diagnostics{}.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Each reference not allowed",
Detail: `The condition expression cannot reference "each" if the action is run before the resource is applied.`,
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 10, Column: 19, Byte: 231},
End: hcl.Pos{Line: 10, Column: 36, Byte: 248},
},
}).Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Each reference not allowed",
Detail: `The condition expression cannot reference "each" if the action is run before the resource is applied.`,
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 10, Column: 19, Byte: 231},
End: hcl.Pos{Line: 10, Column: 36, Byte: 248},
},
})
},
},
"using each in after_* condition": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
action "test_action" "world" {}
resource "test_object" "a" {
for_each = toset(["foo", "bar"])
name = each.key
lifecycle {
action_trigger {
events = [after_create]
condition = each.key == "foo"
actions = [action.test_action.hello]
}
action_trigger {
events = [after_update]
condition = each.key == "bar"
actions = [action.test_action.world]
}
}
}`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 1 {
t.Errorf("Expected 1 action invocations, got %d", len(p.Changes.ActionInvocations))
}
if p.Changes.ActionInvocations[0].Addr.String() != "action.test_action.hello" {
t.Errorf("Expected action 'action.test_action.hello', got %s", p.Changes.ActionInvocations[0].Addr.String())
}
},
},
"using count.index in before_* condition": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
action "test_action" "world" {}
resource "test_object" "a" {
count = 3
name = "item-${count.index}"
lifecycle {
action_trigger {
events = [before_create]
condition = count.index == 1
actions = [action.test_action.hello]
}
action_trigger {
events = [before_update]
condition = count.index == 2
actions = [action.test_action.world]
}
}
}`,
},
expectPlanActionCalled: false,
expectPlanDiagnostics: func(m *configs.Config) tfdiags.Diagnostics {
return tfdiags.Diagnostics{}.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Count reference not allowed",
Detail: `The condition expression cannot reference "count" if the action is run before the resource is applied.`,
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 10, Column: 21, Byte: 210},
End: hcl.Pos{Line: 10, Column: 37, Byte: 226},
},
}).Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Count reference not allowed",
Detail: `The condition expression cannot reference "count" if the action is run before the resource is applied.`,
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 10, Column: 21, Byte: 210},
End: hcl.Pos{Line: 10, Column: 37, Byte: 226},
},
}).Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Count reference not allowed",
Detail: `The condition expression cannot reference "count" if the action is run before the resource is applied.`,
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 10, Column: 21, Byte: 210},
End: hcl.Pos{Line: 10, Column: 37, Byte: 226},
},
})
},
},
"using count.index in after_* condition": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
action "test_action" "world" {}
resource "test_object" "a" {
count = 3
name = "item-${count.index}"
lifecycle {
action_trigger {
events = [after_create]
condition = count.index == 1
actions = [action.test_action.hello]
}
action_trigger {
events = [after_update]
condition = count.index == 2
actions = [action.test_action.world]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 1 {
t.Errorf("Expected 1 action invocation, got %d", len(p.Changes.ActionInvocations))
}
if p.Changes.ActionInvocations[0].Addr.String() != "action.test_action.hello" {
t.Errorf("Expected action invocation %q, got %q", "action.test_action.hello", p.Changes.ActionInvocations[0].Addr.String())
}
},
},
"using each.value in before_* condition": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
action "test_action" "world" {}
resource "test_object" "a" {
for_each = {"foo" = "value1", "bar" = "value2"}
name = each.value
lifecycle {
action_trigger {
events = [before_create]
condition = each.value == "value1"
actions = [action.test_action.hello]
}
action_trigger {
events = [before_update]
condition = each.value == "value2"
actions = [action.test_action.world]
}
}
}
`,
},
expectPlanActionCalled: false,
expectPlanDiagnostics: func(m *configs.Config) tfdiags.Diagnostics {
return tfdiags.Diagnostics{}.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Each reference not allowed",
Detail: `The condition expression cannot reference "each" if the action is run before the resource is applied.`,
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 10, Column: 21, Byte: 260},
End: hcl.Pos{Line: 10, Column: 43, Byte: 282},
},
}).Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Each reference not allowed",
Detail: `The condition expression cannot reference "each" if the action is run before the resource is applied.`,
Subject: &hcl.Range{
Filename: filepath.Join(m.Module.SourceDir, "main.tf"),
Start: hcl.Pos{Line: 10, Column: 21, Byte: 260},
End: hcl.Pos{Line: 10, Column: 43, Byte: 282},
},
})
},
},
"using each.value in after_* condition": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
action "test_action" "world" {}
resource "test_object" "a" {
for_each = {"foo" = "value1", "bar" = "value2"}
name = each.value
lifecycle {
action_trigger {
events = [after_create]
condition = each.value == "value1"
actions = [action.test_action.hello]
}
action_trigger {
events = [after_update]
condition = each.value == "value2"
actions = [action.test_action.world]
}
}
}
`,
},
expectPlanActionCalled: true,
assertPlan: func(t *testing.T, p *plans.Plan) {
if len(p.Changes.ActionInvocations) != 1 {
t.Errorf("Expected 1 action invocations, got %d", len(p.Changes.ActionInvocations))
}
if p.Changes.ActionInvocations[0].Addr.String() != "action.test_action.hello" {
t.Errorf("Expected action 'action.test_action.hello', got %s", p.Changes.ActionInvocations[0].Addr.String())
}
},
},
},
// ======== TARGETING ========
// -target flag behavior
// ======== TARGETING ========
"targeting": {
"targeted run": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
action "test_action" "there" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
action_trigger {
events = [after_create]
actions = [action.test_action.there]
}
}
}
action "test_action" "general" {}
action "test_action" "kenobi" {}
resource "test_object" "b" {
lifecycle {
action_trigger {
events = [before_create, after_update]
actions = [action.test_action.general]
}
}
}
`,
},
expectPlanActionCalled: true,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
// We only target resource a
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(addrs.ManagedResourceMode, "test_object", "a"),
},
},
// There is a warning related to targeting that we will just ignore
assertPlanDiagnostics: func(t *testing.T, d tfdiags.Diagnostics) {
if d.HasErrors() {
t.Fatalf("expected no errors, got %s", d.Err().Error())
}
},
assertPlan: func(t *testing.T, p *plans.Plan) {
// Validate we are targeting resource a out of paranoia
if len(p.Changes.Resources) != 1 {
t.Fatalf("expected plan to have 1 resource change, got %d", len(p.Changes.Resources))
}
if p.Changes.Resources[0].Addr.String() != "test_object.a" {
t.Fatalf("expected plan to target resource 'test_object.a', got %s", p.Changes.Resources[0].Addr.String())
}
// Ensure the actions for test_object.a are planned
if len(p.Changes.ActionInvocations) != 2 {
t.Fatalf("expected plan to have 2 action invocations, got %d", len(p.Changes.ActionInvocations))
}
actionAddrs := []string{
p.Changes.ActionInvocations[0].Addr.String(),
p.Changes.ActionInvocations[1].Addr.String(),
}
slices.Sort(actionAddrs)
if actionAddrs[0] != "action.test_action.hello" || actionAddrs[1] != "action.test_action.there" {
t.Fatalf("expected action addresses to be ['action.test_action.hello', 'action.test_action.there'], got %v", actionAddrs)
}
},
},
"targeted run with ancestor that has actions": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
action "test_action" "there" {}
resource "test_object" "origin" {
name = "origin"
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
resource "test_object" "a" {
name = test_object.origin.name
lifecycle {
action_trigger {
events = [after_create]
actions = [action.test_action.there]
}
}
}
action "test_action" "general" {}
action "test_action" "kenobi" {}
resource "test_object" "b" {
lifecycle {
action_trigger {
events = [before_create, after_update]
actions = [action.test_action.general]
}
}
}
`,
},
expectPlanActionCalled: true,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
// We only target resource a
Targets: []addrs.Targetable{
mustResourceInstanceAddr("test_object.a"),
},
},
// There is a warning related to targeting that we will just ignore
assertPlanDiagnostics: func(t *testing.T, d tfdiags.Diagnostics) {
if d.HasErrors() {
t.Fatalf("expected no errors, got %s", d.Err().Error())
}
},
assertPlan: func(t *testing.T, p *plans.Plan) {
// Validate we are targeting resource a out of paranoia
if len(p.Changes.Resources) != 2 {
t.Fatalf("expected plan to have 2 resource changes, got %d", len(p.Changes.Resources))
}
resourceAddrs := []string{
p.Changes.Resources[0].Addr.String(),
p.Changes.Resources[1].Addr.String(),
}
slices.Sort(resourceAddrs)
if resourceAddrs[0] != "test_object.a" || resourceAddrs[1] != "test_object.origin" {
t.Fatalf("expected resource addresses to be ['test_object.a', 'test_object.origin'], got %v", resourceAddrs)
}
// Ensure the actions for test_object.a are planned
if len(p.Changes.ActionInvocations) != 2 {
t.Fatalf("expected plan to have 2 action invocations, got %d", len(p.Changes.ActionInvocations))
}
actionAddrs := []string{
p.Changes.ActionInvocations[0].Addr.String(),
p.Changes.ActionInvocations[1].Addr.String(),
}
slices.Sort(actionAddrs)
if actionAddrs[0] != "action.test_action.hello" || actionAddrs[1] != "action.test_action.there" {
t.Fatalf("expected action addresses to be ['action.test_action.hello', 'action.test_action.there'], got %v", actionAddrs)
}
},
},
"targeted run with expansion": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {
count = 3
}
action "test_action" "there" {
count = 3
}
resource "test_object" "a" {
count = 3
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello[count.index]]
}
action_trigger {
events = [after_create]
actions = [action.test_action.there[count.index]]
}
}
}
action "test_action" "general" {}
action "test_action" "kenobi" {}
resource "test_object" "b" {
lifecycle {
action_trigger {
events = [before_create, after_update]
actions = [action.test_action.general]
}
}
}
`,
},
expectPlanActionCalled: true,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
// We only target resource a
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(addrs.ManagedResourceMode, "test_object", "a").Instance(addrs.IntKey(2)),
},
},
// There is a warning related to targeting that we will just ignore
assertPlanDiagnostics: func(t *testing.T, d tfdiags.Diagnostics) {
if d.HasErrors() {
t.Fatalf("expected no errors, got %s", d.Err().Error())
}
},
assertPlan: func(t *testing.T, p *plans.Plan) {
// Validate we are targeting resource a out of paranoia
if len(p.Changes.Resources) != 1 {
t.Fatalf("expected plan to have 1 resource change, got %d", len(p.Changes.Resources))
}
if p.Changes.Resources[0].Addr.String() != "test_object.a[2]" {
t.Fatalf("expected plan to target resource 'test_object.a[2]', got %s", p.Changes.Resources[0].Addr.String())
}
// Ensure the actions for test_object.a are planned
if len(p.Changes.ActionInvocations) != 2 {
t.Fatalf("expected plan to have 2 action invocations, got %d", len(p.Changes.ActionInvocations))
}
actionAddrs := []string{
p.Changes.ActionInvocations[0].Addr.String(),
p.Changes.ActionInvocations[1].Addr.String(),
}
slices.Sort(actionAddrs)
if actionAddrs[0] != "action.test_action.hello[2]" || actionAddrs[1] != "action.test_action.there[2]" {
t.Fatalf("expected action addresses to be ['action.test_action.hello[2]', 'action.test_action.there[2]'], got %v", actionAddrs)
}
},
},
"targeted run with resource reference": {
module: map[string]string{
"main.tf": `
resource "test_object" "source" {}
action "test_action" "hello" {
config {
attr = test_object.source.name
}
}
action "test_action" "there" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
action_trigger {
events = [after_create]
actions = [action.test_action.there]
}
}
}
action "test_action" "general" {}
action "test_action" "kenobi" {}
resource "test_object" "b" {
lifecycle {
action_trigger {
events = [before_create, after_update]
actions = [action.test_action.general]
}
}
}
`,
},
expectPlanActionCalled: true,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
// We only target resource a
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(addrs.ManagedResourceMode, "test_object", "a"),
},
},
// There is a warning related to targeting that we will just ignore
assertPlanDiagnostics: func(t *testing.T, d tfdiags.Diagnostics) {
if d.HasErrors() {
t.Fatalf("expected no errors, got %s", d.Err().Error())
}
},
assertPlan: func(t *testing.T, p *plans.Plan) {
// Validate we are targeting resource a out of paranoia
if len(p.Changes.Resources) != 2 {
t.Fatalf("expected plan to have 2 resource changes, got %d", len(p.Changes.Resources))
}
resourceAddrs := []string{
p.Changes.Resources[0].Addr.String(),
p.Changes.Resources[1].Addr.String(),
}
slices.Sort(resourceAddrs)
if resourceAddrs[0] != "test_object.a" || resourceAddrs[1] != "test_object.source" {
t.Fatalf("expected resource addresses to be ['test_object.a', 'test_object.source'], got %v", resourceAddrs)
}
// Ensure the actions for test_object.a are planned
if len(p.Changes.ActionInvocations) != 2 {
t.Fatalf("expected plan to have 2 action invocations, got %d", len(p.Changes.ActionInvocations))
}
actionAddrs := []string{
p.Changes.ActionInvocations[0].Addr.String(),
p.Changes.ActionInvocations[1].Addr.String(),
}
slices.Sort(actionAddrs)
if actionAddrs[0] != "action.test_action.hello" || actionAddrs[1] != "action.test_action.there" {
t.Fatalf("expected action addresses to be ['action.test_action.hello', 'action.test_action.there'], got %v", actionAddrs)
}
},
},
"targeted run with condition referencing another resource": {
module: map[string]string{
"main.tf": `
resource "test_object" "source" {
name = "source"
}
action "test_action" "hello" {
config {
attr = test_object.source.name
}
}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [before_create]
condition = test_object.source.name == "source"
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
// Only target resource a
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(addrs.ManagedResourceMode, "test_object", "a"),
},
},
assertPlanDiagnostics: func(t *testing.T, d tfdiags.Diagnostics) {
if d.HasErrors() {
t.Fatalf("expected no errors, got %s", d.Err().Error())
}
},
assertPlan: func(t *testing.T, p *plans.Plan) {
// Only resource a should be planned
if len(p.Changes.Resources) != 2 {
t.Fatalf("expected plan to have 2 resource changes, got %d", len(p.Changes.Resources))
}
resourceAddrs := []string{p.Changes.Resources[0].Addr.String(), p.Changes.Resources[1].Addr.String()}
slices.Sort(resourceAddrs)
if resourceAddrs[0] != "test_object.a" || resourceAddrs[1] != "test_object.source" {
t.Fatalf("expected resource addresses to be ['test_object.a', 'test_object.source'], got %v", resourceAddrs)
}
// Only one action invocation for resource a
if len(p.Changes.ActionInvocations) != 1 {
t.Fatalf("expected plan to have 1 action invocation, got %d", len(p.Changes.ActionInvocations))
}
if p.Changes.ActionInvocations[0].Addr.String() != "action.test_action.hello" {
t.Fatalf("expected action address to be 'action.test_action.hello', got '%s'", p.Changes.ActionInvocations[0].Addr)
}
},
},
"targeted run with action referencing another resource that also triggers actions": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "source" {
name = "source"
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
action "test_action" "there" {
config {
attr = test_object.source.name
}
}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [after_create]
actions = [action.test_action.there]
}
}
}
resource "test_object" "b" {
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
// Only target resource a
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(addrs.ManagedResourceMode, "test_object", "a"),
},
},
assertPlanDiagnostics: func(t *testing.T, d tfdiags.Diagnostics) {
if d.HasErrors() {
t.Fatalf("expected no errors, got %s", d.Err().Error())
}
},
assertPlan: func(t *testing.T, p *plans.Plan) {
// Should plan for resource a and its dependency source, but not b
if len(p.Changes.Resources) != 2 {
t.Fatalf("expected plan to have 2 resource changes, got %d", len(p.Changes.Resources))
}
resourceAddrs := []string{
p.Changes.Resources[0].Addr.String(),
p.Changes.Resources[1].Addr.String(),
}
slices.Sort(resourceAddrs)
if resourceAddrs[0] != "test_object.a" || resourceAddrs[1] != "test_object.source" {
t.Fatalf("expected resource addresses to be ['test_object.a', 'test_object.source'], got %v", resourceAddrs)
}
// Should plan both actions for resource a
if len(p.Changes.ActionInvocations) != 2 {
t.Fatalf("expected plan to have 2 action invocations, got %d", len(p.Changes.ActionInvocations))
}
actionAddrs := []string{
p.Changes.ActionInvocations[0].Addr.String(),
p.Changes.ActionInvocations[1].Addr.String(),
}
slices.Sort(actionAddrs)
if actionAddrs[0] != "action.test_action.hello" || actionAddrs[1] != "action.test_action.there" {
t.Fatalf("expected action addresses to be ['action.test_action.hello', 'action.test_action.there'], got %v", actionAddrs)
}
},
},
"targeted run with not-triggered action referencing another resource that also triggers actions": {
module: map[string]string{
"main.tf": `
action "test_action" "hello" {}
resource "test_object" "source" {
name = "source"
lifecycle {
action_trigger {
events = [before_create]
actions = [action.test_action.hello]
}
}
}
action "test_action" "there" {
config {
attr = test_object.source.name
}
}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [after_update]
actions = [action.test_action.there]
}
}
}
resource "test_object" "b" {
lifecycle {
action_trigger {
events = [before_update]
actions = [action.test_action.hello]
}
}
}
`,
},
expectPlanActionCalled: true,
planOpts: &PlanOpts{
Mode: plans.NormalMode,
// Only target resource a
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(addrs.ManagedResourceMode, "test_object", "a"),
},
},
assertPlanDiagnostics: func(t *testing.T, d tfdiags.Diagnostics) {
if d.HasErrors() {
t.Fatalf("expected no errors, got %s", d.Err().Error())
}
},
assertPlan: func(t *testing.T, p *plans.Plan) {
// Should plan for resource a and its dependency source, but not b
if len(p.Changes.Resources) != 2 {
t.Fatalf("expected plan to have 2 resource changes, got %d", len(p.Changes.Resources))
}
resourceAddrs := []string{
p.Changes.Resources[0].Addr.String(),
p.Changes.Resources[1].Addr.String(),
}
slices.Sort(resourceAddrs)
if resourceAddrs[0] != "test_object.a" || resourceAddrs[1] != "test_object.source" {
t.Fatalf("expected resource addresses to be ['test_object.a', 'test_object.source'], got %v", resourceAddrs)
}
// Should plan only the before_create action of the dependant resource
if len(p.Changes.ActionInvocations) != 1 {
t.Fatalf("expected plan to have 1 action invocation, got %d", len(p.Changes.ActionInvocations))
}
if p.Changes.ActionInvocations[0].Addr.String() != "action.test_action.hello" {
t.Fatalf("expected action addresses to be 'action.test_action.hello', got %q", p.Changes.ActionInvocations[0].Addr.String())
}
},
},
},
} {
t.Run(topic, func(t *testing.T) {
for name, tc := range tcs {
t.Run(name, func(t *testing.T) {
if tc.toBeImplemented {
t.Skip("Test not implemented yet")
}
opts := SimplePlanOpts(plans.NormalMode, InputValues{})
if tc.planOpts != nil {
opts = tc.planOpts
}
configOpts := []configs.Option{}
if opts.Query {
configOpts = append(configOpts, configs.MatchQueryFiles())
}
m := testModuleInline(t, tc.module, configOpts...)
p := &testing_provider.MockProvider{
GetProviderSchemaResponse: &providers.GetProviderSchemaResponse{
Actions: map[string]providers.ActionSchema{
"test_action": testActionSchema,
"test_action_wo": writeOnlyActionSchema,
"test_nested": nestedActionSchema,
},
ResourceTypes: map[string]providers.Schema{
"test_object": {
Body: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"name": {
Type: cty.String,
Optional: true,
},
},
},
},
},
ListResourceTypes: map[string]providers.Schema{
"test_resource": {
Body: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"data": {
Type: cty.DynamicPseudoType,
Computed: true,
},
},
BlockTypes: map[string]*configschema.NestedBlock{
"config": {
Block: configschema.Block{
Attributes: map[string]*configschema.Attribute{
"filter": {
Required: true,
NestedType: &configschema.Object{
Nesting: configschema.NestingSingle,
Attributes: map[string]*configschema.Attribute{
"attr": {
Type: cty.String,
Required: true,
},
},
},
},
},
},
Nesting: configschema.NestingSingle,
},
},
},
},
},
},
ListResourceFn: func(req providers.ListResourceRequest) providers.ListResourceResponse {
resp := []cty.Value{}
ret := req.Config.AsValueMap()
maps.Copy(ret, map[string]cty.Value{
"data": cty.TupleVal(resp),
})
return providers.ListResourceResponse{Result: cty.ObjectVal(ret)}
},
}
other := &testing_provider.MockProvider{
GetProviderSchemaResponse: &providers.GetProviderSchemaResponse{
ResourceTypes: map[string]providers.Schema{
"other_object": {
Body: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"name": {
Type: cty.String,
Optional: true,
},
},
},
},
},
},
}
ecosystem := &testing_provider.MockProvider{
GetProviderSchemaResponse: &providers.GetProviderSchemaResponse{
Actions: map[string]providers.ActionSchema{
"ecosystem": {
ConfigSchema: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"attr": {
Type: cty.String,
Optional: true,
},
},
},
},
},
},
}
if tc.planActionFn != nil {
p.PlanActionFn = func(r providers.PlanActionRequest) providers.PlanActionResponse {
return tc.planActionFn(t, r)
}
}
if tc.planResourceFn != nil {
p.PlanResourceChangeFn = func(r providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
return tc.planResourceFn(t, r)
}
}
if tc.readResourceFn != nil {
p.ReadResourceFn = func(r providers.ReadResourceRequest) providers.ReadResourceResponse {
return tc.readResourceFn(t, r)
}
}
ctx := testContext2(t, &ContextOpts{
Providers: map[addrs.Provider]providers.Factory{
// The providers never actually going to get called here, we should
// catch the error long before anything happens.
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
addrs.NewDefaultProvider("other"): testProviderFuncFixed(other),
{
Type: "ecosystem",
Namespace: "danielmschmidt",
Hostname: addrs.DefaultProviderRegistryHost,
}: testProviderFuncFixed(ecosystem),
},
})
diags := ctx.Validate(m, &ValidateOpts{
Query: opts.Query,
})
if tc.expectValidateDiagnostics != nil {
tfdiags.AssertDiagnosticsMatch(t, diags, tc.expectValidateDiagnostics(m))
} else if tc.assertValidateDiagnostics != nil {
tc.assertValidateDiagnostics(t, diags)
} else {
tfdiags.AssertNoDiagnostics(t, diags)
}
if diags.HasErrors() {
return
}
var prevRunState *states.State
if tc.buildState != nil {
prevRunState = states.BuildState(tc.buildState)
}
plan, diags := ctx.Plan(m, prevRunState, opts)
if tc.expectPlanDiagnostics != nil {
tfdiags.AssertDiagnosticsMatch(t, diags, tc.expectPlanDiagnostics(m))
} else if tc.assertPlanDiagnostics != nil {
tc.assertPlanDiagnostics(t, diags)
} else {
tfdiags.AssertNoDiagnostics(t, diags)
}
if tc.expectPlanActionCalled && !p.PlanActionCalled {
t.Errorf("expected plan action to be called, but it was not")
} else if !tc.expectPlanActionCalled && p.PlanActionCalled {
t.Errorf("expected plan action to not be called, but it was")
}
if tc.assertPlan != nil {
tc.assertPlan(t, plan)
}
})
}
})
}
}
func TestContextPlan_validateActionInTriggerExists(t *testing.T) {
// this validation occurs during TransformConfig
module := `
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [after_create]
actions = [action.test_action.hello]
}
}
}
`
m := testModuleInline(t, map[string]string{"main.tf": module})
p := simpleMockProvider()
ctx := testContext2(t, &ContextOpts{
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan(m, nil, DefaultPlanOpts)
if !diags.HasErrors() {
t.Fatal("expected errors, got success!")
}
expectedErr := "action_trigger actions references non-existent action: The lifecycle action_trigger actions list contains a reference to the action \"action.test_action.hello\" that does not exist in the configuration of this module."
if diags.Err().Error() != expectedErr {
t.Fatalf("wrong error!, got %q, expected %q", diags.Err().Error(), expectedErr)
}
}
func TestContextPlan_validateActionInTriggerExistsWithSimilarAction(t *testing.T) {
// this validation occurs during TransformConfig
module := `
action "test_action" "hello_word" {}
resource "test_object" "a" {
lifecycle {
action_trigger {
events = [after_create]
actions = [action.test_action.hello_world]
}
}
}
`
m := testModuleInline(t, map[string]string{"main.tf": module})
p := simpleMockProvider()
ctx := testContext2(t, &ContextOpts{
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan(m, nil, DefaultPlanOpts)
if !diags.HasErrors() {
t.Fatal("expected errors, got success!")
}
expectedErr := "action_trigger actions references non-existent action: The lifecycle action_trigger actions list contains a reference to the action \"action.test_action.hello_world\" that does not exist in the configuration of this module. Did you mean \"action.test_action.hello_word\"?"
if diags.Err().Error() != expectedErr {
t.Fatalf("wrong error!, got %q, expected %q", diags.Err().Error(), expectedErr)
}
} | go | github | https://github.com/hashicorp/terraform | internal/terraform/context_plan_actions_test.go |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sites', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CustomArticle',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=50)),
('places_this_article_should_appear', models.ForeignKey('sites.Site', models.CASCADE)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ExclusiveArticle',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=50)),
('site', models.ForeignKey('sites.Site', models.CASCADE)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='SyndicatedArticle',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=50)),
('sites', models.ManyToManyField('sites.Site')),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
] | unknown | codeparrot/codeparrot-clean | ||
# Copyright (c) 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob.exc
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
import nova.cert.rpcapi
from nova import exception
from nova import network
from nova.openstack.common.gettextutils import _
ALIAS = "os-certificates"
authorize = extensions.extension_authorizer('compute', 'v3:' + ALIAS)
def _translate_certificate_view(certificate, private_key=None):
return {
'data': certificate,
'private_key': private_key,
}
class CertificatesController(object):
"""The x509 Certificates API controller for the OpenStack API."""
def __init__(self):
self.network_api = network.API()
self.cert_rpcapi = nova.cert.rpcapi.CertAPI()
super(CertificatesController, self).__init__()
@extensions.expected_errors((404, 501))
def show(self, req, id):
"""Return certificate information."""
context = req.environ['nova.context']
authorize(context, action='show')
if id != 'root':
msg = _("Only root certificate can be retrieved.")
raise webob.exc.HTTPNotImplemented(explanation=msg)
try:
cert = self.cert_rpcapi.fetch_ca(context,
project_id=context.project_id)
except exception.CryptoCAFileNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
return {'certificate': _translate_certificate_view(cert)}
@extensions.expected_errors(())
@wsgi.response(201)
def create(self, req, body=None):
"""Create a certificate."""
context = req.environ['nova.context']
authorize(context, action='create')
pk, cert = self.cert_rpcapi.generate_x509_cert(context,
user_id=context.user_id, project_id=context.project_id)
return {'certificate': _translate_certificate_view(cert, pk)}
class Certificates(extensions.V3APIExtensionBase):
"""Certificates support."""
name = "Certificates"
alias = ALIAS
version = 1
def get_resources(self):
resources = [
extensions.ResourceExtension('os-certificates',
CertificatesController(),
member_actions={})]
return resources
def get_controller_extensions(self):
return [] | unknown | codeparrot/codeparrot-clean | ||
import wx
import time
from scipy import linalg as la
from matplotlib.path import Path
from matplotlib.backends.backend_wxagg import FigureCanvasWxAgg as FigureCanvas
import matplotlib.pyplot as pl
import matplotlib.patches as patches
import numpy as np
import sys
from os import path, getenv
PPRZ_SRC = getenv("PAPARAZZI_SRC", path.normpath(path.join(path.dirname(path.abspath(__file__)), '../../../../')))
sys.path.append(PPRZ_SRC + "/sw/lib/python")
sys.path.append(PPRZ_SRC + "/sw/ext/pprzlink/lib/v1.0/python")
from pprzlink.ivy import IvyMessagesInterface
from pprzlink.message import PprzMessage
from settings_xml_parse import PaparazziACSettings
WIDTH = 800
HEIGHT = 800
class GVFFrame(wx.Frame):
def __init__(self, ac_id):
wx.Frame.__init__(self, id=-1, parent=None, \
name=u'GVF', size=wx.Size(WIDTH, HEIGHT), \
style=wx.DEFAULT_FRAME_STYLE, title=u'Guidance Vector Field')
# Vehicle variables
self.ac_id = ac_id
self.course = 0
self.yaw = 0
self.XY = np.array([0, 0])
# Desired trajectory
self.timer_traj = 0 # We do not update the traj every time we receive a msg
self.timer_traj_lim = 7 # (7+1) * 0.25secs
self.s = 0
self.kn = 0
self.ke = 0
self.map_gvf = map2d(np.array([0, 0]), 150000)
self.traj = None
# Frame
self.canvas = FigureCanvas(self, -1, self.map_gvf.fig)
self.Bind(wx.EVT_CLOSE, self.OnClose)
self.redraw_timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.OnRedrawTimer, self.redraw_timer)
self.redraw_timer.Start(100)
# Ivy
self.interface = IvyMessagesInterface("GVF")
self.interface.subscribe(self.message_recv)
settings = PaparazziACSettings(ac_id)
self.ke_index = None
self.kn_index = None
self.indexes_are_good = 0
self.list_of_indexes = ['gvf_ke', 'gvf_kn']
for setting_ in self.list_of_indexes:
try:
index = settings.name_lookup[setting_].index
if setting_ == 'gvf_ke':
self.ke_index = index
if setting_ == 'gvf_kn':
self.kn_index = index
self.indexes_are_good = self.indexes_are_good + 1
except Exception as e:
print(e)
print(setting_ + " setting not found, \
have you forgotten gvf.xml in your settings?")
def message_recv(self, ac_id, msg):
if int(ac_id) == self.ac_id:
if msg.name == 'GPS':
self.course = int(msg.get_field(3))*np.pi/1800
if msg.name == 'NAVIGATION':
self.XY[0] = float(msg.get_field(2))
self.XY[1] = float(msg.get_field(3))
if msg.name == 'ATTITUDE':
self.yaw = float(msg.get_field(1))
if msg.name == 'DL_VALUE' and \
self.indexes_are_good == len(self.list_of_indexes):
if int(msg.get_field(0)) == int(self.ke_index):
self.ke = float(msg.get_field(1))
if self.traj is not None:
self.traj.vector_field(self.traj.XYoff, \
self.map_gvf.area, self.s, self.kn, self.ke)
if int(msg.get_field(0)) == int(self.kn_index):
self.kn = float(msg.get_field(1))
if self.traj is not None:
self.traj.vector_field(self.traj.XYoff, \
self.map_gvf.area, self.s, self.kn, self.ke)
if msg.name == 'GVF':
self.gvf_error = float(msg.get_field(0))
# Straight line
if int(msg.get_field(1)) == 0 \
and self.timer_traj == self.timer_traj_lim:
self.s = int(msg.get_field(2))
param = [float(x) for x in msg.get_field(3).split(',')]
a = param[0]
b = param[1]
c = param[2]
self.traj = traj_line(np.array([-100,100]), a, b, c)
self.traj.vector_field(self.traj.XYoff, self.map_gvf.area, \
self.s, self.kn, self.ke)
# Ellipse
if int(msg.get_field(1)) == 1 \
and self.timer_traj == self.timer_traj_lim:
self.s = int(msg.get_field(2))
param = [float(x) for x in msg.get_field(3).split(',')]
ex = param[0]
ey = param[1]
ea = param[2]
eb = param[3]
ealpha = param[4]
self.traj = traj_ellipse(np.array([ex, ey]), ealpha, ea, eb)
self.traj.vector_field(self.traj.XYoff, \
self.map_gvf.area, self.s, self.kn, self.ke)
# Sin
if int(msg.get_field(1)) == 2 \
and self.timer_traj == self.timer_traj_lim:
self.s = int(msg.get_field(2))
param = [float(x) for x in msg.get_field(3).split(',')]
a = param[0]
b = param[1]
alpha = param[2]
w = param[3]
off = param[4]
A = param[5]
self.traj = traj_sin(np.array([-100, 100]), a, b, alpha, \
w, off, A)
self.traj.vector_field(self.traj.XYoff, \
self.map_gvf.area, self.s, self.kn, self.ke)
self.timer_traj = self.timer_traj + 1
if self.timer_traj > self.timer_traj_lim:
self.timer_traj = 0
def draw_gvf(self, XY, yaw, course):
if self.traj is not None:
self.map_gvf.draw(XY, yaw, course, self.traj)
def OnClose(self, event):
self.interface.shutdown()
self.Destroy()
def OnRedrawTimer(self, event):
self.draw_gvf(self.XY, self.yaw, self.course)
self.canvas.draw()
class map2d:
def __init__(self, XYoff, area):
self.XYoff = XYoff
self.area = area
self.fig, self.ax = pl.subplots()
self.ax.set_xlabel('South [m]')
self.ax.set_ylabel('West [m]')
self.ax.set_title('2D Map')
self.ax.annotate('HOME', xy = (0, 0))
self.ax.set_xlim(XYoff[0]-0.5*np.sqrt(area), XYoff[0]+0.5*np.sqrt(area))
self.ax.set_ylim(XYoff[1]-0.5*np.sqrt(area), XYoff[1]+0.5*np.sqrt(area))
self.ax.axis('equal')
def vehicle_patch(self, XY, yaw):
Rot = np.array([[np.cos(yaw), np.sin(yaw)],[-np.sin(yaw), np.cos(yaw)]])
apex = 45*np.pi/180 # 30 degrees apex angle
b = np.sqrt(2*(self.area/2000) / np.sin(apex))
a = b*np.sin(apex/2)
h = b*np.cos(apex/2)
z1 = np.array([a/2, -h*0.3])
z2 = np.array([-a/2, -h*0.3])
z3 = np.array([0, h*0.6])
z1 = Rot.dot(z1)
z2 = Rot.dot(z2)
z3 = Rot.dot(z3)
verts = [(XY[0]+z1[0], XY[1]+z1[1]), \
(XY[0]+z2[0], XY[1]+z2[1]), \
(XY[0]+z3[0], XY[1]+z3[1]), \
(0, 0)]
codes = [Path.MOVETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY]
path = Path(verts, codes)
return patches.PathPatch(path, facecolor='red', lw=2)
def draw(self, XY, yaw, course, traj):
self.ax.clear()
self.ax.plot(traj.traj_points[0, :], traj.traj_points[1, :])
self.ax.quiver(traj.mapgrad_X, traj.mapgrad_Y, \
traj.mapgrad_U, traj.mapgrad_V, color='Teal', \
pivot='mid', width=0.002)
self.ax.add_patch(self.vehicle_patch(XY, yaw)) # In radians
apex = 45*np.pi/180 # 30 degrees apex angle
b = np.sqrt(2*(self.area/2000) / np.sin(apex))
h = b*np.cos(apex/2)
self.ax.arrow(XY[0], XY[1], \
h*np.sin(course), h*np.cos(course),\
head_width=5, head_length=10, fc='k', ec='k')
self.ax.annotate('HOME', xy = (0, 0))
if isinstance(traj, traj_ellipse):
self.ax.annotate('ELLIPSE', xy = (traj.XYoff[0], traj.XYoff[1]))
self.ax.plot(0, 0, 'kx', ms=10, mew=2)
self.ax.plot(traj.XYoff[0], traj.XYoff[1], 'kx', ms=10, mew=2)
elif isinstance(traj, traj_sin):
self.ax.annotate('SIN', xy = (traj.XYoff[0], traj.XYoff[1]))
self.ax.plot(0, 0, 'kx', ms=10, mew=2)
self.ax.plot(traj.XYoff[0], traj.XYoff[1], 'kx', ms=10, mew=2)
elif isinstance(traj, traj_line):
self.ax.annotate('LINE', xy = (traj.XYoff[0], traj.XYoff[1]))
self.ax.plot(0, 0, 'kx', ms=10, mew=2)
self.ax.plot(traj.XYoff[0], traj.XYoff[1], 'kx', ms=10, mew=2)
self.ax.set_xlabel('South [m]')
self.ax.set_ylabel('West [m]')
self.ax.set_title('2D Map')
self.ax.set_xlim(self.XYoff[0]-0.5*np.sqrt(self.area), \
self.XYoff[0]+0.5*np.sqrt(self.area))
self.ax.set_ylim(self.XYoff[1]-0.5*np.sqrt(self.area), \
self.XYoff[1]+0.5*np.sqrt(self.area))
self.ax.axis('equal')
self.ax.grid()
class traj_line:
def float_range(self, start, end, step):
while start <= end:
yield start
start += step
def __init__(self, Xminmax, a, b, alpha):
self.XYoff = np.array([a, b])
self.Xminmax = Xminmax
self.a, self.b, self.alpha = a, b, alpha
self.traj_points = np.zeros((2, 200))
self.mapgrad_X = []
self.mapgrad_Y = []
self.mapgrad_U = []
self.mapgrad_V = []
i = 0
for t in self.float_range(0, 1, 0.005):
x = (self.Xminmax[1]-self.Xminmax[0])*t + self.Xminmax[0]
i = i + 1
xtr = np.linspace(-200, 200, 400)
xl = xtr*np.sin(self.alpha) + a
yl = xtr*np.cos(self.alpha) + b
self.traj_points = np.vstack((xl, yl))
def param_point(self, t):
i = 0
def vector_field(self, XYoff, area, s, kn, ke):
self.mapgrad_X, self.mapgrad_Y = np.mgrid[XYoff[0]-0.5*np.sqrt(area):\
XYoff[0]+0.5*np.sqrt(area):30j, \
XYoff[1]-0.5*np.sqrt(area):\
XYoff[1]+0.5*np.sqrt(area):30j]
nx = -np.cos(self.alpha)
ny = np.sin(self.alpha)
tx = s*ny
ty = -s*nx
ke = 1e-2*ke
e = (self.mapgrad_X-self.a)*nx + (self.mapgrad_Y-self.b)*ny
self.mapgrad_U = tx -ke*e*nx
self.mapgrad_V = ty -ke*e*ny
norm = np.sqrt(self.mapgrad_U**2 + self.mapgrad_V**2)
self.mapgrad_U = self.mapgrad_U/norm
self.mapgrad_V = self.mapgrad_V/norm
class traj_ellipse:
def float_range(self, start, end, step):
while start <= end:
yield start
start += step
def __init__(self, XYoff, rot, a, b):
self.XYoff = XYoff
self.a, self.b = a, b
self.rot = rot
self.traj_points = np.zeros((2, 200))
self.mapgrad_X = []
self.mapgrad_Y = []
self.mapgrad_U = []
self.mapgrad_V = []
i = 0
for t in self.float_range(0, 1, 0.005):
self.traj_points[:, i] = self.param_point(t)
i = i + 1
def param_point(self, t):
angle = 2*np.pi*t
return self.XYoff \
+ np.array([self.a*np.cos(angle)*np.cos(-self.rot) - \
self.b*np.sin(angle)*np.sin(-self.rot), \
self.a*np.cos(angle)*np.sin(-self.rot) + \
self.b*np.sin(angle)*np.cos(-self.rot)])
def vector_field(self, XYoff, area, s, kn, ke):
self.mapgrad_X, self.mapgrad_Y = np.mgrid[XYoff[0]-0.5*np.sqrt(area):\
XYoff[0]+0.5*np.sqrt(area):30j, \
XYoff[1]-0.5*np.sqrt(area):\
XYoff[1]+0.5*np.sqrt(area):30j]
Xel = (self.mapgrad_X-self.XYoff[0])*np.cos(self.rot) \
- (self.mapgrad_Y-self.XYoff[1])*np.sin(self.rot)
Yel = (self.mapgrad_X-self.XYoff[0])*np.sin(self.rot) \
+ (self.mapgrad_Y-self.XYoff[1])*np.cos(self.rot)
nx = 2*Xel*np.cos(self.rot)/self.a**2 \
+ 2*Yel*np.sin(self.rot)/self.b**2
ny = -2*Xel*np.sin(self.rot)/self.a**2 \
+ 2*Yel*np.cos(self.rot)/self.b**2
tx = s*ny
ty = -s*nx
e = (Xel/self.a)**2 + (Yel/self.b)**2 - 1
self.mapgrad_U = tx -ke*e*nx
self.mapgrad_V = ty -ke*e*ny
norm = np.sqrt(self.mapgrad_U**2 + self.mapgrad_V**2)
self.mapgrad_U = self.mapgrad_U/norm
self.mapgrad_V = self.mapgrad_V/norm
class traj_sin:
def float_range(self, start, end, step):
while start <= end:
yield start
start += step
def __init__(self, Xminmax, a, b, alpha, w, off, A):
self.XYoff = np.array([a, b])
self.Xminmax = Xminmax
self.a, self.b, self.alpha, self.w, self.off, self.A = \
a, b, alpha, w, off, A
self.traj_points = np.zeros((2, 200))
self.mapgrad_X = []
self.mapgrad_Y = []
self.mapgrad_U = []
self.mapgrad_V = []
i = 0
for t in self.float_range(0, 1, 0.005):
x = (self.Xminmax[1]-self.Xminmax[0])*t + self.Xminmax[0]
i = i + 1
xtr = np.linspace(-200, 200, 400)
ytr = self.A*np.sin(self.w*xtr + self.off)
xsin = -xtr*np.sin(self.alpha) + ytr*np.cos(self.alpha) + a
ysin = xtr*np.cos(self.alpha) + ytr*np.sin(self.alpha) + b
self.traj_points = np.vstack((xsin, ysin))
def param_point(self, t):
i = 0
def vector_field(self, XYoff, area, s, kn, ke):
self.mapgrad_X, self.mapgrad_Y = np.mgrid[XYoff[0]-0.5*np.sqrt(area):\
XYoff[0]+0.5*np.sqrt(area):30j, \
XYoff[1]-0.5*np.sqrt(area):\
XYoff[1]+0.5*np.sqrt(area):30j]
xs = (self.mapgrad_X-self.XYoff[0])*np.sin(self.alpha) \
- (self.mapgrad_Y-self.XYoff[1])*np.cos(self.alpha)
ys = -(self.mapgrad_X-self.XYoff[0])*np.cos(self.alpha) \
- (self.mapgrad_Y-self.XYoff[1])*np.sin(self.alpha)
ang = self.w*xs + self.off
nx = -np.cos(self.alpha) - \
self.A*self.w*np.cos(ang)*np.sin(self.alpha)
ny = -np.sin(self.alpha) + \
self.A*self.w*np.cos(ang)*np.cos(self.alpha)
tx = s*ny
ty = -s*nx
ke = 1e-2*ke
e = ys - self.A*np.sin(ang)
self.mapgrad_U = tx -ke*e*nx
self.mapgrad_V = ty -ke*e*ny
norm = np.sqrt(self.mapgrad_U**2 + self.mapgrad_V**2)
self.mapgrad_U = self.mapgrad_U/norm
self.mapgrad_V = self.mapgrad_V/norm | unknown | codeparrot/codeparrot-clean | ||
const {mutate} = require('shared-runtime');
function component(a) {
let x = {a};
let y = {};
(function () {
y = x;
})();
mutate(y);
return y;
}
export const FIXTURE_ENTRYPOINT = {
fn: component,
params: ['foo'],
}; | javascript | github | https://github.com/facebook/react | compiler/packages/babel-plugin-react-compiler/src/__tests__/fixtures/compiler/capturing-func-simple-alias-iife.js |
from sympy.core.assumptions import StdFactKB
from sympy.core import S, Pow
from sympy.core.expr import AtomicExpr
from sympy.core.compatibility import range
from sympy import diff as df, sqrt, ImmutableMatrix as Matrix
from sympy.vector.coordsysrect import CoordSysCartesian
from sympy.vector.basisdependent import BasisDependent, \
BasisDependentAdd, BasisDependentMul, BasisDependentZero
from sympy.vector.dyadic import BaseDyadic, Dyadic, DyadicAdd
from sympy.core.compatibility import u
class Vector(BasisDependent):
"""
Super class for all Vector classes.
Ideally, neither this class nor any of its subclasses should be
instantiated by the user.
"""
is_Vector = True
_op_priority = 12.0
@property
def components(self):
"""
Returns the components of this vector in the form of a
Python dictionary mapping BaseVector instances to the
corresponding measure numbers.
Examples
========
>>> from sympy.vector import CoordSysCartesian
>>> C = CoordSysCartesian('C')
>>> v = 3*C.i + 4*C.j + 5*C.k
>>> v.components
{C.i: 3, C.j: 4, C.k: 5}
"""
#The '_components' attribute is defined according to the
#subclass of Vector the instance belongs to.
return self._components
def magnitude(self):
"""
Returns the magnitude of this vector.
"""
return sqrt(self & self)
def normalize(self):
"""
Returns the normalized version of this vector.
"""
return self / self.magnitude()
def dot(self, other):
"""
Returns the dot product of this Vector, either with another
Vector, or a Dyadic, or a Del operator.
If 'other' is a Vector, returns the dot product scalar (Sympy
expression).
If 'other' is a Dyadic, the dot product is returned as a Vector.
If 'other' is an instance of Del, returns the directional
derivate operator as a Python function. If this function is
applied to a scalar expression, it returns the directional
derivative of the scalar field wrt this Vector.
Parameters
==========
other: Vector/Dyadic/Del
The Vector or Dyadic we are dotting with, or a Del operator .
Examples
========
>>> from sympy.vector import CoordSysCartesian
>>> C = CoordSysCartesian('C')
>>> C.i.dot(C.j)
0
>>> C.i & C.i
1
>>> v = 3*C.i + 4*C.j + 5*C.k
>>> v.dot(C.k)
5
>>> (C.i & C.delop)(C.x*C.y*C.z)
C.y*C.z
>>> d = C.i.outer(C.i)
>>> C.i.dot(d)
C.i
"""
from sympy.vector.functions import express
#Check special cases
if isinstance(other, Dyadic):
if isinstance(self, VectorZero):
return Vector.zero
outvec = Vector.zero
for k, v in other.components.items():
vect_dot = k.args[0].dot(self)
outvec += vect_dot * v * k.args[1]
return outvec
from sympy.vector.deloperator import Del
if not isinstance(other, Vector) and not isinstance(other, Del):
raise TypeError(str(other) + " is not a vector, dyadic or " +
"del operator")
#Check if the other is a del operator
if isinstance(other, Del):
def directional_derivative(field):
field = express(field, other.system, variables = True)
out = self.dot(other._i) * df(field, other._x)
out += self.dot(other._j) * df(field, other._y)
out += self.dot(other._k) * df(field, other._z)
if out == 0 and isinstance(field, Vector):
out = Vector.zero
return out
return directional_derivative
if isinstance(self, VectorZero) or isinstance(other, VectorZero):
return S(0)
v1 = express(self, other._sys)
v2 = express(other, other._sys)
dotproduct = S(0)
for x in other._sys.base_vectors():
dotproduct += (v1.components.get(x, 0) *
v2.components.get(x, 0))
return dotproduct
def __and__(self, other):
return self.dot(other)
__and__.__doc__ = dot.__doc__
def cross(self, other):
"""
Returns the cross product of this Vector with another Vector or
Dyadic instance.
The cross product is a Vector, if 'other' is a Vector. If 'other'
is a Dyadic, this returns a Dyadic instance.
Parameters
==========
other: Vector/Dyadic
The Vector or Dyadic we are crossing with.
Examples
========
>>> from sympy.vector import CoordSysCartesian
>>> C = CoordSysCartesian('C')
>>> C.i.cross(C.j)
C.k
>>> C.i ^ C.i
0
>>> v = 3*C.i + 4*C.j + 5*C.k
>>> v ^ C.i
5*C.j + (-4)*C.k
>>> d = C.i.outer(C.i)
>>> C.j.cross(d)
(-1)*(C.k|C.i)
"""
#Check special cases
if isinstance(other, Dyadic):
if isinstance(self, VectorZero):
return Dyadic.zero
outdyad = Dyadic.zero
for k, v in other.components.items():
cross_product = self.cross(k.args[0])
outer = cross_product.outer(k.args[1])
outdyad += v * outer
return outdyad
elif not isinstance(other, Vector):
raise TypeError(str(other) + " is not a vector")
elif (isinstance(self, VectorZero) or
isinstance(other, VectorZero)):
return Vector.zero
#Compute cross product
def _det(mat):
"""This is needed as a little method for to find the determinant
of a list in python.
SymPy's Matrix won't take in Vector, so need a custom function.
The user shouldn't be calling this.
"""
return (mat[0][0] * (mat[1][1] * mat[2][2] - mat[1][2] *
mat[2][1])
+ mat[0][1] * (mat[1][2] * mat[2][0] - mat[1][0] *
mat[2][2]) + mat[0][2] * (mat[1][0] * mat[2][1] -
mat[1][1] * mat[2][0]))
outvec = Vector.zero
for system, vect in other.separate().items():
tempi = system.i
tempj = system.j
tempk = system.k
tempm = [[tempi, tempj, tempk],
[self & tempi, self & tempj, self & tempk],
[vect & tempi, vect & tempj, vect & tempk]]
outvec += _det(tempm)
return outvec
def __xor__(self, other):
return self.cross(other)
__xor__.__doc__ = cross.__doc__
def outer(self, other):
"""
Returns the outer product of this vector with another, in the
form of a Dyadic instance.
Parameters
==========
other : Vector
The Vector with respect to which the outer product is to
be computed.
Examples
========
>>> from sympy.vector import CoordSysCartesian
>>> N = CoordSysCartesian('N')
>>> N.i.outer(N.j)
(N.i|N.j)
"""
#Handle the special cases
if not isinstance(other, Vector):
raise TypeError("Invalid operand for outer product")
elif (isinstance(self, VectorZero) or
isinstance(other, VectorZero)):
return Dyadic.zero
#Iterate over components of both the vectors to generate
#the required Dyadic instance
args = []
for k1, v1 in self.components.items():
for k2, v2 in other.components.items():
args.append((v1*v2) * BaseDyadic(k1, k2))
return DyadicAdd(*args)
def __or__(self, other):
return self.outer(other)
__or__.__doc__ = outer.__doc__
def to_matrix(self, system):
"""
Returns the matrix form of this vector with respect to the
specified coordinate system.
Parameters
==========
system : CoordSysCartesian
The system wrt which the matrix form is to be computed
Examples
========
>>> from sympy.vector import CoordSysCartesian
>>> C = CoordSysCartesian('C')
>>> from sympy.abc import a, b, c
>>> v = a*C.i + b*C.j + c*C.k
>>> v.to_matrix(C)
Matrix([
[a],
[b],
[c]])
"""
return Matrix([self.dot(unit_vec) for unit_vec in
system.base_vectors()])
def separate(self):
"""
The constituents of this vector in different coordinate systems,
as per its definition.
Returns a dict mapping each CoordSysCartesian to the corresponding
constituent Vector.
Examples
========
>>> from sympy.vector import CoordSysCartesian
>>> R1 = CoordSysCartesian('R1')
>>> R2 = CoordSysCartesian('R2')
>>> v = R1.i + R2.i
>>> v.separate() == {R1: R1.i, R2: R2.i}
True
"""
parts = {}
for vect, measure in self.components.items():
parts[vect.system] = (parts.get(vect.system, Vector.zero) +
vect*measure)
return parts
class BaseVector(Vector, AtomicExpr):
"""
Class to denote a base vector.
"""
def __new__(cls, name, index, system, pretty_str, latex_str):
#Verify arguments
if not index in range(0, 3):
raise ValueError("index must be 0, 1 or 2")
if not isinstance(name, str):
raise TypeError("name must be a valid string")
if not isinstance(system, CoordSysCartesian):
raise TypeError("system should be a CoordSysCartesian")
#Initialize an object
obj = super(BaseVector, cls).__new__(cls, S(index),
system)
#Assign important attributes
obj._base_instance = obj
obj._components = {obj: S(1)}
obj._measure_number = S(1)
obj._name = name
obj._pretty_form = u(pretty_str)
obj._latex_form = latex_str
obj._system = system
assumptions = {}
assumptions['commutative'] = True
obj._assumptions = StdFactKB(assumptions)
#This attr is used for re-expression to one of the systems
#involved in the definition of the Vector. Applies to
#VectorMul and VectorAdd too.
obj._sys = system
return obj
@property
def system(self):
return self._system
def __str__(self, printer=None):
return self._name
__repr__ = __str__
_sympystr = __str__
class VectorAdd(BasisDependentAdd, Vector):
"""
Class to denote sum of Vector instances.
"""
def __new__(cls, *args, **options):
obj = BasisDependentAdd.__new__(cls, *args, **options)
return obj
def __str__(self, printer=None):
ret_str = ''
items = list(self.separate().items())
items.sort(key = lambda x: x[0].__str__())
for system, vect in items:
base_vects = system.base_vectors()
for x in base_vects:
if x in vect.components:
temp_vect = self.components[x]*x
ret_str += temp_vect.__str__(printer) + " + "
return ret_str[:-3]
__repr__ = __str__
_sympystr = __str__
class VectorMul(BasisDependentMul, Vector):
"""
Class to denote products of scalars and BaseVectors.
"""
def __new__(cls, *args, **options):
obj = BasisDependentMul.__new__(cls, *args, **options)
return obj
@property
def base_vector(self):
""" The BaseVector involved in the product. """
return self._base_instance
@property
def measure_number(self):
""" The scalar expression involved in the defition of
this VectorMul.
"""
return self._measure_number
class VectorZero(BasisDependentZero, Vector):
"""
Class to denote a zero vector
"""
_op_priority = 12.1
_pretty_form = u('0')
_latex_form = '\mathbf{\hat{0}}'
def __new__(cls):
obj = BasisDependentZero.__new__(cls)
return obj
def _vect_div(one, other):
""" Helper for division involving vectors. """
if isinstance(one, Vector) and isinstance(other, Vector):
raise TypeError("Cannot divide two vectors")
elif isinstance(one, Vector):
if other == S.Zero:
raise ValueError("Cannot divide a vector by zero")
return VectorMul(one, Pow(other, S.NegativeOne))
else:
raise TypeError("Invalid division involving a vector")
Vector._expr_type = Vector
Vector._mul_func = VectorMul
Vector._add_func = VectorAdd
Vector._zero_func = VectorZero
Vector._base_func = BaseVector
Vector._div_helper = _vect_div
Vector.zero = VectorZero() | unknown | codeparrot/codeparrot-clean | ||
"""
Audio Tag
---------
This implements a Liquid-style audio tag for Pelican,
based on the pelican video plugin [1]_
Syntax
------
{% audio url/to/audio [url/to/audio] [/url/to/audio] %}
Example
-------
{% audio http://example.tld/foo.mp3 http://example.tld/foo.ogg %}
Output
------
<audio controls><source src="http://example.tld/foo.mp3" type="audio/mpeg"><source src="http://example.tld/foo.ogg" type="audio/ogg">Your browser does not support the audio element.</audio>
[1] https://github.com/getpelican/pelican-plugins/blob/master/liquid_tags/video.py
"""
import os
import re
from .mdx_liquid_tags import LiquidTags
SYNTAX = "{% audio url/to/audio [url/to/audio] [/url/to/audio] %}"
AUDIO = re.compile(r'(/\S+|https?:\S+)(?:\s+(/\S+|https?:\S+))?(?:\s+(/\S+|https?:\S+))?')
AUDIO_TYPEDICT = {'.mp3': 'audio/mpeg',
'.ogg': 'audio/ogg',
'.oga': 'audio/ogg',
'.opus': 'audio/ogg',
'.wav': 'audio/wav',
'.mp4': 'audio/mp4'}
def create_html(markup):
match = AUDIO.search(markup)
if match:
groups = match.groups()
audio_files = [g for g in groups if g]
if any(audio_files):
audio_out = '<audio controls>'
for audio_file in audio_files:
base, ext = os.path.splitext(audio_file)
if ext not in AUDIO_TYPEDICT:
raise ValueError("Unrecognized audio extension: "
"{0}".format(ext))
# add audio source
audio_out += '<source src="{}" type="{}">'.format(
audio_file, AUDIO_TYPEDICT[ext])
# close audio tag
audio_out += 'Your browser does not support the audio element.'
audio_out += '</audio>'
else:
raise ValueError("Error processing input, "
"expected syntax: {0}".format(SYNTAX))
return audio_out
@LiquidTags.register('audio')
def audio(preprocessor, tag, markup):
return create_html(markup)
# ---------------------------------------------------
# This import allows image tag to be a Pelican plugin
from liquid_tags import register | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for Google Test's --gtest_list_tests flag.
A user can ask Google Test to list all tests by specifying the
--gtest_list_tests flag. This script tests such functionality
by invoking googletest-list-tests-unittest_ (a program written with
Google Test) the command line flags.
"""
import re
from googletest.test import gtest_test_utils
# Constants.
# The command line flag for enabling/disabling listing all tests.
LIST_TESTS_FLAG = 'gtest_list_tests'
# Path to the googletest-list-tests-unittest_ program.
EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'googletest-list-tests-unittest_'
)
# The expected output when running googletest-list-tests-unittest_ with
# --gtest_list_tests
EXPECTED_OUTPUT_NO_FILTER_RE = re.compile(
r"""FooDeathTest\.
Test1
Foo\.
Bar1
Bar2
DISABLED_Bar3
Abc\.
Xyz
Def
FooBar\.
Baz
FooTest\.
Test1
DISABLED_Test2
Test3
TypedTest/0\. # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\.
TestA
TestB
TypedTest/1\. # TypeParam = int\s*\*( __ptr64)?
TestA
TestB
TypedTest/2\. # TypeParam = .*MyArray<bool,\s*42>
TestA
TestB
My/TypeParamTest/0\. # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\.
TestA
TestB
My/TypeParamTest/1\. # TypeParam = int\s*\*( __ptr64)?
TestA
TestB
My/TypeParamTest/2\. # TypeParam = .*MyArray<bool,\s*42>
TestA
TestB
MyInstantiation/ValueParamTest\.
TestA/0 # GetParam\(\) = one line
TestA/1 # GetParam\(\) = two\\nlines
TestA/2 # GetParam\(\) = a very\\nlo{241}\.\.\.
TestB/0 # GetParam\(\) = one line
TestB/1 # GetParam\(\) = two\\nlines
TestB/2 # GetParam\(\) = a very\\nlo{241}\.\.\.
"""
)
# The expected output when running googletest-list-tests-unittest_ with
# --gtest_list_tests and --gtest_filter=Foo*.
EXPECTED_OUTPUT_FILTER_FOO_RE = re.compile(
r"""FooDeathTest\.
Test1
Foo\.
Bar1
Bar2
DISABLED_Bar3
FooBar\.
Baz
FooTest\.
Test1
DISABLED_Test2
Test3
"""
)
# Utilities.
def Run(args):
"""Runs googletest-list-tests-unittest_ and returns the list of tests printed."""
return gtest_test_utils.Subprocess(
[EXE_PATH] + args, capture_stderr=False
).output
# The unit test.
class GTestListTestsUnitTest(gtest_test_utils.TestCase):
"""Tests using the --gtest_list_tests flag to list all tests."""
def RunAndVerify(self, flag_value, expected_output_re, other_flag):
"""Run googletest-list-tests-unittest_ and verify the output.
Runs googletest-list-tests-unittest_ and verifies that it prints
the correct tests.
Args:
flag_value: value of the --gtest_list_tests flag; None if the flag
should not be present.
expected_output_re: regular expression that matches the expected output
after running command;
other_flag: a different flag to be passed to command along with
gtest_list_tests; None if the flag should not be present.
"""
if flag_value is None:
flag = ''
flag_expression = 'not set'
elif flag_value == '0':
flag = '--%s=0' % LIST_TESTS_FLAG
flag_expression = '0'
else:
flag = '--%s' % LIST_TESTS_FLAG
flag_expression = '1'
args = [flag]
if other_flag is not None:
args += [other_flag]
output = Run(args)
if expected_output_re:
self.assertTrue(
expected_output_re.match(output),
'when %s is %s, the output of "%s" is "%s",\n'
'which does not match regex "%s"'
% (
LIST_TESTS_FLAG,
flag_expression,
' '.join(args),
output,
expected_output_re.pattern,
),
)
else:
self.assertTrue(
not EXPECTED_OUTPUT_NO_FILTER_RE.match(output),
'when %s is %s, the output of "%s" is "%s"'
% (LIST_TESTS_FLAG, flag_expression, ' '.join(args), output),
)
def testDefaultBehavior(self):
"""Tests the behavior of the default mode."""
self.RunAndVerify(flag_value=None, expected_output_re=None, other_flag=None)
def testFlag(self):
"""Tests using the --gtest_list_tests flag."""
self.RunAndVerify(flag_value='0', expected_output_re=None, other_flag=None)
self.RunAndVerify(
flag_value='1',
expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
other_flag=None,
)
def testOverrideNonFilterFlags(self):
"""Tests that --gtest_list_tests overrides the non-filter flags."""
self.RunAndVerify(
flag_value='1',
expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
other_flag='--gtest_break_on_failure',
)
def testWithFilterFlags(self):
"""Tests that --gtest_list_tests takes into account the filter flags.
Tests that --gtest_list_tests takes into account the
--gtest_filter flag.
"""
self.RunAndVerify(
flag_value='1',
expected_output_re=EXPECTED_OUTPUT_FILTER_FOO_RE,
other_flag='--gtest_filter=Foo*',
)
if __name__ == '__main__':
gtest_test_utils.Main() | python | github | https://github.com/google/googletest | googletest/test/googletest-list-tests-unittest.py |
/*
* Copyright 2002-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.beans.factory.aot;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import org.junit.jupiter.api.Test;
import org.springframework.util.ReflectionUtils;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests for {@link AutowiredArgumentsCodeGenerator}.
*
* @author Phillip Webb
* @author Stephane Nicoll
*/
class AutowiredArgumentsCodeGeneratorTests {
@Test
void generateCodeWhenNoArguments() {
Method method = ReflectionUtils.findMethod(UnambiguousMethods.class, "zero");
AutowiredArgumentsCodeGenerator generator = new AutowiredArgumentsCodeGenerator(
UnambiguousMethods.class, method);
assertThat(generator.generateCode(method.getParameterTypes())).hasToString("");
}
@Test
void generatedCodeWhenSingleArgument() {
Method method = ReflectionUtils.findMethod(UnambiguousMethods.class, "one",
String.class);
AutowiredArgumentsCodeGenerator generator = new AutowiredArgumentsCodeGenerator(
UnambiguousMethods.class, method);
assertThat(generator.generateCode(method.getParameterTypes()))
.hasToString("args.get(0)");
}
@Test
void generateCodeWhenMultipleArguments() {
Method method = ReflectionUtils.findMethod(UnambiguousMethods.class, "three",
String.class, Integer.class, Boolean.class);
AutowiredArgumentsCodeGenerator generator = new AutowiredArgumentsCodeGenerator(
UnambiguousMethods.class, method);
assertThat(generator.generateCode(method.getParameterTypes()))
.hasToString("args.get(0), args.get(1), args.get(2)");
}
@Test
void generateCodeWhenMultipleArgumentsWithOffset() {
Constructor<?> constructor = Outer.Nested.class.getDeclaredConstructors()[0];
AutowiredArgumentsCodeGenerator generator = new AutowiredArgumentsCodeGenerator(
Outer.Nested.class, constructor);
assertThat(generator.generateCode(constructor.getParameterTypes(), 1))
.hasToString("args.get(1), args.get(2)");
}
@Test
void generateCodeWhenAmbiguousConstructor() throws Exception {
Constructor<?> constructor = AmbiguousConstructors.class
.getDeclaredConstructor(String.class, Integer.class);
AutowiredArgumentsCodeGenerator generator = new AutowiredArgumentsCodeGenerator(
AmbiguousConstructors.class, constructor);
assertThat(generator.generateCode(constructor.getParameterTypes())).hasToString(
"args.get(0, java.lang.String.class), args.get(1, java.lang.Integer.class)");
}
@Test
void generateCodeWhenUnambiguousConstructor() throws Exception {
Constructor<?> constructor = UnambiguousConstructors.class
.getDeclaredConstructor(String.class, Integer.class);
AutowiredArgumentsCodeGenerator generator = new AutowiredArgumentsCodeGenerator(
UnambiguousConstructors.class, constructor);
assertThat(generator.generateCode(constructor.getParameterTypes()))
.hasToString("args.get(0), args.get(1)");
}
@Test
void generateCodeWhenAmbiguousMethod() {
Method method = ReflectionUtils.findMethod(AmbiguousMethods.class, "two",
String.class, Integer.class);
AutowiredArgumentsCodeGenerator generator = new AutowiredArgumentsCodeGenerator(
AmbiguousMethods.class, method);
assertThat(generator.generateCode(method.getParameterTypes())).hasToString(
"args.get(0, java.lang.String.class), args.get(1, java.lang.Integer.class)");
}
@Test
void generateCodeWhenAmbiguousSubclassMethod() {
Method method = ReflectionUtils.findMethod(UnambiguousMethods.class, "two",
String.class, Integer.class);
AutowiredArgumentsCodeGenerator generator = new AutowiredArgumentsCodeGenerator(
AmbiguousSubclassMethods.class, method);
assertThat(generator.generateCode(method.getParameterTypes())).hasToString(
"args.get(0, java.lang.String.class), args.get(1, java.lang.Integer.class)");
}
@Test
void generateCodeWhenUnambiguousMethod() {
Method method = ReflectionUtils.findMethod(UnambiguousMethods.class, "two",
String.class, Integer.class);
AutowiredArgumentsCodeGenerator generator = new AutowiredArgumentsCodeGenerator(
UnambiguousMethods.class, method);
assertThat(generator.generateCode(method.getParameterTypes()))
.hasToString("args.get(0), args.get(1)");
}
@Test
void generateCodeWithCustomArgVariable() {
Method method = ReflectionUtils.findMethod(UnambiguousMethods.class, "one",
String.class);
AutowiredArgumentsCodeGenerator generator = new AutowiredArgumentsCodeGenerator(
UnambiguousMethods.class, method);
assertThat(generator.generateCode(method.getParameterTypes(), 0, "objs"))
.hasToString("objs.get(0)");
}
static class Outer {
class Nested {
Nested(String a, Integer b) {
}
}
}
static class UnambiguousMethods {
void zero() {
}
void one(String a) {
}
void two(String a, Integer b) {
}
void three(String a, Integer b, Boolean c) {
}
}
static class AmbiguousMethods {
void two(String a, Integer b) {
}
void two(Integer b, String a) {
}
}
static class AmbiguousSubclassMethods extends UnambiguousMethods {
void two(Integer a, String b) {
}
}
static class UnambiguousConstructors {
UnambiguousConstructors() {
}
UnambiguousConstructors(String a) {
}
UnambiguousConstructors(String a, Integer b) {
}
}
static class AmbiguousConstructors {
AmbiguousConstructors(String a, Integer b) {
}
AmbiguousConstructors(Integer b, String a) {
}
}
} | java | github | https://github.com/spring-projects/spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/aot/AutowiredArgumentsCodeGeneratorTests.java |
##
# Copyright 2012-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBlock for installing Java, implemented as an easyblock
@author: Jens Timmerman (Ghent University)
"""
from easybuild.easyblocks.generic.packedbinary import PackedBinary
class EB_Java(PackedBinary):
"""Support for installing Java as a packed binary file (.tar.gz)
Use the PackedBinary easyblock and set some extra paths.
"""
def make_module_extra(self):
"""
Set JAVA_HOME to install dir
"""
txt = PackedBinary.make_module_extra(self)
txt += self.module_generator.set_environment('JAVA_HOME', self.installdir)
return txt | unknown | codeparrot/codeparrot-clean | ||
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
ExtractorError,
int_or_none,
parse_iso8601,
)
class TriluliluIE(InfoExtractor):
_VALID_URL = r'https?://(?:(?:www|m)\.)?trilulilu\.ro/(?:[^/]+/)?(?P<id>[^/#\?]+)'
_TESTS = [{
'url': 'http://www.trilulilu.ro/big-buck-bunny-1',
'md5': '68da087b676a6196a413549212f60cc6',
'info_dict': {
'id': 'ae2899e124140b',
'ext': 'mp4',
'title': 'Big Buck Bunny',
'description': ':) pentru copilul din noi',
'uploader_id': 'chipy',
'upload_date': '20120304',
'timestamp': 1330830647,
'uploader': 'chipy',
'view_count': int,
'like_count': int,
'comment_count': int,
},
}, {
'url': 'http://www.trilulilu.ro/adena-ft-morreti-inocenta',
'md5': '929dfb8729dc71750463af88bbbbf4a4',
'info_dict': {
'id': 'f299710e3c91c5',
'ext': 'mp4',
'title': 'Adena ft. Morreti - Inocenta',
'description': 'pop music',
'uploader_id': 'VEVOmixt',
'upload_date': '20151204',
'uploader': 'VEVOmixt',
'timestamp': 1449187937,
'view_count': int,
'like_count': int,
'comment_count': int,
},
}]
def _real_extract(self, url):
display_id = self._match_id(url)
media_info = self._download_json('http://m.trilulilu.ro/%s?format=json' % display_id, display_id)
age_limit = 0
errors = media_info.get('errors', {})
if errors.get('friends'):
raise ExtractorError('This video is private.', expected=True)
elif errors.get('geoblock'):
raise ExtractorError('This video is not available in your country.', expected=True)
elif errors.get('xxx_unlogged'):
age_limit = 18
media_class = media_info.get('class')
if media_class not in ('video', 'audio'):
raise ExtractorError('not a video or an audio')
user = media_info.get('user', {})
thumbnail = media_info.get('cover_url')
if thumbnail:
thumbnail.format(width='1600', height='1200')
# TODO: get correct ext for audio files
stream_type = media_info.get('stream_type')
formats = [{
'url': media_info['href'],
'ext': stream_type,
}]
if media_info.get('is_hd'):
formats.append({
'format_id': 'hd',
'url': media_info['hrefhd'],
'ext': stream_type,
})
if media_class == 'audio':
formats[0]['vcodec'] = 'none'
else:
formats[0]['format_id'] = 'sd'
return {
'id': media_info['identifier'].split('|')[1],
'display_id': display_id,
'formats': formats,
'title': media_info['title'],
'description': media_info.get('description'),
'thumbnail': thumbnail,
'uploader_id': user.get('username'),
'uploader': user.get('fullname'),
'timestamp': parse_iso8601(media_info.get('published'), ' '),
'duration': int_or_none(media_info.get('duration')),
'view_count': int_or_none(media_info.get('count_views')),
'like_count': int_or_none(media_info.get('count_likes')),
'comment_count': int_or_none(media_info.get('count_comments')),
'age_limit': age_limit,
} | unknown | codeparrot/codeparrot-clean | ||
/*
* Copyright 2012-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.configurationprocessor.fieldvalues.javac;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
/**
* Reflection based access to {@code com.sun.source.tree.ExpressionTree}.
*
* @author Phillip Webb
* @author Stephane Nicoll
*/
class ExpressionTree extends ReflectionWrapper {
private final Class<?> literalTreeType = findClass("com.sun.source.tree.LiteralTree");
private final Method literalValueMethod = findMethod(this.literalTreeType, "getValue");
private final Class<?> methodInvocationTreeType = findClass("com.sun.source.tree.MethodInvocationTree");
private final Method methodInvocationArgumentsMethod = findMethod(this.methodInvocationTreeType, "getArguments");
private final Class<?> memberSelectTreeType = findClass("com.sun.source.tree.MemberSelectTree");
private final Method memberSelectTreeExpressionMethod = findMethod(this.memberSelectTreeType, "getExpression");
private final Method memberSelectTreeIdentifierMethod = findMethod(this.memberSelectTreeType, "getIdentifier");
private final Class<?> newArrayTreeType = findClass("com.sun.source.tree.NewArrayTree");
private final Method arrayValueMethod = findMethod(this.newArrayTreeType, "getInitializers");
ExpressionTree(Object instance) {
super("com.sun.source.tree.ExpressionTree", instance);
}
String getKind() throws Exception {
return findMethod("getKind").invoke(getInstance()).toString();
}
Object getLiteralValue() throws Exception {
if (this.literalTreeType.isAssignableFrom(getInstance().getClass())) {
return this.literalValueMethod.invoke(getInstance());
}
return null;
}
Object getFactoryValue() throws Exception {
if (this.methodInvocationTreeType.isAssignableFrom(getInstance().getClass())) {
List<?> arguments = (List<?>) this.methodInvocationArgumentsMethod.invoke(getInstance());
if (arguments.size() == 1) {
return new ExpressionTree(arguments.get(0)).getLiteralValue();
}
}
return null;
}
Member getSelectedMember() throws Exception {
if (this.memberSelectTreeType.isAssignableFrom(getInstance().getClass())) {
String expression = this.memberSelectTreeExpressionMethod.invoke(getInstance()).toString();
String identifier = this.memberSelectTreeIdentifierMethod.invoke(getInstance()).toString();
if (expression != null && identifier != null) {
return new Member(expression, identifier);
}
}
return null;
}
List<? extends ExpressionTree> getArrayExpression() throws Exception {
if (this.newArrayTreeType.isAssignableFrom(getInstance().getClass())) {
List<?> elements = (List<?>) this.arrayValueMethod.invoke(getInstance());
List<ExpressionTree> result = new ArrayList<>();
if (elements == null) {
return result;
}
for (Object element : elements) {
result.add(new ExpressionTree(element));
}
return result;
}
return null;
}
record Member(String expression, String identifier) {
}
} | java | github | https://github.com/spring-projects/spring-boot | configuration-metadata/spring-boot-configuration-processor/src/main/java/org/springframework/boot/configurationprocessor/fieldvalues/javac/ExpressionTree.java |
##########################################################################
#
# Copyright (c) 2010, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import sys, os
from IECore import *
from math import pow
class TestEXRWriter(unittest.TestCase):
def __verifyImageRGB( self, imgNew, imgOrig, maxError = 0.002 ):
self.assertEqual( type(imgNew), ImagePrimitive )
if "R" in imgOrig :
self.assert_( "R" in imgNew )
if "G" in imgOrig :
self.assert_( "G" in imgNew )
if "B" in imgOrig :
self.assert_( "B" in imgNew )
if "A" in imgOrig :
self.assert_( "A" in imgNew )
if "Y" in imgOrig :
self.assert_( "Y" in imgNew )
op = ImageDiffOp()
res = op(
imageA = imgNew,
imageB = imgOrig,
maxError = maxError,
skipMissingChannels = True
)
self.failIf( res.value )
def __makeFloatImage( self, dataWindow, displayWindow, withAlpha = False, dataType = FloatVectorData ) :
img = ImagePrimitive( dataWindow, displayWindow )
w = dataWindow.max.x - dataWindow.min.x + 1
h = dataWindow.max.y - dataWindow.min.y + 1
area = w * h
R = dataType( area )
G = dataType( area )
B = dataType( area )
if withAlpha:
A = dataType( area )
offset = 0
for y in range( 0, h ) :
for x in range( 0, w ) :
R[offset] = float(x) / (w - 1)
G[offset] = float(y) / (h - 1)
B[offset] = 0.0
if withAlpha:
A[offset] = 0.5
offset = offset + 1
img["R"] = PrimitiveVariable( PrimitiveVariable.Interpolation.Vertex, R )
img["G"] = PrimitiveVariable( PrimitiveVariable.Interpolation.Vertex, G )
img["B"] = PrimitiveVariable( PrimitiveVariable.Interpolation.Vertex, B )
if withAlpha:
img["A"] = PrimitiveVariable( PrimitiveVariable.Interpolation.Vertex, A )
return img
def testWrite( self ) :
displayWindow = Box2i(
V2i( 0, 0 ),
V2i( 99, 99 )
)
dataWindow = displayWindow
for dataType in [ FloatVectorData ] :
self.setUp()
imgOrig = self.__makeFloatImage( dataWindow, displayWindow, dataType = dataType )
w = Writer.create( imgOrig, "test/IECore/data/exrFiles/output.exr" )
self.assertEqual( type(w), EXRImageWriter )
w.write()
self.assert_( os.path.exists( "test/IECore/data/exrFiles/output.exr" ) )
# Now we've written the image, verify the rgb
r = Reader.create( "test/IECore/data/exrFiles/output.exr" )
imgNew = r.read()
self.assertEqual( type(imgNew['R'].data), FloatVectorData )
self.__verifyImageRGB( imgOrig, imgNew )
self.tearDown()
def testColorConversion(self):
r = Reader.create( "test/IECore/data/exrFiles/ramp.exr" )
imgOrig = r.read()
self.assertEqual( type(imgOrig), ImagePrimitive )
w = Writer.create( imgOrig, "test/IECore/data/exrFiles/output.exr" )
self.assertEqual( type(w), EXRImageWriter )
w.write()
w = None
r = Reader.create( "test/IECore/data/exrFiles/output.exr" )
imgNew = r.read()
self.assertEqual( type(imgNew), ImagePrimitive )
self.assertEqual( imgOrig, imgNew )
def testWriteIncomplete( self ) :
displayWindow = Box2i(
V2i( 0, 0 ),
V2i( 99, 99 )
)
dataWindow = displayWindow
imgOrig = self.__makeFloatImage( dataWindow, displayWindow )
# We don't have enough data to fill this dataWindow
imgOrig.dataWindow = Box2i(
V2i( 0, 0 ),
V2i( 199, 199 )
)
self.failIf( imgOrig.arePrimitiveVariablesValid() )
w = Writer.create( imgOrig, "test/IECore/data/exrFiles/output.exr" )
self.assertEqual( type(w), EXRImageWriter )
self.assertRaises( RuntimeError, w.write )
self.failIf( os.path.exists( "test/IECore/data/exrFiles/output.exr" ) )
def testWindowWrite( self ) :
dataWindow = Box2i(
V2i( 0, 0 ),
V2i( 99, 99 )
)
imgOrig = self.__makeFloatImage( dataWindow, dataWindow )
imgOrig.displayWindow = Box2i(
V2i( -20, -20 ),
V2i( 199, 199 )
)
w = Writer.create( imgOrig, "test/IECore/data/exrFiles/output.exr" )
self.assertEqual( type(w), EXRImageWriter )
w.write()
self.assert_( os.path.exists( "test/IECore/data/exrFiles/output.exr" ) )
r = Reader.create( "test/IECore/data/exrFiles/output.exr" )
imgNew = r.read()
self.__verifyImageRGB( imgNew, imgOrig )
def testOversizeDataWindow( self ) :
r = Reader.create( "test/IECore/data/exrFiles/oversizeDataWindow.exr" )
img = r.read()
w = Writer.create( img, "test/IECore/data/exrFiles/output.exr" )
self.assertEqual( type(w), EXRImageWriter )
w.write()
r = Reader.create( "test/IECore/data/exrFiles/output.exr" )
imgNew = r.read()
r = Reader.create( "test/IECore/data/expectedResults/oversizeDataWindow.dpx" )
imgExpected = r.read()
self.__verifyImageRGB( imgNew, imgExpected )
def testCompressionParameter( self ):
r = Reader.create( "test/IECore/data/exrFiles/oversizeDataWindow.exr" )
img = r.read()
w = Writer.create( img, "test/IECore/data/exrFiles/output.exr" )
w['compression'].setValue( w['compression'].getPresets()['zip'] )
w.write()
w = EXRImageWriter()
w['compression'].setValue( w['compression'].getPresets()['zip'] )
def testBlindDataToHeader( self ) :
displayWindow = Box2i(
V2i( 0, 0 ),
V2i( 9, 9 )
)
dataWindow = displayWindow
headerValues = {
"one": IntData( 1 ),
"two": FloatData( 2 ),
"three": DoubleData( 3 ),
"four" : {
"five": V2fData( V2f(5) ),
"six": V2iData( V2i(6) ),
"seven": V3fData( V3f(7) ),
"eight": V3iData( V3i(8) ),
"nine": {
"ten": Box2iData( Box2i( V2i(0), V2i(10) ) ),
"eleven": Box2fData( Box2f( V2f(0), V2f(11) ) ),
"twelve": M33fData( M33f(12) ),
"thirteen": M44fData( M44f(13) ),
},
"fourteen": StringData( "fourteen" ),
}
}
imgOrig = self.__makeFloatImage( dataWindow, dataWindow )
imgOrig.blindData().update( headerValues.copy() )
# now add some unsupported types
imgOrig.blindData()['notSupported1'] = FloatVectorData( [ 1,2,3] )
imgOrig.blindData()['four']['notSupported2'] = DoubleVectorData( [1,2,3] )
w = Writer.create( imgOrig, "test/IECore/data/exrFiles/output.exr" )
self.assertEqual( type(w), EXRImageWriter )
w.write()
self.assert_( os.path.exists( "test/IECore/data/exrFiles/output.exr" ) )
r = Reader.create( "test/IECore/data/exrFiles/output.exr" )
imgNew = r.read()
imgBlindData = imgNew.blindData()
# eliminate default header info that comes on EXR..
del imgBlindData['screenWindowCenter']
del imgBlindData['displayWindow']
del imgBlindData['dataWindow']
del imgBlindData['pixelAspectRatio']
del imgBlindData['screenWindowWidth']
self.assertEqual( imgBlindData, CompoundData( headerValues ) )
def setUp( self ) :
if os.path.isfile( "test/IECore/data/exrFiles/output.exr") :
os.remove( "test/IECore/data/exrFiles/output.exr" )
def tearDown( self ) :
if os.path.isfile( "test/IECore/data/exrFiles/output.exr") :
os.remove( "test/IECore/data/exrFiles/output.exr" )
if __name__ == "__main__":
unittest.main() | unknown | codeparrot/codeparrot-clean | ||
from __future__ import division
import sys
from optparse import OptionParser
from itertools import islice
import gzip
from collections import defaultdict
from math import floor, sqrt, log10
import datetime
from utils import *
from cluster import Cluster, LogLine
from leaf import Leaf
def get_clusters(
dataset_iterator,
batch_size,
skip_count,
threshold,
MIN_SAMPLES_FOR_SPLIT):
line_count = 0
clusters = []
for line in dataset_iterator:
line_split = line.text.split()
if len(line_split) > skip_count:
has_matched = False
for i in range(len(clusters)):
if clusters[i].check_for_match(
line_split, threshold, skip_count):
clusters[i].add_to_leaf(line, threshold, skip_count)
has_matched = True
if not has_matched:
clusters.append(Cluster(Leaf(line))) # Create a new cluster
line_count += 1
if line_count > batch_size:
# Split leafs that are too large
for i in range(len(clusters)):
if clusters[i].get_num_lines() > MIN_SAMPLES_FOR_SPLIT:
clusters[i].split_leaf(
MIN_SAMPLES_FOR_SPLIT,
skip_count,
min_word_pos_entropy=.0001,
min_percent=.1)
line_count = 0
return clusters
def main():
parser = OptionParser()
parser.add_option("-f", "--file", dest="filename",
help="Log file to read")
parser.add_option("--skip_count", dest='skip_count', type=int, default=0,
help="Number of tokens to skip at start of the line")
parser.add_option(
"--threshold",
dest='threshold',
type=float,
default=0.75,
help="Threshold for cosine similarity")
parser.add_option(
"-l",
'--num_lines',
dest='num_lines',
type=int,
default=-1,
help="Number of lines to read from log file (default:-1 Whole file)")
parser.add_option(
'--min_samples_for_split',
dest='min_samples_for_split',
type=int,
default=25)
parser.add_option(
'--batch_size',
dest='batch_size',
type=int,
default=5000)
(options, args) = parser.parse_args()
num_msgs = options.batch_size
skip_count = options.skip_count
threshold = options.threshold
MIN_SAMPLES_FOR_SPLIT = options.min_samples_for_split
if options.filename.endswith('.gz'):
fIn = gzip.open(options.filename)
else:
fIn = open(options.filename)
def dataset_iterator(fIn, num_lines=options.num_lines):
lines_read = 0
success_full = 0
while num_lines == -1 or lines_read < num_lines:
lines_read += 1
line = fIn.readline()
if len(line) == 0:
break
else:
try:
ls = line.split(' ', 1)
ts = float(
ls[0]) # datetime.datetime.strptime(ls[0], '%b %d %H:%M:%S')
yield LogLine(ts, ls[1].rstrip())
success_full += 1
except:
raise
clusters = get_clusters(
dataset_iterator(fIn),
num_msgs,
skip_count,
threshold,
MIN_SAMPLES_FOR_SPLIT)
index = 0
for cluster in clusters:
index = cluster.print_groups(index, include_text=True)
if __name__ == "__main__":
main() | unknown | codeparrot/codeparrot-clean | ||
import { prettyDOM } from "@testing-library/react";
export default function getHtml(container: HTMLElement) {
return prettyDOM(container, undefined, {
highlight: false,
});
} | typescript | github | https://github.com/remix-run/react-router | packages/react-router/__tests__/utils/getHtml.ts |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-26 14:35
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('flows', '0097_interrupt_runs_for_archived_flows'),
]
operations = [
migrations.CreateModel(
name='FlowPathRecentMessage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('from_uuid', models.UUIDField(help_text='Which flow node they came from')),
('to_uuid', models.UUIDField(help_text='Which flow node they went to')),
('text', models.CharField(max_length=640)),
('created_on', models.DateTimeField(help_text='When the message arrived')),
('run', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='recent_messages', to='flows.FlowRun')),
],
),
] | unknown | codeparrot/codeparrot-clean | ||
/* Copyright 2006-2008 Joaquin M Lopez Munoz.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* http://www.boost.org/LICENSE_1_0.txt)
*
* See http://www.boost.org/libs/flyweight for library home page.
*/
#ifndef BOOST_FLYWEIGHT_KEY_VALUE_FWD_HPP
#define BOOST_FLYWEIGHT_KEY_VALUE_FWD_HPP
#if defined(_MSC_VER)
#pragma once
#endif
namespace boost{
namespace flyweights{
struct no_key_from_value;
template<typename Key,typename Value,typename KeyFromValue=no_key_from_value>
struct key_value;
} /* namespace flyweights */
} /* namespace boost */
#endif | unknown | github | https://github.com/mysql/mysql-server | extra/boost/boost_1_87_0/boost/flyweight/key_value_fwd.hpp |
import logging
log = logging.getLogger(__name__)
from nose.tools import eq_
from numpy import linspace
from numpy.testing import assert_array_almost_equal
from unittest import main
from spacq.interface.units import Quantity
from spacq.tests.tool.box import AssertHandler, DeviceServerTestCase
from ... import awg5014b
class AWG5014BTest(DeviceServerTestCase):
def obtain_device(self):
return DeviceServerTestCase.obtain_device(self, impl=awg5014b.AWG5014B,
manufacturer='Tektronix', model='AWG5014B')
def testMarkerValues(self):
"""
Set the various marker values.
"""
awg = self.obtain_device()
awg.reset()
awg.channels[1].markers[1].delay = Quantity(1, 'ns')
awg.channels[1].markers[1].high = Quantity(0.5, 'V')
awg.channels[1].markers[2].delay = Quantity(0.1, 'ns')
awg.channels[2].markers[1].low = Quantity(-100, 'mV')
eq_(awg.channels[1].markers[1].delay.value, 1e-9)
eq_(awg.channels[1].markers[2].delay.value, 0.1e-9)
eq_(awg.channels[2].markers[1].delay.value, 0)
eq_(awg.channels[2].markers[2].delay.value, 0)
eq_(awg.channels[1].markers[1].high.value, 0.5)
eq_(awg.channels[1].markers[2].high.value, 1)
eq_(awg.channels[2].markers[1].high.value, 1)
eq_(awg.channels[2].markers[2].high.value, 1)
eq_(awg.channels[1].markers[1].low.value, 0)
eq_(awg.channels[1].markers[2].low.value, 0)
eq_(awg.channels[2].markers[1].low.value, -0.1)
eq_(awg.channels[2].markers[2].low.value, 0)
def testScenario(self):
"""
Run through a simple scenario.
Note: Verification should also be done manually based on the AWG output.
"""
log = AssertHandler()
awg = self.obtain_device()
awg.reset()
assert not awg.enabled
# Setup
existing_waveforms = awg.waveform_names
data1 = linspace(-1.0, 1.0, 21)
data2 = linspace(1.0, -1.0, 21)
log.flush()
awg.channels[1].set_waveform(data1, {
1: ([1, 1, 1, 0, 0] * len(data1))[:len(data1)],
2: ([0, 0, 0, 1, 1] * len(data1))[:len(data1)],
3: [1, 2, 3, 4],
})
log.assert_logged('warning', 'marker 3 ignored: \[1, 2, 3, 4\]')
awg.channels[2].set_waveform(data2, name='Test 2')
awg.sampling_rate = Quantity(200, 'MHz')
awg.channels[1].enabled = True
awg.channels[1].amplitude = Quantity(0.8, 'V')
awg.channels[2].enabled = True
awg.channels[2].amplitude = Quantity(0.4, 'V')
awg.channels[3].waveform_name = 'Test 2'
awg.channels[3].enabled = True
awg.channels[4].waveform_name = 'Channel 1'
del awg.channels[3].waveform_name
awg.run_mode = 'triggered'
awg.enabled = True
# Verify
eq_(awg.sampling_rate.value, 2e8)
eq_(awg.waveform_names, existing_waveforms + ['Channel 1', 'Test 2'])
assert_array_almost_equal(awg.get_waveform('Channel 1'), data1, 4)
eq_(awg.channels[1].amplitude.value, 0.8)
assert_array_almost_equal(awg.get_waveform('Test 2'), data2, 4)
eq_(awg.channels[2].amplitude.value, 0.4)
for ch in [1, 2]:
eq_(awg.channels[ch].enabled, True)
for ch in [3, 4]:
eq_(awg.channels[ch].enabled, False)
for ch in [1, 4]:
eq_(awg.channels[ch].waveform_name, 'Channel 1')
eq_(awg.channels[2].waveform_name, 'Test 2')
eq_(awg.channels[3].waveform_name, '')
eq_(awg.run_mode, 'triggered')
assert awg.waiting_for_trigger
assert awg.enabled
awg.trigger()
assert awg.waiting_for_trigger
assert awg.enabled
awg.run_mode = 'continuous'
assert not awg.waiting_for_trigger
assert awg.enabled
if __name__ == '__main__':
main() | unknown | codeparrot/codeparrot-clean | ||
from langchain_core.messages import AIMessage, AIMessageChunk, HumanMessage
from langchain_core.messages import content as types
def test_convert_to_v1_from_bedrock() -> None:
message = AIMessage(
[
{"type": "thinking", "thinking": "foo", "signature": "foo_signature"},
{"type": "text", "text": "Let's call a tool."},
{
"type": "tool_use",
"id": "abc_123",
"name": "get_weather",
"input": {"location": "San Francisco"},
},
{
"type": "text",
"text": "It's sunny.",
"citations": [
{
"type": "search_result_location",
"cited_text": "The weather is sunny.",
"source": "source_123",
"title": "Document Title",
"search_result_index": 1,
"start_block_index": 0,
"end_block_index": 2,
},
{"bar": "baz"},
],
},
{"type": "something_else", "foo": "bar"},
],
tool_calls=[
{
"type": "tool_call",
"id": "abc_123",
"name": "get_weather",
"args": {"location": "San Francisco"},
},
{
"type": "tool_call",
"id": "abc_234",
"name": "another_tool",
"args": {"arg_1": "value_1"},
},
],
response_metadata={
"model_provider": "bedrock",
"model_name": "us.anthropic.claude-sonnet-4-20250514-v1:0",
},
)
expected_content: list[types.ContentBlock] = [
{
"type": "reasoning",
"reasoning": "foo",
"extras": {"signature": "foo_signature"},
},
{"type": "text", "text": "Let's call a tool."},
{
"type": "tool_call",
"id": "abc_123",
"name": "get_weather",
"args": {"location": "San Francisco"},
},
{
"type": "text",
"text": "It's sunny.",
"annotations": [
{
"type": "citation",
"title": "Document Title",
"cited_text": "The weather is sunny.",
"extras": {
"source": "source_123",
"search_result_index": 1,
"start_block_index": 0,
"end_block_index": 2,
},
},
{"type": "non_standard_annotation", "value": {"bar": "baz"}},
],
},
{
"type": "non_standard",
"value": {"type": "something_else", "foo": "bar"},
},
{
"type": "tool_call",
"id": "abc_234",
"name": "another_tool",
"args": {"arg_1": "value_1"},
},
]
assert message.content_blocks == expected_content
# Check no mutation
assert message.content != expected_content
# Test with a non-Anthropic message
message = AIMessage(
[
{"type": "text", "text": "Let's call a tool."},
{"type": "something_else", "foo": "bar"},
],
tool_calls=[
{
"type": "tool_call",
"id": "abc_123",
"name": "get_weather",
"args": {"location": "San Francisco"},
}
],
response_metadata={"model_provider": "bedrock"},
)
expected_content = [
{"type": "text", "text": "Let's call a tool."},
{
"type": "non_standard",
"value": {"type": "something_else", "foo": "bar"},
},
{
"type": "tool_call",
"id": "abc_123",
"name": "get_weather",
"args": {"location": "San Francisco"},
},
]
assert message.content_blocks == expected_content
def test_convert_to_v1_from_bedrock_chunk() -> None:
chunks = [
AIMessageChunk(
content=[{"text": "Looking ", "type": "text", "index": 0}],
response_metadata={"model_provider": "bedrock"},
),
AIMessageChunk(
content=[{"text": "now.", "type": "text", "index": 0}],
response_metadata={"model_provider": "bedrock"},
),
AIMessageChunk(
content=[
{
"type": "tool_use",
"name": "get_weather",
"input": {},
"id": "toolu_abc123",
"index": 1,
}
],
tool_call_chunks=[
{
"type": "tool_call_chunk",
"name": "get_weather",
"args": "",
"id": "toolu_abc123",
"index": 1,
}
],
response_metadata={"model_provider": "bedrock"},
),
AIMessageChunk(
content=[{"type": "input_json_delta", "partial_json": "", "index": 1}],
tool_call_chunks=[
{
"name": None,
"args": "",
"id": None,
"index": 1,
"type": "tool_call_chunk",
}
],
response_metadata={"model_provider": "bedrock"},
),
AIMessageChunk(
content=[
{"type": "input_json_delta", "partial_json": '{"loca', "index": 1}
],
tool_call_chunks=[
{
"name": None,
"args": '{"loca',
"id": None,
"index": 1,
"type": "tool_call_chunk",
}
],
response_metadata={"model_provider": "bedrock"},
),
AIMessageChunk(
content=[
{"type": "input_json_delta", "partial_json": 'tion": "San ', "index": 1}
],
tool_call_chunks=[
{
"name": None,
"args": 'tion": "San ',
"id": None,
"index": 1,
"type": "tool_call_chunk",
}
],
response_metadata={"model_provider": "bedrock"},
),
AIMessageChunk(
content=[
{"type": "input_json_delta", "partial_json": 'Francisco"}', "index": 1}
],
tool_call_chunks=[
{
"name": None,
"args": 'Francisco"}',
"id": None,
"index": 1,
"type": "tool_call_chunk",
}
],
response_metadata={"model_provider": "bedrock"},
),
]
expected_contents: list[types.ContentBlock] = [
{"type": "text", "text": "Looking ", "index": 0},
{"type": "text", "text": "now.", "index": 0},
{
"type": "tool_call_chunk",
"name": "get_weather",
"args": "",
"id": "toolu_abc123",
"index": 1,
},
{"name": None, "args": "", "id": None, "index": 1, "type": "tool_call_chunk"},
{
"name": None,
"args": '{"loca',
"id": None,
"index": 1,
"type": "tool_call_chunk",
},
{
"name": None,
"args": 'tion": "San ',
"id": None,
"index": 1,
"type": "tool_call_chunk",
},
{
"name": None,
"args": 'Francisco"}',
"id": None,
"index": 1,
"type": "tool_call_chunk",
},
]
for chunk, expected in zip(chunks, expected_contents, strict=False):
assert chunk.content_blocks == [expected]
full: AIMessageChunk | None = None
for chunk in chunks:
full = chunk if full is None else full + chunk
assert isinstance(full, AIMessageChunk)
expected_content = [
{"type": "text", "text": "Looking now.", "index": 0},
{
"type": "tool_use",
"name": "get_weather",
"partial_json": '{"location": "San Francisco"}',
"input": {},
"id": "toolu_abc123",
"index": 1,
},
]
assert full.content == expected_content
expected_content_blocks = [
{"type": "text", "text": "Looking now.", "index": 0},
{
"type": "tool_call_chunk",
"name": "get_weather",
"args": '{"location": "San Francisco"}',
"id": "toolu_abc123",
"index": 1,
},
]
assert full.content_blocks == expected_content_blocks
def test_convert_to_v1_from_bedrock_input() -> None:
message = HumanMessage(
[
{"type": "text", "text": "foo"},
{
"type": "document",
"source": {
"type": "base64",
"data": "<base64 data>",
"media_type": "application/pdf",
},
},
{
"type": "document",
"source": {
"type": "url",
"url": "<document url>",
},
},
{
"type": "document",
"source": {
"type": "content",
"content": [
{"type": "text", "text": "The grass is green"},
{"type": "text", "text": "The sky is blue"},
],
},
"citations": {"enabled": True},
},
{
"type": "document",
"source": {
"type": "text",
"data": "<plain text data>",
"media_type": "text/plain",
},
},
{
"type": "image",
"source": {
"type": "base64",
"media_type": "image/jpeg",
"data": "<base64 image data>",
},
},
{
"type": "image",
"source": {
"type": "url",
"url": "<image url>",
},
},
{
"type": "image",
"source": {
"type": "file",
"file_id": "<image file id>",
},
},
{
"type": "document",
"source": {"type": "file", "file_id": "<pdf file id>"},
},
]
)
expected: list[types.ContentBlock] = [
{"type": "text", "text": "foo"},
{
"type": "file",
"base64": "<base64 data>",
"mime_type": "application/pdf",
},
{
"type": "file",
"url": "<document url>",
},
{
"type": "non_standard",
"value": {
"type": "document",
"source": {
"type": "content",
"content": [
{"type": "text", "text": "The grass is green"},
{"type": "text", "text": "The sky is blue"},
],
},
"citations": {"enabled": True},
},
},
{
"type": "text-plain",
"text": "<plain text data>",
"mime_type": "text/plain",
},
{
"type": "image",
"base64": "<base64 image data>",
"mime_type": "image/jpeg",
},
{
"type": "image",
"url": "<image url>",
},
{
"type": "image",
"id": "<image file id>",
},
{
"type": "file",
"id": "<pdf file id>",
},
]
assert message.content_blocks == expected | python | github | https://github.com/langchain-ai/langchain | libs/core/tests/unit_tests/messages/block_translators/test_bedrock.py |
########################################################################
#
# File Name: TreeWalker.py
#
# Documentation: http://docs.4suite.com/4DOM/TreeWalker.py.html
#
"""
Tree Walker from DOM Level 2. Allows multi-directional iteration over nodes.
WWW: http://4suite.com/4DOM e-mail: support@4suite.com
Copyright (c) 2000 Fourthought Inc, USA. All Rights Reserved.
See http://4suite.com/COPYRIGHT for license and copyright information
"""
from NodeFilter import NodeFilter
from xml.dom import NoModificationAllowedErr
from xml.dom import NotSupportedErr
class TreeWalker:
def __init__(self, root, whatToShow, filter, expandEntityReferences):
self.__dict__['__root'] = root
self.__dict__['__whatToShow'] = whatToShow
self.__dict__['__filter'] = filter
self.__dict__['__expandEntityReferences'] = expandEntityReferences
self.__dict__['__currentNode'] = root
### Attribute Access Methods -- xxx.attr ###
def __getattr__(self, name):
attrFunc = self._readComputedAttrs.get(name)
if attrFunc:
return attrFunc(self)
def __setattr__(self, name, value):
#Make sure attribute is not read-only
if name in self.__class__._readOnlyAttrs:
raise NoModificationAllowedErr()
#If it's computed execute that function
attrFunc = self.__class__._writeComputedAttrs.get(name)
if attrFunc:
attrFunc(self, value)
#Otherwise, just set the attribute
else:
self.__dict__[name] = value
### Attribute Methods -- xxx._get_attr() ###
def _get_root(self):
return self.__dict__['__root']
def _get_filter(self):
return self.__dict__['__filter']
def _get_whatToShow(self):
return self.__dict__['__whatToShow']
def _get_expandEntityReferences(self):
return self.__dict__['__expandEntityReferences']
def _get_currentNode(self):
return self.__dict__['__currentNode']
def _set_currentNode(self, value):
if value == None:
raise NotSupportedErr()
self.__dict__['__currentNode'] = value
### Methods ###
def parentNode(self):
next_node = None
if self.__dict__['__currentNode'] != self.__dict__['__root']:
next_node = self.__dict__['__currentNode']._get_parentNode()
while next_node and next_node != self.__dict__['__root'] \
and not (self.__checkWhatToShow(next_node) \
and self.__checkFilter(next_node) == NodeFilter.FILTER_ACCEPT):
next_node = next_node._get_parentNode()
if next_node:
self.__dict__['__currentNode'] = next_node
return next_node
def firstChild(self):
next_node = None
if self.__checkFilter(self.__dict__['__currentNode']) != NodeFilter.FILTER_REJECT:
next_node = self.__dict__['__currentNode']._get_firstChild()
while next_node and not (self.__checkWhatToShow(next_node) \
and self.__checkFilter(next_node) == NodeFilter.FILTER_ACCEPT):
next_node = next_node._get_nextSibling()
if next_node:
self.__dict__['__currentNode'] = next_node
return next_node
def lastChild(self):
next_node = None
if self.__checkFilter(self.__dict__['__currentNode']) != NodeFilter.FILTER_REJECT:
next_node = self.__dict__['__currentNode']._get_lastChild()
while next_node and not (self.__checkWhatToShow(next_node) \
and self.__checkFilter(next_node) == NodeFilter.FILTER_ACCEPT):
next_node = next_node._get_previousSibling()
if next_node:
self.__dict__['__currentNode'] = next_node
return next_node
def previousSibling(self):
prev_node = None
if self.__dict__['__currentNode'] != self.__root:
prev_node = self.__dict__['__currentNode']._get_previousSibling()
while prev_node and not (self.__checkWhatToShow(prev_node) \
and self.__checkFilter(prev_node) == NodeFilter.FILTER_ACCEPT):
prev_node = prev_node._get_previousSibling()
if prev_node:
self.__dict__['__currentNode'] = prev_node
return prev_node
def nextSibling(self):
next_node = None
if self.__dict__['__currentNode'] != self.__root:
next_node = self.__dict__['__currentNode']._get_nextSibling()
while next_node and not (self.__checkWhatToShow(next_node) and self.__checkFilter(next_node) == NodeFilter.FILTER_ACCEPT):
next_node = next_node._get_nextSibling()
if next_node:
self.__dict__['__currentNode'] = next_node
return next_node
def nextNode(self):
next_node = self.__advance()
while next_node and not (self.__checkWhatToShow(next_node) and self.__checkFilter(next_node) == NodeFilter.FILTER_ACCEPT):
next_node = self.__advance()
return next_node
def previousNode(self):
prev_node = self.__regress()
while prev_node and not (self.__checkWhatToShow(prev_node) and self.__checkFilter(prev_node) == NodeFilter.FILTER_ACCEPT):
prev_node = self.__regress()
return prev_node
def __advance(self):
if self.firstChild():
return self.__dict__['__currentNode']
if self.nextSibling():
return self.__dict__['__currentNode']
if self.parentNode():
return self.nextSibling()
return None
def __regress(self):
if self.previousSibling():
self.lastChild()
return self.__dict__['__currentNode']
if self.parentNode():
return self.__dict__['__currentNode']
return None
def __checkWhatToShow(self, node):
show_bit = 1 << (node._get_nodeType() - 1)
return self.__dict__['__whatToShow'] & show_bit
def __checkFilter(self, node):
if self.__dict__['__filter']:
return self.__dict__['__filter'].acceptNode(node)
else:
return NodeFilter.FILTER_ACCEPT
### Attribute Access Mappings ###
_readComputedAttrs = {'root':_get_root,
'whatToShow':_get_whatToShow,
'filter':_get_filter,
'expandEntityReferences':_get_expandEntityReferences,
'currentNode':_get_currentNode
}
_writeComputedAttrs = {'currentNode': _set_currentNode
}
# Create the read-only list of attributes
_readOnlyAttrs = filter(lambda k,m=_writeComputedAttrs: not m.has_key(k),
_readComputedAttrs.keys()) | unknown | codeparrot/codeparrot-clean | ||
<?php
namespace Illuminate\Support;
use ArrayIterator;
use Illuminate\Contracts\Support\ValidatedData;
use Illuminate\Support\Traits\Dumpable;
use Illuminate\Support\Traits\InteractsWithData;
use Traversable;
class ValidatedInput implements ValidatedData
{
use Dumpable, InteractsWithData;
/**
* The underlying input.
*
* @var array
*/
protected $input;
/**
* Create a new validated input container.
*
* @param array $input
*/
public function __construct(array $input)
{
$this->input = $input;
}
/**
* Merge the validated input with the given array of additional data.
*
* @param array $items
* @return static
*/
public function merge(array $items)
{
return new static(array_merge($this->all(), $items));
}
/**
* Get the raw, underlying input array.
*
* @param mixed $keys
* @return array
*/
public function all($keys = null)
{
if (! $keys) {
return $this->input;
}
$input = [];
foreach (is_array($keys) ? $keys : func_get_args() as $key) {
Arr::set($input, $key, Arr::get($this->input, $key));
}
return $input;
}
/**
* Retrieve data from the instance.
*
* @param string|null $key
* @param mixed $default
* @return mixed
*/
protected function data($key = null, $default = null)
{
return $this->input($key, $default);
}
/**
* Get the keys for all of the input.
*
* @return array
*/
public function keys()
{
return array_keys($this->input());
}
/**
* Retrieve an input item from the validated inputs.
*
* @param string|null $key
* @param mixed $default
* @return mixed
*/
public function input($key = null, $default = null)
{
return data_get(
$this->all(), $key, $default
);
}
/**
* Dump the items.
*
* @param mixed ...$keys
* @return $this
*/
public function dump(...$keys)
{
dump(count($keys) > 0 ? $this->only($keys) : $this->all());
return $this;
}
/**
* Get the instance as an array.
*
* @return array
*/
public function toArray()
{
return $this->all();
}
/**
* Dynamically access input data.
*
* @param string $name
* @return mixed
*/
public function __get($name)
{
return $this->input($name);
}
/**
* Dynamically set input data.
*
* @param string $name
* @param mixed $value
* @return mixed
*/
public function __set($name, $value)
{
$this->input[$name] = $value;
}
/**
* Determine if an input item is set.
*
* @param string $name
* @return bool
*/
public function __isset($name)
{
return $this->exists($name);
}
/**
* Remove an input item.
*
* @param string $name
* @return void
*/
public function __unset($name)
{
unset($this->input[$name]);
}
/**
* Determine if an item exists at an offset.
*
* @param mixed $key
* @return bool
*/
public function offsetExists($key): bool
{
return $this->exists($key);
}
/**
* Get an item at a given offset.
*
* @param mixed $key
* @return mixed
*/
public function offsetGet($key): mixed
{
return $this->input($key);
}
/**
* Set the item at a given offset.
*
* @param mixed $key
* @param mixed $value
* @return void
*/
public function offsetSet($key, $value): void
{
if (is_null($key)) {
$this->input[] = $value;
} else {
$this->input[$key] = $value;
}
}
/**
* Unset the item at a given offset.
*
* @param string $key
* @return void
*/
public function offsetUnset($key): void
{
unset($this->input[$key]);
}
/**
* Get an iterator for the input.
*
* @return \ArrayIterator
*/
public function getIterator(): Traversable
{
return new ArrayIterator($this->input);
}
} | php | github | https://github.com/laravel/framework | src/Illuminate/Support/ValidatedInput.php |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.providers.microsoft.azure.log.wasb_task_handler`."""
import warnings
# pylint: disable=unused-import
from airflow.providers.microsoft.azure.log.wasb_task_handler import WasbTaskHandler # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.microsoft.azure.log.wasb_task_handler`.",
DeprecationWarning,
stacklevel=2,
) | unknown | codeparrot/codeparrot-clean | ||
from insights.parsers.redhat_release import RedhatRelease
from insights.tests import context_wrap
REDHAT_RELEASE1 = """
Red Hat Enterprise Linux Server release 6.7 (Santiago)
""".strip()
REDHAT_RELEASE2 = """
Red Hat Enterprise Linux Server release 7.2 (Maipo)
""".strip()
RHVH_RHV40 = """
Red Hat Enterprise Linux release 7.3
""".strip()
RHEVH_RHEV35 = """
Red Hat Enterprise Virtualization Hypervisor release 6.7 (20160219.0.el6ev)
""".strip()
FEDORA = """
Fedora release 23 (Twenty Three)
""".strip()
def test_rhe6():
release = RedhatRelease(context_wrap(REDHAT_RELEASE1))
assert release.raw == REDHAT_RELEASE1
assert release.major == 6
assert release.minor == 7
assert release.version == "6.7"
assert release.is_rhel
assert release.product == "Red Hat Enterprise Linux Server"
def test_rhe7():
release = RedhatRelease(context_wrap(REDHAT_RELEASE2))
assert release.raw == REDHAT_RELEASE2
assert release.major == 7
assert release.minor == 2
assert release.version == "7.2"
assert release.is_rhel
assert release.product == "Red Hat Enterprise Linux Server"
def test_rhevh35():
release = RedhatRelease(context_wrap(RHEVH_RHEV35))
assert release.raw == RHEVH_RHEV35
assert release.major == 6
assert release.minor == 7
assert release.version == "6.7"
assert not release.is_rhel
assert release.product == "Red Hat Enterprise Virtualization Hypervisor"
def test_rhvh40():
release = RedhatRelease(context_wrap(RHVH_RHV40))
assert release.raw == RHVH_RHV40
assert release.major == 7
assert release.minor == 3
assert release.version == "7.3"
assert release.is_rhel
assert release.product == "Red Hat Enterprise Linux"
def test_fedora23():
release = RedhatRelease(context_wrap(FEDORA))
assert release.raw == FEDORA
assert release.major == 23
assert release.minor is None
assert release.version == "23"
assert not release.is_rhel
assert release.product == "Fedora" | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: us-ascii -*-
# frozen_string_literal: false
# $RoughId: extconf.rb,v 1.3 2001/08/14 19:54:51 knu Exp $
# $Id$
require "mkmf"
require File.expand_path("../../digest_conf", __FILE__)
$objs = [ "md5init.#{$OBJEXT}" ]
digest_conf("md5")
have_header("sys/cdefs.h")
$preload = %w[digest]
create_makefile("digest/md5") | ruby | github | https://github.com/ruby/ruby | ext/digest/md5/extconf.rb |
# -*- coding: utf-8 -*-
"""
Created on Sun Jan 06 09:50:54 2013
Author: Josef Perktold
"""
from __future__ import print_function
if __name__ == '__main__':
import numpy as np
import matplotlib.pyplot as plt
from statsmodels.nonparametric.api import KernelReg
import statsmodels.sandbox.nonparametric.dgp_examples as dgp
seed = np.random.randint(999999)
seed = 430973
print(seed)
np.random.seed(seed)
funcs = [dgp.UnivariateFanGijbels1(),
dgp.UnivariateFanGijbels2(),
dgp.UnivariateFanGijbels1EU(),
#dgp.UnivariateFanGijbels2(distr_x=stats.uniform(-2, 4))
dgp.UnivariateFunc1()
]
res = []
fig = plt.figure()
for i,func in enumerate(funcs):
#f = func()
f = func
model = KernelReg(endog=[f.y], exog=[f.x], reg_type='ll',
var_type='c', bw='cv_ls')
mean, mfx = model.fit()
ax = fig.add_subplot(2, 2, i+1)
f.plot(ax=ax)
ax.plot(f.x, mean, color='r', lw=2, label='est. mean')
ax.legend(loc='upper left')
res.append((model, mean, mfx))
fig.suptitle('Kernel Regression')
fig.show() | unknown | codeparrot/codeparrot-clean | ||
import type { Theme } from '../../theme'
import defaultTheme from '../default-theme'
import type { UserConfig } from './types'
export function createCompatConfig(cssTheme: Theme): UserConfig {
return {
theme: {
...defaultTheme,
// In the defaultTheme config, the `colors` key is not a function but a
// shallow object. We don't want to define the color namespace unless it
// is in the CSS theme so here we explicitly overwrite the defaultTheme
// and only allow colors from the CSS theme.
colors: ({ theme }) => theme('color', {}),
extend: {
fontSize: ({ theme }) => ({
...theme('text', {}),
}),
boxShadow: ({ theme }) => ({
...theme('shadow', {}),
}),
animation: ({ theme }) => ({
...theme('animate', {}),
}),
aspectRatio: ({ theme }) => ({
...theme('aspect', {}),
}),
borderRadius: ({ theme }) => ({
...theme('radius', {}),
}),
screens: ({ theme }) => ({
...theme('breakpoint', {}),
}),
letterSpacing: ({ theme }) => ({
...theme('tracking', {}),
}),
lineHeight: ({ theme }) => ({
...theme('leading', {}),
}),
transitionDuration: {
DEFAULT: cssTheme.get(['--default-transition-duration']) ?? null,
},
transitionTimingFunction: {
DEFAULT: cssTheme.get(['--default-transition-timing-function']) ?? null,
},
maxWidth: ({ theme }) => ({
...theme('container', {}),
}),
},
},
}
} | typescript | github | https://github.com/tailwindlabs/tailwindcss | packages/tailwindcss/src/compat/config/create-compat-config.ts |
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Main entry point for the NaCl SDK buildbot.
The entry point used to be build_sdk.py itself, but we want
to be able to simplify build_sdk (for example separating out
the test code into test_sdk) and change its default behaviour
while being able to separately control excactly what the bots
run.
"""
import buildbot_common
import os
import subprocess
import sys
from buildbot_common import Run
from build_paths import SRC_DIR, SDK_SRC_DIR, SCRIPT_DIR
import getos
def StepRunUnittests():
buildbot_common.BuildStep('Run unittests')
# Our tests shouldn't be using the proxy; they should all be connecting to
# localhost. Some slaves can't route HTTP traffic through the proxy to
# localhost (we get 504 gateway errors), so we clear it here.
env = dict(os.environ)
if 'http_proxy' in env:
del env['http_proxy']
Run([sys.executable, 'test_all.py'], env=env, cwd=SDK_SRC_DIR)
def StepBuildSDK():
is_win = getos.GetPlatform() == 'win'
# Windows has a path length limit of 255 characters, after joining cwd with a
# relative path. Use subst before building to keep the path lengths short.
if is_win:
subst_drive = 'S:'
root_dir = os.path.dirname(SRC_DIR)
new_root_dir = subst_drive + '\\'
subprocess.check_call(['subst', subst_drive, root_dir])
new_script_dir = os.path.join(new_root_dir,
os.path.relpath(SCRIPT_DIR, root_dir))
else:
new_script_dir = SCRIPT_DIR
try:
Run([sys.executable, 'build_sdk.py'], cwd=new_script_dir)
finally:
if is_win:
subprocess.check_call(['subst', '/D', subst_drive])
def StepTestSDK():
cmd = []
if getos.GetPlatform() == 'linux':
# Run all of test_sdk.py under xvfb-run; it's startup time leaves something
# to be desired, so only start it up once.
# We also need to make sure that there are at least 24 bits per pixel.
# https://code.google.com/p/chromium/issues/detail?id=316687
cmd.extend([
'xvfb-run',
'--auto-servernum',
'--server-args', '-screen 0 1024x768x24'
])
cmd.extend([sys.executable, 'test_sdk.py'])
Run(cmd, cwd=SCRIPT_DIR)
def main():
StepRunUnittests()
StepBuildSDK()
# Skip the testing phase if we are running on a build-only bots.
if not buildbot_common.IsBuildOnlyBot():
StepTestSDK()
return 0
if __name__ == '__main__':
try:
sys.exit(main())
except KeyboardInterrupt:
buildbot_common.ErrorExit('buildbot_run: interrupted') | unknown | codeparrot/codeparrot-clean | ||
## API Report File for "extended_template_diagnostic_name_api"
> Do not edit this file. It is a report generated by [API Extractor](https://api-extractor.com/).
```ts
// @public
export enum ExtendedTemplateDiagnosticName {
// (undocumented)
CONTROL_FLOW_PREVENTING_CONTENT_PROJECTION = "controlFlowPreventingContentProjection",
// (undocumented)
DEFER_TRIGGER_MISCONFIGURATION = "deferTriggerMisconfiguration",
// (undocumented)
INTERPOLATED_SIGNAL_NOT_INVOKED = "interpolatedSignalNotInvoked",
// (undocumented)
INVALID_BANANA_IN_BOX = "invalidBananaInBox",
// (undocumented)
MISSING_CONTROL_FLOW_DIRECTIVE = "missingControlFlowDirective",
// (undocumented)
MISSING_NGFOROF_LET = "missingNgForOfLet",
// (undocumented)
MISSING_STRUCTURAL_DIRECTIVE = "missingStructuralDirective",
// (undocumented)
NULLISH_COALESCING_NOT_NULLABLE = "nullishCoalescingNotNullable",
// (undocumented)
OPTIONAL_CHAIN_NOT_NULLABLE = "optionalChainNotNullable",
// (undocumented)
SKIP_HYDRATION_NOT_STATIC = "skipHydrationNotStatic",
// (undocumented)
SUFFIX_NOT_SUPPORTED = "suffixNotSupported",
// (undocumented)
TEXT_ATTRIBUTE_NOT_BINDING = "textAttributeNotBinding",
// (undocumented)
UNINVOKED_FUNCTION_IN_EVENT_BINDING = "uninvokedFunctionInEventBinding",
// (undocumented)
UNINVOKED_FUNCTION_IN_TEXT_INTERPOLATION = "uninvokedFunctionInTextInterpolation",
// (undocumented)
UNINVOKED_TRACK_FUNCTION = "uninvokedTrackFunction",
// (undocumented)
UNPARENTHESIZED_NULLISH_COALESCING = "unparenthesizedNullishCoalescing",
// (undocumented)
UNUSED_LET_DECLARATION = "unusedLetDeclaration",
// (undocumented)
UNUSED_STANDALONE_IMPORTS = "unusedStandaloneImports"
}
// (No @packageDocumentation comment for this package)
``` | unknown | github | https://github.com/angular/angular | goldens/public-api/compiler-cli/extended_template_diagnostic_name.api.md |
import sys, os
#get path of script
_script_path = os.path.realpath(__file__)
_script_dir = os.path.dirname(_script_path)
pyWolfPath = _script_dir
if sys.platform == "linux" or sys.platform == "linux2":
print "Linux not tested yet"
elif sys.platform == "darwin":
print "OS X not tested yet"
elif sys.platform == "win32":
pyWolfPath = pyWolfPath + "\\..\\..\\..\\..\\bin\\x64\\Debug\\Win32\\"
if pyWolfPath != "" and (not pyWolfPath in sys.path):
sys.path.append(pyWolfPath)
import ctypes, threading, pyWolf
from PySide import QtGui, QtCore
from PySide.QtGui import *
from PySide.QtCore import *
screen_width = 800
screen_height = 600
class scene(QWidget):
def __init__(self, pContentPath, pLogPath, pAppName, parent = None):
super(scene, self).__init__(parent)
self.__exiting = False
self._game = pyWolf.framework.w_game(pContentPath, pLogPath, pAppName)
self._game.set_pre_init_callback(self.pre_init)
self._game.set_post_init_callback(self.post_init)
self._game.set_load_callback(self.load)
self._game.set_update_callback(self.update)
self._game.set_pre_render_callback(self.pre_render)
self._game.set_post_render_callback(self.post_render)
self._gDevice = None
self._viewport = pyWolf.graphics.w_viewport()
self._viewport_scissor = pyWolf.graphics.w_viewport_scissor()
self._draw_command_buffers = pyWolf.graphics.w_command_buffers()
self._draw_render_pass = pyWolf.graphics.w_render_pass()
self._draw_fence = pyWolf.graphics.w_fences()
self._draw_semaphore = pyWolf.graphics.w_semaphore()
self._shader = pyWolf.graphics.w_shader()
self._pipeline = pyWolf.graphics.w_pipeline()
self._mesh = pyWolf.graphics.w_mesh()
_config = pyWolf.graphics.w_graphics_device_manager_configs()
_config.debug_gpu = False
self._game.set_graphics_device_manager_configs(_config)
def pre_init(self):
print "pre_init"
def post_init(self):
#get main graphics device
self._gDevice = self._game.get_graphics_device(0)
print self._gDevice.get_info()
print "post_init"
def load(self):
#initialize viewport
self._viewport.y = 0
self._viewport.width = screen_width
self._viewport.height = screen_height
self._viewport.minDepth = 0
self._viewport.maxDepth = 1
#initialize scissor of viewport
self._viewport_scissor.offset.x = 0
self._viewport_scissor.offset.y = 0
self._viewport_scissor.extent.width = screen_width
self._viewport_scissor.extent.height = screen_height
#load render pass which contains frame buffers
_render_pass_attachments = []
_output_window = self._gDevice.output_presentation_window
for _iter in _output_window.swap_chain_image_views:
# COLOR #DEPTH
_render_pass_attachments.append([_iter, _output_window.depth_buffer_image_view])
_hr = self._draw_render_pass.load(self._gDevice, self._viewport, self._viewport_scissor, _render_pass_attachments)
if _hr:
print "Error on loading render pass"
sys.exit(1)
#create one semaphore for drawing
_hr = self._draw_semaphore.initialize(self._gDevice)
if _hr:
print "Error on initializing semaphore"
sys.exit(1)
#create one fence for drawing
_hr = self._draw_fence.initialize(self._gDevice, 1)
if _hr:
print "Error on initializing fence(s)"
sys.exit(1)
#create one fence for drawing
number_of_swap_chains = self._gDevice.get_number_of_swap_chains()
_hr = self._draw_command_buffers.load(self._gDevice, number_of_swap_chains, pyWolf.graphics.w_command_buffer_level.PRIMARY)
if _hr:
print "Error on initializing draw command buffer(s)"
sys.exit(1)
#loading vertex shader
_content_path_dir = _script_dir + "/../../03_vertex_buffer/src/content/"
_hr = self._shader.load(self._gDevice, _content_path_dir + "shaders/shader.vert.spv", pyWolf.graphics.w_shader_stage_flag_bits.VERTEX_SHADER, "main")
if _hr:
print "Error on loading vertex shader"
sys.exit(1)
#loading fragment shader
_hr = self._shader.load(self._gDevice, _content_path_dir + "shaders/shader.frag.spv", pyWolf.graphics.w_shader_stage_flag_bits.FRAGMENT_SHADER, "main")
if _hr:
print "Error on loading fragment shader"
sys.exit(1)
#just we need vertex position color
_vba = pyWolf.graphics.w_vertex_binding_attributes(pyWolf.graphics.w_vertex_declaration.VERTEX_POSITION_COLOR)
self._mesh.set_vertex_binding_attributes(_vba);
#loading pipeline cache
_pipeline_cache_name = "pipeline_cache";
_hr = self._pipeline.create_pipeline_cache(self._gDevice, _pipeline_cache_name)
if _hr:
print "Error on creating pipeline cache"
#create pipeline
_hr = self._pipeline.load(self._gDevice, _vba, pyWolf.graphics.w_primitive_topology.TRIANGLE_LIST, self._draw_render_pass, self._shader, [self._viewport], [ self._viewport_scissor ], _pipeline_cache_name)
if _hr:
print "Error on creating pipeline"
sys.exit(1)
#++++++++++++++++++++++++++++++++++++++++++++++++++++
#The following codes have been added for this project
#++++++++++++++++++++++++++++++++++++++++++++++++++++
_vertex_data = [
-0.7, -0.7, 0.0, #pos0
1.0, 0.0, 0.0, 1.0, #color0
-0.7, 0.7, 0.0, #pos1
1.0, 1.0, 1.0, 1.0, #color1
0.7, 0.7, 0.0, #pos2
0.0, 1.0, 0.0, 1.0, #color2
0.7, -0.7, 0.0, #pos3
0.0, 0.0, 0.0, 1.0 #color3
]
_index_data = [ 0,1,3,3,1,2 ]
#create mesh
_hr = self._mesh.load(self._gDevice, _vertex_data, _index_data)
if _hr:
print "Error on loading mesh"
sys.exit(1)
#++++++++++++++++++++++++++++++++++++++++++++++++++++
#++++++++++++++++++++++++++++++++++++++++++++++++++++
_hr = self.build_command_buffers()
if _hr:
print "Error on building draw command buffer(s)"
sys.exit(1)
print "scene loaded successfully"
def build_command_buffers(self):
_hr = pyWolf.W_PASSED
_size = self._draw_command_buffers.get_commands_size()
for i in xrange(_size):
_cmd = self._draw_command_buffers.get_command_at(i)
_hr = self._draw_command_buffers.begin(i)
if _hr:
print "Error on begining command buffer: " + str(i)
break
self._draw_render_pass.begin(i, _cmd, pyWolf.system.w_color.CORNFLOWER_BLUE(), 1.0, 0)
#place your draw code
self._pipeline.bind(_cmd)
self._mesh.draw(_cmd, None, 0, False)
self._draw_render_pass.end(_cmd)
_hr = self._draw_command_buffers.end(i)
if _hr:
print "Error on ending command buffer: " + str(i)
break
return _hr
def update(self, pGameTime):
print "fps: " + str(pGameTime.get_frames_per_second())
def pre_render(self, pGameTime):
_output_window = self._gDevice.output_presentation_window
_frame_index = _output_window.swap_chain_image_index
_wait_dst_stage_mask = [ pyWolf.graphics.w_pipeline_stage_flag_bits.COLOR_ATTACHMENT_OUTPUT_BIT ]
_wait_semaphores = [ _output_window.swap_chain_image_is_available_semaphore ]
_signal_semaphores = [ _output_window.rendering_done_semaphore ]
_cmd = self._draw_command_buffers.get_command_at(_frame_index)
_cmd_buffers = [_cmd]
#reset draw fence
self._draw_fence.reset()
_hr = self._gDevice.submit(_cmd_buffers, self._gDevice.graphics_queue, _wait_dst_stage_mask, _wait_semaphores, _signal_semaphores, self._draw_fence)
if _hr:
print "Error on submit to graphics device"
return
_hr = self._draw_fence.wait()
if _hr:
print "Error on waiting for draw fence"
return
def post_render(self, pSuccessfullyRendered):
if pSuccessfullyRendered == False:
print "Rendered Unsuccessfully"
def run(self):
#run game
_window_info = pyWolf.system.w_window_info()
_window_info.width = self.width()
_window_info.height = self.height()
_window_info.v_sync_enable = False
_window_info.is_full_screen = False
_window_info.swap_chain_format = 44 # BGRA8Unorm in VULKAN
_window_info.cpu_access_swap_chain_buffer = False
# get window handle
pycobject_hwnd = self.winId()
#convert window handle as HWND to unsigned integer pointer for c++
ctypes.pythonapi.PyCObject_AsVoidPtr.restype = ctypes.c_void_p
ctypes.pythonapi.PyCObject_AsVoidPtr.argtypes = [ctypes.py_object]
int_hwnd = ctypes.pythonapi.PyCObject_AsVoidPtr(pycobject_hwnd)
_window_info.set_win_id(int_hwnd)
#initialize game
_map_info = (0, _window_info)
while True:
if self.__exiting:
self.release()
break
self._game.run(_map_info)
print "Game exited"
def showEvent(self, event):
#run in another thread
threading.Thread(target=self.run).start()
event.accept()
def closeEvent(self, event):
self.__exiting = True
event.accept()
def release(self):
self._draw_fence.release()
self._draw_fence = None
self._draw_semaphore.release()
self._draw_semaphore = None
self._draw_command_buffers.release()
self._draw_command_buffers = None
self._draw_render_pass.release()
self._draw_render_pass = None
self._shader.release()
self._shader = None
self._pipeline.release()
self._pipeline = None
self._mesh.release()
self._mesh = None
self._game.release()
self._game = None
self._gDevice = None
self._viewport = None
self._viewport_scissor = None
if __name__ == '__main__':
# Create a Qt application
app = QApplication(sys.argv)
scene = scene(pyWolfPath + "..\\..\\..\\..\\content\\",
pyWolfPath,
"py_04_index_buffer")
scene.resize(screen_width, screen_height)
scene.setWindowTitle('Wolf.Engine')
scene.show()
sys.exit(app.exec_()) | unknown | codeparrot/codeparrot-clean | ||
"""Supporting definitions for the Python regression tests."""
if __name__ != 'test.support':
raise ImportError('support must be imported from the test package')
import annotationlib
import contextlib
import functools
import inspect
import logging
import _opcode
import os
import re
import stat
import sys
import sysconfig
import textwrap
import time
import types
import unittest
import warnings
__all__ = [
# globals
"PIPE_MAX_SIZE", "verbose", "max_memuse", "use_resources", "failfast",
# exceptions
"Error", "TestFailed", "TestDidNotRun", "ResourceDenied",
# io
"record_original_stdout", "get_original_stdout", "captured_stdout",
"captured_stdin", "captured_stderr", "captured_output",
# unittest
"is_resource_enabled", "get_resource_value", "requires", "requires_resource",
"requires_freebsd_version",
"requires_gil_enabled", "requires_linux_version", "requires_mac_ver",
"check_syntax_error",
"requires_gzip", "requires_bz2", "requires_lzma", "requires_zstd",
"bigmemtest", "bigaddrspacetest", "cpython_only", "get_attribute",
"requires_IEEE_754", "requires_zlib",
"has_fork_support", "requires_fork",
"has_subprocess_support", "requires_subprocess",
"has_socket_support", "requires_working_socket",
"has_remote_subprocess_debugging", "requires_remote_subprocess_debugging",
"anticipate_failure", "load_package_tests", "detect_api_mismatch",
"check__all__", "skip_if_buggy_ucrt_strfptime",
"check_disallow_instantiation", "check_sanitizer", "skip_if_sanitizer",
"requires_limited_api", "requires_specialization", "thread_unsafe",
"skip_if_unlimited_stack_size",
# sys
"MS_WINDOWS", "is_jython", "is_android", "is_emscripten", "is_wasi",
"is_apple_mobile", "check_impl_detail", "unix_shell", "setswitchinterval",
"support_remote_exec_only",
# os
"get_pagesize",
# network
"open_urlresource",
# processes
"reap_children",
# miscellaneous
"run_with_locale", "swap_item", "findfile", "infinite_recursion",
"swap_attr", "Matcher", "set_memlimit", "SuppressCrashReport", "sortdict",
"run_with_tz", "PGO", "missing_compiler_executable",
"ALWAYS_EQ", "NEVER_EQ", "LARGEST", "SMALLEST",
"LOOPBACK_TIMEOUT", "INTERNET_TIMEOUT", "SHORT_TIMEOUT", "LONG_TIMEOUT",
"Py_DEBUG", "exceeds_recursion_limit", "skip_on_s390x",
"requires_jit_enabled",
"requires_jit_disabled",
"force_not_colorized",
"force_not_colorized_test_class",
"make_clean_env",
"BrokenIter",
"in_systemd_nspawn_sync_suppressed",
"run_no_yield_async_fn", "run_yielding_async_fn", "async_yield",
"reset_code", "on_github_actions"
]
# Timeout in seconds for tests using a network server listening on the network
# local loopback interface like 127.0.0.1.
#
# The timeout is long enough to prevent test failure: it takes into account
# that the client and the server can run in different threads or even different
# processes.
#
# The timeout should be long enough for connect(), recv() and send() methods
# of socket.socket.
LOOPBACK_TIMEOUT = 10.0
# Timeout in seconds for network requests going to the internet. The timeout is
# short enough to prevent a test to wait for too long if the internet request
# is blocked for whatever reason.
#
# Usually, a timeout using INTERNET_TIMEOUT should not mark a test as failed,
# but skip the test instead: see transient_internet().
INTERNET_TIMEOUT = 60.0
# Timeout in seconds to mark a test as failed if the test takes "too long".
#
# The timeout value depends on the regrtest --timeout command line option.
#
# If a test using SHORT_TIMEOUT starts to fail randomly on slow buildbots, use
# LONG_TIMEOUT instead.
SHORT_TIMEOUT = 30.0
# Timeout in seconds to detect when a test hangs.
#
# It is long enough to reduce the risk of test failure on the slowest Python
# buildbots. It should not be used to mark a test as failed if the test takes
# "too long". The timeout value depends on the regrtest --timeout command line
# option.
LONG_TIMEOUT = 5 * 60.0
# TEST_HOME_DIR refers to the top level directory of the "test" package
# that contains Python's regression test suite
TEST_SUPPORT_DIR = os.path.dirname(os.path.abspath(__file__))
TEST_HOME_DIR = os.path.dirname(TEST_SUPPORT_DIR)
STDLIB_DIR = os.path.dirname(TEST_HOME_DIR)
REPO_ROOT = os.path.dirname(STDLIB_DIR)
class Error(Exception):
"""Base class for regression test exceptions."""
class TestFailed(Error):
"""Test failed."""
def __init__(self, msg, *args, stats=None):
self.msg = msg
self.stats = stats
super().__init__(msg, *args)
def __str__(self):
return self.msg
class TestFailedWithDetails(TestFailed):
"""Test failed."""
def __init__(self, msg, errors, failures, stats):
self.errors = errors
self.failures = failures
super().__init__(msg, errors, failures, stats=stats)
class TestDidNotRun(Error):
"""Test did not run any subtests."""
class ResourceDenied(unittest.SkipTest):
"""Test skipped because it requested a disallowed resource.
This is raised when a test calls requires() for a resource that
has not be enabled. It is used to distinguish between expected
and unexpected skips.
"""
def anticipate_failure(condition):
"""Decorator to mark a test that is known to be broken in some cases
Any use of this decorator should have a comment identifying the
associated tracker issue.
"""
if condition:
return unittest.expectedFailure
return lambda f: f
def load_package_tests(pkg_dir, loader, standard_tests, pattern):
"""Generic load_tests implementation for simple test packages.
Most packages can implement load_tests using this function as follows:
def load_tests(*args):
return load_package_tests(os.path.dirname(__file__), *args)
"""
if pattern is None:
pattern = "test*"
top_dir = STDLIB_DIR
package_tests = loader.discover(start_dir=pkg_dir,
top_level_dir=top_dir,
pattern=pattern)
standard_tests.addTests(package_tests)
return standard_tests
def get_attribute(obj, name):
"""Get an attribute, raising SkipTest if AttributeError is raised."""
try:
attribute = getattr(obj, name)
except AttributeError:
raise unittest.SkipTest("object %r has no attribute %r" % (obj, name))
else:
return attribute
verbose = 1 # Flag set to 0 by regrtest.py
use_resources = None # Flag set to {} by regrtest.py
max_memuse = 0 # Disable bigmem tests (they will still be run with
# small sizes, to make sure they work.)
real_max_memuse = 0
junit_xml_list = None # list of testsuite XML elements
failfast = False
# _original_stdout is meant to hold stdout at the time regrtest began.
# This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
# The point is to have some flavor of stdout the user can actually see.
_original_stdout = None
def record_original_stdout(stdout):
global _original_stdout
_original_stdout = stdout
def get_original_stdout():
return _original_stdout or sys.stdout
def _force_run(path, func, *args):
try:
return func(*args)
except FileNotFoundError as err:
# chmod() won't fix a missing file.
if verbose >= 2:
print('%s: %s' % (err.__class__.__name__, err))
raise
except OSError as err:
if verbose >= 2:
print('%s: %s' % (err.__class__.__name__, err))
print('re-run %s%r' % (func.__name__, args))
os.chmod(path, stat.S_IRWXU)
return func(*args)
# Check whether a gui is actually available
def _is_gui_available():
if hasattr(_is_gui_available, 'result'):
return _is_gui_available.result
import platform
reason = None
if sys.platform.startswith('win') and platform.win32_is_iot():
reason = "gui is not available on Windows IoT Core"
elif sys.platform.startswith('win'):
# if Python is running as a service (such as the buildbot service),
# gui interaction may be disallowed
import ctypes
import ctypes.wintypes
UOI_FLAGS = 1
WSF_VISIBLE = 0x0001
class USEROBJECTFLAGS(ctypes.Structure):
_fields_ = [("fInherit", ctypes.wintypes.BOOL),
("fReserved", ctypes.wintypes.BOOL),
("dwFlags", ctypes.wintypes.DWORD)]
dll = ctypes.windll.user32
h = dll.GetProcessWindowStation()
if not h:
raise ctypes.WinError()
uof = USEROBJECTFLAGS()
needed = ctypes.wintypes.DWORD()
res = dll.GetUserObjectInformationW(h,
UOI_FLAGS,
ctypes.byref(uof),
ctypes.sizeof(uof),
ctypes.byref(needed))
if not res:
raise ctypes.WinError()
if not bool(uof.dwFlags & WSF_VISIBLE):
reason = "gui not available (WSF_VISIBLE flag not set)"
elif sys.platform == 'darwin':
# The Aqua Tk implementations on OS X can abort the process if
# being called in an environment where a window server connection
# cannot be made, for instance when invoked by a buildbot or ssh
# process not running under the same user id as the current console
# user. To avoid that, raise an exception if the window manager
# connection is not available.
import subprocess
try:
rc = subprocess.run(["launchctl", "managername"],
capture_output=True, check=True)
managername = rc.stdout.decode("utf-8").strip()
except subprocess.CalledProcessError:
reason = "unable to detect macOS launchd job manager"
else:
if managername != "Aqua":
reason = f"{managername=} -- can only run in a macOS GUI session"
# check on every platform whether tkinter can actually do anything
if not reason:
try:
from tkinter import Tk
root = Tk()
root.withdraw()
root.update()
root.destroy()
except Exception as e:
err_string = str(e)
if len(err_string) > 50:
err_string = err_string[:50] + ' [...]'
reason = 'Tk unavailable due to {}: {}'.format(type(e).__name__,
err_string)
_is_gui_available.reason = reason
_is_gui_available.result = not reason
return _is_gui_available.result
def is_resource_enabled(resource):
"""Test whether a resource is enabled.
Known resources are set by regrtest.py. If not running under regrtest.py,
all resources are assumed enabled unless use_resources has been set.
"""
return use_resources is None or resource in use_resources
def get_resource_value(resource):
"""Test whether a resource is enabled.
Known resources are set by regrtest.py. If not running under regrtest.py,
all resources are assumed enabled unless use_resources has been set.
"""
if use_resources is None:
return None
return use_resources.get(resource)
def requires(resource, msg=None):
"""Raise ResourceDenied if the specified resource is not available."""
if not is_resource_enabled(resource):
if msg is None:
msg = "Use of the %r resource not enabled" % resource
raise ResourceDenied(msg)
if resource in {"network", "urlfetch"} and not has_socket_support:
raise ResourceDenied("No socket support")
if resource == 'gui' and not _is_gui_available():
raise ResourceDenied(_is_gui_available.reason)
def _get_kernel_version(sysname="Linux"):
import platform
if platform.system() != sysname:
return None
version_txt = platform.release().split('-', 1)[0]
try:
return tuple(map(int, version_txt.split('.')))
except ValueError:
return None
def _requires_unix_version(sysname, min_version):
"""Decorator raising SkipTest if the OS is `sysname` and the version is less
than `min_version`.
For example, @_requires_unix_version('FreeBSD', (7, 2)) raises SkipTest if
the FreeBSD version is less than 7.2.
"""
import platform
min_version_txt = '.'.join(map(str, min_version))
version_txt = platform.release().split('-', 1)[0]
if platform.system() == sysname:
try:
version = tuple(map(int, version_txt.split('.')))
except ValueError:
skip = False
else:
skip = version < min_version
else:
skip = False
return unittest.skipIf(
skip,
f"{sysname} version {min_version_txt} or higher required, not "
f"{version_txt}"
)
def requires_freebsd_version(*min_version):
"""Decorator raising SkipTest if the OS is FreeBSD and the FreeBSD version is
less than `min_version`.
For example, @requires_freebsd_version(7, 2) raises SkipTest if the FreeBSD
version is less than 7.2.
"""
return _requires_unix_version('FreeBSD', min_version)
def requires_linux_version(*min_version):
"""Decorator raising SkipTest if the OS is Linux and the Linux version is
less than `min_version`.
For example, @requires_linux_version(2, 6, 32) raises SkipTest if the Linux
version is less than 2.6.32.
"""
return _requires_unix_version('Linux', min_version)
def requires_mac_ver(*min_version):
"""Decorator raising SkipTest if the OS is Mac OS X and the OS X
version if less than min_version.
For example, @requires_mac_ver(10, 5) raises SkipTest if the OS X version
is lesser than 10.5.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kw):
if sys.platform == 'darwin':
import platform
version_txt = platform.mac_ver()[0]
try:
version = tuple(map(int, version_txt.split('.')))
except ValueError:
pass
else:
if version < min_version:
min_version_txt = '.'.join(map(str, min_version))
raise unittest.SkipTest(
"Mac OS X %s or higher required, not %s"
% (min_version_txt, version_txt))
return func(*args, **kw)
wrapper.min_version = min_version
return wrapper
return decorator
def thread_unsafe(reason):
"""Mark a test as not thread safe. When the test runner is run with
--parallel-threads=N, the test will be run in a single thread."""
def decorator(test_item):
test_item.__unittest_thread_unsafe__ = True
# the reason is not currently used
test_item.__unittest_thread_unsafe__why__ = reason
return test_item
if isinstance(reason, types.FunctionType):
test_item = reason
reason = ''
return decorator(test_item)
return decorator
def skip_if_buildbot(reason=None):
"""Decorator raising SkipTest if running on a buildbot."""
import getpass
if not reason:
reason = 'not suitable for buildbots'
try:
isbuildbot = getpass.getuser().lower() == 'buildbot'
except (KeyError, OSError) as err:
logging.getLogger(__name__).warning('getpass.getuser() failed %s.', err, exc_info=err)
isbuildbot = False
return unittest.skipIf(isbuildbot, reason)
def check_sanitizer(*, address=False, memory=False, ub=False, thread=False,
function=True):
"""Returns True if Python is compiled with sanitizer support"""
if not (address or memory or ub or thread):
raise ValueError('At least one of address, memory, ub or thread must be True')
cflags = sysconfig.get_config_var('CFLAGS') or ''
config_args = sysconfig.get_config_var('CONFIG_ARGS') or ''
memory_sanitizer = (
'-fsanitize=memory' in cflags or
'--with-memory-sanitizer' in config_args
)
address_sanitizer = (
'-fsanitize=address' in cflags or
'--with-address-sanitizer' in config_args
)
ub_sanitizer = (
'-fsanitize=undefined' in cflags or
'--with-undefined-behavior-sanitizer' in config_args
)
thread_sanitizer = (
'-fsanitize=thread' in cflags or
'--with-thread-sanitizer' in config_args
)
function_sanitizer = (
'-fsanitize=function' in cflags
)
return (
(memory and memory_sanitizer) or
(address and address_sanitizer) or
(ub and ub_sanitizer) or
(thread and thread_sanitizer) or
(function and function_sanitizer)
)
def skip_if_sanitizer(reason=None, *, address=False, memory=False, ub=False, thread=False):
"""Decorator raising SkipTest if running with a sanitizer active."""
if not reason:
reason = 'not working with sanitizers active'
skip = check_sanitizer(address=address, memory=memory, ub=ub, thread=thread)
return unittest.skipIf(skip, reason)
# gh-89363: True if fork() can hang if Python is built with Address Sanitizer
# (ASAN): libasan race condition, dead lock in pthread_create().
HAVE_ASAN_FORK_BUG = check_sanitizer(address=True)
def set_sanitizer_env_var(env, option):
for name in ('ASAN_OPTIONS', 'MSAN_OPTIONS', 'UBSAN_OPTIONS', 'TSAN_OPTIONS'):
if name in env:
env[name] += f':{option}'
else:
env[name] = option
def system_must_validate_cert(f):
"""Skip the test on TLS certificate validation failures."""
@functools.wraps(f)
def dec(*args, **kwargs):
try:
f(*args, **kwargs)
except OSError as e:
if "CERTIFICATE_VERIFY_FAILED" in str(e):
raise unittest.SkipTest("system does not contain "
"necessary certificates")
raise
return dec
# A constant likely larger than the underlying OS pipe buffer size, to
# make writes blocking.
# Windows limit seems to be around 512 B, and many Unix kernels have a
# 64 KiB pipe buffer size or 16 * PAGE_SIZE: take a few megs to be sure.
# (see issue #17835 for a discussion of this number).
PIPE_MAX_SIZE = 4 * 1024 * 1024 + 1
# A constant likely larger than the underlying OS socket buffer size, to make
# writes blocking.
# The socket buffer sizes can usually be tuned system-wide (e.g. through sysctl
# on Linux), or on a per-socket basis (SO_SNDBUF/SO_RCVBUF). See issue #18643
# for a discussion of this number.
SOCK_MAX_SIZE = 16 * 1024 * 1024 + 1
# decorator for skipping tests on non-IEEE 754 platforms
requires_IEEE_754 = unittest.skipUnless(
float.__getformat__("double").startswith("IEEE"),
"test requires IEEE 754 doubles")
def requires_zlib(reason='requires zlib'):
try:
import zlib
except ImportError:
zlib = None
return unittest.skipUnless(zlib, reason)
def requires_gzip(reason='requires gzip'):
try:
import gzip
except ImportError:
gzip = None
return unittest.skipUnless(gzip, reason)
def requires_bz2(reason='requires bz2'):
try:
import bz2
except ImportError:
bz2 = None
return unittest.skipUnless(bz2, reason)
def requires_lzma(reason='requires lzma'):
try:
import lzma
except ImportError:
lzma = None
return unittest.skipUnless(lzma, reason)
def requires_zstd(reason='requires zstd'):
try:
from compression import zstd
except ImportError:
zstd = None
return unittest.skipUnless(zstd, reason)
def has_no_debug_ranges():
try:
import _testcapi
except ImportError:
raise unittest.SkipTest("_testinternalcapi required")
return not _testcapi.config_get('code_debug_ranges')
def requires_debug_ranges(reason='requires co_positions / debug_ranges'):
try:
skip = has_no_debug_ranges()
except unittest.SkipTest as e:
skip = True
reason = e.args[0] if e.args else reason
return unittest.skipIf(skip, reason)
MS_WINDOWS = (sys.platform == 'win32')
# Is not actually used in tests, but is kept for compatibility.
is_jython = sys.platform.startswith('java')
is_android = sys.platform == "android"
def skip_android_selinux(name):
return unittest.skipIf(
sys.platform == "android", f"Android blocks {name} with SELinux"
)
if sys.platform not in {"win32", "vxworks", "ios", "tvos", "watchos"}:
unix_shell = '/system/bin/sh' if is_android else '/bin/sh'
else:
unix_shell = None
# wasm32-emscripten and -wasi are POSIX-like but do not
# have subprocess or fork support.
is_emscripten = sys.platform == "emscripten"
is_wasi = sys.platform == "wasi"
# Use is_wasm32 as a generic check for WebAssembly platforms.
is_wasm32 = is_emscripten or is_wasi
def skip_emscripten_stack_overflow():
return unittest.skipIf(is_emscripten, "Exhausts stack on Emscripten")
def skip_wasi_stack_overflow():
return unittest.skipIf(is_wasi, "Exhausts stack on WASI")
is_apple_mobile = sys.platform in {"ios", "tvos", "watchos"}
is_apple = is_apple_mobile or sys.platform == "darwin"
has_fork_support = hasattr(os, "fork") and not (
# WASM and Apple mobile platforms do not support subprocesses.
is_emscripten
or is_wasi
or is_apple_mobile
# Although Android supports fork, it's unsafe to call it from Python because
# all Android apps are multi-threaded.
or is_android
)
def requires_fork():
return unittest.skipUnless(has_fork_support, "requires working os.fork()")
has_subprocess_support = not (
# WASM and Apple mobile platforms do not support subprocesses.
is_emscripten
or is_wasi
or is_apple_mobile
# Although Android supports subproceses, they're almost never useful in
# practice (see PEP 738). And most of the tests that use them are calling
# sys.executable, which won't work when Python is embedded in an Android app.
or is_android
)
def requires_subprocess():
"""Used for subprocess, os.spawn calls, fd inheritance"""
return unittest.skipUnless(has_subprocess_support, "requires subprocess support")
# Emscripten's socket emulation and WASI sockets have limitations.
has_socket_support = not (
is_emscripten
or is_wasi
)
def requires_working_socket(*, module=False):
"""Skip tests or modules that require working sockets
Can be used as a function/class decorator or to skip an entire module.
"""
msg = "requires socket support"
if module:
if not has_socket_support:
raise unittest.SkipTest(msg)
else:
return unittest.skipUnless(has_socket_support, msg)
@functools.cache
def has_remote_subprocess_debugging():
"""Check if we have permissions to debug subprocesses remotely.
Returns True if we have permissions, False if we don't.
Checks for:
- Platform support (Linux, macOS, Windows only)
- On Linux: process_vm_readv support
- _remote_debugging module availability
- Actual subprocess debugging permissions (e.g., macOS entitlements)
Result is cached.
"""
# Check platform support
if sys.platform not in ("linux", "darwin", "win32"):
return False
try:
import _remote_debugging
except ImportError:
return False
# On Linux, check for process_vm_readv support
if sys.platform == "linux":
if not getattr(_remote_debugging, "PROCESS_VM_READV_SUPPORTED", False):
return False
# First check if we can read our own process
if not _remote_debugging.is_python_process(os.getpid()):
return False
# Check subprocess access - debugging child processes may require
# additional permissions depending on platform security settings
import socket
import subprocess
# Create a socket for child to signal readiness
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.bind(("127.0.0.1", 0))
server.listen(1)
port = server.getsockname()[1]
# Child connects to signal it's ready, then waits for parent to close
child_code = f"""
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(("127.0.0.1", {port}))
s.recv(1) # Wait for parent to signal done
"""
proc = subprocess.Popen(
[sys.executable, "-c", child_code],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
try:
server.settimeout(5.0)
conn, _ = server.accept()
# Child is ready, test if we can probe it
result = _remote_debugging.is_python_process(proc.pid)
# Check if subprocess is still alive after probing
if proc.poll() is not None:
return False
conn.close() # Signal child to exit
return result
except (socket.timeout, OSError):
return False
finally:
server.close()
proc.kill()
proc.wait()
def requires_remote_subprocess_debugging():
"""Skip tests that require remote subprocess debugging permissions.
This also implies subprocess support, so no need to use both
@requires_subprocess() and @requires_remote_subprocess_debugging().
"""
if not has_subprocess_support:
return unittest.skip("requires subprocess support")
return unittest.skipUnless(
has_remote_subprocess_debugging(),
"requires remote subprocess debugging permissions"
)
# Does strftime() support glibc extension like '%4Y'?
has_strftime_extensions = False
if sys.platform != "win32":
# bpo-47037: Windows debug builds crash with "Debug Assertion Failed"
try:
has_strftime_extensions = time.strftime("%4Y") != "%4Y"
except ValueError:
pass
# Define the URL of a dedicated HTTP server for the network tests.
# The URL must use clear-text HTTP: no redirection to encrypted HTTPS.
TEST_HTTP_URL = "http://www.pythontest.net"
# Set by libregrtest/main.py so we can skip tests that are not
# useful for PGO
PGO = False
# Set by libregrtest/main.py if we are running the extended (time consuming)
# PGO task. If this is True, PGO is also True.
PGO_EXTENDED = False
# TEST_DATA_DIR is used as a target download location for remote resources
TEST_DATA_DIR = os.path.join(TEST_HOME_DIR, "data")
def darwin_malloc_err_warning(test_name):
"""Assure user that loud errors generated by macOS libc's malloc are
expected."""
if sys.platform != 'darwin':
return
import shutil
msg = ' NOTICE '
detail = (f'{test_name} may generate "malloc can\'t allocate region"\n'
'warnings on macOS systems. This behavior is known. Do not\n'
'report a bug unless tests are also failing.\n'
'See https://github.com/python/cpython/issues/85100')
padding, _ = shutil.get_terminal_size()
print(msg.center(padding, '-'))
print(detail)
print('-' * padding)
def findfile(filename, subdir=None):
"""Try to find a file on sys.path or in the test directory. If it is not
found the argument passed to the function is returned (this does not
necessarily signal failure; could still be the legitimate path).
Setting *subdir* indicates a relative path to use to find the file
rather than looking directly in the path directories.
"""
if os.path.isabs(filename):
return filename
if subdir is not None:
filename = os.path.join(subdir, filename)
path = [TEST_HOME_DIR] + sys.path
for dn in path:
fn = os.path.join(dn, filename)
if os.path.exists(fn): return fn
return filename
def sortdict(dict):
"Like repr(dict), but in sorted order."
items = sorted(dict.items())
reprpairs = ["%r: %r" % pair for pair in items]
withcommas = ", ".join(reprpairs)
return "{%s}" % withcommas
def run_code(code: str, extra_names: dict[str, object] | None = None) -> dict[str, object]:
"""Run a piece of code after dedenting it, and return its global namespace."""
ns = {}
if extra_names:
ns.update(extra_names)
exec(textwrap.dedent(code), ns)
return ns
def check_syntax_error(testcase, statement, errtext='', *, lineno=None, offset=None):
with testcase.assertRaisesRegex(SyntaxError, errtext) as cm:
compile(statement, '<test string>', 'exec')
err = cm.exception
testcase.assertIsNotNone(err.lineno)
if lineno is not None:
testcase.assertEqual(err.lineno, lineno)
testcase.assertIsNotNone(err.offset)
if offset is not None:
testcase.assertEqual(err.offset, offset)
def open_urlresource(url, *args, **kw):
import urllib.request, urllib.parse
from .os_helper import unlink
try:
import gzip
except ImportError:
gzip = None
check = kw.pop('check', None)
filename = urllib.parse.urlparse(url)[2].split('/')[-1] # '/': it's URL!
fn = os.path.join(TEST_DATA_DIR, filename)
def check_valid_file(fn):
f = open(fn, *args, **kw)
if check is None:
return f
elif check(f):
f.seek(0)
return f
f.close()
if os.path.exists(fn):
f = check_valid_file(fn)
if f is not None:
return f
unlink(fn)
# Verify the requirement before downloading the file
requires('urlfetch')
if verbose:
print('\tfetching %s ...' % url, file=get_original_stdout())
opener = urllib.request.build_opener()
if gzip:
opener.addheaders.append(('Accept-Encoding', 'gzip'))
f = opener.open(url, timeout=INTERNET_TIMEOUT)
if gzip and f.headers.get('Content-Encoding') == 'gzip':
f = gzip.GzipFile(fileobj=f)
try:
with open(fn, "wb") as out:
s = f.read()
while s:
out.write(s)
s = f.read()
finally:
f.close()
f = check_valid_file(fn)
if f is not None:
return f
raise TestFailed('invalid resource %r' % fn)
@contextlib.contextmanager
def captured_output(stream_name):
"""Return a context manager used by captured_stdout/stdin/stderr
that temporarily replaces the sys stream *stream_name* with a StringIO."""
import io
orig_stdout = getattr(sys, stream_name)
setattr(sys, stream_name, io.StringIO())
try:
yield getattr(sys, stream_name)
finally:
setattr(sys, stream_name, orig_stdout)
def captured_stdout():
"""Capture the output of sys.stdout:
with captured_stdout() as stdout:
print("hello")
self.assertEqual(stdout.getvalue(), "hello\\n")
"""
return captured_output("stdout")
def captured_stderr():
"""Capture the output of sys.stderr:
with captured_stderr() as stderr:
print("hello", file=sys.stderr)
self.assertEqual(stderr.getvalue(), "hello\\n")
"""
return captured_output("stderr")
def captured_stdin():
"""Capture the input to sys.stdin:
with captured_stdin() as stdin:
stdin.write('hello\\n')
stdin.seek(0)
# call test code that consumes from sys.stdin
captured = input()
self.assertEqual(captured, "hello")
"""
return captured_output("stdin")
def gc_collect():
"""Force as many objects as possible to be collected.
In non-CPython implementations of Python, this is needed because timely
deallocation is not guaranteed by the garbage collector. (Even in CPython
this can be the case in case of reference cycles.) This means that __del__
methods may be called later than expected and weakrefs may remain alive for
longer than expected. This function tries its best to force all garbage
objects to disappear.
"""
import gc
gc.collect()
gc.collect()
gc.collect()
@contextlib.contextmanager
def disable_gc():
import gc
have_gc = gc.isenabled()
gc.disable()
try:
yield
finally:
if have_gc:
gc.enable()
@contextlib.contextmanager
def gc_threshold(*args):
import gc
old_threshold = gc.get_threshold()
gc.set_threshold(*args)
try:
yield
finally:
gc.set_threshold(*old_threshold)
def python_is_optimized():
"""Find if Python was built with optimizations."""
cflags = sysconfig.get_config_var('PY_CFLAGS') or ''
final_opt = ""
for opt in cflags.split():
if opt.startswith('-O'):
final_opt = opt
if sysconfig.get_config_var("CC") == "gcc":
non_opts = ('', '-O0', '-Og')
else:
non_opts = ('', '-O0')
return final_opt not in non_opts
def check_cflags_pgo():
# Check if Python was built with ./configure --enable-optimizations:
# with Profile Guided Optimization (PGO).
cflags_nodist = sysconfig.get_config_var('PY_CFLAGS_NODIST') or ''
pgo_options = [
# GCC
'-fprofile-use',
# clang: -fprofile-instr-use=code.profclangd
'-fprofile-instr-use',
# ICC
"-prof-use",
]
PGO_PROF_USE_FLAG = sysconfig.get_config_var('PGO_PROF_USE_FLAG')
if PGO_PROF_USE_FLAG:
pgo_options.append(PGO_PROF_USE_FLAG)
return any(option in cflags_nodist for option in pgo_options)
def check_bolt_optimized():
# Always return false, if the platform is WASI,
# because BOLT optimization does not support WASM binary.
if is_wasi:
return False
config_args = sysconfig.get_config_var('CONFIG_ARGS') or ''
return '--enable-bolt' in config_args
Py_GIL_DISABLED = bool(sysconfig.get_config_var('Py_GIL_DISABLED'))
def requires_gil_enabled(msg="needs the GIL enabled"):
"""Decorator for skipping tests on the free-threaded build."""
return unittest.skipIf(Py_GIL_DISABLED, msg)
def expected_failure_if_gil_disabled():
"""Expect test failure if the GIL is disabled."""
if Py_GIL_DISABLED:
return unittest.expectedFailure
return lambda test_case: test_case
if Py_GIL_DISABLED:
_header = 'PHBBInP'
else:
_header = 'nP'
_align = '0n'
_vheader = _header + 'n'
def calcobjsize(fmt):
import struct
return struct.calcsize(_header + fmt + _align)
def calcvobjsize(fmt):
import struct
return struct.calcsize(_vheader + fmt + _align)
_TPFLAGS_STATIC_BUILTIN = 1<<1
_TPFLAGS_DISALLOW_INSTANTIATION = 1<<7
_TPFLAGS_IMMUTABLETYPE = 1<<8
_TPFLAGS_HEAPTYPE = 1<<9
_TPFLAGS_BASETYPE = 1<<10
_TPFLAGS_READY = 1<<12
_TPFLAGS_READYING = 1<<13
_TPFLAGS_HAVE_GC = 1<<14
_TPFLAGS_BASE_EXC_SUBCLASS = 1<<30
_TPFLAGS_TYPE_SUBCLASS = 1<<31
def check_sizeof(test, o, size):
try:
import _testinternalcapi
except ImportError:
raise unittest.SkipTest("_testinternalcapi required")
result = sys.getsizeof(o)
# add GC header size
if ((type(o) == type) and (o.__flags__ & _TPFLAGS_HEAPTYPE) or\
((type(o) != type) and (type(o).__flags__ & _TPFLAGS_HAVE_GC))):
size += _testinternalcapi.SIZEOF_PYGC_HEAD
msg = 'wrong size for %s: got %d, expected %d' \
% (type(o), result, size)
test.assertEqual(result, size, msg)
def subTests(arg_names, arg_values, /, *, _do_cleanups=False):
"""Run multiple subtests with different parameters.
"""
single_param = False
if isinstance(arg_names, str):
arg_names = arg_names.replace(',',' ').split()
if len(arg_names) == 1:
single_param = True
arg_values = tuple(arg_values)
def decorator(func):
if isinstance(func, type):
raise TypeError('subTests() can only decorate methods, not classes')
@functools.wraps(func)
def wrapper(self, /, *args, **kwargs):
for values in arg_values:
if single_param:
values = (values,)
subtest_kwargs = dict(zip(arg_names, values))
with self.subTest(**subtest_kwargs):
func(self, *args, **kwargs, **subtest_kwargs)
if _do_cleanups:
self.doCleanups()
return wrapper
return decorator
#=======================================================================
# Decorator/context manager for running a code in a different locale,
# correctly resetting it afterwards.
@contextlib.contextmanager
def run_with_locale(catstr, *locales):
try:
import locale
category = getattr(locale, catstr)
orig_locale = locale.setlocale(category)
except AttributeError:
# if the test author gives us an invalid category string
raise
except Exception:
# cannot retrieve original locale, so do nothing
locale = orig_locale = None
if '' not in locales:
raise unittest.SkipTest('no locales')
else:
for loc in locales:
try:
locale.setlocale(category, loc)
break
except locale.Error:
pass
else:
if '' not in locales:
raise unittest.SkipTest(f'no locales {locales}')
try:
yield
finally:
if locale and orig_locale:
locale.setlocale(category, orig_locale)
#=======================================================================
# Decorator for running a function in multiple locales (if they are
# availasble) and resetting the original locale afterwards.
def run_with_locales(catstr, *locales):
def deco(func):
@functools.wraps(func)
def wrapper(self, /, *args, **kwargs):
dry_run = '' in locales
try:
import locale
category = getattr(locale, catstr)
orig_locale = locale.setlocale(category)
except AttributeError:
# if the test author gives us an invalid category string
raise
except Exception:
# cannot retrieve original locale, so do nothing
pass
else:
try:
for loc in locales:
with self.subTest(locale=loc):
try:
locale.setlocale(category, loc)
except locale.Error:
self.skipTest(f'no locale {loc!r}')
else:
dry_run = False
func(self, *args, **kwargs)
finally:
locale.setlocale(category, orig_locale)
if dry_run:
# no locales available, so just run the test
# with the current locale
with self.subTest(locale=None):
func(self, *args, **kwargs)
return wrapper
return deco
#=======================================================================
# Decorator for running a function in a specific timezone, correctly
# resetting it afterwards.
def run_with_tz(tz):
def decorator(func):
def inner(*args, **kwds):
try:
tzset = time.tzset
except AttributeError:
raise unittest.SkipTest("tzset required")
if 'TZ' in os.environ:
orig_tz = os.environ['TZ']
else:
orig_tz = None
os.environ['TZ'] = tz
tzset()
# now run the function, resetting the tz on exceptions
try:
return func(*args, **kwds)
finally:
if orig_tz is None:
del os.environ['TZ']
else:
os.environ['TZ'] = orig_tz
time.tzset()
inner.__name__ = func.__name__
inner.__doc__ = func.__doc__
return inner
return decorator
#=======================================================================
# Big-memory-test support. Separate from 'resources' because memory use
# should be configurable.
# Some handy shorthands. Note that these are used for byte-limits as well
# as size-limits, in the various bigmem tests
_1M = 1024*1024
_1G = 1024 * _1M
_2G = 2 * _1G
_4G = 4 * _1G
MAX_Py_ssize_t = sys.maxsize
def _parse_memlimit(limit: str) -> int:
sizes = {
'k': 1024,
'm': _1M,
'g': _1G,
't': 1024*_1G,
}
m = re.match(r'(\d+(?:\.\d+)?) (K|M|G|T)b?$', limit,
re.IGNORECASE | re.VERBOSE)
if m is None:
raise ValueError(f'Invalid memory limit: {limit!r}')
return int(float(m.group(1)) * sizes[m.group(2).lower()])
def set_memlimit(limit: str) -> None:
global max_memuse
global real_max_memuse
memlimit = _parse_memlimit(limit)
if memlimit < _2G - 1:
raise ValueError(f'Memory limit {limit!r} too low to be useful')
real_max_memuse = memlimit
memlimit = min(memlimit, MAX_Py_ssize_t)
max_memuse = memlimit
class _MemoryWatchdog:
"""An object which periodically watches the process' memory consumption
and prints it out.
"""
def __init__(self):
self.procfile = '/proc/{pid}/statm'.format(pid=os.getpid())
self.started = False
def start(self):
try:
f = open(self.procfile, 'r')
except OSError as e:
logging.getLogger(__name__).warning('/proc not available for stats: %s', e, exc_info=e)
sys.stderr.flush()
return
import subprocess
with f:
watchdog_script = findfile("memory_watchdog.py")
self.mem_watchdog = subprocess.Popen([sys.executable, watchdog_script],
stdin=f,
stderr=subprocess.DEVNULL)
self.started = True
def stop(self):
if self.started:
self.mem_watchdog.terminate()
self.mem_watchdog.wait()
def bigmemtest(size, memuse, dry_run=True):
"""Decorator for bigmem tests.
'size' is a requested size for the test (in arbitrary, test-interpreted
units.) 'memuse' is the number of bytes per unit for the test, or a good
estimate of it. For example, a test that needs two byte buffers, of 4 GiB
each, could be decorated with @bigmemtest(size=_4G, memuse=2).
The 'size' argument is normally passed to the decorated test method as an
extra argument. If 'dry_run' is true, the value passed to the test method
may be less than the requested value. If 'dry_run' is false, it means the
test doesn't support dummy runs when -M is not specified.
"""
def decorator(f):
def wrapper(self):
size = wrapper.size
memuse = wrapper.memuse
if not real_max_memuse:
maxsize = 5147
else:
maxsize = size
if ((real_max_memuse or not dry_run)
and real_max_memuse < maxsize * memuse):
raise unittest.SkipTest(
"not enough memory: %.1fG minimum needed"
% (size * memuse / (1024 ** 3)))
if real_max_memuse and verbose:
print()
print(" ... expected peak memory use: {peak:.1f}G"
.format(peak=size * memuse / (1024 ** 3)))
watchdog = _MemoryWatchdog()
watchdog.start()
else:
watchdog = None
try:
return f(self, maxsize)
finally:
if watchdog:
watchdog.stop()
wrapper.size = size
wrapper.memuse = memuse
return wrapper
return decorator
def bigaddrspacetest(f):
"""Decorator for tests that fill the address space."""
def wrapper(self):
if max_memuse < MAX_Py_ssize_t:
if MAX_Py_ssize_t >= 2**63 - 1 and max_memuse >= 2**31:
raise unittest.SkipTest(
"not enough memory: try a 32-bit build instead")
else:
raise unittest.SkipTest(
"not enough memory: %.1fG minimum needed"
% (MAX_Py_ssize_t / (1024 ** 3)))
else:
return f(self)
return wrapper
#=======================================================================
# unittest integration.
def _id(obj):
return obj
def requires_resource(resource):
if resource == 'gui' and not _is_gui_available():
return unittest.skip(_is_gui_available.reason)
if is_resource_enabled(resource):
return _id
else:
return unittest.skip("resource {0!r} is not enabled".format(resource))
def cpython_only(test):
"""
Decorator for tests only applicable on CPython.
"""
return impl_detail(cpython=True)(test)
def impl_detail(msg=None, **guards):
if check_impl_detail(**guards):
return _id
if msg is None:
guardnames, default = _parse_guards(guards)
if default:
msg = "implementation detail not available on {0}"
else:
msg = "implementation detail specific to {0}"
guardnames = sorted(guardnames.keys())
msg = msg.format(' or '.join(guardnames))
return unittest.skip(msg)
def _parse_guards(guards):
# Returns a tuple ({platform_name: run_me}, default_value)
if not guards:
return ({'cpython': True}, False)
is_true = list(guards.values())[0]
assert list(guards.values()) == [is_true] * len(guards) # all True or all False
return (guards, not is_true)
# Use the following check to guard CPython's implementation-specific tests --
# or to run them only on the implementation(s) guarded by the arguments.
def check_impl_detail(**guards):
"""This function returns True or False depending on the host platform.
Examples:
if check_impl_detail(): # only on CPython (default)
if check_impl_detail(jython=True): # only on Jython
if check_impl_detail(cpython=False): # everywhere except on CPython
"""
guards, default = _parse_guards(guards)
return guards.get(sys.implementation.name, default)
def no_tracing(func):
"""Decorator to temporarily turn off tracing for the duration of a test."""
trace_wrapper = func
if hasattr(sys, 'gettrace'):
@functools.wraps(func)
def trace_wrapper(*args, **kwargs):
original_trace = sys.gettrace()
try:
sys.settrace(None)
return func(*args, **kwargs)
finally:
sys.settrace(original_trace)
coverage_wrapper = trace_wrapper
if 'test.cov' in sys.modules: # -Xpresite=test.cov used
cov = sys.monitoring.COVERAGE_ID
@functools.wraps(func)
def coverage_wrapper(*args, **kwargs):
original_events = sys.monitoring.get_events(cov)
try:
sys.monitoring.set_events(cov, 0)
return trace_wrapper(*args, **kwargs)
finally:
sys.monitoring.set_events(cov, original_events)
return coverage_wrapper
def no_rerun(reason):
"""Skip rerunning for a particular test.
WARNING: Use this decorator with care; skipping rerunning makes it
impossible to find reference leaks. Provide a clear reason for skipping the
test using the 'reason' parameter.
"""
def deco(func):
assert not isinstance(func, type), func
_has_run = False
def wrapper(self):
nonlocal _has_run
if _has_run:
self.skipTest(reason)
func(self)
_has_run = True
return wrapper
return deco
def refcount_test(test):
"""Decorator for tests which involve reference counting.
To start, the decorator does not run the test if is not run by CPython.
After that, any trace function is unset during the test to prevent
unexpected refcounts caused by the trace function.
"""
return no_tracing(cpython_only(test))
def requires_limited_api(test):
try:
import _testcapi # noqa: F401
import _testlimitedcapi # noqa: F401
except ImportError:
return unittest.skip('needs _testcapi and _testlimitedcapi modules')(test)
return test
# Windows build doesn't support --disable-test-modules feature, so there's no
# 'TEST_MODULES' var in config
TEST_MODULES_ENABLED = (sysconfig.get_config_var('TEST_MODULES') or 'yes') == 'yes'
def requires_specialization(test):
return unittest.skipUnless(
_opcode.ENABLE_SPECIALIZATION, "requires specialization")(test)
def reset_code(f: types.FunctionType) -> types.FunctionType:
"""Clear all specializations, local instrumentation, and JIT code for the given function."""
f.__code__ = f.__code__.replace()
return f
on_github_actions = "GITHUB_ACTIONS" in os.environ
#=======================================================================
# Check for the presence of docstrings.
# Rather than trying to enumerate all the cases where docstrings may be
# disabled, we just check for that directly
def _check_docstrings():
"""Just used to check if docstrings are enabled"""
MISSING_C_DOCSTRINGS = (check_impl_detail() and
sys.platform != 'win32' and
not sysconfig.get_config_var('WITH_DOC_STRINGS'))
HAVE_PY_DOCSTRINGS = _check_docstrings.__doc__ is not None
HAVE_DOCSTRINGS = (HAVE_PY_DOCSTRINGS and not MISSING_C_DOCSTRINGS)
requires_docstrings = unittest.skipUnless(HAVE_DOCSTRINGS,
"test requires docstrings")
#=======================================================================
# Support for saving and restoring the imported modules.
def flush_std_streams():
if sys.stdout is not None:
sys.stdout.flush()
if sys.stderr is not None:
sys.stderr.flush()
def print_warning(msg):
# bpo-45410: Explicitly flush stdout to keep logs in order
flush_std_streams()
stream = print_warning.orig_stderr
for line in msg.splitlines():
print(f"Warning -- {line}", file=stream)
stream.flush()
# bpo-39983: Store the original sys.stderr at Python startup to be able to
# log warnings even if sys.stderr is captured temporarily by a test.
print_warning.orig_stderr = sys.stderr
# Flag used by saved_test_environment of test.libregrtest.save_env,
# to check if a test modified the environment. The flag should be set to False
# before running a new test.
#
# For example, threading_helper.threading_cleanup() sets the flag is the function fails
# to cleanup threads.
environment_altered = False
def reap_children():
"""Use this function at the end of test_main() whenever sub-processes
are started. This will help ensure that no extra children (zombies)
stick around to hog resources and create problems when looking
for refleaks.
"""
global environment_altered
# Need os.waitpid(-1, os.WNOHANG): Windows is not supported
if not (hasattr(os, 'waitpid') and hasattr(os, 'WNOHANG')):
return
elif not has_subprocess_support:
return
# Reap all our dead child processes so we don't leave zombies around.
# These hog resources and might be causing some of the buildbots to die.
while True:
try:
# Read the exit status of any child process which already completed
pid, status = os.waitpid(-1, os.WNOHANG)
except OSError:
break
if pid == 0:
break
print_warning(f"reap_children() reaped child process {pid}")
environment_altered = True
@contextlib.contextmanager
def swap_attr(obj, attr, new_val):
"""Temporary swap out an attribute with a new object.
Usage:
with swap_attr(obj, "attr", 5):
...
This will set obj.attr to 5 for the duration of the with: block,
restoring the old value at the end of the block. If `attr` doesn't
exist on `obj`, it will be created and then deleted at the end of the
block.
The old value (or None if it doesn't exist) will be assigned to the
target of the "as" clause, if there is one.
"""
if hasattr(obj, attr):
real_val = getattr(obj, attr)
setattr(obj, attr, new_val)
try:
yield real_val
finally:
setattr(obj, attr, real_val)
else:
setattr(obj, attr, new_val)
try:
yield
finally:
if hasattr(obj, attr):
delattr(obj, attr)
@contextlib.contextmanager
def swap_item(obj, item, new_val):
"""Temporary swap out an item with a new object.
Usage:
with swap_item(obj, "item", 5):
...
This will set obj["item"] to 5 for the duration of the with: block,
restoring the old value at the end of the block. If `item` doesn't
exist on `obj`, it will be created and then deleted at the end of the
block.
The old value (or None if it doesn't exist) will be assigned to the
target of the "as" clause, if there is one.
"""
if item in obj:
real_val = obj[item]
obj[item] = new_val
try:
yield real_val
finally:
obj[item] = real_val
else:
obj[item] = new_val
try:
yield
finally:
if item in obj:
del obj[item]
def args_from_interpreter_flags():
"""Return a list of command-line arguments reproducing the current
settings in sys.flags and sys.warnoptions."""
import subprocess
return subprocess._args_from_interpreter_flags()
def optim_args_from_interpreter_flags():
"""Return a list of command-line arguments reproducing the current
optimization settings in sys.flags."""
import subprocess
return subprocess._optim_args_from_interpreter_flags()
class Matcher(object):
_partial_matches = ('msg', 'message')
def matches(self, d, **kwargs):
"""
Try to match a single dict with the supplied arguments.
Keys whose values are strings and which are in self._partial_matches
will be checked for partial (i.e. substring) matches. You can extend
this scheme to (for example) do regular expression matching, etc.
"""
result = True
for k in kwargs:
v = kwargs[k]
dv = d.get(k)
if not self.match_value(k, dv, v):
result = False
break
return result
def match_value(self, k, dv, v):
"""
Try to match a single stored value (dv) with a supplied value (v).
"""
if type(v) != type(dv):
result = False
elif type(dv) is not str or k not in self._partial_matches:
result = (v == dv)
else:
result = dv.find(v) >= 0
return result
_buggy_ucrt = None
def skip_if_buggy_ucrt_strfptime(test):
"""
Skip decorator for tests that use buggy strptime/strftime
If the UCRT bugs are present time.localtime().tm_zone will be
an empty string, otherwise we assume the UCRT bugs are fixed
See bpo-37552 [Windows] strptime/strftime return invalid
results with UCRT version 17763.615
"""
import locale
global _buggy_ucrt
if _buggy_ucrt is None:
if(sys.platform == 'win32' and
locale.getencoding() == 'cp65001' and
time.localtime().tm_zone == ''):
_buggy_ucrt = True
else:
_buggy_ucrt = False
return unittest.skip("buggy MSVC UCRT strptime/strftime")(test) if _buggy_ucrt else test
class PythonSymlink:
"""Creates a symlink for the current Python executable"""
def __init__(self, link=None):
from .os_helper import TESTFN
self.link = link or os.path.abspath(TESTFN)
self._linked = []
self.real = os.path.realpath(sys.executable)
self._also_link = []
self._env = None
self._platform_specific()
if sys.platform == "win32":
def _platform_specific(self):
import glob
import _winapi
if os.path.lexists(self.real) and not os.path.exists(self.real):
# App symlink appears to not exist, but we want the
# real executable here anyway
self.real = _winapi.GetModuleFileName(0)
dll = _winapi.GetModuleFileName(sys.dllhandle)
src_dir = os.path.dirname(dll)
dest_dir = os.path.dirname(self.link)
self._also_link.append((
dll,
os.path.join(dest_dir, os.path.basename(dll))
))
for runtime in glob.glob(os.path.join(glob.escape(src_dir), "vcruntime*.dll")):
self._also_link.append((
runtime,
os.path.join(dest_dir, os.path.basename(runtime))
))
self._env = {k.upper(): os.getenv(k) for k in os.environ}
self._env["PYTHONHOME"] = os.path.dirname(self.real)
if sysconfig.is_python_build():
self._env["PYTHONPATH"] = STDLIB_DIR
else:
def _platform_specific(self):
pass
def __enter__(self):
os.symlink(self.real, self.link)
self._linked.append(self.link)
for real, link in self._also_link:
os.symlink(real, link)
self._linked.append(link)
return self
def __exit__(self, exc_type, exc_value, exc_tb):
for link in self._linked:
try:
os.remove(link)
except IOError as ex:
if verbose:
print("failed to clean up {}: {}".format(link, ex))
def _call(self, python, args, env, returncode):
import subprocess
cmd = [python, *args]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, env=env)
r = p.communicate()
if p.returncode != returncode:
if verbose:
print(repr(r[0]))
print(repr(r[1]), file=sys.stderr)
raise RuntimeError(
'unexpected return code: {0} (0x{0:08X})'.format(p.returncode))
return r
def call_real(self, *args, returncode=0):
return self._call(self.real, args, None, returncode)
def call_link(self, *args, returncode=0):
return self._call(self.link, args, self._env, returncode)
def skip_if_pgo_task(test):
"""Skip decorator for tests not run in (non-extended) PGO task"""
ok = not PGO or PGO_EXTENDED
msg = "Not run for (non-extended) PGO task"
return test if ok else unittest.skip(msg)(test)
def skip_if_unlimited_stack_size(test):
"""Skip decorator for tests not run when an unlimited stack size is configured.
Tests using support.infinite_recursion([...]) may otherwise run into
an infinite loop, running until the memory on the system is filled and
crashing due to OOM.
See https://github.com/python/cpython/issues/143460.
"""
if is_emscripten or is_wasi or os.name == "nt":
return test
import resource
curlim, maxlim = resource.getrlimit(resource.RLIMIT_STACK)
unlimited_stack_size_cond = curlim == maxlim and curlim in (-1, 0xFFFF_FFFF_FFFF_FFFF)
reason = "Not run due to unlimited stack size"
return unittest.skipIf(unlimited_stack_size_cond, reason)(test)
def detect_api_mismatch(ref_api, other_api, *, ignore=()):
"""Returns the set of items in ref_api not in other_api, except for a
defined list of items to be ignored in this check.
By default this skips private attributes beginning with '_' but
includes all magic methods, i.e. those starting and ending in '__'.
"""
missing_items = set(dir(ref_api)) - set(dir(other_api))
if ignore:
missing_items -= set(ignore)
missing_items = set(m for m in missing_items
if not m.startswith('_') or m.endswith('__'))
return missing_items
def check__all__(test_case, module, name_of_module=None, extra=(),
not_exported=()):
"""Assert that the __all__ variable of 'module' contains all public names.
The module's public names (its API) are detected automatically based on
whether they match the public name convention and were defined in
'module'.
The 'name_of_module' argument can specify (as a string or tuple thereof)
what module(s) an API could be defined in order to be detected as a
public API. One case for this is when 'module' imports part of its public
API from other modules, possibly a C backend (like 'csv' and its '_csv').
The 'extra' argument can be a set of names that wouldn't otherwise be
automatically detected as "public", like objects without a proper
'__module__' attribute. If provided, it will be added to the
automatically detected ones.
The 'not_exported' argument can be a set of names that must not be treated
as part of the public API even though their names indicate otherwise.
Usage:
import bar
import foo
import unittest
from test import support
class MiscTestCase(unittest.TestCase):
def test__all__(self):
support.check__all__(self, foo)
class OtherTestCase(unittest.TestCase):
def test__all__(self):
extra = {'BAR_CONST', 'FOO_CONST'}
not_exported = {'baz'} # Undocumented name.
# bar imports part of its API from _bar.
support.check__all__(self, bar, ('bar', '_bar'),
extra=extra, not_exported=not_exported)
"""
if name_of_module is None:
name_of_module = (module.__name__, )
elif isinstance(name_of_module, str):
name_of_module = (name_of_module, )
expected = set(extra)
for name in dir(module):
if name.startswith('_') or name in not_exported:
continue
obj = getattr(module, name)
if (getattr(obj, '__module__', None) in name_of_module or
(not hasattr(obj, '__module__') and
not isinstance(obj, types.ModuleType))):
expected.add(name)
test_case.assertCountEqual(module.__all__, expected)
def suppress_msvcrt_asserts(verbose=False):
try:
import msvcrt
except ImportError:
return
msvcrt.SetErrorMode(msvcrt.SEM_FAILCRITICALERRORS
| msvcrt.SEM_NOALIGNMENTFAULTEXCEPT
| msvcrt.SEM_NOGPFAULTERRORBOX
| msvcrt.SEM_NOOPENFILEERRORBOX)
# CrtSetReportMode() is only available in debug build
if hasattr(msvcrt, 'CrtSetReportMode'):
for m in [msvcrt.CRT_WARN, msvcrt.CRT_ERROR, msvcrt.CRT_ASSERT]:
if verbose:
msvcrt.CrtSetReportMode(m, msvcrt.CRTDBG_MODE_FILE)
msvcrt.CrtSetReportFile(m, msvcrt.CRTDBG_FILE_STDERR)
else:
msvcrt.CrtSetReportMode(m, 0)
class SuppressCrashReport:
"""Try to prevent a crash report from popping up.
On Windows, don't display the Windows Error Reporting dialog. On UNIX,
disable the creation of coredump file.
"""
old_value = None
old_modes = None
def __enter__(self):
"""On Windows, disable Windows Error Reporting dialogs using
SetErrorMode() and CrtSetReportMode().
On UNIX, try to save the previous core file size limit, then set
soft limit to 0.
"""
if sys.platform.startswith('win'):
# see http://msdn.microsoft.com/en-us/library/windows/desktop/ms680621.aspx
try:
import msvcrt
except ImportError:
return
self.old_value = msvcrt.GetErrorMode()
msvcrt.SetErrorMode(self.old_value | msvcrt.SEM_NOGPFAULTERRORBOX)
# bpo-23314: Suppress assert dialogs in debug builds.
# CrtSetReportMode() is only available in debug build.
if hasattr(msvcrt, 'CrtSetReportMode'):
self.old_modes = {}
for report_type in [msvcrt.CRT_WARN,
msvcrt.CRT_ERROR,
msvcrt.CRT_ASSERT]:
old_mode = msvcrt.CrtSetReportMode(report_type,
msvcrt.CRTDBG_MODE_FILE)
old_file = msvcrt.CrtSetReportFile(report_type,
msvcrt.CRTDBG_FILE_STDERR)
self.old_modes[report_type] = old_mode, old_file
else:
try:
import resource
self.resource = resource
except ImportError:
self.resource = None
if self.resource is not None:
try:
self.old_value = self.resource.getrlimit(self.resource.RLIMIT_CORE)
self.resource.setrlimit(self.resource.RLIMIT_CORE,
(0, self.old_value[1]))
except (ValueError, OSError):
pass
if sys.platform == 'darwin':
import subprocess
# Check if the 'Crash Reporter' on OSX was configured
# in 'Developer' mode and warn that it will get triggered
# when it is.
#
# This assumes that this context manager is used in tests
# that might trigger the next manager.
cmd = ['/usr/bin/defaults', 'read',
'com.apple.CrashReporter', 'DialogType']
proc = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
with proc:
stdout = proc.communicate()[0]
if stdout.strip() == b'developer':
print("this test triggers the Crash Reporter, "
"that is intentional", end='', flush=True)
return self
def __exit__(self, *ignore_exc):
"""Restore Windows ErrorMode or core file behavior to initial value."""
if self.old_value is None:
return
if sys.platform.startswith('win'):
import msvcrt
msvcrt.SetErrorMode(self.old_value)
if self.old_modes:
for report_type, (old_mode, old_file) in self.old_modes.items():
msvcrt.CrtSetReportMode(report_type, old_mode)
msvcrt.CrtSetReportFile(report_type, old_file)
else:
if self.resource is not None:
try:
self.resource.setrlimit(self.resource.RLIMIT_CORE, self.old_value)
except (ValueError, OSError):
pass
def patch(test_instance, object_to_patch, attr_name, new_value):
"""Override 'object_to_patch'.'attr_name' with 'new_value'.
Also, add a cleanup procedure to 'test_instance' to restore
'object_to_patch' value for 'attr_name'.
The 'attr_name' should be a valid attribute for 'object_to_patch'.
"""
# check that 'attr_name' is a real attribute for 'object_to_patch'
# will raise AttributeError if it does not exist
getattr(object_to_patch, attr_name)
# keep a copy of the old value
attr_is_local = False
try:
old_value = object_to_patch.__dict__[attr_name]
except (AttributeError, KeyError):
old_value = getattr(object_to_patch, attr_name, None)
else:
attr_is_local = True
# restore the value when the test is done
def cleanup():
if attr_is_local:
setattr(object_to_patch, attr_name, old_value)
else:
delattr(object_to_patch, attr_name)
test_instance.addCleanup(cleanup)
# actually override the attribute
setattr(object_to_patch, attr_name, new_value)
@contextlib.contextmanager
def patch_list(orig):
"""Like unittest.mock.patch.dict, but for lists."""
try:
saved = orig[:]
yield
finally:
orig[:] = saved
def run_in_subinterp(code):
"""
Run code in a subinterpreter. Raise unittest.SkipTest if the tracemalloc
module is enabled.
"""
_check_tracemalloc()
try:
import _testcapi
except ImportError:
raise unittest.SkipTest("requires _testcapi")
return _testcapi.run_in_subinterp(code)
def run_in_subinterp_with_config(code, *, own_gil=None, **config):
"""
Run code in a subinterpreter. Raise unittest.SkipTest if the tracemalloc
module is enabled.
"""
_check_tracemalloc()
try:
import _testinternalcapi
except ImportError:
raise unittest.SkipTest("requires _testinternalcapi")
if own_gil is not None:
assert 'gil' not in config, (own_gil, config)
config['gil'] = 'own' if own_gil else 'shared'
else:
gil = config['gil']
if gil == 0:
config['gil'] = 'default'
elif gil == 1:
config['gil'] = 'shared'
elif gil == 2:
config['gil'] = 'own'
elif not isinstance(gil, str):
raise NotImplementedError(gil)
config = types.SimpleNamespace(**config)
return _testinternalcapi.run_in_subinterp_with_config(code, config)
def _check_tracemalloc():
# Issue #10915, #15751: PyGILState_*() functions don't work with
# sub-interpreters, the tracemalloc module uses these functions internally
try:
import tracemalloc
except ImportError:
pass
else:
if tracemalloc.is_tracing():
raise unittest.SkipTest("run_in_subinterp() cannot be used "
"if tracemalloc module is tracing "
"memory allocations")
def check_free_after_iterating(test, iter, cls, args=()):
done = False
def wrapper():
class A(cls):
def __del__(self):
nonlocal done
done = True
try:
next(it)
except StopIteration:
pass
it = iter(A(*args))
# Issue 26494: Shouldn't crash
test.assertRaises(StopIteration, next, it)
wrapper()
# The sequence should be deallocated just after the end of iterating
gc_collect()
test.assertTrue(done)
def missing_compiler_executable(cmd_names=[]):
"""Check if the compiler components used to build the interpreter exist.
Check for the existence of the compiler executables whose names are listed
in 'cmd_names' or all the compiler executables when 'cmd_names' is empty
and return the first missing executable or None when none is found
missing.
"""
from setuptools._distutils import ccompiler, sysconfig
from setuptools import errors
import shutil
compiler = ccompiler.new_compiler()
sysconfig.customize_compiler(compiler)
if compiler.compiler_type == "msvc":
# MSVC has no executables, so check whether initialization succeeds
try:
compiler.initialize()
except errors.PlatformError:
return "msvc"
for name in compiler.executables:
if cmd_names and name not in cmd_names:
continue
cmd = getattr(compiler, name)
if cmd_names:
assert cmd is not None, \
"the '%s' executable is not configured" % name
elif not cmd:
continue
if shutil.which(cmd[0]) is None:
return cmd[0]
_old_android_emulator = None
def setswitchinterval(interval):
# Setting a very low gil interval on the Android emulator causes python
# to hang (issue #26939).
minimum_interval = 1e-4 # 100 us
if is_android and interval < minimum_interval:
global _old_android_emulator
if _old_android_emulator is None:
import platform
av = platform.android_ver()
_old_android_emulator = av.is_emulator and av.api_level < 24
if _old_android_emulator:
interval = minimum_interval
return sys.setswitchinterval(interval)
def get_pagesize():
"""Get size of a page in bytes."""
try:
page_size = os.sysconf('SC_PAGESIZE')
except (ValueError, AttributeError):
try:
page_size = os.sysconf('SC_PAGE_SIZE')
except (ValueError, AttributeError):
page_size = 4096
return page_size
@contextlib.contextmanager
def disable_faulthandler():
import faulthandler
# use sys.__stderr__ instead of sys.stderr, since regrtest replaces
# sys.stderr with a StringIO which has no file descriptor when a test
# is run with -W/--verbose3.
fd = sys.__stderr__.fileno()
is_enabled = faulthandler.is_enabled()
try:
faulthandler.disable()
yield
finally:
if is_enabled:
faulthandler.enable(file=fd, all_threads=True)
class SaveSignals:
"""
Save and restore signal handlers.
This class is only able to save/restore signal handlers registered
by the Python signal module: see bpo-13285 for "external" signal
handlers.
"""
def __init__(self):
import signal
self.signal = signal
self.signals = signal.valid_signals()
# SIGKILL and SIGSTOP signals cannot be ignored nor caught
for signame in ('SIGKILL', 'SIGSTOP'):
try:
signum = getattr(signal, signame)
except AttributeError:
continue
self.signals.remove(signum)
self.handlers = {}
def save(self):
for signum in self.signals:
handler = self.signal.getsignal(signum)
if handler is None:
# getsignal() returns None if a signal handler was not
# registered by the Python signal module,
# and the handler is not SIG_DFL nor SIG_IGN.
#
# Ignore the signal: we cannot restore the handler.
continue
self.handlers[signum] = handler
def restore(self):
for signum, handler in self.handlers.items():
self.signal.signal(signum, handler)
def with_pymalloc():
try:
import _testcapi
except ImportError:
raise unittest.SkipTest("requires _testcapi")
return _testcapi.WITH_PYMALLOC and not Py_GIL_DISABLED
def with_mimalloc():
try:
import _testcapi
except ImportError:
raise unittest.SkipTest("requires _testcapi")
return _testcapi.WITH_MIMALLOC
class _ALWAYS_EQ:
"""
Object that is equal to anything.
"""
def __eq__(self, other):
return True
def __ne__(self, other):
return False
ALWAYS_EQ = _ALWAYS_EQ()
class _NEVER_EQ:
"""
Object that is not equal to anything.
"""
def __eq__(self, other):
return False
def __ne__(self, other):
return True
def __hash__(self):
return 1
NEVER_EQ = _NEVER_EQ()
@functools.total_ordering
class _LARGEST:
"""
Object that is greater than anything (except itself).
"""
def __eq__(self, other):
return isinstance(other, _LARGEST)
def __lt__(self, other):
return False
LARGEST = _LARGEST()
@functools.total_ordering
class _SMALLEST:
"""
Object that is less than anything (except itself).
"""
def __eq__(self, other):
return isinstance(other, _SMALLEST)
def __gt__(self, other):
return False
SMALLEST = _SMALLEST()
def maybe_get_event_loop_policy():
"""Return the global event loop policy if one is set, else return None."""
import asyncio.events
return asyncio.events._event_loop_policy
# Helpers for testing hashing.
NHASHBITS = sys.hash_info.width # number of bits in hash() result
assert NHASHBITS in (32, 64)
# Return mean and sdev of number of collisions when tossing nballs balls
# uniformly at random into nbins bins. By definition, the number of
# collisions is the number of balls minus the number of occupied bins at
# the end.
def collision_stats(nbins, nballs):
n, k = nbins, nballs
# prob a bin empty after k trials = (1 - 1/n)**k
# mean # empty is then n * (1 - 1/n)**k
# so mean # occupied is n - n * (1 - 1/n)**k
# so collisions = k - (n - n*(1 - 1/n)**k)
#
# For the variance:
# n*(n-1)*(1-2/n)**k + meanempty - meanempty**2 =
# n*(n-1)*(1-2/n)**k + meanempty * (1 - meanempty)
#
# Massive cancellation occurs, and, e.g., for a 64-bit hash code
# 1-1/2**64 rounds uselessly to 1.0. Rather than make heroic (and
# error-prone) efforts to rework the naive formulas to avoid those,
# we use the `decimal` module to get plenty of extra precision.
#
# Note: the exact values are straightforward to compute with
# rationals, but in context that's unbearably slow, requiring
# multi-million bit arithmetic.
import decimal
with decimal.localcontext() as ctx:
bits = n.bit_length() * 2 # bits in n**2
# At least that many bits will likely cancel out.
# Use that many decimal digits instead.
ctx.prec = max(bits, 30)
dn = decimal.Decimal(n)
p1empty = ((dn - 1) / dn) ** k
meanempty = n * p1empty
occupied = n - meanempty
collisions = k - occupied
var = dn*(dn-1)*((dn-2)/dn)**k + meanempty * (1 - meanempty)
return float(collisions), float(var.sqrt())
class catch_unraisable_exception:
"""
Context manager catching unraisable exception using sys.unraisablehook.
Storing the exception value (cm.unraisable.exc_value) creates a reference
cycle. The reference cycle is broken explicitly when the context manager
exits.
Storing the object (cm.unraisable.object) can resurrect it if it is set to
an object which is being finalized. Exiting the context manager clears the
stored object.
Usage:
with support.catch_unraisable_exception() as cm:
# code creating an "unraisable exception"
...
# check the unraisable exception: use cm.unraisable
...
# cm.unraisable attribute no longer exists at this point
# (to break a reference cycle)
"""
def __init__(self):
self.unraisable = None
self._old_hook = None
def _hook(self, unraisable):
# Storing unraisable.object can resurrect an object which is being
# finalized. Storing unraisable.exc_value creates a reference cycle.
self.unraisable = unraisable
def __enter__(self):
self._old_hook = sys.unraisablehook
sys.unraisablehook = self._hook
return self
def __exit__(self, *exc_info):
sys.unraisablehook = self._old_hook
del self.unraisable
def wait_process(pid, *, exitcode, timeout=None):
"""
Wait until process pid completes and check that the process exit code is
exitcode.
Raise an AssertionError if the process exit code is not equal to exitcode.
If the process runs longer than timeout seconds (LONG_TIMEOUT by default),
kill the process (if signal.SIGKILL is available) and raise an
AssertionError. The timeout feature is not available on Windows.
"""
if os.name != "nt":
import signal
if timeout is None:
timeout = LONG_TIMEOUT
start_time = time.monotonic()
for _ in sleeping_retry(timeout, error=False):
pid2, status = os.waitpid(pid, os.WNOHANG)
if pid2 != 0:
break
# rety: the process is still running
else:
try:
os.kill(pid, signal.SIGKILL)
os.waitpid(pid, 0)
except OSError:
# Ignore errors like ChildProcessError or PermissionError
pass
dt = time.monotonic() - start_time
raise AssertionError(f"process {pid} is still running "
f"after {dt:.1f} seconds")
else:
# Windows implementation: don't support timeout :-(
pid2, status = os.waitpid(pid, 0)
exitcode2 = os.waitstatus_to_exitcode(status)
if exitcode2 != exitcode:
raise AssertionError(f"process {pid} exited with code {exitcode2}, "
f"but exit code {exitcode} is expected")
# sanity check: it should not fail in practice
if pid2 != pid:
raise AssertionError(f"pid {pid2} != pid {pid}")
def skip_if_broken_multiprocessing_synchronize():
"""
Skip tests if the multiprocessing.synchronize module is missing, if there
is no available semaphore implementation, or if creating a lock raises an
OSError (on Linux only).
"""
from .import_helper import import_module
# Skip tests if the _multiprocessing extension is missing.
import_module('_multiprocessing')
# Skip tests if there is no available semaphore implementation:
# multiprocessing.synchronize requires _multiprocessing.SemLock.
synchronize = import_module('multiprocessing.synchronize')
if sys.platform == "linux":
try:
# bpo-38377: On Linux, creating a semaphore fails with OSError
# if the current user does not have the permission to create
# a file in /dev/shm/ directory.
import multiprocessing
synchronize.Lock(ctx=multiprocessing.get_context('fork'))
# The explicit fork mp context is required in order for
# TestResourceTracker.test_resource_tracker_reused to work.
# synchronize creates a new multiprocessing.resource_tracker
# process at module import time via the above call in that
# scenario. Awkward. This enables gh-84559. No code involved
# should have threads at that point so fork() should be safe.
except OSError as exc:
raise unittest.SkipTest(f"broken multiprocessing SemLock: {exc!r}")
def check_disallow_instantiation(testcase, tp, *args, **kwds):
"""
Check that given type cannot be instantiated using *args and **kwds.
See bpo-43916: Add Py_TPFLAGS_DISALLOW_INSTANTIATION type flag.
"""
mod = tp.__module__
name = tp.__name__
if mod != 'builtins':
qualname = f"{mod}.{name}"
else:
qualname = f"{name}"
msg = f"cannot create '{re.escape(qualname)}' instances"
testcase.assertRaisesRegex(TypeError, msg, tp, *args, **kwds)
testcase.assertRaisesRegex(TypeError, msg, tp.__new__, tp, *args, **kwds)
def get_recursion_depth():
"""Get the recursion depth of the caller function.
In the __main__ module, at the module level, it should be 1.
"""
try:
import _testinternalcapi
depth = _testinternalcapi.get_recursion_depth()
except (ImportError, RecursionError) as exc:
# sys._getframe() + frame.f_back implementation.
try:
depth = 0
frame = sys._getframe()
while frame is not None:
depth += 1
frame = frame.f_back
finally:
# Break any reference cycles.
frame = None
# Ignore get_recursion_depth() frame.
return max(depth - 1, 1)
def get_recursion_available():
"""Get the number of available frames before RecursionError.
It depends on the current recursion depth of the caller function and
sys.getrecursionlimit().
"""
limit = sys.getrecursionlimit()
depth = get_recursion_depth()
return limit - depth
@contextlib.contextmanager
def set_recursion_limit(limit):
"""Temporarily change the recursion limit."""
original_limit = sys.getrecursionlimit()
try:
sys.setrecursionlimit(limit)
yield
finally:
sys.setrecursionlimit(original_limit)
def infinite_recursion(max_depth=None):
if max_depth is None:
# Pick a number large enough to cause problems
# but not take too long for code that can handle
# very deep recursion.
max_depth = 20_000
elif max_depth < 3:
raise ValueError(f"max_depth must be at least 3, got {max_depth}")
depth = get_recursion_depth()
depth = max(depth - 1, 1) # Ignore infinite_recursion() frame.
limit = depth + max_depth
return set_recursion_limit(limit)
def ignore_deprecations_from(module: str, *, like: str) -> object:
token = object()
warnings.filterwarnings(
"ignore",
category=DeprecationWarning,
module=module,
message=like + fr"(?#support{id(token)})",
)
return token
def clear_ignored_deprecations(*tokens: object) -> None:
if not tokens:
raise ValueError("Provide token or tokens returned by ignore_deprecations_from")
new_filters = []
old_filters = warnings._get_filters()
endswith = tuple(rf"(?#support{id(token)})" for token in tokens)
for action, message, category, module, lineno in old_filters:
if action == "ignore" and category is DeprecationWarning:
if isinstance(message, re.Pattern):
msg = message.pattern
else:
msg = message or ""
if msg.endswith(endswith):
continue
new_filters.append((action, message, category, module, lineno))
if old_filters != new_filters:
old_filters[:] = new_filters
warnings._filters_mutated()
# Skip a test if venv with pip is known to not work.
def requires_venv_with_pip():
# ensurepip requires zlib to open ZIP archives (.whl binary wheel packages)
try:
import zlib # noqa: F401
except ImportError:
return unittest.skipIf(True, "venv: ensurepip requires zlib")
# bpo-26610: pip/pep425tags.py requires ctypes.
# gh-92820: setuptools/windows_support.py uses ctypes (setuptools 58.1).
try:
import ctypes
except ImportError:
ctypes = None
return unittest.skipUnless(ctypes, 'venv: pip requires ctypes')
@functools.cache
def _findwheel(pkgname):
"""Try to find a wheel with the package specified as pkgname.
If set, the wheels are searched for in WHEEL_PKG_DIR (see ensurepip).
Otherwise, they are searched for in the test directory.
"""
wheel_dir = sysconfig.get_config_var('WHEEL_PKG_DIR') or os.path.join(
TEST_HOME_DIR, 'wheeldata',
)
filenames = os.listdir(wheel_dir)
filenames = sorted(filenames, reverse=True) # approximate "newest" first
for filename in filenames:
# filename is like 'setuptools-{version}-py3-none-any.whl'
if not filename.endswith(".whl"):
continue
prefix = pkgname + '-'
if filename.startswith(prefix):
return os.path.join(wheel_dir, filename)
raise FileNotFoundError(f"No wheel for {pkgname} found in {wheel_dir}")
# Context manager that creates a virtual environment, install setuptools in it,
# and returns the paths to the venv directory and the python executable
@contextlib.contextmanager
def setup_venv_with_pip_setuptools(venv_dir):
import subprocess
from .os_helper import temp_cwd
def run_command(cmd):
if verbose:
import shlex
print()
print('Run:', ' '.join(map(shlex.quote, cmd)))
subprocess.run(cmd, check=True)
else:
subprocess.run(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
check=True)
with temp_cwd() as temp_dir:
# Create virtual environment to get setuptools
cmd = [sys.executable, '-X', 'dev', '-m', 'venv', venv_dir]
run_command(cmd)
venv = os.path.join(temp_dir, venv_dir)
# Get the Python executable of the venv
python_exe = os.path.basename(sys.executable)
if sys.platform == 'win32':
python = os.path.join(venv, 'Scripts', python_exe)
else:
python = os.path.join(venv, 'bin', python_exe)
cmd = (python, '-X', 'dev',
'-m', 'pip', 'install',
_findwheel('setuptools'),
)
run_command(cmd)
yield python
# True if Python is built with the Py_DEBUG macro defined: if
# Python is built in debug mode (./configure --with-pydebug).
Py_DEBUG = hasattr(sys, 'gettotalrefcount')
def late_deletion(obj):
"""
Keep a Python alive as long as possible.
Create a reference cycle and store the cycle in an object deleted late in
Python finalization. Try to keep the object alive until the very last
garbage collection.
The function keeps a strong reference by design. It should be called in a
subprocess to not mark a test as "leaking a reference".
"""
# Late CPython finalization:
# - finalize_interp_clear()
# - _PyInterpreterState_Clear(): Clear PyInterpreterState members
# (ex: codec_search_path, before_forkers)
# - clear os.register_at_fork() callbacks
# - clear codecs.register() callbacks
ref_cycle = [obj]
ref_cycle.append(ref_cycle)
# Store a reference in PyInterpreterState.codec_search_path
import codecs
def search_func(encoding):
return None
search_func.reference = ref_cycle
codecs.register(search_func)
if hasattr(os, 'register_at_fork'):
# Store a reference in PyInterpreterState.before_forkers
def atfork_func():
pass
atfork_func.reference = ref_cycle
os.register_at_fork(before=atfork_func)
def busy_retry(timeout, err_msg=None, /, *, error=True):
"""
Run the loop body until "break" stops the loop.
After *timeout* seconds, raise an AssertionError if *error* is true,
or just stop if *error is false.
Example:
for _ in support.busy_retry(support.SHORT_TIMEOUT):
if check():
break
Example of error=False usage:
for _ in support.busy_retry(support.SHORT_TIMEOUT, error=False):
if check():
break
else:
raise RuntimeError('my custom error')
"""
if timeout <= 0:
raise ValueError("timeout must be greater than zero")
start_time = time.monotonic()
deadline = start_time + timeout
while True:
yield
if time.monotonic() >= deadline:
break
if error:
dt = time.monotonic() - start_time
msg = f"timeout ({dt:.1f} seconds)"
if err_msg:
msg = f"{msg}: {err_msg}"
raise AssertionError(msg)
def sleeping_retry(timeout, err_msg=None, /,
*, init_delay=0.010, max_delay=1.0, error=True):
"""
Wait strategy that applies exponential backoff.
Run the loop body until "break" stops the loop. Sleep at each loop
iteration, but not at the first iteration. The sleep delay is doubled at
each iteration (up to *max_delay* seconds).
See busy_retry() documentation for the parameters usage.
Example raising an exception after SHORT_TIMEOUT seconds:
for _ in support.sleeping_retry(support.SHORT_TIMEOUT):
if check():
break
Example of error=False usage:
for _ in support.sleeping_retry(support.SHORT_TIMEOUT, error=False):
if check():
break
else:
raise RuntimeError('my custom error')
"""
delay = init_delay
for _ in busy_retry(timeout, err_msg, error=error):
yield
time.sleep(delay)
delay = min(delay * 2, max_delay)
class Stopwatch:
"""Context manager to roughly time a CPU-bound operation.
Disables GC. Uses perf_counter, which is a clock with the highest
available resolution. It is chosen even though it does include
time elapsed during sleep and is system-wide, because the
resolution of process_time is too coarse on Windows and
process_time does not exist everywhere (for example, WASM).
Note:
- This *includes* time spent in other threads/processes.
- Some systems only have a coarse resolution; check
stopwatch.clock_info.resolution when using the results.
Usage:
with Stopwatch() as stopwatch:
...
elapsed = stopwatch.seconds
resolution = stopwatch.clock_info.resolution
"""
def __enter__(self):
get_time = time.perf_counter
clock_info = time.get_clock_info('perf_counter')
self.context = disable_gc()
self.context.__enter__()
self.get_time = get_time
self.clock_info = clock_info
self.start_time = get_time()
return self
def __exit__(self, *exc):
try:
end_time = self.get_time()
finally:
result = self.context.__exit__(*exc)
self.seconds = end_time - self.start_time
return result
@contextlib.contextmanager
def adjust_int_max_str_digits(max_digits):
"""Temporarily change the integer string conversion length limit."""
current = sys.get_int_max_str_digits()
try:
sys.set_int_max_str_digits(max_digits)
yield
finally:
sys.set_int_max_str_digits(current)
def exceeds_recursion_limit():
"""For recursion tests, easily exceeds default recursion limit."""
return 150_000
# Windows doesn't have os.uname() but it doesn't support s390x.
is_s390x = hasattr(os, 'uname') and os.uname().machine == 's390x'
skip_on_s390x = unittest.skipIf(is_s390x, 'skipped on s390x')
Py_TRACE_REFS = hasattr(sys, 'getobjects')
_JIT_ENABLED = sys._jit.is_enabled()
requires_jit_enabled = unittest.skipUnless(_JIT_ENABLED, "requires JIT enabled")
requires_jit_disabled = unittest.skipIf(_JIT_ENABLED, "requires JIT disabled")
_BASE_COPY_SRC_DIR_IGNORED_NAMES = frozenset({
# SRC_DIR/.git
'.git',
# ignore all __pycache__/ sub-directories
'__pycache__',
})
# Ignore function for shutil.copytree() to copy the Python source code.
def copy_python_src_ignore(path, names):
ignored = _BASE_COPY_SRC_DIR_IGNORED_NAMES
if os.path.basename(path) == 'Doc':
ignored |= {
# SRC_DIR/Doc/build/
'build',
# SRC_DIR/Doc/venv/
'venv',
}
# check if we are at the root of the source code
elif 'Modules' in names:
ignored |= {
# SRC_DIR/build/
'build',
}
return ignored
# XXX Move this to the inspect module?
def walk_class_hierarchy(top, *, topdown=True):
# This is based on the logic in os.walk().
assert isinstance(top, type), repr(top)
stack = [top]
while stack:
top = stack.pop()
if isinstance(top, tuple):
yield top
continue
subs = type(top).__subclasses__(top)
if topdown:
# Yield before subclass traversal if going top down.
yield top, subs
# Traverse into subclasses.
for sub in reversed(subs):
stack.append(sub)
else:
# Yield after subclass traversal if going bottom up.
stack.append((top, subs))
# Traverse into subclasses.
for sub in reversed(subs):
stack.append(sub)
def iter_builtin_types():
# First try the explicit route.
try:
import _testinternalcapi
except ImportError:
_testinternalcapi = None
if _testinternalcapi is not None:
yield from _testinternalcapi.get_static_builtin_types()
return
# Fall back to making a best-effort guess.
if hasattr(object, '__flags__'):
# Look for any type object with the Py_TPFLAGS_STATIC_BUILTIN flag set.
import datetime # noqa: F401
seen = set()
for cls, subs in walk_class_hierarchy(object):
if cls in seen:
continue
seen.add(cls)
if not (cls.__flags__ & _TPFLAGS_STATIC_BUILTIN):
# Do not walk its subclasses.
subs[:] = []
continue
yield cls
else:
# Fall back to a naive approach.
seen = set()
for obj in __builtins__.values():
if not isinstance(obj, type):
continue
cls = obj
# XXX?
if cls.__module__ != 'builtins':
continue
if cls == ExceptionGroup:
# It's a heap type.
continue
if cls in seen:
continue
seen.add(cls)
yield cls
# XXX Move this to the inspect module?
def iter_name_in_mro(cls, name):
"""Yield matching items found in base.__dict__ across the MRO.
The descriptor protocol is not invoked.
list(iter_name_in_mro(cls, name))[0] is roughly equivalent to
find_name_in_mro() in Objects/typeobject.c (AKA PyType_Lookup()).
inspect.getattr_static() is similar.
"""
# This can fail if "cls" is weird.
for base in inspect._static_getmro(cls):
# This can fail if "base" is weird.
ns = inspect._get_dunder_dict_of_class(base)
try:
obj = ns[name]
except KeyError:
continue
yield obj, base
# XXX Move this to the inspect module?
def find_name_in_mro(cls, name, default=inspect._sentinel):
for res in iter_name_in_mro(cls, name):
# Return the first one.
return res
if default is not inspect._sentinel:
return default, None
raise AttributeError(name)
# XXX The return value should always be exactly the same...
def identify_type_slot_wrappers():
try:
import _testinternalcapi
except ImportError:
_testinternalcapi = None
if _testinternalcapi is not None:
names = {n: None for n in _testinternalcapi.identify_type_slot_wrappers()}
return list(names)
else:
raise NotImplementedError
def iter_slot_wrappers(cls):
def is_slot_wrapper(name, value):
if not isinstance(value, types.WrapperDescriptorType):
assert not repr(value).startswith('<slot wrapper '), (cls, name, value)
return False
assert repr(value).startswith('<slot wrapper '), (cls, name, value)
assert callable(value), (cls, name, value)
assert name.startswith('__') and name.endswith('__'), (cls, name, value)
return True
try:
attrs = identify_type_slot_wrappers()
except NotImplementedError:
attrs = None
if attrs is not None:
for attr in sorted(attrs):
obj, base = find_name_in_mro(cls, attr, None)
if obj is not None and is_slot_wrapper(attr, obj):
yield attr, base is cls
return
# Fall back to a naive best-effort approach.
ns = vars(cls)
unused = set(ns)
for name in dir(cls):
if name in ns:
unused.remove(name)
try:
value = getattr(cls, name)
except AttributeError:
# It's as though it weren't in __dir__.
assert name in ('__annotate__', '__annotations__', '__abstractmethods__'), (cls, name)
if name in ns and is_slot_wrapper(name, ns[name]):
unused.add(name)
continue
if not name.startswith('__') or not name.endswith('__'):
assert not is_slot_wrapper(name, value), (cls, name, value)
if not is_slot_wrapper(name, value):
if name in ns:
assert not is_slot_wrapper(name, ns[name]), (cls, name, value, ns[name])
else:
if name in ns:
assert ns[name] is value, (cls, name, value, ns[name])
yield name, True
else:
yield name, False
for name in unused:
value = ns[name]
if is_slot_wrapper(cls, name, value):
yield name, True
@contextlib.contextmanager
def force_color(color: bool):
import _colorize
from .os_helper import EnvironmentVarGuard
with (
swap_attr(_colorize, "can_colorize", lambda *, file=None: color),
EnvironmentVarGuard() as env,
):
env.unset("FORCE_COLOR", "NO_COLOR", "PYTHON_COLORS")
env.set("FORCE_COLOR" if color else "NO_COLOR", "1")
yield
def force_colorized(func):
"""Force the terminal to be colorized."""
@functools.wraps(func)
def wrapper(*args, **kwargs):
with force_color(True):
return func(*args, **kwargs)
return wrapper
def force_not_colorized(func):
"""Force the terminal NOT to be colorized."""
@functools.wraps(func)
def wrapper(*args, **kwargs):
with force_color(False):
return func(*args, **kwargs)
return wrapper
def force_colorized_test_class(cls):
"""Force the terminal to be colorized for the entire test class."""
original_setUpClass = cls.setUpClass
@classmethod
@functools.wraps(cls.setUpClass)
def new_setUpClass(cls):
cls.enterClassContext(force_color(True))
original_setUpClass()
cls.setUpClass = new_setUpClass
return cls
def force_not_colorized_test_class(cls):
"""Force the terminal NOT to be colorized for the entire test class."""
original_setUpClass = cls.setUpClass
@classmethod
@functools.wraps(cls.setUpClass)
def new_setUpClass(cls):
cls.enterClassContext(force_color(False))
original_setUpClass()
cls.setUpClass = new_setUpClass
return cls
def make_clean_env() -> dict[str, str]:
clean_env = os.environ.copy()
for k in clean_env.copy():
if k.startswith("PYTHON"):
clean_env.pop(k)
clean_env.pop("FORCE_COLOR", None)
clean_env.pop("NO_COLOR", None)
return clean_env
WINDOWS_STATUS = {
0xC0000005: "STATUS_ACCESS_VIOLATION",
0xC00000FD: "STATUS_STACK_OVERFLOW",
0xC000013A: "STATUS_CONTROL_C_EXIT",
}
def get_signal_name(exitcode):
import signal
if exitcode < 0:
signum = -exitcode
try:
return signal.Signals(signum).name
except ValueError:
pass
# Shell exit code (ex: WASI build)
if 128 < exitcode < 256:
signum = exitcode - 128
try:
return signal.Signals(signum).name
except ValueError:
pass
try:
return WINDOWS_STATUS[exitcode]
except KeyError:
pass
# Format Windows exit status as hexadecimal
if 0xC0000000 <= exitcode:
return f"0x{exitcode:X}"
return None
class BrokenIter:
def __init__(self, init_raises=False, next_raises=False, iter_raises=False):
if init_raises:
1/0
self.next_raises = next_raises
self.iter_raises = iter_raises
def __next__(self):
if self.next_raises:
1/0
def __iter__(self):
if self.iter_raises:
1/0
return self
def in_systemd_nspawn_sync_suppressed() -> bool:
"""
Test whether the test suite is runing in systemd-nspawn
with ``--suppress-sync=true``.
This can be used to skip tests that rely on ``fsync()`` calls
and similar not being intercepted.
"""
if not hasattr(os, "O_SYNC"):
return False
try:
with open("/run/systemd/container", "rb") as fp:
if fp.read().rstrip() != b"systemd-nspawn":
return False
except FileNotFoundError:
return False
# If systemd-nspawn is used, O_SYNC flag will immediately
# trigger EINVAL. Otherwise, ENOENT will be given instead.
import errno
try:
fd = os.open(__file__, os.O_RDONLY | os.O_SYNC)
except OSError as err:
if err.errno == errno.EINVAL:
return True
else:
os.close(fd)
return False
def run_no_yield_async_fn(async_fn, /, *args, **kwargs):
coro = async_fn(*args, **kwargs)
try:
coro.send(None)
except StopIteration as e:
return e.value
else:
raise AssertionError("coroutine did not complete")
finally:
coro.close()
@types.coroutine
def async_yield(v):
return (yield v)
def run_yielding_async_fn(async_fn, /, *args, **kwargs):
coro = async_fn(*args, **kwargs)
try:
while True:
try:
coro.send(None)
except StopIteration as e:
return e.value
finally:
coro.close()
def is_libssl_fips_mode():
try:
from _hashlib import get_fips_mode # ask _hashopenssl.c
except ImportError:
return False # more of a maybe, unless we add this to the _ssl module.
return get_fips_mode() != 0
def _supports_remote_attaching():
PROCESS_VM_READV_SUPPORTED = False
try:
from _remote_debugging import PROCESS_VM_READV_SUPPORTED
except ImportError:
pass
return PROCESS_VM_READV_SUPPORTED
def _support_remote_exec_only_impl():
if not sys.is_remote_debug_enabled():
return unittest.skip("Remote debugging is not enabled")
if sys.platform not in ("darwin", "linux", "win32"):
return unittest.skip("Test only runs on Linux, Windows and macOS")
if sys.platform == "linux" and not _supports_remote_attaching():
return unittest.skip("Test only runs on Linux with process_vm_readv support")
return _id
def support_remote_exec_only(test):
return _support_remote_exec_only_impl()(test)
class EqualToForwardRef:
"""Helper to ease use of annotationlib.ForwardRef in tests.
This checks only attributes that can be set using the constructor.
"""
def __init__(
self,
arg,
*,
module=None,
owner=None,
is_class=False,
):
self.__forward_arg__ = arg
self.__forward_is_class__ = is_class
self.__forward_module__ = module
self.__owner__ = owner
def __eq__(self, other):
if not isinstance(other, (EqualToForwardRef, annotationlib.ForwardRef)):
return NotImplemented
return (
self.__forward_arg__ == other.__forward_arg__
and self.__forward_module__ == other.__forward_module__
and self.__forward_is_class__ == other.__forward_is_class__
and self.__owner__ == other.__owner__
)
def __repr__(self):
extra = []
if self.__forward_module__ is not None:
extra.append(f", module={self.__forward_module__!r}")
if self.__forward_is_class__:
extra.append(", is_class=True")
if self.__owner__ is not None:
extra.append(f", owner={self.__owner__!r}")
return f"EqualToForwardRef({self.__forward_arg__!r}{''.join(extra)})"
_linked_to_musl = None
def linked_to_musl():
"""
Report if the Python executable is linked to the musl C library.
Return False if we don't think it is, or a version triple otherwise.
"""
# This is can be a relatively expensive check, so we use a cache.
global _linked_to_musl
if _linked_to_musl is not None:
return _linked_to_musl
# emscripten (at least as far as we're concerned) and wasi use musl,
# but platform doesn't know how to get the version, so set it to zero.
if is_wasm32:
_linked_to_musl = (0, 0, 0)
return _linked_to_musl
# On all other non-linux platforms assume no musl.
if sys.platform != 'linux':
_linked_to_musl = False
return _linked_to_musl
# On linux, we'll depend on the platform module to do the check, so new
# musl platforms should add support in that module if possible.
import platform
lib, version = platform.libc_ver()
if lib != 'musl':
_linked_to_musl = False
return _linked_to_musl
_linked_to_musl = tuple(map(int, version.split('.')))
return _linked_to_musl
def control_characters_c0() -> list[str]:
"""Returns a list of C0 control characters as strings.
C0 control characters defined as the byte range 0x00-0x1F, and 0x7F.
"""
return [chr(c) for c in range(0x00, 0x20)] + ["\x7F"] | python | github | https://github.com/python/cpython | Lib/test/support/__init__.py |
#!/usr/bin/env python3
# Copyright (c) 2016-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test processing of feefilter messages."""
from decimal import Decimal
import time
from test_framework.messages import msg_feefilter
from test_framework.mininode import (
mininode_lock,
P2PInterface,
)
from test_framework.test_framework import BitcoinTestFramework
def hashToHex(hash):
return format(hash, '064x')
# Wait up to 60 secs to see if the testnode has received all the expected invs
def allInvsMatch(invsExpected, testnode):
for x in range(60):
with mininode_lock:
if (sorted(invsExpected) == sorted(testnode.txinvs)):
return True
time.sleep(1)
return False
class TestP2PConn(P2PInterface):
def __init__(self):
super().__init__()
self.txinvs = []
def on_inv(self, message):
for i in message.inv:
if (i.type == 1):
self.txinvs.append(hashToHex(i.hash))
def clear_invs(self):
with mininode_lock:
self.txinvs = []
class FeeFilterTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
node1 = self.nodes[1]
node0 = self.nodes[0]
# Get out of IBD
node1.generate(1)
self.sync_blocks()
self.nodes[0].add_p2p_connection(TestP2PConn())
# Test that invs are received for all txs at feerate of 20 sat/byte
node1.settxfee(Decimal("0.00020000"))
txids = [node1.sendtoaddress(node1.getnewaddress(), 1)
for x in range(3)]
assert allInvsMatch(txids, self.nodes[0].p2p)
self.nodes[0].p2p.clear_invs()
# Set a filter of 15 sat/byte
self.nodes[0].p2p.send_and_ping(msg_feefilter(15000))
# Test that txs are still being received (paying 20 sat/byte)
txids = [node1.sendtoaddress(node1.getnewaddress(), 1)
for x in range(3)]
assert allInvsMatch(txids, self.nodes[0].p2p)
self.nodes[0].p2p.clear_invs()
# Change tx fee rate to 10 sat/byte and test they are no longer
# received
node1.settxfee(Decimal("0.00010000"))
[node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
self.sync_mempools() # must be sure node 0 has received all txs
# Send one transaction from node0 that should be received, so that we
# we can sync the test on receipt (if node1's txs were relayed, they'd
# be received by the time this node0 tx is received). This is
# unfortunately reliant on the current relay behavior where we batch up
# to 35 entries in an inv, which means that when this next transaction
# is eligible for relay, the prior transactions from node1 are eligible
# as well.
node0.settxfee(Decimal("0.00020000"))
txids = [node0.sendtoaddress(node0.getnewaddress(), 1)]
assert allInvsMatch(txids, self.nodes[0].p2p)
self.nodes[0].p2p.clear_invs()
# Remove fee filter and check that txs are received again
self.nodes[0].p2p.send_and_ping(msg_feefilter(0))
txids = [node1.sendtoaddress(node1.getnewaddress(), 1)
for x in range(3)]
assert allInvsMatch(txids, self.nodes[0].p2p)
self.nodes[0].p2p.clear_invs()
if __name__ == '__main__':
FeeFilterTest().main() | unknown | codeparrot/codeparrot-clean | ||
@if ($paginator->hasPages())
<nav role="navigation" aria-label="{{ __('Pagination Navigation') }}">
<div class="flex gap-2 items-center justify-between sm:hidden">
@if ($paginator->onFirstPage())
<span class="inline-flex items-center px-4 py-2 text-sm font-medium text-gray-600 bg-white border border-gray-300 cursor-not-allowed leading-5 rounded-md dark:text-gray-300 dark:bg-gray-700 dark:border-gray-600">
{!! __('pagination.previous') !!}
</span>
@else
<a href="{{ $paginator->previousPageUrl() }}" rel="prev" class="inline-flex items-center px-4 py-2 text-sm font-medium text-gray-800 bg-white border border-gray-300 leading-5 rounded-md hover:text-gray-700 focus:outline-none focus:ring ring-gray-300 focus:border-blue-300 active:bg-gray-100 active:text-gray-800 transition ease-in-out duration-150 dark:bg-gray-800 dark:border-gray-600 dark:text-gray-200 dark:focus:border-blue-700 dark:active:bg-gray-700 dark:active:text-gray-300 hover:bg-gray-100 dark:hover:bg-gray-900 dark:hover:text-gray-200">
{!! __('pagination.previous') !!}
</a>
@endif
@if ($paginator->hasMorePages())
<a href="{{ $paginator->nextPageUrl() }}" rel="next" class="inline-flex items-center px-4 py-2 text-sm font-medium text-gray-800 bg-white border border-gray-300 leading-5 rounded-md hover:text-gray-700 focus:outline-none focus:ring ring-gray-300 focus:border-blue-300 active:bg-gray-100 active:text-gray-800 transition ease-in-out duration-150 dark:bg-gray-800 dark:border-gray-600 dark:text-gray-200 dark:focus:border-blue-700 dark:active:bg-gray-700 dark:active:text-gray-300 hover:bg-gray-100 dark:hover:bg-gray-900 dark:hover:text-gray-200">
{!! __('pagination.next') !!}
</a>
@else
<span class="inline-flex items-center px-4 py-2 text-sm font-medium text-gray-600 bg-white border border-gray-300 cursor-not-allowed leading-5 rounded-md dark:text-gray-300 dark:bg-gray-700 dark:border-gray-600">
{!! __('pagination.next') !!}
</span>
@endif
</div>
<div class="hidden sm:flex-1 sm:flex sm:gap-2 sm:items-center sm:justify-between">
<div>
<p class="text-sm text-gray-700 leading-5 dark:text-gray-600">
{!! __('Showing') !!}
@if ($paginator->firstItem())
<span class="font-medium">{{ $paginator->firstItem() }}</span>
{!! __('to') !!}
<span class="font-medium">{{ $paginator->lastItem() }}</span>
@else
{{ $paginator->count() }}
@endif
{!! __('of') !!}
<span class="font-medium">{{ $paginator->total() }}</span>
{!! __('results') !!}
</p>
</div>
<div>
<span class="inline-flex rtl:flex-row-reverse shadow-sm rounded-md">
{{-- Previous Page Link --}}
@if ($paginator->onFirstPage())
<span aria-disabled="true" aria-label="{{ __('pagination.previous') }}">
<span class="inline-flex items-center px-2 py-2 text-sm font-medium text-gray-500 bg-white border border-gray-300 cursor-not-allowed rounded-l-md leading-5 dark:bg-gray-700 dark:border-gray-600 dark:text-gray-400" aria-hidden="true">
<svg class="w-5 h-5" fill="currentColor" viewBox="0 0 20 20">
<path fill-rule="evenodd" d="M12.707 5.293a1 1 0 010 1.414L9.414 10l3.293 3.293a1 1 0 01-1.414 1.414l-4-4a1 1 0 010-1.414l4-4a1 1 0 011.414 0z" clip-rule="evenodd" />
</svg>
</span>
</span>
@else
<a href="{{ $paginator->previousPageUrl() }}" rel="prev" class="inline-flex items-center px-2 py-2 text-sm font-medium text-gray-500 bg-white border border-gray-300 rounded-l-md leading-5 hover:text-gray-400 focus:outline-none focus:ring ring-gray-300 focus:border-blue-300 active:bg-gray-100 active:text-gray-500 transition ease-in-out duration-150 dark:bg-gray-800 dark:border-gray-600 dark:active:bg-gray-700 dark:focus:border-blue-800 dark:text-gray-300 dark:hover:bg-gray-900 dark:hover:text-gray-300" aria-label="{{ __('pagination.previous') }}">
<svg class="w-5 h-5" fill="currentColor" viewBox="0 0 20 20">
<path fill-rule="evenodd" d="M12.707 5.293a1 1 0 010 1.414L9.414 10l3.293 3.293a1 1 0 01-1.414 1.414l-4-4a1 1 0 010-1.414l4-4a1 1 0 011.414 0z" clip-rule="evenodd" />
</svg>
</a>
@endif
{{-- Pagination Elements --}}
@foreach ($elements as $element)
{{-- "Three Dots" Separator --}}
@if (is_string($element))
<span aria-disabled="true">
<span class="inline-flex items-center px-4 py-2 -ml-px text-sm font-medium text-gray-700 bg-white border border-gray-300 cursor-default leading-5 dark:bg-gray-800 dark:border-gray-600 dark:text-gray-300">{{ $element }}</span>
</span>
@endif
{{-- Array Of Links --}}
@if (is_array($element))
@foreach ($element as $page => $url)
@if ($page == $paginator->currentPage())
<span aria-current="page">
<span class="inline-flex items-center px-4 py-2 -ml-px text-sm font-medium text-gray-700 bg-gray-200 border border-gray-300 cursor-default leading-5 dark:bg-gray-700 dark:border-gray-600 dark:text-gray-300">{{ $page }}</span>
</span>
@else
<a href="{{ $url }}" class="inline-flex items-center px-4 py-2 -ml-px text-sm font-medium text-gray-700 bg-white border border-gray-300 leading-5 hover:text-gray-700 focus:outline-none focus:ring ring-gray-300 focus:border-blue-300 active:bg-gray-100 active:text-gray-700 transition ease-in-out duration-150 dark:bg-gray-800 dark:border-gray-600 dark:text-gray-300 dark:hover:text-gray-300 dark:active:bg-gray-700 dark:focus:border-blue-800 hover:bg-gray-100 dark:hover:bg-gray-900" aria-label="{{ __('Go to page :page', ['page' => $page]) }}">
{{ $page }}
</a>
@endif
@endforeach
@endif
@endforeach
{{-- Next Page Link --}}
@if ($paginator->hasMorePages())
<a href="{{ $paginator->nextPageUrl() }}" rel="next" class="inline-flex items-center px-2 py-2 -ml-px text-sm font-medium text-gray-500 bg-white border border-gray-300 rounded-r-md leading-5 hover:text-gray-400 focus:outline-none focus:ring ring-gray-300 focus:border-blue-300 active:bg-gray-100 active:text-gray-500 transition ease-in-out duration-150 dark:bg-gray-800 dark:border-gray-600 dark:active:bg-gray-700 dark:focus:border-blue-800 dark:text-gray-300 dark:hover:bg-gray-900 dark:hover:text-gray-300" aria-label="{{ __('pagination.next') }}">
<svg class="w-5 h-5" fill="currentColor" viewBox="0 0 20 20">
<path fill-rule="evenodd" d="M7.293 14.707a1 1 0 010-1.414L10.586 10 7.293 6.707a1 1 0 011.414-1.414l4 4a1 1 0 010 1.414l-4 4a1 1 0 01-1.414 0z" clip-rule="evenodd" />
</svg>
</a>
@else
<span aria-disabled="true" aria-label="{{ __('pagination.next') }}">
<span class="inline-flex items-center px-2 py-2 -ml-px text-sm font-medium text-gray-500 bg-white border border-gray-300 cursor-not-allowed rounded-r-md leading-5 dark:bg-gray-700 dark:border-gray-600 dark:text-gray-400" aria-hidden="true">
<svg class="w-5 h-5" fill="currentColor" viewBox="0 0 20 20">
<path fill-rule="evenodd" d="M7.293 14.707a1 1 0 010-1.414L10.586 10 7.293 6.707a1 1 0 011.414-1.414l4 4a1 1 0 010 1.414l-4 4a1 1 0 01-1.414 0z" clip-rule="evenodd" />
</svg>
</span>
</span>
@endif
</span>
</div>
</div>
</nav>
@endif | php | github | https://github.com/laravel/framework | src/Illuminate/Pagination/resources/views/tailwind.blade.php |
#!/usr/bin/env python
# Test whether a client sends a correct PUBLISH to a topic with QoS 0.
# The client should connect to port 1888 with keepalive=60, clean session set,
# and client id publish-qos0-test
# The test will send a CONNACK message to the client with rc=0. Upon receiving
# the CONNACK and verifying that rc=0, the client should send a PUBLISH message
# to topic "pub/qos0/test" with payload "message" and QoS=0. If rc!=0, the
# client should exit with an error.
# After sending the PUBLISH message, the client should send a DISCONNECT message.
import inspect
import os
import subprocess
import socket
import sys
import time
# From http://stackoverflow.com/questions/279237/python-import-a-module-from-a-folder
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],"..")))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
import mosq_test
rc = 1
keepalive = 60
connect_packet = mosq_test.gen_connect("publish-qos0-test", keepalive=keepalive)
connack_packet = mosq_test.gen_connack(rc=0)
publish_packet = mosq_test.gen_publish("pub/qos0/test", qos=0, payload="message")
disconnect_packet = mosq_test.gen_disconnect()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.settimeout(10)
sock.bind(('', 1888))
sock.listen(5)
client_args = sys.argv[1:]
env = dict(os.environ)
env['LD_LIBRARY_PATH'] = '../../lib:../../lib/cpp'
try:
pp = env['PYTHONPATH']
except KeyError:
pp = ''
env['PYTHONPATH'] = '../../lib/python:'+pp
client = subprocess.Popen(client_args, env=env)
try:
(conn, address) = sock.accept()
conn.settimeout(10)
if mosq_test.expect_packet(conn, "connect", connect_packet):
conn.send(connack_packet)
if mosq_test.expect_packet(conn, "publish", publish_packet):
if mosq_test.expect_packet(conn, "disconnect", disconnect_packet):
rc = 0
conn.close()
finally:
client.terminate()
client.wait()
sock.close()
exit(rc) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
from collections import namedtuple
from operator import attrgetter
from ..utils import ItemsCount
from .._compat import to_unicode
from ..nlp.stemmers import null_stemmer
SentenceInfo = namedtuple("SentenceInfo", ("sentence", "order", "rating",))
class AbstractSummarizer(object):
def __init__(self, stemmer=null_stemmer):
if not callable(stemmer):
raise ValueError("Stemmer has to be a callable object")
self._stemmer = stemmer
def __call__(self, document, sentences_count):
raise NotImplementedError("This method should be overriden in subclass")
def stem_word(self, word):
return self._stemmer(self.normalize_word(word))
@staticmethod
def normalize_word(word):
return to_unicode(word).lower()
@staticmethod
def _get_best_sentences(sentences, count, rating, *args, **kwargs):
rate = rating
if isinstance(rating, dict):
assert not args and not kwargs
rate = lambda s: rating[s]
infos = (SentenceInfo(s, o, rate(s, *args, **kwargs))
for o, s in enumerate(sentences))
# sort sentences by rating in descending order
infos = sorted(infos, key=attrgetter("rating"), reverse=True)
# get `count` first best rated sentences
if not isinstance(count, ItemsCount):
count = ItemsCount(count)
infos = count(infos)
# sort sentences by their order in document
infos = sorted(infos, key=attrgetter("order"))
return tuple(i.sentence for i in infos) | unknown | codeparrot/codeparrot-clean | ||
import pytz
import logging
import requests
from dateutil.parser import parse
from datetime import datetime, timedelta
from django.utils import timezone
from website.app import init_app
from scripts.analytics.base import SummaryAnalytics
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
LOG_THRESHOLD = 11
class PreprintSummary(SummaryAnalytics):
@property
def collection_name(self):
return 'preprint_summary'
def get_events(self, date):
super(PreprintSummary, self).get_events(date)
from osf.models import PreprintProvider
# Convert to a datetime at midnight for queries and the timestamp
timestamp_datetime = datetime(date.year, date.month, date.day).replace(tzinfo=pytz.UTC)
query_datetime = timestamp_datetime + timedelta(days=1)
elastic_query = {
'query': {
'bool': {
'must': [
{
'match': {
'type': 'preprint'
}
},
{
'match': {
'sources': None
}
}
],
'filter': [
{
'range': {
'date': {
'lte': '{}||/d'.format(query_datetime.strftime('%Y-%m-%d'))
}
}
}
]
}
}
}
counts = []
for preprint_provider in PreprintProvider.objects.all():
name = preprint_provider.name if preprint_provider.name != 'Open Science Framework' else 'OSF'
elastic_query['query']['bool']['must'][1]['match']['sources'] = name
resp = requests.post('https://share.osf.io/api/v2/search/creativeworks/_search', json=elastic_query).json()
counts.append({
'keen': {
'timestamp': timestamp_datetime.isoformat()
},
'provider': {
'name': preprint_provider.name,
'total': resp['hits']['total'],
},
})
logger.info('{} Preprints counted for the provider {}'.format(resp['hits']['total'], preprint_provider.name))
return counts
def get_class():
return PreprintSummary
if __name__ == '__main__':
init_app()
preprint_summary = PreprintSummary()
args = preprint_summary.parse_args()
yesterday = args.yesterday
if yesterday:
date = (timezone.now() - timedelta(days=1)).date()
else:
date = parse(args.date).date() if args.date else None
events = preprint_summary.get_events(date)
preprint_summary.send_events(events) | unknown | codeparrot/codeparrot-clean | ||
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
High-level abstraction of an EC2 server
"""
import boto
import boto.utils
from boto.compat import StringIO
from boto.mashups.iobject import IObject
from boto.pyami.config import Config, BotoConfigPath
from boto.mashups.interactive import interactive_shell
from boto.sdb.db.model import Model
from boto.sdb.db.property import StringProperty
import os
class ServerSet(list):
def __getattr__(self, name):
results = []
is_callable = False
for server in self:
try:
val = getattr(server, name)
if callable(val):
is_callable = True
results.append(val)
except:
results.append(None)
if is_callable:
self.map_list = results
return self.map
return results
def map(self, *args):
results = []
for fn in self.map_list:
results.append(fn(*args))
return results
class Server(Model):
@property
def ec2(self):
if self._ec2 is None:
self._ec2 = boto.connect_ec2()
return self._ec2
@classmethod
def Inventory(cls):
"""
Returns a list of Server instances, one for each Server object
persisted in the db
"""
l = ServerSet()
rs = cls.find()
for server in rs:
l.append(server)
return l
@classmethod
def Register(cls, name, instance_id, description=''):
s = cls()
s.name = name
s.instance_id = instance_id
s.description = description
s.save()
return s
def __init__(self, id=None, **kw):
super(Server, self).__init__(id, **kw)
self._reservation = None
self._instance = None
self._ssh_client = None
self._pkey = None
self._config = None
self._ec2 = None
name = StringProperty(unique=True, verbose_name="Name")
instance_id = StringProperty(verbose_name="Instance ID")
config_uri = StringProperty()
ami_id = StringProperty(verbose_name="AMI ID")
zone = StringProperty(verbose_name="Availability Zone")
security_group = StringProperty(verbose_name="Security Group", default="default")
key_name = StringProperty(verbose_name="Key Name")
elastic_ip = StringProperty(verbose_name="Elastic IP")
instance_type = StringProperty(verbose_name="Instance Type")
description = StringProperty(verbose_name="Description")
log = StringProperty()
def setReadOnly(self, value):
raise AttributeError
def getInstance(self):
if not self._instance:
if self.instance_id:
try:
rs = self.ec2.get_all_reservations([self.instance_id])
except:
return None
if len(rs) > 0:
self._reservation = rs[0]
self._instance = self._reservation.instances[0]
return self._instance
instance = property(getInstance, setReadOnly, None, 'The Instance for the server')
def getAMI(self):
if self.instance:
return self.instance.image_id
ami = property(getAMI, setReadOnly, None, 'The AMI for the server')
def getStatus(self):
if self.instance:
self.instance.update()
return self.instance.state
status = property(getStatus, setReadOnly, None,
'The status of the server')
def getHostname(self):
if self.instance:
return self.instance.public_dns_name
hostname = property(getHostname, setReadOnly, None,
'The public DNS name of the server')
def getPrivateHostname(self):
if self.instance:
return self.instance.private_dns_name
private_hostname = property(getPrivateHostname, setReadOnly, None,
'The private DNS name of the server')
def getLaunchTime(self):
if self.instance:
return self.instance.launch_time
launch_time = property(getLaunchTime, setReadOnly, None,
'The time the Server was started')
def getConsoleOutput(self):
if self.instance:
return self.instance.get_console_output()
console_output = property(getConsoleOutput, setReadOnly, None,
'Retrieve the console output for server')
def getGroups(self):
if self._reservation:
return self._reservation.groups
else:
return None
groups = property(getGroups, setReadOnly, None,
'The Security Groups controlling access to this server')
def getConfig(self):
if not self._config:
remote_file = BotoConfigPath
local_file = '%s.ini' % self.instance.id
self.get_file(remote_file, local_file)
self._config = Config(local_file)
return self._config
def setConfig(self, config):
local_file = '%s.ini' % self.instance.id
fp = open(local_file)
config.write(fp)
fp.close()
self.put_file(local_file, BotoConfigPath)
self._config = config
config = property(getConfig, setConfig, None,
'The instance data for this server')
def set_config(self, config):
"""
Set SDB based config
"""
self._config = config
self._config.dump_to_sdb("botoConfigs", self.id)
def load_config(self):
self._config = Config(do_load=False)
self._config.load_from_sdb("botoConfigs", self.id)
def stop(self):
if self.instance:
self.instance.stop()
def start(self):
self.stop()
ec2 = boto.connect_ec2()
ami = ec2.get_all_images(image_ids = [str(self.ami_id)])[0]
groups = ec2.get_all_security_groups(groupnames=[str(self.security_group)])
if not self._config:
self.load_config()
if not self._config.has_section("Credentials"):
self._config.add_section("Credentials")
self._config.set("Credentials", "aws_access_key_id", ec2.aws_access_key_id)
self._config.set("Credentials", "aws_secret_access_key", ec2.aws_secret_access_key)
if not self._config.has_section("Pyami"):
self._config.add_section("Pyami")
if self._manager.domain:
self._config.set('Pyami', 'server_sdb_domain', self._manager.domain.name)
self._config.set("Pyami", 'server_sdb_name', self.name)
cfg = StringIO()
self._config.write(cfg)
cfg = cfg.getvalue()
r = ami.run(min_count=1,
max_count=1,
key_name=self.key_name,
security_groups = groups,
instance_type = self.instance_type,
placement = self.zone,
user_data = cfg)
i = r.instances[0]
self.instance_id = i.id
self.put()
if self.elastic_ip:
ec2.associate_address(self.instance_id, self.elastic_ip)
def reboot(self):
if self.instance:
self.instance.reboot()
def get_ssh_client(self, key_file=None, host_key_file='~/.ssh/known_hosts',
uname='root'):
import paramiko
if not self.instance:
print('No instance yet!')
return
if not self._ssh_client:
if not key_file:
iobject = IObject()
key_file = iobject.get_filename('Path to OpenSSH Key file')
self._pkey = paramiko.RSAKey.from_private_key_file(key_file)
self._ssh_client = paramiko.SSHClient()
self._ssh_client.load_system_host_keys()
self._ssh_client.load_host_keys(os.path.expanduser(host_key_file))
self._ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self._ssh_client.connect(self.instance.public_dns_name,
username=uname, pkey=self._pkey)
return self._ssh_client
def get_file(self, remotepath, localpath):
ssh_client = self.get_ssh_client()
sftp_client = ssh_client.open_sftp()
sftp_client.get(remotepath, localpath)
def put_file(self, localpath, remotepath):
ssh_client = self.get_ssh_client()
sftp_client = ssh_client.open_sftp()
sftp_client.put(localpath, remotepath)
def listdir(self, remotepath):
ssh_client = self.get_ssh_client()
sftp_client = ssh_client.open_sftp()
return sftp_client.listdir(remotepath)
def shell(self, key_file=None):
ssh_client = self.get_ssh_client(key_file)
channel = ssh_client.invoke_shell()
interactive_shell(channel)
def bundle_image(self, prefix, key_file, cert_file, size):
print('bundling image...')
print('\tcopying cert and pk over to /mnt directory on server')
ssh_client = self.get_ssh_client()
sftp_client = ssh_client.open_sftp()
path, name = os.path.split(key_file)
remote_key_file = '/mnt/%s' % name
self.put_file(key_file, remote_key_file)
path, name = os.path.split(cert_file)
remote_cert_file = '/mnt/%s' % name
self.put_file(cert_file, remote_cert_file)
print('\tdeleting %s' % BotoConfigPath)
# delete the metadata.ini file if it exists
try:
sftp_client.remove(BotoConfigPath)
except:
pass
command = 'sudo ec2-bundle-vol '
command += '-c %s -k %s ' % (remote_cert_file, remote_key_file)
command += '-u %s ' % self._reservation.owner_id
command += '-p %s ' % prefix
command += '-s %d ' % size
command += '-d /mnt '
if self.instance.instance_type == 'm1.small' or self.instance_type == 'c1.medium':
command += '-r i386'
else:
command += '-r x86_64'
print('\t%s' % command)
t = ssh_client.exec_command(command)
response = t[1].read()
print('\t%s' % response)
print('\t%s' % t[2].read())
print('...complete!')
def upload_bundle(self, bucket, prefix):
print('uploading bundle...')
command = 'ec2-upload-bundle '
command += '-m /mnt/%s.manifest.xml ' % prefix
command += '-b %s ' % bucket
command += '-a %s ' % self.ec2.aws_access_key_id
command += '-s %s ' % self.ec2.aws_secret_access_key
print('\t%s' % command)
ssh_client = self.get_ssh_client()
t = ssh_client.exec_command(command)
response = t[1].read()
print('\t%s' % response)
print('\t%s' % t[2].read())
print('...complete!')
def create_image(self, bucket=None, prefix=None, key_file=None, cert_file=None, size=None):
iobject = IObject()
if not bucket:
bucket = iobject.get_string('Name of S3 bucket')
if not prefix:
prefix = iobject.get_string('Prefix for AMI file')
if not key_file:
key_file = iobject.get_filename('Path to RSA private key file')
if not cert_file:
cert_file = iobject.get_filename('Path to RSA public cert file')
if not size:
size = iobject.get_int('Size (in MB) of bundled image')
self.bundle_image(prefix, key_file, cert_file, size)
self.upload_bundle(bucket, prefix)
print('registering image...')
self.image_id = self.ec2.register_image('%s/%s.manifest.xml' % (bucket, prefix))
return self.image_id
def attach_volume(self, volume, device="/dev/sdp"):
"""
Attach an EBS volume to this server
:param volume: EBS Volume to attach
:type volume: boto.ec2.volume.Volume
:param device: Device to attach to (default to /dev/sdp)
:type device: string
"""
if hasattr(volume, "id"):
volume_id = volume.id
else:
volume_id = volume
return self.ec2.attach_volume(volume_id=volume_id, instance_id=self.instance_id, device=device)
def detach_volume(self, volume):
"""
Detach an EBS volume from this server
:param volume: EBS Volume to detach
:type volume: boto.ec2.volume.Volume
"""
if hasattr(volume, "id"):
volume_id = volume.id
else:
volume_id = volume
return self.ec2.detach_volume(volume_id=volume_id, instance_id=self.instance_id)
def install_package(self, package_name):
print('installing %s...' % package_name)
command = 'yum -y install %s' % package_name
print('\t%s' % command)
ssh_client = self.get_ssh_client()
t = ssh_client.exec_command(command)
response = t[1].read()
print('\t%s' % response)
print('\t%s' % t[2].read())
print('...complete!') | unknown | codeparrot/codeparrot-clean | ||
---
name: triaging-issues
description: Triages GitHub issues by routing to oncall teams, applying labels, and closing questions. Use when processing new PyTorch issues or when asked to triage an issue.
hooks:
PreToolUse:
- matcher: "mcp__github__issue_write|mcp__github__update_issue"
hooks:
- type: command
command: "python3 \"$CLAUDE_PROJECT_DIR\"/.claude/skills/triaging-issues/scripts/validate_labels.py"
PostToolUse:
- matcher: "mcp__github__issue_write|mcp__github__update_issue|mcp__github__add_issue_comment|mcp__github__transfer_issue"
hooks:
- type: command
command: "python3 \"$CLAUDE_PROJECT_DIR\"/.claude/skills/triaging-issues/scripts/add_bot_triaged.py"
---
# PyTorch Issue Triage Skill
This skill helps triage GitHub issues by routing issues, applying labels, and leaving first-line responses.
## Contents
- [MCP Tools Available](#mcp-tools-available)
- [Labels You Must NEVER Add](#labels-you-must-never-add)
- [Issue Triage Steps](#issue-triage-for-each-issue)
- Step 0: Already Routed — SKIP
- Step 1: Question vs Bug/Feature
- Step 1.5: Needs Reproduction — External Files
- Step 2: Transfer
- Step 2.5: PT2 Issues — Special Handling
- Step 3: Redirect to Secondary Oncall
- Step 4: Label the Issue
- Step 5: High Priority — REQUIRES HUMAN REVIEW
- Step 6: bot-triaged (automatic)
- Step 7: Mark Triaged
- [V1 Constraints](#v1-constraints)
**Labels reference:** See [labels.json](labels.json) for the full catalog of 305 labels suitable for triage. **ONLY apply labels that exist in this file.** Do not invent or guess label names. This file excludes CI triggers, test configs, release notes, and deprecated labels.
**PT2 triage guide:** See [pt2-triage-rubric.md](pt2-triage-rubric.md) for detailed labeling guidance when triaging PT2/torch.compile issues.
**Response templates:** See [templates.json](templates.json) for standard response messages.
---
## MCP Tools Available
Use these GitHub MCP tools for triage:
| Tool | Purpose |
|------|---------|
| `mcp__github__issue_read` | Get issue details, comments, and existing labels |
| `mcp__github__issue_write` | Apply labels or close issues |
| `mcp__github__add_issue_comment` | Add comment (only for redirecting questions) |
| `mcp__github__search_issues` | Find similar issues for context |
---
## Labels You Must NEVER Add
| Prefix/Category | Reason |
|-----------------|--------|
| Labels not in `labels.json` | Only apply labels that exist in the allowlist |
| `ciflow/*` | CI job triggers for PRs only |
| `test-config/*` | Test suite selectors for PRs only |
| `release notes: *` | Auto-assigned for release notes |
| `ci-*`, `ci:*` | CI infrastructure controls |
| `sev*` | Severity labels require human decision |
| `merge blocking` | Requires human decision |
| Any label containing "deprecated" | Obsolete |
**If blocked:** When a label is blocked by the hook, add ONLY `triage review` and stop. A human will handle it.
These rules are enforced by a PreToolUse hook that validates all labels against `labels.json`.
---
## Issue Triage (for each issue)
### 0) Already Routed — SKIP
**If an issue already has ANY `oncall:` label, SKIP IT entirely.** Do not:
- Add any labels
- Add `triaged`
- Leave comments
- Do any triage work
That issue belongs to the sub-oncall team. They own their queue.
### 1) Question vs Bug/Feature
- If it is a question (not a bug report or feature request): close and use the `redirect_to_forum` template from `templates.json`.
- If unclear whether it is a bug/feature vs a question: request additional information using the `request_more_info` template and stop.
### 1.5) Needs Reproduction — External Files
Check if the issue body contains links to external files that users would need to download to reproduce.
**Patterns to detect:**
- File attachments: `.zip`, `.pt`, `.pth`, `.pkl`, `.safetensors`, `.onnx`, `.bin` files
- External storage: Google Drive, Dropbox, OneDrive, Mega, WeTransfer links
- Model hubs: Hugging Face Hub links to model files
**Action:**
1. **Edit the issue body** to remove/redact the download links
- Replace with: `[Link removed - external file downloads are not permitted for security reasons]`
2. Add `needs reproduction` label
3. Use the `needs_reproduction` template from `templates.json` to request a self-contained reproduction
4. Do NOT add `triaged` — wait for the user to provide a reproducible example
### 1.6) Edge Cases & Numerical Accuracy
If the issue involves extremal values or numerical precision differences:
**Patterns to detect:**
- Values near `torch.finfo(dtype).max` or `torch.finfo(dtype).min`
- NaN/Inf appearing in outputs from valid (but extreme) inputs
- Differences between CPU and GPU results
- Precision differences between dtypes (e.g., fp32 vs fp16)
- Fuzzer-generated edge cases
**Action:**
1. Add `module: edge cases` label
2. If from a fuzzer, also add `topic: fuzzer`
3. Use the `numerical_accuracy` template from `templates.json` to link to the docs
4. If the issue is clearly expected behavior per the docs, close it with the template comment
### 2) Transfer (domain library or ExecuTorch)
If the issue belongs in another repo (vision/text/audio/RL/ExecuTorch/etc.), transfer the issue and **STOP**.
### 2.5) PT2 Issues — Special Handling
When triaging PT2 issues (torch.compile, dynamo, inductor), see [pt2-triage-rubric.md](pt2-triage-rubric.md) for detailed labeling decisions.
**Key differences from general triage:**
- For PT2 issues, you MAY apply `module:` labels (e.g., `module: dynamo`, `module: inductor`, `module: dynamic shapes`)
- Use the rubric to determine the correct component labels
- Only redirect to `oncall: cpu inductor` for MKLDNN-specific issues; otherwise keep in PT2 queue
### 3) Redirect to Secondary Oncall
**CRITICAL:** When redirecting issues to an oncall queue (**critical** with the exception of PT2), apply exactly one `oncall: ...` label and **STOP**. Do NOT:
- Add any `module:` labels
- Mark it `triaged`
- Do any further triage work
The sub-oncall team will handle their own triage. Your job is only to route it to them.
#### Oncall Redirect Labels
| Label | When to use |
|-------|-------------|
| `oncall: jit` | TorchScript issues |
| `oncall: distributed` | Distributed training (DDP, FSDP, RPC, c10d, DTensor, DeviceMesh, symmetric memory, context parallel, pipelining) |
| `oncall: export` | torch.export issues |
| `oncall: quantization` | Quantization issues |
| `oncall: mobile` | Mobile (iOS/Android), excludes ExecuTorch |
| `oncall: profiler` | Profiler issues (CPU, GPU, Kineto) |
| `oncall: visualization` | TensorBoard integration |
**Note:** `oncall: cpu inductor` is a sub-queue of PT2. For general triage, just use `oncall: pt2`.
### 4) Label the issue (if NOT transferred/redirected)
Only if the issue stays in the general queue:
- Add 1+ `module: ...` labels based on the affected area
- If feature request: add `feature` (or `function request` for a new function or new arguments/modes)
- If small improvement: add `enhancement`
### 5) High Priority — REQUIRES HUMAN REVIEW
**CRITICAL:** If you believe an issue is high priority, you MUST:
1. Add `triage review` label and do not add `triaged`
Do NOT directly add `high priority` without human confirmation.
High priority criteria:
- Crash / segfault / illegal memory access
- Silent correctness issue (wrong results without error)
- Regression from a prior version
- Internal assert failure
- Many users affected
- Core component or popular model impact
### 6) bot-triaged (automatic)
The `bot-triaged` label is automatically applied by a post-hook after any issue mutation. You do not need to add it manually.
### 7) Mark triaged
If not transferred/redirected and not flagged for review, add `triaged`.
---
## V1 Constraints
**DO NOT:**
- Close bug reports or feature requests automatically
- Close issues unless they are clear usage questions per Step 1
- Assign issues to users
- Add `high priority` directly without human confirmation
- Add module labels when redirecting to oncall
- Add comments to bug reports or feature requests, except a single info request when classification is unclear
**DO:**
- Close clear usage questions and point to discuss.pytorch.org (per step 1)
- Be conservative - when in doubt, add `triage review` for human attention
- Apply type labels (`feature`, `enhancement`, `function request`) when confident
- Add `triaged` label when classification is complete
**Note:** `bot-triaged` is automatically applied by a post-hook after any issue mutation. | unknown | github | https://github.com/pytorch/pytorch | .claude/skills/triaging-issues/SKILL.md |
# Copyright (C) 2010-2019 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest as ut
import unittest_decorators as utx
import numpy as np
import espressomd
import tests_common
@utx.skipIfMissingFeatures(["MASS", "ROTATIONAL_INERTIA"])
class RotationalInertia(ut.TestCase):
longMessage = True
# Handle for espresso system
system = espressomd.System(box_l=[1.0, 1.0, 1.0])
system.cell_system.skin = 0
# Particle's angular momentum: initial and ongoing
L_0_lab = np.zeros((3))
L_lab = np.zeros((3))
# Angular momentum
def L_body(self, part):
return self.system.part[part].omega_body[:] * \
self.system.part[part].rinertia[:]
# Set the angular momentum
def set_L_0(self, part):
L_0_body = self.L_body(part)
self.L_0_lab = tests_common.convert_vec_body_to_space(
self.system, part, L_0_body)
def set_L(self, part):
L_body = self.L_body(part)
self.L_lab = tests_common.convert_vec_body_to_space(
self.system, part, L_body)
def test_stability(self):
self.system.part.clear()
self.system.part.add(
pos=np.array([0.0, 0.0, 0.0]), id=0, rotation=(1, 1, 1))
# Inertial motion around the stable and unstable axes
tol = 4E-3
# Anisotropic inertial moment. Stable axes correspond to J[1] and J[2].
# The unstable axis corresponds to J[0]. These values relation is J[1]
# < J[0] < J[2].
J = np.array([5, 0.5, 18.5])
self.system.part[0].rinertia = J[:]
# Validation of J[1] stability
# ----------------------------
self.system.time_step = 0.0006
# Stable omega component should be larger than other components.
stable_omega = 57.65
self.system.part[0].omega_body = np.array([0.15, stable_omega, -0.043])
self.set_L_0(0)
for i in range(100):
self.set_L(0)
for k in range(3):
self.assertAlmostEqual(
self.L_lab[k], self.L_0_lab[k], delta=tol,
msg='Inertial motion around stable axis J1: Deviation in '
'angular momentum is too large. Step {0}, coordinate '
'{1}, expected {2}, got {3}'.format(
i, k, self.L_0_lab[k], self.L_lab[k]))
self.assertAlmostEqual(
self.system.part[0].omega_body[1], stable_omega, delta=tol,
msg='Inertial motion around stable axis J1: Deviation in omega '
'is too large. Step {0}, coordinate 1, expected {1}, got {2}'
.format(i, stable_omega, self.system.part[0].omega_body[1]))
self.system.integrator.run(10)
# Validation of J[2] stability
# ----------------------------
self.system.time_step = 0.01
# Stable omega component should be larger than other components.
stable_omega = 3.2
self.system.part[0].omega_body = np.array(
[0.011, -0.043, stable_omega])
self.set_L_0(0)
for i in range(100):
self.set_L(0)
for k in range(3):
self.assertAlmostEqual(
self.L_lab[k], self.L_0_lab[k], delta=tol,
msg='Inertial motion around stable axis J2: Deviation in '
'angular momentum is too large. Step {0}, coordinate '
'{1}, expected {2}, got {3}'.format(
i, k, self.L_0_lab[k], self.L_lab[k]))
self.assertAlmostEqual(
self.system.part[0].omega_body[2], stable_omega, delta=tol,
msg='Inertial motion around stable axis J2: Deviation in omega '
'is too large. Step {0}, coordinate 2, expected {1}, got {2}'
.format(i, stable_omega, self.system.part[0].omega_body[2]))
self.system.integrator.run(10)
# Validation of J[0]
# ------------------
self.system.time_step = 0.001
# Unstable omega component should be larger than other components.
unstable_omega = 5.76
self.system.part[0].omega_body = np.array(
[unstable_omega, -0.043, 0.15])
self.set_L_0(0)
for i in range(100):
self.set_L(0)
for k in range(3):
self.assertAlmostEqual(
self.L_lab[k], self.L_0_lab[k], delta=tol,
msg='Inertial motion around stable axis J0: Deviation in '
'angular momentum is too large. Step {0}, coordinate '
'{1}, expected {2}, got {3}'.format(
i, k, self.L_0_lab[k], self.L_lab[k]))
self.system.integrator.run(10)
def energy(self, p):
return 0.5 * np.dot(p.rinertia, p.omega_body**2)
def momentum(self, p):
return np.linalg.norm(p.rinertia * p.omega_body)
def test_energy_and_momentum_conservation(self):
system = self.system
system.part.clear()
system.thermostat.turn_off()
p = system.part.add(pos=(0, 0, 0), rinertia=(1.1, 1.3, 1.5),
rotation=(1, 1, 1), omega_body=(2, 1, 4))
E0 = self.energy(p)
m0 = self.momentum(p)
system.time_step = 0.001
for _ in range(1000):
system.integrator.run(100)
self.assertAlmostEqual(self.energy(p), E0, places=3)
self.assertAlmostEqual(self.momentum(p), m0, places=3)
if __name__ == '__main__':
ut.main() | unknown | codeparrot/codeparrot-clean | ||
/* this file was generated by Tools/unicode/makeunicodedata.py 3.3 */
/* a list of unique character type descriptors */
const _PyUnicode_TypeRecord _PyUnicode_TypeRecords[] = {
{0, 0, 0, 0, 0, 0},
{0, 0, 0, 0, 0, 32},
{0, 0, 0, 0, 0, 48},
{0, 0, 0, 0, 0, 1056},
{0, 0, 0, 0, 0, 1024},
{0, 0, 0, 0, 0, 5120},
{0, 0, 0, 0, 0, 3590},
{0, 0, 0, 1, 1, 3590},
{0, 0, 0, 2, 2, 3590},
{0, 0, 0, 3, 3, 3590},
{0, 0, 0, 4, 4, 3590},
{0, 0, 0, 5, 5, 3590},
{0, 0, 0, 6, 6, 3590},
{0, 0, 0, 7, 7, 3590},
{0, 0, 0, 8, 8, 3590},
{0, 0, 0, 9, 9, 3590},
{0, 32, 0, 0, 0, 10113},
{0, 0, 0, 0, 0, 1536},
{-32, 0, -32, 0, 0, 9993},
{0, 0, 0, 0, 0, 9993},
{0, 0, 0, 0, 0, 4096},
{0, 0, 0, 0, 2, 3076},
{0, 0, 0, 0, 3, 3076},
{16777218, 17825792, 16777218, 0, 0, 26377},
{0, 0, 0, 0, 0, 5632},
{0, 0, 0, 0, 1, 3076},
{0, 0, 0, 0, 0, 3072},
{33554438, 18874371, 33554440, 0, 0, 26377},
{121, 0, 121, 0, 0, 9993},
{0, 1, 0, 0, 0, 10113},
{-1, 0, -1, 0, 0, 9993},
{16777228, 33554442, 16777228, 0, 0, 26497},
{-232, 0, -232, 0, 0, 9993},
{33554448, 18874381, 33554448, 0, 0, 26377},
{0, -121, 0, 0, 0, 10113},
{16777236, 17825810, 16777236, 0, 0, 26377},
{195, 0, 195, 0, 0, 9993},
{0, 210, 0, 0, 0, 10113},
{0, 206, 0, 0, 0, 10113},
{0, 205, 0, 0, 0, 10113},
{0, 79, 0, 0, 0, 10113},
{0, 202, 0, 0, 0, 10113},
{0, 203, 0, 0, 0, 10113},
{0, 207, 0, 0, 0, 10113},
{97, 0, 97, 0, 0, 9993},
{0, 211, 0, 0, 0, 10113},
{0, 209, 0, 0, 0, 10113},
{163, 0, 163, 0, 0, 9993},
{42561, 0, 42561, 0, 0, 9993},
{0, 213, 0, 0, 0, 10113},
{130, 0, 130, 0, 0, 9993},
{0, 214, 0, 0, 0, 10113},
{0, 218, 0, 0, 0, 10113},
{0, 217, 0, 0, 0, 10113},
{0, 219, 0, 0, 0, 10113},
{0, 0, 0, 0, 0, 1793},
{56, 0, 56, 0, 0, 9993},
{0, 2, 1, 0, 0, 10113},
{-1, 1, 0, 0, 0, 10049},
{-2, 0, -1, 0, 0, 9993},
{-79, 0, -79, 0, 0, 9993},
{33554456, 18874389, 33554456, 0, 0, 26377},
{0, -97, 0, 0, 0, 10113},
{0, -56, 0, 0, 0, 10113},
{0, -130, 0, 0, 0, 10113},
{0, 10795, 0, 0, 0, 10113},
{0, -163, 0, 0, 0, 10113},
{0, 10792, 0, 0, 0, 10113},
{10815, 0, 10815, 0, 0, 9993},
{0, -195, 0, 0, 0, 10113},
{0, 69, 0, 0, 0, 10113},
{0, 71, 0, 0, 0, 10113},
{10783, 0, 10783, 0, 0, 9993},
{10780, 0, 10780, 0, 0, 9993},
{10782, 0, 10782, 0, 0, 9993},
{-210, 0, -210, 0, 0, 9993},
{-206, 0, -206, 0, 0, 9993},
{-205, 0, -205, 0, 0, 9993},
{-202, 0, -202, 0, 0, 9993},
{-203, 0, -203, 0, 0, 9993},
{42319, 0, 42319, 0, 0, 9993},
{42315, 0, 42315, 0, 0, 9993},
{-207, 0, -207, 0, 0, 9993},
{42343, 0, 42343, 0, 0, 9993},
{42280, 0, 42280, 0, 0, 9993},
{42308, 0, 42308, 0, 0, 9993},
{-209, 0, -209, 0, 0, 9993},
{-211, 0, -211, 0, 0, 9993},
{10743, 0, 10743, 0, 0, 9993},
{42305, 0, 42305, 0, 0, 9993},
{10749, 0, 10749, 0, 0, 9993},
{-213, 0, -213, 0, 0, 9993},
{-214, 0, -214, 0, 0, 9993},
{10727, 0, 10727, 0, 0, 9993},
{-218, 0, -218, 0, 0, 9993},
{42307, 0, 42307, 0, 0, 9993},
{42282, 0, 42282, 0, 0, 9993},
{-69, 0, -69, 0, 0, 9993},
{-217, 0, -217, 0, 0, 9993},
{-71, 0, -71, 0, 0, 9993},
{-219, 0, -219, 0, 0, 9993},
{42261, 0, 42261, 0, 0, 9993},
{42258, 0, 42258, 0, 0, 9993},
{0, 0, 0, 0, 0, 14089},
{0, 0, 0, 0, 0, 5889},
{16777244, 17825818, 16777244, 0, 0, 30216},
{0, 0, 0, 0, 0, 13321},
{0, 116, 0, 0, 0, 10113},
{0, 38, 0, 0, 0, 10113},
{0, 37, 0, 0, 0, 10113},
{0, 64, 0, 0, 0, 10113},
{0, 63, 0, 0, 0, 10113},
{50331681, 19922973, 50331681, 0, 0, 26377},
{-38, 0, -38, 0, 0, 9993},
{-37, 0, -37, 0, 0, 9993},
{50331688, 19922980, 50331688, 0, 0, 26377},
{16777261, 17825835, 16777261, 0, 0, 26377},
{-64, 0, -64, 0, 0, 9993},
{-63, 0, -63, 0, 0, 9993},
{0, 8, 0, 0, 0, 10113},
{16777264, 17825838, 16777264, 0, 0, 26377},
{16777267, 17825841, 16777267, 0, 0, 26377},
{0, 0, 0, 0, 0, 10113},
{16777270, 17825844, 16777270, 0, 0, 26377},
{16777273, 17825847, 16777273, 0, 0, 26377},
{-8, 0, -8, 0, 0, 9993},
{16777276, 17825850, 16777276, 0, 0, 26377},
{16777279, 17825853, 16777279, 0, 0, 26377},
{7, 0, 7, 0, 0, 9993},
{-116, 0, -116, 0, 0, 9993},
{0, -60, 0, 0, 0, 10113},
{16777282, 17825856, 16777282, 0, 0, 26377},
{0, -7, 0, 0, 0, 10113},
{0, 80, 0, 0, 0, 10113},
{-80, 0, -80, 0, 0, 9993},
{0, 15, 0, 0, 0, 10113},
{-15, 0, -15, 0, 0, 9993},
{0, 48, 0, 0, 0, 10113},
{-48, 0, -48, 0, 0, 9993},
{33554502, 18874435, 33554504, 0, 0, 26377},
{0, 0, 0, 0, 0, 1537},
{0, 7264, 0, 0, 0, 10113},
{3008, 0, 0, 0, 0, 9993},
{0, 0, 0, 0, 1, 3588},
{0, 0, 0, 0, 2, 3588},
{0, 0, 0, 0, 3, 3588},
{0, 0, 0, 0, 4, 3588},
{0, 0, 0, 0, 5, 3588},
{0, 0, 0, 0, 6, 3588},
{0, 0, 0, 0, 7, 3588},
{0, 0, 0, 0, 8, 3588},
{0, 0, 0, 0, 9, 3588},
{16777292, 17825866, 16777292, 0, 0, 26497},
{16777295, 17825869, 16777295, 0, 0, 26497},
{16777298, 17825872, 16777298, 0, 0, 26497},
{16777301, 17825875, 16777301, 0, 0, 26497},
{16777304, 17825878, 16777304, 0, 0, 26497},
{16777307, 17825881, 16777307, 0, 0, 26497},
{16777310, 17825884, 16777310, 0, 0, 26497},
{16777313, 17825887, 16777313, 0, 0, 26497},
{16777316, 17825890, 16777316, 0, 0, 26497},
{16777319, 17825893, 16777319, 0, 0, 26497},
{16777322, 17825896, 16777322, 0, 0, 26497},
{16777325, 17825899, 16777325, 0, 0, 26497},
{16777328, 17825902, 16777328, 0, 0, 26497},
{16777331, 17825905, 16777331, 0, 0, 26497},
{16777334, 17825908, 16777334, 0, 0, 26497},
{16777337, 17825911, 16777337, 0, 0, 26497},
{16777340, 17825914, 16777340, 0, 0, 26497},
{16777343, 17825917, 16777343, 0, 0, 26497},
{16777346, 17825920, 16777346, 0, 0, 26497},
{16777349, 17825923, 16777349, 0, 0, 26497},
{16777352, 17825926, 16777352, 0, 0, 26497},
{16777355, 17825929, 16777355, 0, 0, 26497},
{16777358, 17825932, 16777358, 0, 0, 26497},
{16777361, 17825935, 16777361, 0, 0, 26497},
{16777364, 17825938, 16777364, 0, 0, 26497},
{16777367, 17825941, 16777367, 0, 0, 26497},
{16777370, 17825944, 16777370, 0, 0, 26497},
{16777373, 17825947, 16777373, 0, 0, 26497},
{16777376, 17825950, 16777376, 0, 0, 26497},
{16777379, 17825953, 16777379, 0, 0, 26497},
{16777382, 17825956, 16777382, 0, 0, 26497},
{16777385, 17825959, 16777385, 0, 0, 26497},
{16777388, 17825962, 16777388, 0, 0, 26497},
{16777391, 17825965, 16777391, 0, 0, 26497},
{16777394, 17825968, 16777394, 0, 0, 26497},
{16777397, 17825971, 16777397, 0, 0, 26497},
{16777400, 17825974, 16777400, 0, 0, 26497},
{16777403, 17825977, 16777403, 0, 0, 26497},
{16777406, 17825980, 16777406, 0, 0, 26497},
{16777409, 17825983, 16777409, 0, 0, 26497},
{16777412, 17825986, 16777412, 0, 0, 26497},
{16777415, 17825989, 16777415, 0, 0, 26497},
{16777418, 17825992, 16777418, 0, 0, 26497},
{16777421, 17825995, 16777421, 0, 0, 26497},
{16777424, 17825998, 16777424, 0, 0, 26497},
{16777427, 17826001, 16777427, 0, 0, 26497},
{16777430, 17826004, 16777430, 0, 0, 26497},
{16777433, 17826007, 16777433, 0, 0, 26497},
{16777436, 17826010, 16777436, 0, 0, 26497},
{16777439, 17826013, 16777439, 0, 0, 26497},
{16777442, 17826016, 16777442, 0, 0, 26497},
{16777445, 17826019, 16777445, 0, 0, 26497},
{16777448, 17826022, 16777448, 0, 0, 26497},
{16777451, 17826025, 16777451, 0, 0, 26497},
{16777454, 17826028, 16777454, 0, 0, 26497},
{16777457, 17826031, 16777457, 0, 0, 26497},
{16777460, 17826034, 16777460, 0, 0, 26497},
{16777463, 17826037, 16777463, 0, 0, 26497},
{16777466, 17826040, 16777466, 0, 0, 26497},
{16777469, 17826043, 16777469, 0, 0, 26497},
{16777472, 17826046, 16777472, 0, 0, 26497},
{16777475, 17826049, 16777475, 0, 0, 26497},
{16777478, 17826052, 16777478, 0, 0, 26497},
{16777481, 17826055, 16777481, 0, 0, 26497},
{16777484, 17826058, 16777484, 0, 0, 26497},
{16777487, 17826061, 16777487, 0, 0, 26497},
{16777490, 17826064, 16777490, 0, 0, 26497},
{16777493, 17826067, 16777493, 0, 0, 26497},
{16777496, 17826070, 16777496, 0, 0, 26497},
{16777499, 17826073, 16777499, 0, 0, 26497},
{16777502, 17826076, 16777502, 0, 0, 26497},
{16777505, 17826079, 16777505, 0, 0, 26497},
{16777508, 17826082, 16777508, 0, 0, 26497},
{16777511, 17826085, 16777511, 0, 0, 26497},
{16777514, 17826088, 16777514, 0, 0, 26497},
{16777517, 17826091, 16777517, 0, 0, 26497},
{16777520, 17826094, 16777520, 0, 0, 26497},
{16777523, 17826097, 16777523, 0, 0, 26497},
{16777526, 17826100, 16777526, 0, 0, 26497},
{16777529, 17826103, 16777529, 0, 0, 26497},
{16777532, 17826106, 16777532, 0, 0, 26497},
{16777535, 17826109, 16777535, 0, 0, 26497},
{16777538, 17826112, 16777538, 0, 0, 26497},
{16777541, 17826115, 16777541, 0, 0, 26497},
{16777544, 17826118, 16777544, 0, 0, 26497},
{16777547, 17826121, 16777547, 0, 0, 26497},
{16777550, 17826124, 16777550, 0, 0, 26377},
{16777553, 17826127, 16777553, 0, 0, 26377},
{16777556, 17826130, 16777556, 0, 0, 26377},
{16777559, 17826133, 16777559, 0, 0, 26377},
{16777562, 17826136, 16777562, 0, 0, 26377},
{16777565, 17826139, 16777565, 0, 0, 26377},
{0, 0, 0, 0, 0, 3840},
{0, 0, 0, 0, 0, 5888},
{16777568, 17826142, 16777568, 0, 0, 26377},
{16777571, 17826145, 16777571, 0, 0, 26377},
{16777574, 17826148, 16777574, 0, 0, 26377},
{16777577, 17826151, 16777577, 0, 0, 26377},
{16777580, 17826154, 16777580, 0, 0, 26377},
{16777583, 17826157, 16777583, 0, 0, 26377},
{16777586, 17826160, 16777586, 0, 0, 26377},
{16777589, 17826163, 16777589, 0, 0, 26377},
{16777592, 17826166, 16777592, 0, 0, 26377},
{0, -3008, 0, 0, 0, 10113},
{35332, 0, 35332, 0, 0, 9993},
{3814, 0, 3814, 0, 0, 9993},
{35384, 0, 35384, 0, 0, 9993},
{33554812, 18874745, 33554812, 0, 0, 26377},
{33554817, 18874750, 33554817, 0, 0, 26377},
{33554822, 18874755, 33554822, 0, 0, 26377},
{33554827, 18874760, 33554827, 0, 0, 26377},
{33554832, 18874765, 33554832, 0, 0, 26377},
{16777620, 17826194, 16777620, 0, 0, 26377},
{16777624, 18874773, 16777624, 0, 0, 26497},
{8, 0, 8, 0, 0, 9993},
{0, -8, 0, 0, 0, 10113},
{33554844, 18874777, 33554844, 0, 0, 26377},
{50332066, 19923358, 50332066, 0, 0, 26377},
{50332073, 19923365, 50332073, 0, 0, 26377},
{50332080, 19923372, 50332080, 0, 0, 26377},
{74, 0, 74, 0, 0, 9993},
{86, 0, 86, 0, 0, 9993},
{100, 0, 100, 0, 0, 9993},
{128, 0, 128, 0, 0, 9993},
{112, 0, 112, 0, 0, 9993},
{126, 0, 126, 0, 0, 9993},
{33554870, 18874803, 16777656, 0, 0, 26377},
{33554876, 18874809, 16777662, 0, 0, 26377},
{33554882, 18874815, 16777668, 0, 0, 26377},
{33554888, 18874821, 16777674, 0, 0, 26377},
{33554894, 18874827, 16777680, 0, 0, 26377},
{33554900, 18874833, 16777686, 0, 0, 26377},
{33554906, 18874839, 16777692, 0, 0, 26377},
{33554912, 18874845, 16777698, 0, 0, 26377},
{33554918, 18874851, 16777704, 0, 0, 26433},
{33554924, 18874857, 16777710, 0, 0, 26433},
{33554930, 18874863, 16777716, 0, 0, 26433},
{33554936, 18874869, 16777722, 0, 0, 26433},
{33554942, 18874875, 16777728, 0, 0, 26433},
{33554948, 18874881, 16777734, 0, 0, 26433},
{33554954, 18874887, 16777740, 0, 0, 26433},
{33554960, 18874893, 16777746, 0, 0, 26433},
{33554966, 18874899, 16777752, 0, 0, 26377},
{33554972, 18874905, 16777758, 0, 0, 26377},
{33554978, 18874911, 16777764, 0, 0, 26377},
{33554984, 18874917, 16777770, 0, 0, 26377},
{33554990, 18874923, 16777776, 0, 0, 26377},
{33554996, 18874929, 16777782, 0, 0, 26377},
{33555002, 18874935, 16777788, 0, 0, 26377},
{33555008, 18874941, 16777794, 0, 0, 26377},
{33555014, 18874947, 16777800, 0, 0, 26433},
{33555020, 18874953, 16777806, 0, 0, 26433},
{33555026, 18874959, 16777812, 0, 0, 26433},
{33555032, 18874965, 16777818, 0, 0, 26433},
{33555038, 18874971, 16777824, 0, 0, 26433},
{33555044, 18874977, 16777830, 0, 0, 26433},
{33555050, 18874983, 16777836, 0, 0, 26433},
{33555056, 18874989, 16777842, 0, 0, 26433},
{33555062, 18874995, 16777848, 0, 0, 26377},
{33555068, 18875001, 16777854, 0, 0, 26377},
{33555074, 18875007, 16777860, 0, 0, 26377},
{33555080, 18875013, 16777866, 0, 0, 26377},
{33555086, 18875019, 16777872, 0, 0, 26377},
{33555092, 18875025, 16777878, 0, 0, 26377},
{33555098, 18875031, 16777884, 0, 0, 26377},
{33555104, 18875037, 16777890, 0, 0, 26377},
{33555110, 18875043, 16777896, 0, 0, 26433},
{33555116, 18875049, 16777902, 0, 0, 26433},
{33555122, 18875055, 16777908, 0, 0, 26433},
{33555128, 18875061, 16777914, 0, 0, 26433},
{33555134, 18875067, 16777920, 0, 0, 26433},
{33555140, 18875073, 16777926, 0, 0, 26433},
{33555146, 18875079, 16777932, 0, 0, 26433},
{33555152, 18875085, 16777938, 0, 0, 26433},
{33555158, 18875091, 33555160, 0, 0, 26377},
{33555165, 18875098, 16777951, 0, 0, 26377},
{33555171, 18875104, 33555173, 0, 0, 26377},
{33555178, 18875111, 33555178, 0, 0, 26377},
{50332400, 19923692, 50332403, 0, 0, 26377},
{0, -74, 0, 0, 0, 10113},
{33555193, 18875126, 16777979, 0, 0, 26433},
{16777982, 17826556, 16777982, 0, 0, 26377},
{33555202, 18875135, 33555204, 0, 0, 26377},
{33555209, 18875142, 16777995, 0, 0, 26377},
{33555215, 18875148, 33555217, 0, 0, 26377},
{33555222, 18875155, 33555222, 0, 0, 26377},
{50332444, 19923736, 50332447, 0, 0, 26377},
{0, -86, 0, 0, 0, 10113},
{33555237, 18875170, 16778023, 0, 0, 26433},
{50332460, 19923752, 50332460, 0, 0, 26377},
{50332467, 19923759, 50332467, 0, 0, 26377},
{33555257, 18875190, 33555257, 0, 0, 26377},
{50332479, 19923771, 50332479, 0, 0, 26377},
{0, -100, 0, 0, 0, 10113},
{50332486, 19923778, 50332486, 0, 0, 26377},
{50332493, 19923785, 50332493, 0, 0, 26377},
{33555283, 18875216, 33555283, 0, 0, 26377},
{33555288, 18875221, 33555288, 0, 0, 26377},
{50332510, 19923802, 50332510, 0, 0, 26377},
{0, -112, 0, 0, 0, 10113},
{33555300, 18875233, 33555302, 0, 0, 26377},
{33555307, 18875240, 16778093, 0, 0, 26377},
{33555313, 18875246, 33555315, 0, 0, 26377},
{33555320, 18875253, 33555320, 0, 0, 26377},
{50332542, 19923834, 50332545, 0, 0, 26377},
{0, -128, 0, 0, 0, 10113},
{0, -126, 0, 0, 0, 10113},
{33555335, 18875268, 16778121, 0, 0, 26433},
{0, 0, 0, 0, 0, 4608},
{0, 0, 0, 0, 0, 3076},
{0, 0, 0, 0, 4, 3076},
{0, 0, 0, 0, 5, 3076},
{0, 0, 0, 0, 6, 3076},
{0, 0, 0, 0, 7, 3076},
{0, 0, 0, 0, 8, 3076},
{0, 0, 0, 0, 9, 3076},
{0, 0, 0, 0, 0, 1792},
{0, -7517, 0, 0, 0, 10113},
{0, -8383, 0, 0, 0, 10113},
{0, -8262, 0, 0, 0, 10113},
{0, 28, 0, 0, 0, 10113},
{-28, 0, -28, 0, 0, 9993},
{0, 16, 0, 0, 0, 12160},
{-16, 0, -16, 0, 0, 12040},
{0, 26, 0, 0, 0, 9344},
{-26, 0, -26, 0, 0, 9224},
{0, -10743, 0, 0, 0, 10113},
{0, -3814, 0, 0, 0, 10113},
{0, -10727, 0, 0, 0, 10113},
{-10795, 0, -10795, 0, 0, 9993},
{-10792, 0, -10792, 0, 0, 9993},
{0, -10780, 0, 0, 0, 10113},
{0, -10749, 0, 0, 0, 10113},
{0, -10783, 0, 0, 0, 10113},
{0, -10782, 0, 0, 0, 10113},
{0, -10815, 0, 0, 0, 10113},
{-7264, 0, -7264, 0, 0, 9993},
{0, 0, 0, 0, 0, 5121},
{0, 0, 0, 0, 0, 3841},
{0, -35332, 0, 0, 0, 10113},
{0, -42280, 0, 0, 0, 10113},
{48, 0, 48, 0, 0, 9993},
{0, -42308, 0, 0, 0, 10113},
{0, -42319, 0, 0, 0, 10113},
{0, -42315, 0, 0, 0, 10113},
{0, -42305, 0, 0, 0, 10113},
{0, -42258, 0, 0, 0, 10113},
{0, -42282, 0, 0, 0, 10113},
{0, -42261, 0, 0, 0, 10113},
{0, 928, 0, 0, 0, 10113},
{0, -48, 0, 0, 0, 10113},
{0, -42307, 0, 0, 0, 10113},
{0, -35384, 0, 0, 0, 10113},
{0, -42343, 0, 0, 0, 10113},
{0, -42561, 0, 0, 0, 10113},
{-928, 0, -928, 0, 0, 9993},
{16778124, 17826698, 16778124, 0, 0, 26377},
{16778127, 17826701, 16778127, 0, 0, 26377},
{16778130, 17826704, 16778130, 0, 0, 26377},
{16778133, 17826707, 16778133, 0, 0, 26377},
{16778136, 17826710, 16778136, 0, 0, 26377},
{16778139, 17826713, 16778139, 0, 0, 26377},
{16778142, 17826716, 16778142, 0, 0, 26377},
{16778145, 17826719, 16778145, 0, 0, 26377},
{16778148, 17826722, 16778148, 0, 0, 26377},
{16778151, 17826725, 16778151, 0, 0, 26377},
{16778154, 17826728, 16778154, 0, 0, 26377},
{16778157, 17826731, 16778157, 0, 0, 26377},
{16778160, 17826734, 16778160, 0, 0, 26377},
{16778163, 17826737, 16778163, 0, 0, 26377},
{16778166, 17826740, 16778166, 0, 0, 26377},
{16778169, 17826743, 16778169, 0, 0, 26377},
{16778172, 17826746, 16778172, 0, 0, 26377},
{16778175, 17826749, 16778175, 0, 0, 26377},
{16778178, 17826752, 16778178, 0, 0, 26377},
{16778181, 17826755, 16778181, 0, 0, 26377},
{16778184, 17826758, 16778184, 0, 0, 26377},
{16778187, 17826761, 16778187, 0, 0, 26377},
{16778190, 17826764, 16778190, 0, 0, 26377},
{16778193, 17826767, 16778193, 0, 0, 26377},
{16778196, 17826770, 16778196, 0, 0, 26377},
{16778199, 17826773, 16778199, 0, 0, 26377},
{16778202, 17826776, 16778202, 0, 0, 26377},
{16778205, 17826779, 16778205, 0, 0, 26377},
{16778208, 17826782, 16778208, 0, 0, 26377},
{16778211, 17826785, 16778211, 0, 0, 26377},
{16778214, 17826788, 16778214, 0, 0, 26377},
{16778217, 17826791, 16778217, 0, 0, 26377},
{16778220, 17826794, 16778220, 0, 0, 26377},
{16778223, 17826797, 16778223, 0, 0, 26377},
{16778226, 17826800, 16778226, 0, 0, 26377},
{16778229, 17826803, 16778229, 0, 0, 26377},
{16778232, 17826806, 16778232, 0, 0, 26377},
{16778235, 17826809, 16778235, 0, 0, 26377},
{16778238, 17826812, 16778238, 0, 0, 26377},
{16778241, 17826815, 16778241, 0, 0, 26377},
{16778244, 17826818, 16778244, 0, 0, 26377},
{16778247, 17826821, 16778247, 0, 0, 26377},
{16778250, 17826824, 16778250, 0, 0, 26377},
{16778253, 17826827, 16778253, 0, 0, 26377},
{16778256, 17826830, 16778256, 0, 0, 26377},
{16778259, 17826833, 16778259, 0, 0, 26377},
{16778262, 17826836, 16778262, 0, 0, 26377},
{16778265, 17826839, 16778265, 0, 0, 26377},
{16778268, 17826842, 16778268, 0, 0, 26377},
{16778271, 17826845, 16778271, 0, 0, 26377},
{16778274, 17826848, 16778274, 0, 0, 26377},
{16778277, 17826851, 16778277, 0, 0, 26377},
{16778280, 17826854, 16778280, 0, 0, 26377},
{16778283, 17826857, 16778283, 0, 0, 26377},
{16778286, 17826860, 16778286, 0, 0, 26377},
{16778289, 17826863, 16778289, 0, 0, 26377},
{16778292, 17826866, 16778292, 0, 0, 26377},
{16778295, 17826869, 16778295, 0, 0, 26377},
{16778298, 17826872, 16778298, 0, 0, 26377},
{16778301, 17826875, 16778301, 0, 0, 26377},
{16778304, 17826878, 16778304, 0, 0, 26377},
{16778307, 17826881, 16778307, 0, 0, 26377},
{16778310, 17826884, 16778310, 0, 0, 26377},
{16778313, 17826887, 16778313, 0, 0, 26377},
{16778316, 17826890, 16778316, 0, 0, 26377},
{16778319, 17826893, 16778319, 0, 0, 26377},
{16778322, 17826896, 16778322, 0, 0, 26377},
{16778325, 17826899, 16778325, 0, 0, 26377},
{16778328, 17826902, 16778328, 0, 0, 26377},
{16778331, 17826905, 16778331, 0, 0, 26377},
{16778334, 17826908, 16778334, 0, 0, 26377},
{16778337, 17826911, 16778337, 0, 0, 26377},
{16778340, 17826914, 16778340, 0, 0, 26377},
{16778343, 17826917, 16778343, 0, 0, 26377},
{16778346, 17826920, 16778346, 0, 0, 26377},
{16778349, 17826923, 16778349, 0, 0, 26377},
{16778352, 17826926, 16778352, 0, 0, 26377},
{16778355, 17826929, 16778355, 0, 0, 26377},
{16778358, 17826932, 16778358, 0, 0, 26377},
{16778361, 17826935, 16778361, 0, 0, 26377},
{33555581, 18875514, 33555583, 0, 0, 26377},
{33555588, 18875521, 33555590, 0, 0, 26377},
{33555595, 18875528, 33555597, 0, 0, 26377},
{50332819, 19924111, 50332822, 0, 0, 26377},
{50332829, 19924121, 50332832, 0, 0, 26377},
{33555622, 18875555, 33555624, 0, 0, 26377},
{33555629, 18875562, 33555631, 0, 0, 26377},
{33555636, 18875569, 33555638, 0, 0, 26377},
{33555643, 18875576, 33555645, 0, 0, 26377},
{33555650, 18875583, 33555652, 0, 0, 26377},
{33555657, 18875590, 33555659, 0, 0, 26377},
{33555664, 18875597, 33555666, 0, 0, 26377},
{0, 0, 0, 0, 0, 1025},
{0, 0, 0, 0, 0, 5633},
{0, 40, 0, 0, 0, 10113},
{-40, 0, -40, 0, 0, 9993},
{0, 39, 0, 0, 0, 10113},
{-39, 0, -39, 0, 0, 9993},
{0, 27, 0, 0, 0, 10113},
{-27, 0, -27, 0, 0, 9993},
{0, 34, 0, 0, 0, 10113},
{-34, 0, -34, 0, 0, 9993},
{0, 0, 0, 0, 0, 9344},
};
/* extended case mappings */
const Py_UCS4 _PyUnicode_ExtendedCase[] = {
181,
956,
924,
223,
115,
115,
83,
83,
83,
115,
105,
775,
304,
329,
700,
110,
700,
78,
383,
115,
83,
496,
106,
780,
74,
780,
837,
953,
921,
912,
953,
776,
769,
921,
776,
769,
944,
965,
776,
769,
933,
776,
769,
962,
963,
931,
976,
946,
914,
977,
952,
920,
981,
966,
934,
982,
960,
928,
1008,
954,
922,
1009,
961,
929,
1013,
949,
917,
1415,
1381,
1410,
1333,
1362,
1333,
1410,
43888,
5024,
5024,
43889,
5025,
5025,
43890,
5026,
5026,
43891,
5027,
5027,
43892,
5028,
5028,
43893,
5029,
5029,
43894,
5030,
5030,
43895,
5031,
5031,
43896,
5032,
5032,
43897,
5033,
5033,
43898,
5034,
5034,
43899,
5035,
5035,
43900,
5036,
5036,
43901,
5037,
5037,
43902,
5038,
5038,
43903,
5039,
5039,
43904,
5040,
5040,
43905,
5041,
5041,
43906,
5042,
5042,
43907,
5043,
5043,
43908,
5044,
5044,
43909,
5045,
5045,
43910,
5046,
5046,
43911,
5047,
5047,
43912,
5048,
5048,
43913,
5049,
5049,
43914,
5050,
5050,
43915,
5051,
5051,
43916,
5052,
5052,
43917,
5053,
5053,
43918,
5054,
5054,
43919,
5055,
5055,
43920,
5056,
5056,
43921,
5057,
5057,
43922,
5058,
5058,
43923,
5059,
5059,
43924,
5060,
5060,
43925,
5061,
5061,
43926,
5062,
5062,
43927,
5063,
5063,
43928,
5064,
5064,
43929,
5065,
5065,
43930,
5066,
5066,
43931,
5067,
5067,
43932,
5068,
5068,
43933,
5069,
5069,
43934,
5070,
5070,
43935,
5071,
5071,
43936,
5072,
5072,
43937,
5073,
5073,
43938,
5074,
5074,
43939,
5075,
5075,
43940,
5076,
5076,
43941,
5077,
5077,
43942,
5078,
5078,
43943,
5079,
5079,
43944,
5080,
5080,
43945,
5081,
5081,
43946,
5082,
5082,
43947,
5083,
5083,
43948,
5084,
5084,
43949,
5085,
5085,
43950,
5086,
5086,
43951,
5087,
5087,
43952,
5088,
5088,
43953,
5089,
5089,
43954,
5090,
5090,
43955,
5091,
5091,
43956,
5092,
5092,
43957,
5093,
5093,
43958,
5094,
5094,
43959,
5095,
5095,
43960,
5096,
5096,
43961,
5097,
5097,
43962,
5098,
5098,
43963,
5099,
5099,
43964,
5100,
5100,
43965,
5101,
5101,
43966,
5102,
5102,
43967,
5103,
5103,
5112,
5104,
5104,
5113,
5105,
5105,
5114,
5106,
5106,
5115,
5107,
5107,
5116,
5108,
5108,
5117,
5109,
5109,
5112,
5104,
5104,
5113,
5105,
5105,
5114,
5106,
5106,
5115,
5107,
5107,
5116,
5108,
5108,
5117,
5109,
5109,
7296,
1074,
1042,
7297,
1076,
1044,
7298,
1086,
1054,
7299,
1089,
1057,
7300,
1090,
1058,
7301,
1090,
1058,
7302,
1098,
1066,
7303,
1123,
1122,
7304,
42571,
42570,
7830,
104,
817,
72,
817,
7831,
116,
776,
84,
776,
7832,
119,
778,
87,
778,
7833,
121,
778,
89,
778,
7834,
97,
702,
65,
702,
7835,
7777,
7776,
223,
115,
115,
7838,
8016,
965,
787,
933,
787,
8018,
965,
787,
768,
933,
787,
768,
8020,
965,
787,
769,
933,
787,
769,
8022,
965,
787,
834,
933,
787,
834,
8064,
7936,
953,
7944,
921,
8072,
8065,
7937,
953,
7945,
921,
8073,
8066,
7938,
953,
7946,
921,
8074,
8067,
7939,
953,
7947,
921,
8075,
8068,
7940,
953,
7948,
921,
8076,
8069,
7941,
953,
7949,
921,
8077,
8070,
7942,
953,
7950,
921,
8078,
8071,
7943,
953,
7951,
921,
8079,
8064,
7936,
953,
7944,
921,
8072,
8065,
7937,
953,
7945,
921,
8073,
8066,
7938,
953,
7946,
921,
8074,
8067,
7939,
953,
7947,
921,
8075,
8068,
7940,
953,
7948,
921,
8076,
8069,
7941,
953,
7949,
921,
8077,
8070,
7942,
953,
7950,
921,
8078,
8071,
7943,
953,
7951,
921,
8079,
8080,
7968,
953,
7976,
921,
8088,
8081,
7969,
953,
7977,
921,
8089,
8082,
7970,
953,
7978,
921,
8090,
8083,
7971,
953,
7979,
921,
8091,
8084,
7972,
953,
7980,
921,
8092,
8085,
7973,
953,
7981,
921,
8093,
8086,
7974,
953,
7982,
921,
8094,
8087,
7975,
953,
7983,
921,
8095,
8080,
7968,
953,
7976,
921,
8088,
8081,
7969,
953,
7977,
921,
8089,
8082,
7970,
953,
7978,
921,
8090,
8083,
7971,
953,
7979,
921,
8091,
8084,
7972,
953,
7980,
921,
8092,
8085,
7973,
953,
7981,
921,
8093,
8086,
7974,
953,
7982,
921,
8094,
8087,
7975,
953,
7983,
921,
8095,
8096,
8032,
953,
8040,
921,
8104,
8097,
8033,
953,
8041,
921,
8105,
8098,
8034,
953,
8042,
921,
8106,
8099,
8035,
953,
8043,
921,
8107,
8100,
8036,
953,
8044,
921,
8108,
8101,
8037,
953,
8045,
921,
8109,
8102,
8038,
953,
8046,
921,
8110,
8103,
8039,
953,
8047,
921,
8111,
8096,
8032,
953,
8040,
921,
8104,
8097,
8033,
953,
8041,
921,
8105,
8098,
8034,
953,
8042,
921,
8106,
8099,
8035,
953,
8043,
921,
8107,
8100,
8036,
953,
8044,
921,
8108,
8101,
8037,
953,
8045,
921,
8109,
8102,
8038,
953,
8046,
921,
8110,
8103,
8039,
953,
8047,
921,
8111,
8114,
8048,
953,
8122,
921,
8122,
837,
8115,
945,
953,
913,
921,
8124,
8116,
940,
953,
902,
921,
902,
837,
8118,
945,
834,
913,
834,
8119,
945,
834,
953,
913,
834,
921,
913,
834,
837,
8115,
945,
953,
913,
921,
8124,
8126,
953,
921,
8130,
8052,
953,
8138,
921,
8138,
837,
8131,
951,
953,
919,
921,
8140,
8132,
942,
953,
905,
921,
905,
837,
8134,
951,
834,
919,
834,
8135,
951,
834,
953,
919,
834,
921,
919,
834,
837,
8131,
951,
953,
919,
921,
8140,
8146,
953,
776,
768,
921,
776,
768,
8147,
953,
776,
769,
921,
776,
769,
8150,
953,
834,
921,
834,
8151,
953,
776,
834,
921,
776,
834,
8162,
965,
776,
768,
933,
776,
768,
8163,
965,
776,
769,
933,
776,
769,
8164,
961,
787,
929,
787,
8166,
965,
834,
933,
834,
8167,
965,
776,
834,
933,
776,
834,
8178,
8060,
953,
8186,
921,
8186,
837,
8179,
969,
953,
937,
921,
8188,
8180,
974,
953,
911,
921,
911,
837,
8182,
969,
834,
937,
834,
8183,
969,
834,
953,
937,
834,
921,
937,
834,
837,
8179,
969,
953,
937,
921,
8188,
43888,
5024,
5024,
43889,
5025,
5025,
43890,
5026,
5026,
43891,
5027,
5027,
43892,
5028,
5028,
43893,
5029,
5029,
43894,
5030,
5030,
43895,
5031,
5031,
43896,
5032,
5032,
43897,
5033,
5033,
43898,
5034,
5034,
43899,
5035,
5035,
43900,
5036,
5036,
43901,
5037,
5037,
43902,
5038,
5038,
43903,
5039,
5039,
43904,
5040,
5040,
43905,
5041,
5041,
43906,
5042,
5042,
43907,
5043,
5043,
43908,
5044,
5044,
43909,
5045,
5045,
43910,
5046,
5046,
43911,
5047,
5047,
43912,
5048,
5048,
43913,
5049,
5049,
43914,
5050,
5050,
43915,
5051,
5051,
43916,
5052,
5052,
43917,
5053,
5053,
43918,
5054,
5054,
43919,
5055,
5055,
43920,
5056,
5056,
43921,
5057,
5057,
43922,
5058,
5058,
43923,
5059,
5059,
43924,
5060,
5060,
43925,
5061,
5061,
43926,
5062,
5062,
43927,
5063,
5063,
43928,
5064,
5064,
43929,
5065,
5065,
43930,
5066,
5066,
43931,
5067,
5067,
43932,
5068,
5068,
43933,
5069,
5069,
43934,
5070,
5070,
43935,
5071,
5071,
43936,
5072,
5072,
43937,
5073,
5073,
43938,
5074,
5074,
43939,
5075,
5075,
43940,
5076,
5076,
43941,
5077,
5077,
43942,
5078,
5078,
43943,
5079,
5079,
43944,
5080,
5080,
43945,
5081,
5081,
43946,
5082,
5082,
43947,
5083,
5083,
43948,
5084,
5084,
43949,
5085,
5085,
43950,
5086,
5086,
43951,
5087,
5087,
43952,
5088,
5088,
43953,
5089,
5089,
43954,
5090,
5090,
43955,
5091,
5091,
43956,
5092,
5092,
43957,
5093,
5093,
43958,
5094,
5094,
43959,
5095,
5095,
43960,
5096,
5096,
43961,
5097,
5097,
43962,
5098,
5098,
43963,
5099,
5099,
43964,
5100,
5100,
43965,
5101,
5101,
43966,
5102,
5102,
43967,
5103,
5103,
64256,
102,
102,
70,
70,
70,
102,
64257,
102,
105,
70,
73,
70,
105,
64258,
102,
108,
70,
76,
70,
108,
64259,
102,
102,
105,
70,
70,
73,
70,
102,
105,
64260,
102,
102,
108,
70,
70,
76,
70,
102,
108,
64261,
115,
116,
83,
84,
83,
116,
64262,
115,
116,
83,
84,
83,
116,
64275,
1396,
1398,
1348,
1350,
1348,
1398,
64276,
1396,
1381,
1348,
1333,
1348,
1381,
64277,
1396,
1387,
1348,
1339,
1348,
1387,
64278,
1406,
1398,
1358,
1350,
1358,
1398,
64279,
1396,
1389,
1348,
1341,
1348,
1389,
};
/* type indexes */
#define SHIFT 7
static const unsigned short index1[] = {
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 34, 35, 36, 37,
38, 39, 34, 34, 34, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 64, 64, 64, 65, 66, 64,
64, 64, 64, 67, 68, 64, 64, 64, 64, 64, 64, 69, 64, 70, 71, 72, 73, 74,
75, 64, 76, 77, 78, 79, 80, 81, 82, 64, 64, 83, 84, 34, 34, 34, 34, 34,
34, 85, 34, 34, 34, 34, 34, 86, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 87, 88, 89, 90, 91, 92, 34, 93, 34, 34,
34, 94, 95, 34, 34, 34, 34, 34, 96, 34, 34, 34, 97, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 98, 99, 100, 34, 34, 34, 34, 34, 34, 101, 102, 34,
34, 34, 34, 34, 34, 34, 34, 103, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
104, 34, 34, 34, 92, 34, 34, 34, 34, 34, 34, 34, 34, 105, 34, 34, 34, 34,
106, 107, 34, 34, 34, 34, 34, 108, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 92, 34, 34, 34, 34, 34, 34, 109, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 110, 111, 34, 34, 34, 34, 34, 34,
34, 34, 34, 112, 34, 34, 34, 34, 113, 34, 34, 114, 115, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 116, 34, 34, 34,
34, 34, 34, 34, 34, 117, 34, 34, 118, 119, 120, 121, 122, 123, 124, 125,
126, 127, 128, 129, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 130, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 132, 133,
134, 135, 136, 137, 138, 34, 139, 140, 141, 142, 143, 144, 145, 146, 147,
148, 131, 149, 150, 151, 152, 153, 154, 155, 34, 34, 156, 157, 158, 159,
160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173,
174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 131, 184, 185, 186,
187, 131, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199,
131, 200, 201, 202, 203, 34, 34, 34, 204, 34, 205, 206, 207, 34, 208,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 209, 34, 34, 34, 34, 34, 34, 34, 34, 210,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 146, 34, 34, 34, 34, 211,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 212, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 34, 34, 34, 34,
213, 214, 215, 216, 131, 131, 217, 131, 218, 219, 220, 221, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
222, 223, 224, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
225, 34, 34, 226, 34, 34, 227, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 228, 229, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 64,
230, 64, 64, 64, 231, 232, 233, 64, 234, 235, 236, 237, 238, 239, 131,
240, 241, 242, 243, 244, 245, 246, 247, 64, 64, 64, 64, 248, 249, 131,
131, 131, 131, 131, 131, 131, 131, 250, 131, 251, 252, 253, 131, 131,
254, 131, 131, 131, 255, 131, 256, 131, 257, 131, 258, 34, 259, 260, 131,
131, 131, 131, 131, 261, 262, 263, 131, 264, 265, 131, 131, 266, 267,
268, 269, 270, 131, 64, 271, 64, 64, 64, 64, 64, 272, 64, 273, 274, 275,
64, 64, 276, 277, 64, 278, 131, 131, 131, 131, 131, 131, 131, 131, 279,
280, 281, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 85,
282, 34, 283, 284, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 285,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 286, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 287, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 108, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 288, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 289, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 290,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 291, 34, 34, 34, 34, 292, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 34, 285, 34,
34, 293, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
294, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34,
34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 295, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 296, 131, 297, 298, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131,
131, 131, 131,
};
static const unsigned short index2[] = {
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 2, 2, 2, 1, 3, 4, 4, 4, 4, 4, 4, 5, 4, 4, 4, 4, 4, 4, 5, 4,
6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 5, 4, 4, 4, 4, 4, 4, 16, 16, 16, 16,
16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
16, 16, 16, 16, 4, 4, 4, 5, 17, 5, 18, 18, 18, 18, 18, 18, 18, 18, 18,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 4, 4,
4, 4, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 4, 4, 4, 4, 4, 4, 4, 5, 4, 19, 4, 4,
20, 4, 5, 4, 4, 21, 22, 5, 23, 4, 24, 5, 25, 19, 4, 26, 26, 26, 4, 16,
16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
16, 16, 16, 16, 4, 16, 16, 16, 16, 16, 16, 16, 27, 18, 18, 18, 18, 18,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
4, 18, 18, 18, 18, 18, 18, 18, 28, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 31, 32, 29, 30, 29, 30, 29, 30, 19, 29, 30, 29, 30, 29, 30,
29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 33, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 34, 29, 30, 29, 30, 29, 30, 35, 36, 37, 29, 30, 29, 30, 38,
29, 30, 39, 39, 29, 30, 19, 40, 41, 42, 29, 30, 39, 43, 44, 45, 46, 29,
30, 47, 48, 45, 49, 50, 51, 29, 30, 29, 30, 29, 30, 52, 29, 30, 52, 19,
19, 29, 30, 52, 29, 30, 53, 53, 29, 30, 29, 30, 54, 29, 30, 19, 55, 29,
30, 19, 56, 55, 55, 55, 55, 57, 58, 59, 57, 58, 59, 57, 58, 59, 29, 30,
29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 60, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 61, 57, 58,
59, 29, 30, 62, 63, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 64, 19, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 19, 19, 19, 19, 19, 19, 65,
29, 30, 66, 67, 68, 68, 29, 30, 69, 70, 71, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 72, 73, 74, 75, 76, 19, 77, 77, 19, 78, 19, 79, 80, 19, 19,
19, 77, 81, 19, 82, 83, 84, 85, 19, 86, 87, 85, 88, 89, 19, 19, 87, 19,
90, 91, 19, 19, 92, 19, 19, 19, 19, 19, 19, 19, 93, 19, 19, 94, 19, 95,
94, 19, 19, 19, 96, 94, 97, 98, 98, 99, 19, 19, 19, 19, 19, 100, 19, 55,
55, 19, 19, 19, 19, 19, 19, 19, 101, 102, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 103, 103, 103, 103, 103, 103, 103,
103, 103, 104, 104, 104, 104, 104, 104, 104, 103, 103, 5, 5, 5, 5, 104,
104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 103, 103, 103, 103, 103, 5, 5, 5, 5, 5, 5, 5,
104, 5, 104, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 105, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 29, 30, 29, 30, 104, 5, 29, 30, 0, 0, 106, 50, 50, 50, 4, 107, 0,
0, 0, 0, 5, 5, 108, 24, 109, 109, 109, 0, 110, 0, 111, 111, 112, 16, 16,
16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 0, 16, 16,
16, 16, 16, 16, 16, 16, 16, 113, 114, 114, 114, 115, 18, 18, 18, 18, 18,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 116, 18, 18, 18, 18, 18,
18, 18, 18, 18, 117, 118, 118, 119, 120, 121, 122, 122, 122, 123, 124,
125, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 126, 127, 128, 129, 130, 131, 4, 29, 30, 132,
29, 30, 19, 64, 64, 64, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133,
133, 133, 133, 133, 133, 133, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
16, 16, 16, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 134,
134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134,
134, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 4,
24, 24, 24, 24, 24, 5, 5, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30,
29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30,
29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30,
29, 30, 29, 30, 29, 30, 135, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 136, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30,
29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30,
29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30,
29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30,
29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30,
29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 0, 137, 137, 137, 137, 137, 137,
137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137,
137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137,
137, 137, 137, 137, 0, 0, 104, 4, 4, 4, 4, 4, 5, 19, 138, 138, 138, 138,
138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138,
138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138,
138, 138, 138, 138, 138, 138, 139, 19, 4, 4, 0, 0, 4, 4, 4, 0, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 4, 24, 4, 24, 24, 4, 24, 24, 4, 24, 0, 0, 0,
0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 55, 55,
55, 55, 4, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 20, 20, 20, 20, 20, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 4,
20, 4, 4, 4, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 104,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 6, 7, 8, 9, 10, 11,
12, 13, 14, 15, 4, 4, 4, 4, 55, 55, 24, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 4, 55, 24, 24, 24, 24, 24, 24, 24, 20, 4, 24, 24, 24, 24, 24, 24,
104, 104, 24, 24, 4, 24, 24, 24, 24, 55, 55, 6, 7, 8, 9, 10, 11, 12, 13,
14, 15, 55, 55, 55, 4, 4, 55, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
0, 20, 55, 24, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13,
14, 15, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24,
24, 24, 24, 24, 24, 24, 24, 24, 104, 104, 4, 4, 4, 4, 104, 0, 0, 24, 4,
4, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 24, 24, 24, 24, 104, 24, 24, 24, 24, 24, 24, 24, 24,
24, 104, 24, 24, 24, 104, 24, 24, 24, 24, 24, 0, 0, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24, 24, 24, 0, 0,
4, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 5, 55, 55, 55, 55, 55, 55, 55, 20, 20, 0, 0, 0, 0, 0, 24,
24, 24, 24, 24, 24, 24, 24, 24, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 104, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 20, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 17,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
24, 17, 24, 55, 17, 17, 17, 24, 24, 24, 24, 24, 24, 24, 24, 17, 17, 17,
17, 24, 17, 17, 55, 24, 24, 24, 24, 24, 24, 24, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 24, 24, 4, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 4, 104,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24, 17, 17,
0, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 55, 55, 0, 0, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
0, 55, 55, 55, 55, 55, 55, 55, 0, 55, 0, 0, 0, 55, 55, 55, 55, 0, 0, 24,
55, 17, 17, 17, 24, 24, 24, 24, 0, 0, 17, 17, 0, 0, 17, 17, 24, 55, 0, 0,
0, 0, 0, 0, 0, 0, 17, 0, 0, 0, 0, 55, 55, 0, 55, 55, 55, 24, 24, 0, 0, 6,
7, 8, 9, 10, 11, 12, 13, 14, 15, 55, 55, 4, 4, 26, 26, 26, 26, 26, 26, 4,
4, 55, 4, 24, 0, 0, 24, 24, 17, 0, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0,
55, 55, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 0,
55, 55, 0, 55, 55, 0, 0, 24, 0, 17, 17, 17, 24, 24, 0, 0, 0, 0, 24, 24,
0, 0, 24, 24, 24, 0, 0, 0, 24, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 0,
55, 0, 0, 0, 0, 0, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 24, 24, 55,
55, 55, 24, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 24, 17, 0, 55, 55, 55,
55, 55, 55, 55, 55, 55, 0, 55, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 55,
55, 55, 55, 55, 0, 55, 55, 0, 55, 55, 55, 55, 55, 0, 0, 24, 55, 17, 17,
17, 24, 24, 24, 24, 24, 0, 24, 24, 17, 0, 17, 17, 24, 0, 0, 55, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 24, 24, 0, 0, 6, 7, 8, 9, 10,
11, 12, 13, 14, 15, 4, 4, 0, 0, 0, 0, 0, 0, 0, 55, 24, 24, 24, 24, 24,
24, 0, 24, 17, 17, 0, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 55, 55, 0, 0,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 0, 55, 55, 55,
55, 55, 0, 0, 24, 55, 17, 24, 17, 24, 24, 24, 24, 0, 0, 17, 17, 0, 0, 17,
17, 24, 0, 0, 0, 0, 0, 0, 0, 24, 24, 17, 0, 0, 0, 0, 55, 55, 0, 55, 55,
55, 24, 24, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 4, 55, 26, 26, 26,
26, 26, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 55, 0, 55, 55, 55, 55, 55,
55, 0, 0, 0, 55, 55, 55, 0, 55, 55, 55, 55, 0, 0, 0, 55, 55, 0, 55, 0,
55, 55, 0, 0, 0, 55, 55, 0, 0, 0, 55, 55, 55, 0, 0, 0, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 17, 17, 24, 17, 17, 0, 0, 0,
17, 17, 17, 0, 17, 17, 17, 24, 0, 0, 55, 0, 0, 0, 0, 0, 0, 17, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 26,
26, 26, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 24, 17, 17, 17, 24, 55,
55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 24, 55,
24, 24, 24, 17, 17, 17, 17, 0, 24, 24, 24, 0, 24, 24, 24, 24, 0, 0, 0, 0,
0, 0, 0, 24, 24, 0, 55, 55, 55, 0, 55, 55, 0, 0, 55, 55, 24, 24, 0, 0, 6,
7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0, 0, 0, 4, 26, 26, 26, 26,
26, 26, 26, 4, 55, 24, 17, 17, 4, 55, 55, 55, 55, 55, 55, 55, 55, 0, 55,
55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 0, 55, 55, 55, 55, 55, 0, 0, 24, 55, 17, 24, 17, 17, 17, 17, 17, 0,
24, 17, 17, 0, 17, 17, 24, 24, 0, 0, 0, 0, 0, 0, 0, 17, 17, 0, 0, 0, 0,
0, 55, 55, 55, 0, 55, 55, 24, 24, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14,
15, 0, 55, 55, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 24, 17, 17,
55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 55, 0, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
24, 24, 55, 17, 17, 17, 24, 24, 24, 24, 0, 17, 17, 17, 0, 17, 17, 17, 24,
55, 4, 0, 0, 0, 0, 55, 55, 55, 17, 26, 26, 26, 26, 26, 26, 26, 55, 55,
55, 24, 24, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 26, 26, 26, 26, 26,
26, 26, 26, 26, 4, 55, 55, 55, 55, 55, 55, 0, 24, 17, 17, 0, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 55, 0, 0,
55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 24, 0, 0, 0, 0, 17, 17, 17, 24, 24,
24, 0, 24, 0, 17, 17, 17, 17, 17, 17, 17, 17, 0, 0, 0, 0, 0, 0, 6, 7, 8,
9, 10, 11, 12, 13, 14, 15, 0, 0, 17, 17, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24, 55, 140, 24, 24,
24, 24, 24, 24, 24, 0, 0, 0, 0, 4, 55, 55, 55, 55, 55, 55, 104, 24, 24,
24, 24, 24, 24, 24, 24, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 4, 4, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 0, 55, 0, 55, 55, 55, 55, 55,
0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 0, 55, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 24, 55, 140, 24, 24, 24, 24, 24, 24, 24, 24, 24, 55, 0, 0, 55, 55,
55, 55, 55, 0, 104, 0, 24, 24, 24, 24, 24, 24, 24, 0, 6, 7, 8, 9, 10, 11,
12, 13, 14, 15, 0, 0, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 24, 24, 4, 4,
4, 4, 4, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 4, 24, 4, 24, 4, 24, 4, 4, 4, 4, 17, 17, 55, 55, 55, 55,
55, 55, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 0, 0, 0, 0, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 17, 24, 24, 24, 24, 24, 4, 24, 24, 55, 55, 55, 55, 55,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 0, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 0, 4, 4, 4, 4, 4, 4, 4,
4, 24, 4, 4, 4, 4, 4, 4, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 17, 17, 24,
24, 24, 24, 17, 24, 24, 24, 24, 24, 24, 17, 24, 24, 17, 17, 24, 24, 55,
6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 4, 4, 4, 4, 4, 4, 55, 55, 55, 55, 55,
55, 17, 17, 24, 24, 55, 55, 55, 55, 24, 24, 24, 55, 17, 17, 17, 55, 55,
17, 17, 17, 17, 17, 17, 17, 55, 55, 55, 24, 24, 24, 24, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 24, 17, 17, 24, 24, 17, 17, 17, 17,
17, 17, 24, 55, 17, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 17, 17, 17, 24,
4, 4, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141,
141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141,
141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 0, 141, 0, 0, 0,
0, 0, 141, 0, 0, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142,
142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142,
142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142,
142, 142, 142, 142, 4, 103, 142, 142, 142, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 55,
55, 0, 0, 55, 55, 55, 55, 55, 55, 55, 0, 55, 0, 55, 55, 55, 55, 0, 0, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 0, 55, 55, 55, 55, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 55, 55, 0, 0, 55, 55, 55, 55, 55,
55, 55, 0, 55, 0, 55, 55, 55, 55, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 55, 55, 0, 0, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 24, 24, 24, 4, 4,
4, 4, 4, 4, 4, 4, 4, 143, 144, 145, 146, 147, 148, 149, 150, 151, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0,
0, 0, 0, 0, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163,
164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177,
178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191,
192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205,
206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219,
220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233,
234, 235, 236, 237, 0, 0, 238, 239, 240, 241, 242, 243, 0, 0, 4, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
4, 4, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
1, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 4, 4, 0, 0, 0, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 4, 4, 4,
244, 244, 244, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24,
24, 24, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24, 24, 17, 4, 4, 0, 0, 0, 0,
0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 24, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 55, 0, 24, 24, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 24, 24, 17, 24, 24, 24, 24, 24, 24, 24, 17, 17, 17,
17, 17, 17, 17, 17, 24, 17, 17, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 4, 4, 4, 104, 4, 4, 4, 4, 55, 24, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13,
14, 15, 0, 0, 0, 0, 0, 0, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 0, 0,
0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 24, 24, 24, 20, 24, 6, 7, 8,
9, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 104, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55,
55, 55, 245, 245, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 24, 55, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 0, 24, 24, 24, 17, 17, 17, 17, 24, 24,
17, 17, 17, 0, 0, 0, 0, 17, 17, 24, 17, 17, 17, 17, 17, 17, 24, 24, 24,
0, 0, 0, 0, 4, 0, 0, 0, 4, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 55, 55, 55, 55, 55, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 6, 7, 8, 9, 10, 11, 12,
13, 14, 15, 143, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
24, 24, 17, 17, 24, 0, 0, 4, 4, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 17, 24, 17, 24, 24, 24, 24, 24, 24, 24, 0,
24, 17, 24, 17, 17, 24, 24, 24, 24, 24, 24, 24, 24, 17, 17, 17, 17, 17,
17, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 0, 0, 24, 6, 7, 8, 9, 10, 11,
12, 13, 14, 15, 0, 0, 0, 0, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0,
0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 104, 4, 4, 4, 4, 4, 4, 0, 0, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 5, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 0, 0, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
24, 24, 24, 24, 17, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24, 17,
24, 24, 24, 24, 24, 17, 24, 17, 17, 17, 17, 17, 24, 17, 17, 55, 55, 55,
55, 55, 55, 55, 55, 0, 4, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 24, 24, 24, 24, 24, 24, 24, 24,
24, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 24, 24, 17, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 17, 24, 24, 24, 24, 17, 17, 24, 24, 17, 24,
24, 24, 55, 55, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 24, 17, 24, 24, 17, 17, 17, 24, 17, 24, 24, 24, 17, 17, 0, 0,
0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 17, 17, 17, 17, 17, 17, 17, 17, 24, 24, 24,
24, 24, 24, 24, 24, 17, 17, 24, 24, 0, 0, 0, 4, 4, 4, 4, 4, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 0, 0, 0, 55, 55, 55, 6, 7, 8, 9, 10, 11, 12, 13,
14, 15, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 104, 104, 104,
104, 104, 104, 4, 4, 246, 247, 248, 249, 250, 251, 252, 253, 254, 29, 30,
0, 0, 0, 0, 0, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,
255, 255, 255, 255, 0, 0, 255, 255, 255, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0,
0, 0, 0, 0, 0, 24, 24, 24, 4, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 17, 24, 24, 24, 24, 24, 24, 24, 55, 55, 55, 55, 24, 55, 55, 55,
55, 55, 55, 24, 55, 55, 17, 24, 24, 55, 0, 0, 0, 0, 0, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103,
103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103,
103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103,
103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103,
103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 103, 256, 19, 19, 19, 257, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 258, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 103, 103, 103, 103, 103, 103, 103, 103, 103,
103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103,
103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 259, 260,
261, 262, 263, 264, 19, 19, 265, 19, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 266, 266,
266, 266, 266, 266, 266, 266, 267, 267, 267, 267, 267, 267, 267, 267,
266, 266, 266, 266, 266, 266, 0, 0, 267, 267, 267, 267, 267, 267, 0, 0,
266, 266, 266, 266, 266, 266, 266, 266, 267, 267, 267, 267, 267, 267,
267, 267, 266, 266, 266, 266, 266, 266, 266, 266, 267, 267, 267, 267,
267, 267, 267, 267, 266, 266, 266, 266, 266, 266, 0, 0, 267, 267, 267,
267, 267, 267, 0, 0, 268, 266, 269, 266, 270, 266, 271, 266, 0, 267, 0,
267, 0, 267, 0, 267, 266, 266, 266, 266, 266, 266, 266, 266, 267, 267,
267, 267, 267, 267, 267, 267, 272, 272, 273, 273, 273, 273, 274, 274,
275, 275, 276, 276, 277, 277, 0, 0, 278, 279, 280, 281, 282, 283, 284,
285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298,
299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312,
313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 266,
266, 326, 327, 328, 0, 329, 330, 267, 267, 331, 331, 332, 5, 333, 5, 5,
5, 334, 335, 336, 0, 337, 338, 339, 339, 339, 339, 340, 5, 5, 5, 266,
266, 341, 342, 0, 0, 343, 344, 267, 267, 345, 345, 0, 5, 5, 5, 266, 266,
346, 347, 348, 128, 349, 350, 267, 267, 351, 351, 132, 5, 5, 5, 0, 0,
352, 353, 354, 0, 355, 356, 357, 357, 358, 358, 359, 5, 5, 0, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 20, 360, 360, 20, 20, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 4, 4, 5, 2, 2, 20, 20, 20, 20, 20, 1, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 17, 17, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 17, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 1, 20,
20, 20, 20, 20, 0, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 361, 103, 0,
0, 362, 363, 364, 365, 366, 367, 4, 4, 4, 4, 4, 103, 361, 25, 21, 22,
362, 363, 364, 365, 366, 367, 4, 4, 4, 4, 4, 0, 103, 103, 103, 103, 103,
103, 103, 103, 103, 103, 103, 103, 103, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 5, 5, 5, 5, 24, 5, 5, 5, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4, 4, 122, 4, 4, 4, 4, 122, 4, 4, 19, 122, 122, 122, 19, 19, 122, 122,
122, 19, 4, 122, 4, 4, 368, 122, 122, 122, 122, 122, 4, 4, 4, 4, 4, 4,
122, 4, 369, 4, 122, 4, 370, 371, 122, 122, 368, 19, 122, 122, 372, 122,
19, 55, 55, 55, 55, 19, 4, 4, 19, 19, 122, 122, 4, 4, 4, 4, 4, 122, 19,
19, 19, 19, 4, 4, 4, 4, 373, 4, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 374, 374, 374, 374, 374, 374, 374, 374, 374, 374,
374, 374, 374, 374, 374, 374, 375, 375, 375, 375, 375, 375, 375, 375,
375, 375, 375, 375, 375, 375, 375, 375, 244, 244, 244, 29, 30, 244, 244,
244, 244, 26, 4, 4, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 21, 22, 362, 363, 364, 365, 366, 367,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 25, 21, 22, 362, 363, 364,
365, 366, 367, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 25, 21, 22,
362, 363, 364, 365, 366, 367, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 376, 376, 376, 376, 376, 376, 376, 376, 376, 376, 376, 376, 376,
376, 376, 376, 376, 376, 376, 376, 376, 376, 376, 376, 376, 376, 377,
377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377,
377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 377, 361, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 25, 21, 22, 362, 363, 364, 365, 366, 367, 26,
361, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 25,
21, 22, 362, 363, 364, 365, 366, 367, 26, 25, 21, 22, 362, 363, 364, 365,
366, 367, 26, 25, 21, 22, 362, 363, 364, 365, 366, 367, 26, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 137, 137, 137, 137, 137,
137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137,
137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137,
137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137,
137, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138,
138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138,
138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138,
138, 138, 138, 138, 138, 138, 138, 29, 30, 378, 379, 380, 381, 382, 29,
30, 29, 30, 29, 30, 383, 384, 385, 386, 19, 29, 30, 19, 29, 30, 19, 19,
19, 19, 19, 103, 103, 387, 387, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30,
29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30,
29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30,
29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30,
29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30,
29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30,
19, 4, 4, 4, 4, 4, 4, 29, 30, 29, 30, 24, 24, 24, 29, 30, 0, 0, 0, 0, 0,
4, 4, 4, 4, 26, 4, 4, 388, 388, 388, 388, 388, 388, 388, 388, 388, 388,
388, 388, 388, 388, 388, 388, 388, 388, 388, 388, 388, 388, 388, 388,
388, 388, 388, 388, 388, 388, 388, 388, 388, 388, 388, 388, 388, 388, 0,
388, 0, 0, 0, 0, 0, 388, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 104, 4, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0,
0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 55, 55, 55, 55,
55, 0, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 0, 55,
55, 55, 55, 55, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 55, 55,
55, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 0, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 389, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 1, 4, 4, 4, 4, 104, 55, 244, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 244, 244, 244,
244, 244, 244, 244, 244, 244, 24, 24, 24, 24, 17, 17, 4, 104, 104, 104,
104, 104, 4, 4, 244, 244, 244, 104, 55, 4, 4, 4, 0, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 24, 24, 5, 5, 104, 104, 55, 4,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
17, 104, 104, 104, 55, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 0, 4, 4, 26, 26, 26, 26, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 26, 26, 26, 26, 26, 26,
26, 26, 4, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 55, 55,
55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 390, 55, 55, 390, 55, 55, 55, 390,
55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 390, 55, 390,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 390, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
390, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
390, 55, 390, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390,
55, 390, 390, 390, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 390, 390, 390, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 390, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
390, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 390, 390,
55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55,
55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 390,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 104, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 104, 104, 104,
104, 104, 104, 4, 4, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 104,
4, 4, 4, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 55,
24, 5, 5, 5, 4, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 4, 104, 29, 30,
29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30,
29, 30, 29, 30, 29, 30, 29, 30, 103, 103, 24, 24, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 244, 244, 244, 244, 244, 244,
244, 244, 244, 244, 24, 24, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 104,
104, 104, 104, 104, 104, 104, 104, 104, 5, 5, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 19, 19, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 103,
19, 19, 19, 19, 19, 19, 19, 19, 29, 30, 29, 30, 391, 29, 30, 29, 30, 29,
30, 29, 30, 29, 30, 104, 5, 5, 29, 30, 392, 19, 55, 29, 30, 29, 30, 393,
19, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29,
30, 29, 30, 394, 395, 396, 397, 394, 19, 398, 399, 400, 401, 29, 30, 29,
30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 402, 403, 404, 29,
30, 29, 30, 405, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30, 29, 30,
29, 30, 406, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
103, 103, 103, 103, 29, 30, 55, 103, 103, 19, 55, 55, 55, 55, 55, 55, 55,
24, 55, 55, 55, 24, 55, 55, 55, 55, 24, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 17, 17, 24,
24, 17, 4, 4, 4, 4, 24, 0, 0, 0, 26, 26, 26, 26, 26, 26, 4, 4, 4, 4, 0,
0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 17, 17, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 24, 24, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 6, 7,
8, 9, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0, 0, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 55, 55, 55, 55, 55, 55,
4, 4, 4, 55, 4, 55, 55, 24, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 24, 24, 24, 24, 24, 24, 24, 24, 4, 4, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 17, 17, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
0, 0, 0, 24, 24, 24, 17, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24,
17, 17, 24, 24, 24, 24, 17, 17, 24, 24, 17, 17, 17, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 0, 104, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0,
4, 4, 55, 55, 55, 55, 55, 24, 104, 55, 55, 55, 55, 55, 55, 55, 55, 55, 6,
7, 8, 9, 10, 11, 12, 13, 14, 15, 55, 55, 55, 55, 55, 0, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 24, 24, 24, 24, 24, 24, 17, 17, 24, 24, 17, 17, 24, 24, 0, 0, 0, 0,
0, 0, 0, 0, 0, 55, 55, 55, 24, 55, 55, 55, 55, 55, 55, 55, 55, 24, 17, 0,
0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 0, 4, 4, 4, 4, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 104, 55, 55, 55, 55, 55,
55, 4, 4, 4, 55, 17, 24, 17, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 24, 55, 24, 24, 24, 55, 55, 24, 24, 55, 55, 55, 55, 55, 24,
24, 55, 24, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 55, 55, 104, 4, 4, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 17, 24, 24, 17, 17, 4, 4, 55, 104, 104, 17, 24, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 55, 55, 55, 55, 55, 55, 0, 0, 55, 55, 55, 55, 55, 55, 0, 0, 55,
55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55,
55, 0, 55, 55, 55, 55, 55, 55, 55, 0, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 407, 19, 19, 19, 19, 19, 19, 19, 5, 103,
103, 103, 103, 19, 19, 19, 19, 19, 19, 19, 19, 19, 103, 5, 5, 0, 0, 0, 0,
408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421,
422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 435,
436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449,
450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463,
464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477,
478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 17, 17, 24, 17, 17, 24, 17,
17, 4, 17, 24, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0,
0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 390,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55,
390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 488, 489, 490, 491, 492, 493, 494, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 495, 496, 497, 498, 499, 0, 0, 0, 0, 0, 55, 24, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 4, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 0, 55, 55, 55, 55, 55, 0, 55, 0, 55, 55, 0, 55, 55, 0, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 500,
500, 500, 500, 500, 500, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 4, 4, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 4, 4, 4, 4, 4, 4, 4, 4,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 500, 500,
4, 4, 4, 4, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 4, 4, 4, 5, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 4, 4, 4, 17, 17, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 17, 17, 17,
4, 4, 5, 0, 4, 5, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0,
4, 4, 4, 4, 0, 0, 0, 0, 500, 55, 500, 55, 500, 0, 500, 55, 500, 55, 500,
55, 500, 55, 500, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 0, 0, 20, 0, 4, 4, 4, 4, 4, 4, 5, 4, 4, 4, 4, 4, 4, 5, 4,
6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 5, 4, 4, 4, 4, 4, 4, 16, 16, 16, 16,
16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
16, 16, 16, 16, 4, 4, 4, 5, 17, 5, 18, 18, 18, 18, 18, 18, 18, 18, 18,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 17, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 104,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 501, 501, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 0, 0, 0, 55, 55, 55, 55, 55, 55, 0, 0, 55, 55,
55, 55, 55, 55, 0, 0, 55, 55, 55, 55, 55, 55, 0, 0, 55, 55, 55, 0, 0, 0,
4, 4, 4, 5, 4, 4, 4, 0, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 20, 20, 20, 4, 4, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 0, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 4, 4,
4, 0, 0, 0, 0, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 0, 0, 0, 4, 4, 4, 4,
4, 4, 4, 4, 4, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244,
244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244,
244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244,
244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 26,
26, 26, 26, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 26, 26, 4,
4, 4, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 4, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 24, 0, 0, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 26, 26, 26, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 244, 55, 55, 55, 55, 55, 55, 55, 55, 244, 0, 0, 0, 0, 0, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24,
24, 24, 24, 24, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 0, 4, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 4, 244, 244, 244,
244, 244, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 502, 502,
502, 502, 502, 502, 502, 502, 502, 502, 502, 502, 502, 502, 502, 502,
502, 502, 502, 502, 502, 502, 502, 502, 502, 502, 502, 502, 502, 502,
502, 502, 502, 502, 502, 502, 502, 502, 502, 502, 503, 503, 503, 503,
503, 503, 503, 503, 503, 503, 503, 503, 503, 503, 503, 503, 503, 503,
503, 503, 503, 503, 503, 503, 503, 503, 503, 503, 503, 503, 503, 503,
503, 503, 503, 503, 503, 503, 503, 503, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 6,
7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0, 0, 502, 502, 502, 502,
502, 502, 502, 502, 502, 502, 502, 502, 502, 502, 502, 502, 502, 502,
502, 502, 502, 502, 502, 502, 502, 502, 502, 502, 502, 502, 502, 502,
502, 502, 502, 502, 0, 0, 0, 0, 503, 503, 503, 503, 503, 503, 503, 503,
503, 503, 503, 503, 503, 503, 503, 503, 503, 503, 503, 503, 503, 503,
503, 503, 503, 503, 503, 503, 503, 503, 503, 503, 503, 503, 503, 503, 0,
0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
4, 504, 504, 504, 504, 504, 504, 504, 504, 504, 504, 504, 0, 504, 504,
504, 504, 504, 504, 504, 504, 504, 504, 504, 504, 504, 504, 504, 0, 504,
504, 504, 504, 504, 504, 504, 0, 504, 504, 0, 505, 505, 505, 505, 505,
505, 505, 505, 505, 505, 505, 0, 505, 505, 505, 505, 505, 505, 505, 505,
505, 505, 505, 505, 505, 505, 505, 0, 505, 505, 505, 505, 505, 505, 505,
0, 505, 505, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0,
0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55,
55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 103, 104, 104, 103, 103, 103, 0, 103, 103, 103, 103, 103, 103, 103,
103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103,
103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103,
103, 103, 103, 103, 103, 103, 103, 0, 103, 103, 103, 103, 103, 103, 103,
103, 103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
55, 55, 55, 55, 55, 55, 0, 0, 55, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0,
55, 55, 0, 0, 0, 55, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 4, 26, 26, 26, 26, 26,
26, 26, 26, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 4, 4, 26, 26, 26, 26, 26, 26, 26, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 26,
26, 26, 26, 26, 26, 26, 26, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 0, 0, 0, 0, 0, 26, 26, 26, 26,
26, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 26, 26, 26, 26, 26, 26, 0, 0, 0, 4, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 0, 0, 0, 0, 0, 4, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 26, 26, 55, 55, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 0, 0, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 55, 24, 24, 24, 0, 24, 24, 0, 0, 0, 0, 0,
24, 24, 24, 24, 55, 55, 55, 55, 0, 55, 55, 55, 0, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 0, 0, 24, 24, 24, 0, 0, 0, 0, 24, 25, 21, 22, 362,
26, 26, 26, 26, 26, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0,
0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 26, 26, 4,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 26, 26, 26, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 4, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 24, 24, 0, 0, 0, 0, 26, 26, 26, 26, 26, 4, 4, 4, 4, 4, 4, 4, 0,
0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0,
26, 26, 26, 26, 26, 26, 26, 26, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 26, 26, 26, 26, 26,
26, 26, 26, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 26, 26, 26, 26, 26, 26, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110,
110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110,
110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110,
110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117,
117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117,
117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117,
117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 0, 0, 0,
0, 0, 0, 0, 26, 26, 26, 26, 26, 26, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 24, 24, 24, 24, 0, 0, 0, 0, 0, 0, 0,
0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0, 0, 6, 7, 8, 9, 10,
11, 12, 13, 14, 15, 55, 55, 55, 55, 104, 55, 16, 16, 16, 16, 16, 16, 16,
16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 0, 0, 0, 24,
24, 24, 24, 24, 4, 104, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 21, 22, 362, 363, 364,
365, 366, 367, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 24, 24, 4,
0, 0, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55,
104, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 24, 24, 24, 24, 24, 24, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 55, 0, 0, 0, 0, 0, 0,
0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 26, 26,
26, 26, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 24, 24, 24, 24, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 26, 26, 26, 26, 26, 26, 26, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0,
0, 0, 0, 0, 0, 17, 24, 17, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 25, 21, 22, 362, 363, 364,
365, 366, 367, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 24, 55, 55, 24, 24, 55, 0, 0, 0, 0, 0, 0, 0, 0,
0, 24, 24, 24, 17, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 17, 17, 17, 24,
24, 24, 24, 17, 17, 24, 24, 4, 4, 20, 4, 4, 4, 4, 24, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 20, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 6,
7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0, 0, 24, 24, 24, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24, 24,
24, 24, 24, 17, 24, 24, 24, 24, 24, 24, 24, 24, 0, 6, 7, 8, 9, 10, 11,
12, 13, 14, 15, 4, 4, 4, 4, 55, 17, 17, 55, 0, 0, 0, 0, 0, 0, 0, 0, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24, 4, 4,
55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 24, 17, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 17, 17, 17, 24, 24, 24, 24, 24, 24, 24, 24, 24, 17,
17, 55, 55, 55, 55, 4, 4, 4, 4, 24, 24, 24, 24, 4, 17, 24, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 55, 4, 55, 4, 4, 4, 0, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 17, 17, 17, 24, 24, 24,
17, 17, 24, 17, 24, 24, 4, 4, 4, 4, 4, 4, 24, 55, 55, 24, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 0, 55, 0, 55, 55,
55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 4, 0, 0, 0, 0, 0, 0, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 24, 17, 17, 17, 24, 24, 24, 24, 24, 24,
24, 24, 0, 0, 0, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0,
0, 24, 24, 17, 17, 0, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 55, 55, 0, 0,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 0, 55, 55, 55,
55, 55, 0, 24, 24, 55, 17, 17, 24, 17, 17, 17, 17, 0, 0, 17, 17, 0, 0,
17, 17, 17, 0, 0, 55, 0, 0, 0, 0, 0, 0, 17, 0, 0, 0, 0, 0, 55, 55, 55,
55, 55, 17, 17, 0, 0, 24, 24, 24, 24, 24, 24, 24, 0, 0, 0, 24, 24, 24,
24, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 0, 55, 0, 0, 55, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 55, 17, 17, 17, 24, 24, 24, 24,
24, 24, 0, 17, 0, 0, 17, 0, 17, 17, 17, 17, 0, 17, 17, 24, 17, 24, 55,
24, 55, 4, 4, 0, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 24, 24, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 17, 17,
17, 24, 24, 24, 24, 24, 24, 24, 24, 17, 17, 24, 24, 24, 17, 24, 55, 55,
55, 55, 4, 4, 4, 4, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 4, 4, 0, 4,
24, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 17, 17, 17, 24, 24, 24, 24, 24, 24, 17, 24, 17, 17, 17, 17, 24,
24, 17, 24, 24, 55, 55, 4, 55, 0, 0, 0, 0, 0, 0, 0, 0, 6, 7, 8, 9, 10,
11, 12, 13, 14, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 17, 17, 17, 24, 24, 24, 24, 0, 0, 17, 17,
17, 17, 24, 24, 17, 24, 24, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 55, 55, 55, 55, 24, 24, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 17, 17, 17, 24, 24,
24, 24, 24, 24, 24, 24, 17, 17, 24, 17, 24, 24, 4, 4, 4, 55, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0,
0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24, 17, 24, 17,
17, 24, 24, 24, 24, 24, 24, 17, 24, 55, 4, 0, 0, 0, 0, 0, 0, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14,
15, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 0, 0, 24, 17, 24, 17, 17, 24, 24, 24, 24, 17, 24, 24, 24,
24, 24, 0, 0, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 26, 26, 4, 4, 4,
4, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 17, 17, 17, 24, 24, 24, 24, 24, 24, 24, 24, 24, 17, 24,
24, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 6, 7, 8, 9, 10,
11, 12, 13, 14, 15, 26, 26, 26, 26, 26, 26, 26, 26, 26, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 55, 0, 0, 55, 55,
55, 55, 55, 55, 55, 55, 0, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 17, 17, 17,
17, 17, 17, 0, 17, 17, 0, 0, 24, 24, 17, 24, 55, 17, 55, 17, 24, 4, 4, 4,
0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55,
55, 55, 55, 55, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 17, 17, 17, 24, 24, 24, 24, 0, 0, 24, 24,
17, 17, 17, 17, 24, 55, 4, 55, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 24, 24, 24, 24, 24, 24, 17, 55, 24, 24, 24,
24, 4, 4, 4, 4, 4, 4, 4, 4, 24, 0, 0, 0, 0, 0, 0, 0, 0, 55, 24, 24, 24,
24, 24, 24, 17, 17, 24, 24, 24, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 17, 24, 24, 4, 4, 4,
55, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0,
0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 24, 17, 24, 24, 24, 17, 24, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 7, 8, 9, 10, 11, 12,
13, 14, 15, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
17, 24, 24, 24, 24, 24, 24, 24, 0, 24, 24, 24, 24, 24, 24, 17, 24, 55, 4,
4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14,
15, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 0, 0, 0, 4, 4, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
0, 0, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 0, 17, 24, 24, 24, 24, 24, 24, 24, 17, 24, 24, 17,
24, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 0, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24, 24, 24,
24, 24, 24, 0, 0, 0, 24, 0, 24, 24, 0, 24, 24, 24, 24, 24, 24, 24, 55,
24, 0, 0, 0, 0, 0, 0, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 0, 0,
0, 0, 0, 55, 55, 55, 55, 55, 55, 0, 55, 55, 0, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 17, 17, 17, 17, 17, 0, 24, 24, 0, 17, 17,
24, 17, 24, 55, 0, 0, 0, 0, 0, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 104, 55, 55, 0, 0, 0, 0, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 24, 24, 17, 17, 4, 4, 0, 0, 0, 0, 0, 0, 0, 24, 24,
55, 17, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 17, 17, 24, 24,
24, 24, 24, 0, 0, 0, 17, 17, 24, 17, 24, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 390,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
390, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 390, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 244, 244, 244,
244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244,
244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244,
244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244,
244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244,
244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244,
244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244,
244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244,
244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 0, 4, 4, 4, 4, 4, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 4, 4, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 24, 55,
55, 55, 55, 55, 55, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 17, 17,
17, 24, 24, 24, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0,
0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 4, 4, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 6,
7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 0, 0, 24, 24, 24, 24, 24, 4, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24, 24,
24, 24, 24, 24, 24, 4, 4, 4, 4, 4, 4, 4, 4, 4, 104, 104, 104, 104, 4, 4,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 26,
26, 26, 26, 26, 26, 26, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 104, 104, 104, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 104, 104, 4, 4, 4, 6, 7, 8,
9, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
16, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18,
18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 4, 4, 4, 4, 0, 0, 0, 0, 0, 506, 506, 506, 506, 506, 506, 506,
506, 506, 506, 506, 506, 506, 506, 506, 506, 506, 506, 506, 506, 506,
506, 506, 506, 506, 0, 0, 507, 507, 507, 507, 507, 507, 507, 507, 507,
507, 507, 507, 507, 507, 507, 507, 507, 507, 507, 507, 507, 507, 507,
507, 507, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 0, 0, 0, 0, 24, 55, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17, 0, 0, 0, 0, 0, 0, 0, 24, 24, 24, 24, 104,
104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 104, 104, 4, 104, 24, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 17, 17, 104, 104, 244, 244, 244, 0, 0, 0, 0, 0, 0, 0,
0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 104,
104, 104, 104, 0, 104, 104, 104, 104, 104, 104, 104, 0, 104, 104, 0, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 0, 0,
55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 0, 0, 0, 0,
0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0,
0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0,
0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 0, 0, 4, 24, 24, 4, 20, 20, 20, 20, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 4, 4, 4,
0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
0, 0, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 17, 17, 24, 24, 24, 4, 4, 4,
17, 17, 17, 17, 17, 17, 20, 20, 20, 20, 20, 20, 20, 20, 24, 24, 24, 24,
24, 24, 24, 24, 4, 4, 24, 24, 24, 24, 24, 24, 24, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 24, 24,
24, 24, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 24, 24, 24, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 0, 0, 0, 0, 0, 0, 0, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 19, 19, 19, 19, 19, 19, 19, 0, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 122, 0, 122, 122, 0, 0, 122, 0, 0, 122, 122, 0, 0, 122, 122, 122,
122, 0, 122, 122, 122, 122, 122, 122, 122, 122, 19, 19, 19, 19, 0, 19, 0,
19, 19, 19, 19, 19, 19, 19, 0, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 122, 122, 0, 122, 122, 122, 122, 0, 0, 122, 122, 122,
122, 122, 122, 122, 122, 0, 122, 122, 122, 122, 122, 122, 122, 0, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 122, 122, 0, 122, 122, 122, 122, 0, 122, 122,
122, 122, 122, 0, 122, 0, 0, 0, 122, 122, 122, 122, 122, 122, 122, 0, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 0, 0, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 4,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 4, 19, 19, 19, 19, 19, 19, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 4, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
4, 19, 19, 19, 19, 19, 19, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 4, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 4, 19, 19, 19, 19, 19, 19, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 4, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 4, 19, 19, 19, 19, 19, 19, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122,
122, 122, 122, 122, 4, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 4, 19, 19, 19, 19,
19, 19, 122, 19, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 4, 4, 4,
4, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 4, 4, 4, 4,
4, 4, 4, 4, 24, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 24, 4, 4, 4, 4,
4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 24, 24, 24, 24,
0, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 55, 19, 19, 19,
19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 0, 0,
0, 0, 0, 0, 19, 19, 19, 19, 19, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 24, 24, 24, 24, 24, 24, 24, 0, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 0, 0, 24, 24, 24, 24, 24, 24, 24, 0, 24, 24,
0, 24, 24, 24, 24, 24, 0, 0, 0, 0, 0, 103, 103, 103, 103, 103, 103, 103,
103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103,
103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103,
103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103,
103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 24, 24, 24, 24, 24, 24, 24, 104,
104, 104, 104, 104, 104, 104, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
0, 0, 0, 0, 55, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 24, 24, 24, 24, 6, 7,
8, 9, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 104, 24, 24, 24, 24, 6, 7, 8, 9, 10,
11, 12, 13, 14, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 24, 24, 55, 6, 7, 8, 9, 10, 11, 12,
13, 14, 15, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 55,
24, 55, 55, 24, 55, 55, 55, 55, 55, 55, 55, 24, 24, 55, 55, 55, 55, 55,
24, 0, 0, 0, 0, 0, 0, 0, 0, 55, 104, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 0, 55,
55, 55, 55, 0, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 0, 0, 26, 26, 26, 26, 26, 26, 26, 26, 26, 24, 24, 24, 24, 24, 24, 24,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 508, 508, 508, 508,
508, 508, 508, 508, 508, 508, 508, 508, 508, 508, 508, 508, 508, 508,
508, 508, 508, 508, 508, 508, 508, 508, 508, 508, 508, 508, 508, 508,
508, 508, 509, 509, 509, 509, 509, 509, 509, 509, 509, 509, 509, 509,
509, 509, 509, 509, 509, 509, 509, 509, 509, 509, 509, 509, 509, 509,
509, 509, 509, 509, 509, 509, 509, 509, 24, 24, 24, 24, 24, 24, 24, 104,
0, 0, 0, 0, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 4, 4, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 4, 26, 26, 26, 4, 26, 26, 26, 26, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 26, 26, 4, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
26, 26, 26, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55,
55, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 0, 55, 0,
0, 55, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 55, 55, 0,
55, 0, 55, 0, 0, 0, 0, 0, 0, 55, 0, 0, 0, 0, 55, 0, 55, 0, 55, 0, 55, 55,
55, 0, 55, 55, 0, 55, 0, 0, 55, 0, 55, 0, 55, 0, 55, 0, 55, 0, 55, 55, 0,
55, 0, 0, 55, 55, 55, 55, 0, 55, 55, 55, 55, 55, 55, 55, 0, 55, 55, 55,
55, 0, 55, 55, 55, 55, 0, 55, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0,
0, 0, 0, 0, 55, 55, 55, 0, 55, 55, 55, 55, 55, 0, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 361, 361, 25, 21, 22, 362, 363, 364, 365, 366, 367, 26, 26, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 510, 510, 510, 510, 510, 510, 510, 510, 510,
510, 510, 510, 510, 510, 510, 510, 510, 510, 510, 510, 510, 510, 510,
510, 510, 510, 4, 4, 4, 4, 4, 4, 510, 510, 510, 510, 510, 510, 510, 510,
510, 510, 510, 510, 510, 510, 510, 510, 510, 510, 510, 510, 510, 510,
510, 510, 510, 510, 4, 4, 4, 4, 4, 4, 510, 510, 510, 510, 510, 510, 510,
510, 510, 510, 510, 510, 510, 510, 510, 510, 510, 510, 510, 510, 510,
510, 510, 510, 510, 510, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 4, 4,
4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 4, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0,
0, 0, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 4, 0, 0, 0, 0, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 0, 0, 0, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 6, 7, 8, 9, 10, 11, 12, 13, 14,
15, 4, 0, 0, 0, 0, 0, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55,
55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 390, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55,
55, 55, 55, 0, 0, 0, 0, 0, 0, 0, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 20, 20, 20, 20,
20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
20, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0,
};
/* Returns the numeric value as double for Unicode characters
* having this property, -1.0 otherwise.
*/
double _PyUnicode_ToNumeric(Py_UCS4 ch)
{
switch (ch) {
case 0x0F33:
return (double) -1.0/2.0;
case 0x0030:
case 0x0660:
case 0x06F0:
case 0x07C0:
case 0x0966:
case 0x09E6:
case 0x0A66:
case 0x0AE6:
case 0x0B66:
case 0x0BE6:
case 0x0C66:
case 0x0C78:
case 0x0CE6:
case 0x0D66:
case 0x0DE6:
case 0x0E50:
case 0x0ED0:
case 0x0F20:
case 0x1040:
case 0x1090:
case 0x17E0:
case 0x17F0:
case 0x1810:
case 0x1946:
case 0x19D0:
case 0x1A80:
case 0x1A90:
case 0x1B50:
case 0x1BB0:
case 0x1C40:
case 0x1C50:
case 0x2070:
case 0x2080:
case 0x2189:
case 0x24EA:
case 0x24FF:
case 0x3007:
case 0x6D1E:
case 0x96F6:
case 0xA620:
case 0xA6EF:
case 0xA8D0:
case 0xA900:
case 0xA9D0:
case 0xA9F0:
case 0xAA50:
case 0xABF0:
case 0xF9B2:
case 0xFF10:
case 0x1018A:
case 0x104A0:
case 0x10D30:
case 0x10D40:
case 0x11066:
case 0x110F0:
case 0x11136:
case 0x111D0:
case 0x112F0:
case 0x11450:
case 0x114D0:
case 0x11650:
case 0x116C0:
case 0x116D0:
case 0x116DA:
case 0x11730:
case 0x118E0:
case 0x11950:
case 0x11BF0:
case 0x11C50:
case 0x11D50:
case 0x11DA0:
case 0x11DE0:
case 0x11F50:
case 0x16130:
case 0x16A60:
case 0x16AC0:
case 0x16B50:
case 0x16D70:
case 0x16E80:
case 0x1CCF0:
case 0x1D2C0:
case 0x1D2E0:
case 0x1D7CE:
case 0x1D7D8:
case 0x1D7E2:
case 0x1D7EC:
case 0x1D7F6:
case 0x1E140:
case 0x1E2F0:
case 0x1E4F0:
case 0x1E5F1:
case 0x1E950:
case 0x1F100:
case 0x1F101:
case 0x1F10B:
case 0x1F10C:
case 0x1FBF0:
return (double) 0.0;
case 0x0031:
case 0x00B9:
case 0x0661:
case 0x06F1:
case 0x07C1:
case 0x0967:
case 0x09E7:
case 0x0A67:
case 0x0AE7:
case 0x0B67:
case 0x0BE7:
case 0x0C67:
case 0x0C79:
case 0x0C7C:
case 0x0CE7:
case 0x0D67:
case 0x0DE7:
case 0x0E51:
case 0x0ED1:
case 0x0F21:
case 0x1041:
case 0x1091:
case 0x1369:
case 0x17E1:
case 0x17F1:
case 0x1811:
case 0x1947:
case 0x19D1:
case 0x19DA:
case 0x1A81:
case 0x1A91:
case 0x1B51:
case 0x1BB1:
case 0x1C41:
case 0x1C51:
case 0x2081:
case 0x215F:
case 0x2160:
case 0x2170:
case 0x2460:
case 0x2474:
case 0x2488:
case 0x24F5:
case 0x2776:
case 0x2780:
case 0x278A:
case 0x3021:
case 0x3192:
case 0x3220:
case 0x3280:
case 0x4E00:
case 0x58F1:
case 0x58F9:
case 0x5E7A:
case 0x5F0C:
case 0xA621:
case 0xA6E6:
case 0xA8D1:
case 0xA901:
case 0xA9D1:
case 0xA9F1:
case 0xAA51:
case 0xABF1:
case 0xFF11:
case 0x10107:
case 0x10142:
case 0x10158:
case 0x10159:
case 0x1015A:
case 0x102E1:
case 0x10320:
case 0x103D1:
case 0x104A1:
case 0x10858:
case 0x10879:
case 0x108A7:
case 0x108FB:
case 0x10916:
case 0x109C0:
case 0x10A40:
case 0x10A7D:
case 0x10A9D:
case 0x10AEB:
case 0x10B58:
case 0x10B78:
case 0x10BA9:
case 0x10CFA:
case 0x10D31:
case 0x10D41:
case 0x10E60:
case 0x10F1D:
case 0x10F51:
case 0x10FC5:
case 0x11052:
case 0x11067:
case 0x110F1:
case 0x11137:
case 0x111D1:
case 0x111E1:
case 0x112F1:
case 0x11451:
case 0x114D1:
case 0x11651:
case 0x116C1:
case 0x116D1:
case 0x116DB:
case 0x11731:
case 0x118E1:
case 0x11951:
case 0x11BF1:
case 0x11C51:
case 0x11C5A:
case 0x11D51:
case 0x11DA1:
case 0x11DE1:
case 0x11F51:
case 0x12038:
case 0x12039:
case 0x12079:
case 0x1230B:
case 0x12415:
case 0x1241E:
case 0x1242C:
case 0x12434:
case 0x1244F:
case 0x12458:
case 0x16131:
case 0x16A61:
case 0x16AC1:
case 0x16B51:
case 0x16D71:
case 0x16E81:
case 0x16E94:
case 0x16FF4:
case 0x1CCF1:
case 0x1D2C1:
case 0x1D2E1:
case 0x1D360:
case 0x1D372:
case 0x1D377:
case 0x1D7CF:
case 0x1D7D9:
case 0x1D7E3:
case 0x1D7ED:
case 0x1D7F7:
case 0x1E141:
case 0x1E2F1:
case 0x1E4F1:
case 0x1E5F2:
case 0x1E8C7:
case 0x1E951:
case 0x1EC71:
case 0x1ECA3:
case 0x1ECB1:
case 0x1ED01:
case 0x1F102:
case 0x1FBF1:
case 0x2092A:
return (double) 1.0;
case 0x0D5C:
case 0x2152:
case 0x11FCB:
return (double) 1.0/10.0;
case 0x109F6:
return (double) 1.0/12.0;
case 0x09F4:
case 0x0B75:
case 0x0D76:
case 0xA833:
case 0x11FC9:
case 0x11FCA:
return (double) 1.0/16.0;
case 0x0D58:
case 0x11FC1:
return (double) 1.0/160.0;
case 0x00BD:
case 0x0B73:
case 0x0D74:
case 0x0F2A:
case 0x2CFD:
case 0xA831:
case 0x10141:
case 0x10175:
case 0x10176:
case 0x109BD:
case 0x10A48:
case 0x10E7B:
case 0x10F26:
case 0x11FD1:
case 0x11FD2:
case 0x12226:
case 0x12464:
case 0x1ECAE:
case 0x1ED3C:
return (double) 1.0/2.0;
case 0x0D5B:
case 0x11FC8:
return (double) 1.0/20.0;
case 0x2153:
case 0x10E7D:
case 0x1245A:
case 0x1245D:
case 0x12465:
return (double) 1.0/3.0;
case 0x11FC5:
return (double) 1.0/32.0;
case 0x11FC0:
case 0x11FD4:
return (double) 1.0/320.0;
case 0x00BC:
case 0x09F7:
case 0x0B72:
case 0x0D73:
case 0xA830:
case 0x10140:
case 0x1018B:
case 0x10E7C:
case 0x11FD0:
case 0x12460:
case 0x12462:
case 0x12463:
case 0x1ECAD:
return (double) 1.0/4.0;
case 0x0D59:
case 0x11FC4:
return (double) 1.0/40.0;
case 0x0D5E:
case 0x2155:
case 0x11FCF:
return (double) 1.0/5.0;
case 0x2159:
case 0x12461:
case 0x1ED3D:
return (double) 1.0/6.0;
case 0x11FC3:
return (double) 1.0/64.0;
case 0x2150:
return (double) 1.0/7.0;
case 0x09F5:
case 0x0B76:
case 0x0D77:
case 0x215B:
case 0xA834:
case 0x11FCC:
case 0x1245F:
return (double) 1.0/8.0;
case 0x11FC2:
return (double) 1.0/80.0;
case 0x2151:
return (double) 1.0/9.0;
case 0x0BF0:
case 0x0D70:
case 0x1372:
case 0x2169:
case 0x2179:
case 0x2469:
case 0x247D:
case 0x2491:
case 0x24FE:
case 0x277F:
case 0x2789:
case 0x2793:
case 0x3038:
case 0x3229:
case 0x3248:
case 0x3289:
case 0x4EC0:
case 0x5341:
case 0x62FE:
case 0xF973:
case 0xF9FD:
case 0x10110:
case 0x10149:
case 0x10150:
case 0x10157:
case 0x10160:
case 0x10161:
case 0x10162:
case 0x10163:
case 0x10164:
case 0x102EA:
case 0x10322:
case 0x103D3:
case 0x1085B:
case 0x1087E:
case 0x108AD:
case 0x108FD:
case 0x10917:
case 0x109C9:
case 0x10A44:
case 0x10A9E:
case 0x10AED:
case 0x10B5C:
case 0x10B7C:
case 0x10BAD:
case 0x10CFC:
case 0x10E69:
case 0x10F22:
case 0x10F52:
case 0x10FC9:
case 0x1105B:
case 0x111EA:
case 0x1173A:
case 0x118EA:
case 0x11C63:
case 0x16B5B:
case 0x16E8A:
case 0x1D2CA:
case 0x1D2EA:
case 0x1D369:
case 0x1EC7A:
case 0x1ED0A:
case 0x1ED37:
return (double) 10.0;
case 0x109FF:
return (double) 10.0/12.0;
case 0x0BF1:
case 0x0D71:
case 0x137B:
case 0x216D:
case 0x217D:
case 0x4F70:
case 0x767E:
case 0x964C:
case 0x10119:
case 0x1014B:
case 0x10152:
case 0x1016A:
case 0x102F3:
case 0x103D5:
case 0x1085D:
case 0x108AF:
case 0x108FF:
case 0x10919:
case 0x109D2:
case 0x10A46:
case 0x10AEF:
case 0x10B5E:
case 0x10B7E:
case 0x10BAF:
case 0x10CFE:
case 0x10E72:
case 0x10F25:
case 0x10F54:
case 0x10FCB:
case 0x11064:
case 0x111F3:
case 0x11C6C:
case 0x16B5C:
case 0x1EC83:
case 0x1ED13:
return (double) 100.0;
case 0x0BF2:
case 0x0D72:
case 0x216F:
case 0x217F:
case 0x2180:
case 0x4EDF:
case 0x5343:
case 0x9621:
case 0x10122:
case 0x1014D:
case 0x10154:
case 0x10171:
case 0x1085E:
case 0x109DB:
case 0x10A47:
case 0x10B5F:
case 0x10B7F:
case 0x10CFF:
case 0x11065:
case 0x111F4:
case 0x1EC8C:
case 0x1ED1C:
return (double) 1000.0;
case 0x137C:
case 0x2182:
case 0x4E07:
case 0x842C:
case 0x1012B:
case 0x10155:
case 0x1085F:
case 0x109E4:
case 0x16B5D:
case 0x1EC95:
case 0x1ECB3:
case 0x1ED25:
case 0x1ED3B:
return (double) 10000.0;
case 0x2188:
case 0x109ED:
case 0x1EC9E:
case 0x1ECA0:
case 0x1ECB4:
return (double) 100000.0;
case 0x16B5E:
return (double) 1000000.0;
case 0x1ECA1:
return (double) 10000000.0;
case 0x4EBF:
case 0x5104:
case 0x16B5F:
return (double) 100000000.0;
case 0x79ED:
return (double) 1000000000.0;
case 0x16B60:
return (double) 10000000000.0;
case 0x5146:
case 0x16B61:
return (double) 1000000000000.0;
case 0x4EAC:
return (double) 1e+16;
case 0x216A:
case 0x217A:
case 0x246A:
case 0x247E:
case 0x2492:
case 0x24EB:
case 0x16E8B:
case 0x1D2CB:
case 0x1D2EB:
return (double) 11.0;
case 0x109BC:
return (double) 11.0/12.0;
case 0x0F2F:
return (double) 11.0/2.0;
case 0x216B:
case 0x217B:
case 0x246B:
case 0x247F:
case 0x2493:
case 0x24EC:
case 0x16E8C:
case 0x1D2CC:
case 0x1D2EC:
return (double) 12.0;
case 0x246C:
case 0x2480:
case 0x2494:
case 0x24ED:
case 0x16E8D:
case 0x1D2CD:
case 0x1D2ED:
return (double) 13.0;
case 0x0F30:
return (double) 13.0/2.0;
case 0x246D:
case 0x2481:
case 0x2495:
case 0x24EE:
case 0x16E8E:
case 0x1D2CE:
case 0x1D2EE:
return (double) 14.0;
case 0x246E:
case 0x2482:
case 0x2496:
case 0x24EF:
case 0x16E8F:
case 0x1D2CF:
case 0x1D2EF:
return (double) 15.0;
case 0x0F31:
return (double) 15.0/2.0;
case 0x09F9:
case 0x246F:
case 0x2483:
case 0x2497:
case 0x24F0:
case 0x16E90:
case 0x1D2D0:
case 0x1D2F0:
return (double) 16.0;
case 0x16EE:
case 0x2470:
case 0x2484:
case 0x2498:
case 0x24F1:
case 0x16E91:
case 0x1D2D1:
case 0x1D2F1:
return (double) 17.0;
case 0x0F32:
return (double) 17.0/2.0;
case 0x16EF:
case 0x2471:
case 0x2485:
case 0x2499:
case 0x24F2:
case 0x16E92:
case 0x1D2D2:
case 0x1D2F2:
return (double) 18.0;
case 0x16F0:
case 0x2472:
case 0x2486:
case 0x249A:
case 0x24F3:
case 0x16E93:
case 0x1D2D3:
case 0x1D2F3:
return (double) 19.0;
case 0x0032:
case 0x00B2:
case 0x0662:
case 0x06F2:
case 0x07C2:
case 0x0968:
case 0x09E8:
case 0x0A68:
case 0x0AE8:
case 0x0B68:
case 0x0BE8:
case 0x0C68:
case 0x0C7A:
case 0x0C7D:
case 0x0CE8:
case 0x0D68:
case 0x0DE8:
case 0x0E52:
case 0x0ED2:
case 0x0F22:
case 0x1042:
case 0x1092:
case 0x136A:
case 0x17E2:
case 0x17F2:
case 0x1812:
case 0x1948:
case 0x19D2:
case 0x1A82:
case 0x1A92:
case 0x1B52:
case 0x1BB2:
case 0x1C42:
case 0x1C52:
case 0x2082:
case 0x2161:
case 0x2171:
case 0x2461:
case 0x2475:
case 0x2489:
case 0x24F6:
case 0x2777:
case 0x2781:
case 0x278B:
case 0x3022:
case 0x3193:
case 0x3221:
case 0x3281:
case 0x3483:
case 0x4E24:
case 0x4E8C:
case 0x4FE9:
case 0x5006:
case 0x5169:
case 0x5F0D:
case 0x5F10:
case 0x8CAE:
case 0x8CB3:
case 0x8D30:
case 0xA622:
case 0xA6E7:
case 0xA8D2:
case 0xA902:
case 0xA9D2:
case 0xA9F2:
case 0xAA52:
case 0xABF2:
case 0xF978:
case 0xFF12:
case 0x10108:
case 0x1015B:
case 0x1015C:
case 0x1015D:
case 0x1015E:
case 0x102E2:
case 0x103D2:
case 0x104A2:
case 0x10859:
case 0x1087A:
case 0x108A8:
case 0x1091A:
case 0x109C1:
case 0x10A41:
case 0x10B59:
case 0x10B79:
case 0x10BAA:
case 0x10D32:
case 0x10D42:
case 0x10E61:
case 0x10F1E:
case 0x10FC6:
case 0x11053:
case 0x11068:
case 0x110F2:
case 0x11138:
case 0x111D2:
case 0x111E2:
case 0x112F2:
case 0x11452:
case 0x114D2:
case 0x11652:
case 0x116C2:
case 0x116D2:
case 0x116DC:
case 0x11732:
case 0x118E2:
case 0x11952:
case 0x11BF2:
case 0x11C52:
case 0x11C5B:
case 0x11D52:
case 0x11DA2:
case 0x11DE2:
case 0x11F52:
case 0x1222B:
case 0x12399:
case 0x12400:
case 0x12416:
case 0x1241F:
case 0x12423:
case 0x1242D:
case 0x12435:
case 0x1244A:
case 0x12450:
case 0x12456:
case 0x12459:
case 0x16132:
case 0x16A62:
case 0x16AC2:
case 0x16B52:
case 0x16D72:
case 0x16E82:
case 0x16E95:
case 0x16FF6:
case 0x1CCF2:
case 0x1D2C2:
case 0x1D2E2:
case 0x1D361:
case 0x1D373:
case 0x1D7D0:
case 0x1D7DA:
case 0x1D7E4:
case 0x1D7EE:
case 0x1D7F8:
case 0x1E142:
case 0x1E2F2:
case 0x1E4F2:
case 0x1E5F3:
case 0x1E8C8:
case 0x1E952:
case 0x1EC72:
case 0x1ECA4:
case 0x1ECB2:
case 0x1ED02:
case 0x1ED2F:
case 0x1F103:
case 0x1FBF2:
case 0x22390:
return (double) 2.0;
case 0x109F7:
return (double) 2.0/12.0;
case 0x2154:
case 0x10177:
case 0x10E7E:
case 0x1245B:
case 0x1245E:
case 0x12466:
return (double) 2.0/3.0;
case 0x2156:
return (double) 2.0/5.0;
case 0x1373:
case 0x2473:
case 0x2487:
case 0x249B:
case 0x24F4:
case 0x3039:
case 0x3249:
case 0x5344:
case 0x5EFF:
case 0x10111:
case 0x102EB:
case 0x103D4:
case 0x1085C:
case 0x1087F:
case 0x108AE:
case 0x108FE:
case 0x10918:
case 0x109CA:
case 0x10A45:
case 0x10A9F:
case 0x10AEE:
case 0x10B5D:
case 0x10B7D:
case 0x10BAE:
case 0x10E6A:
case 0x10F23:
case 0x10F53:
case 0x10FCA:
case 0x1105C:
case 0x111EB:
case 0x1173B:
case 0x118EB:
case 0x11C64:
case 0x1D36A:
case 0x1EC7B:
case 0x1ED0B:
return (double) 20.0;
case 0x7695:
case 0x1011A:
case 0x102F4:
case 0x109D3:
case 0x10E73:
case 0x1EC84:
case 0x1ED14:
return (double) 200.0;
case 0x10123:
case 0x109DC:
case 0x1EC8D:
case 0x1ED1D:
case 0x1ED3A:
return (double) 2000.0;
case 0x1012C:
case 0x109E5:
case 0x1EC96:
case 0x1ED26:
return (double) 20000.0;
case 0x109EE:
case 0x1EC9F:
return (double) 200000.0;
case 0x1ECA2:
return (double) 20000000.0;
case 0x3251:
return (double) 21.0;
case 0x12432:
return (double) 216000.0;
case 0x3252:
return (double) 22.0;
case 0x3253:
return (double) 23.0;
case 0x3254:
return (double) 24.0;
case 0x3255:
return (double) 25.0;
case 0x3256:
return (double) 26.0;
case 0x3257:
return (double) 27.0;
case 0x3258:
return (double) 28.0;
case 0x3259:
return (double) 29.0;
case 0x0033:
case 0x00B3:
case 0x0663:
case 0x06F3:
case 0x07C3:
case 0x0969:
case 0x09E9:
case 0x0A69:
case 0x0AE9:
case 0x0B69:
case 0x0BE9:
case 0x0C69:
case 0x0C7B:
case 0x0C7E:
case 0x0CE9:
case 0x0D69:
case 0x0DE9:
case 0x0E53:
case 0x0ED3:
case 0x0F23:
case 0x1043:
case 0x1093:
case 0x136B:
case 0x17E3:
case 0x17F3:
case 0x1813:
case 0x1949:
case 0x19D3:
case 0x1A83:
case 0x1A93:
case 0x1B53:
case 0x1BB3:
case 0x1C43:
case 0x1C53:
case 0x2083:
case 0x2162:
case 0x2172:
case 0x2462:
case 0x2476:
case 0x248A:
case 0x24F7:
case 0x2778:
case 0x2782:
case 0x278C:
case 0x3023:
case 0x3194:
case 0x3222:
case 0x3282:
case 0x4E09:
case 0x4EE8:
case 0x53C1:
case 0x53C2:
case 0x53C3:
case 0x53C4:
case 0x5F0E:
case 0xA623:
case 0xA6E8:
case 0xA8D3:
case 0xA903:
case 0xA9D3:
case 0xA9F3:
case 0xAA53:
case 0xABF3:
case 0xF96B:
case 0xFF13:
case 0x10109:
case 0x102E3:
case 0x104A3:
case 0x1085A:
case 0x1087B:
case 0x108A9:
case 0x1091B:
case 0x109C2:
case 0x10A42:
case 0x10B5A:
case 0x10B7A:
case 0x10BAB:
case 0x10D33:
case 0x10D43:
case 0x10E62:
case 0x10F1F:
case 0x10FC7:
case 0x11054:
case 0x11069:
case 0x110F3:
case 0x11139:
case 0x111D3:
case 0x111E3:
case 0x112F3:
case 0x11453:
case 0x114D3:
case 0x11653:
case 0x116C3:
case 0x116D3:
case 0x116DD:
case 0x11733:
case 0x118E3:
case 0x11953:
case 0x11BF3:
case 0x11C53:
case 0x11C5C:
case 0x11D53:
case 0x11DA3:
case 0x11DE3:
case 0x11F53:
case 0x1230D:
case 0x12401:
case 0x12408:
case 0x12417:
case 0x12420:
case 0x12424:
case 0x12425:
case 0x1242E:
case 0x1242F:
case 0x12436:
case 0x12437:
case 0x1243A:
case 0x1243B:
case 0x1244B:
case 0x12451:
case 0x12457:
case 0x16133:
case 0x16A63:
case 0x16AC3:
case 0x16B53:
case 0x16D73:
case 0x16E83:
case 0x16E96:
case 0x1CCF3:
case 0x1D2C3:
case 0x1D2E3:
case 0x1D362:
case 0x1D374:
case 0x1D7D1:
case 0x1D7DB:
case 0x1D7E5:
case 0x1D7EF:
case 0x1D7F9:
case 0x1E143:
case 0x1E2F3:
case 0x1E4F3:
case 0x1E5F4:
case 0x1E8C9:
case 0x1E953:
case 0x1EC73:
case 0x1ECA5:
case 0x1ED03:
case 0x1ED30:
case 0x1F104:
case 0x1FBF3:
case 0x20AFD:
case 0x20B19:
case 0x22998:
case 0x23B1B:
return (double) 3.0;
case 0x109F8:
return (double) 3.0/12.0;
case 0x09F6:
case 0x0B77:
case 0x0D78:
case 0xA835:
case 0x11FCE:
return (double) 3.0/16.0;
case 0x0F2B:
case 0x16FF5:
return (double) 3.0/2.0;
case 0x0D5D:
case 0x11FCD:
return (double) 3.0/20.0;
case 0x00BE:
case 0x09F8:
case 0x0B74:
case 0x0D75:
case 0xA832:
case 0x10178:
case 0x11FD3:
case 0x1ECAF:
return (double) 3.0/4.0;
case 0x2157:
return (double) 3.0/5.0;
case 0x11FC7:
return (double) 3.0/64.0;
case 0x215C:
return (double) 3.0/8.0;
case 0x0D5A:
case 0x11FC6:
return (double) 3.0/80.0;
case 0x1374:
case 0x303A:
case 0x324A:
case 0x325A:
case 0x5345:
case 0x10112:
case 0x10165:
case 0x102EC:
case 0x109CB:
case 0x10E6B:
case 0x10F24:
case 0x1105D:
case 0x111EC:
case 0x118EC:
case 0x11C65:
case 0x1D36B:
case 0x1EC7C:
case 0x1ED0C:
case 0x20983:
return (double) 30.0;
case 0x1011B:
case 0x1016B:
case 0x102F5:
case 0x109D4:
case 0x10E74:
case 0x1EC85:
case 0x1ED15:
return (double) 300.0;
case 0x10124:
case 0x109DD:
case 0x1EC8E:
case 0x1ED1E:
return (double) 3000.0;
case 0x1012D:
case 0x109E6:
case 0x1EC97:
case 0x1ED27:
return (double) 30000.0;
case 0x109EF:
return (double) 300000.0;
case 0x325B:
return (double) 31.0;
case 0x325C:
return (double) 32.0;
case 0x325D:
return (double) 33.0;
case 0x325E:
return (double) 34.0;
case 0x325F:
return (double) 35.0;
case 0x32B1:
return (double) 36.0;
case 0x32B2:
return (double) 37.0;
case 0x32B3:
return (double) 38.0;
case 0x32B4:
return (double) 39.0;
case 0x0034:
case 0x0664:
case 0x06F4:
case 0x07C4:
case 0x096A:
case 0x09EA:
case 0x0A6A:
case 0x0AEA:
case 0x0B6A:
case 0x0BEA:
case 0x0C6A:
case 0x0CEA:
case 0x0D6A:
case 0x0DEA:
case 0x0E54:
case 0x0ED4:
case 0x0F24:
case 0x1044:
case 0x1094:
case 0x136C:
case 0x17E4:
case 0x17F4:
case 0x1814:
case 0x194A:
case 0x19D4:
case 0x1A84:
case 0x1A94:
case 0x1B54:
case 0x1BB4:
case 0x1C44:
case 0x1C54:
case 0x2074:
case 0x2084:
case 0x2163:
case 0x2173:
case 0x2463:
case 0x2477:
case 0x248B:
case 0x24F8:
case 0x2779:
case 0x2783:
case 0x278D:
case 0x3024:
case 0x3195:
case 0x3223:
case 0x3283:
case 0x4E96:
case 0x56DB:
case 0x8086:
case 0xA624:
case 0xA6E9:
case 0xA8D4:
case 0xA904:
case 0xA9D4:
case 0xA9F4:
case 0xAA54:
case 0xABF4:
case 0xFF14:
case 0x1010A:
case 0x102E4:
case 0x104A4:
case 0x1087C:
case 0x108AA:
case 0x108AB:
case 0x109C3:
case 0x10A43:
case 0x10B5B:
case 0x10B7B:
case 0x10BAC:
case 0x10D34:
case 0x10D44:
case 0x10E63:
case 0x10F20:
case 0x10FC8:
case 0x11055:
case 0x1106A:
case 0x110F4:
case 0x1113A:
case 0x111D4:
case 0x111E4:
case 0x112F4:
case 0x11454:
case 0x114D4:
case 0x11654:
case 0x116C4:
case 0x116D4:
case 0x116DE:
case 0x11734:
case 0x118E4:
case 0x11954:
case 0x11BF4:
case 0x11C54:
case 0x11C5D:
case 0x11D54:
case 0x11DA4:
case 0x11DE4:
case 0x11F54:
case 0x12402:
case 0x12409:
case 0x1240F:
case 0x12418:
case 0x12421:
case 0x12426:
case 0x12430:
case 0x12438:
case 0x1243C:
case 0x1243D:
case 0x1243E:
case 0x1243F:
case 0x1244C:
case 0x12452:
case 0x12453:
case 0x12469:
case 0x16134:
case 0x16A64:
case 0x16AC4:
case 0x16B54:
case 0x16D74:
case 0x16E84:
case 0x1CCF4:
case 0x1D2C4:
case 0x1D2E4:
case 0x1D363:
case 0x1D375:
case 0x1D7D2:
case 0x1D7DC:
case 0x1D7E6:
case 0x1D7F0:
case 0x1D7FA:
case 0x1E144:
case 0x1E2F4:
case 0x1E4F4:
case 0x1E5F5:
case 0x1E8CA:
case 0x1E954:
case 0x1EC74:
case 0x1ECA6:
case 0x1ED04:
case 0x1ED31:
case 0x1F105:
case 0x1FBF4:
case 0x20064:
case 0x200E2:
case 0x2626D:
return (double) 4.0;
case 0x109F9:
return (double) 4.0/12.0;
case 0x2158:
return (double) 4.0/5.0;
case 0x1375:
case 0x324B:
case 0x32B5:
case 0x534C:
case 0x10113:
case 0x102ED:
case 0x109CC:
case 0x10E6C:
case 0x1105E:
case 0x111ED:
case 0x118ED:
case 0x11C66:
case 0x12467:
case 0x1D36C:
case 0x1EC7D:
case 0x1ED0D:
case 0x2098C:
case 0x2099C:
return (double) 40.0;
case 0x1011C:
case 0x102F6:
case 0x109D5:
case 0x10E75:
case 0x1EC86:
case 0x1ED16:
case 0x1ED38:
return (double) 400.0;
case 0x10125:
case 0x109DE:
case 0x1EC8F:
case 0x1ED1F:
return (double) 4000.0;
case 0x1012E:
case 0x109E7:
case 0x1EC98:
case 0x1ED28:
return (double) 40000.0;
case 0x109F0:
return (double) 400000.0;
case 0x32B6:
return (double) 41.0;
case 0x32B7:
return (double) 42.0;
case 0x32B8:
return (double) 43.0;
case 0x12433:
return (double) 432000.0;
case 0x32B9:
return (double) 44.0;
case 0x32BA:
return (double) 45.0;
case 0x32BB:
return (double) 46.0;
case 0x32BC:
return (double) 47.0;
case 0x32BD:
return (double) 48.0;
case 0x32BE:
return (double) 49.0;
case 0x0035:
case 0x0665:
case 0x06F5:
case 0x07C5:
case 0x096B:
case 0x09EB:
case 0x0A6B:
case 0x0AEB:
case 0x0B6B:
case 0x0BEB:
case 0x0C6B:
case 0x0CEB:
case 0x0D6B:
case 0x0DEB:
case 0x0E55:
case 0x0ED5:
case 0x0F25:
case 0x1045:
case 0x1095:
case 0x136D:
case 0x17E5:
case 0x17F5:
case 0x1815:
case 0x194B:
case 0x19D5:
case 0x1A85:
case 0x1A95:
case 0x1B55:
case 0x1BB5:
case 0x1C45:
case 0x1C55:
case 0x2075:
case 0x2085:
case 0x2164:
case 0x2174:
case 0x2464:
case 0x2478:
case 0x248C:
case 0x24F9:
case 0x277A:
case 0x2784:
case 0x278E:
case 0x3025:
case 0x3224:
case 0x3284:
case 0x3405:
case 0x382A:
case 0x4E94:
case 0x4F0D:
case 0xA625:
case 0xA6EA:
case 0xA8D5:
case 0xA905:
case 0xA9D5:
case 0xA9F5:
case 0xAA55:
case 0xABF5:
case 0xFF15:
case 0x1010B:
case 0x10143:
case 0x10148:
case 0x1014F:
case 0x1015F:
case 0x10173:
case 0x102E5:
case 0x10321:
case 0x104A5:
case 0x1087D:
case 0x108AC:
case 0x108FC:
case 0x109C4:
case 0x10AEC:
case 0x10CFB:
case 0x10D35:
case 0x10D45:
case 0x10E64:
case 0x10F21:
case 0x11056:
case 0x1106B:
case 0x110F5:
case 0x1113B:
case 0x111D5:
case 0x111E5:
case 0x112F5:
case 0x11455:
case 0x114D5:
case 0x11655:
case 0x116C5:
case 0x116D5:
case 0x116DF:
case 0x11735:
case 0x118E5:
case 0x11955:
case 0x11BF5:
case 0x11C55:
case 0x11C5E:
case 0x11D55:
case 0x11DA5:
case 0x11DE5:
case 0x11F55:
case 0x12403:
case 0x1240A:
case 0x12410:
case 0x12419:
case 0x12422:
case 0x12427:
case 0x12431:
case 0x12439:
case 0x1244D:
case 0x12454:
case 0x12455:
case 0x1246A:
case 0x16135:
case 0x16A65:
case 0x16AC5:
case 0x16B55:
case 0x16D75:
case 0x16E85:
case 0x1CCF5:
case 0x1D2C5:
case 0x1D2E5:
case 0x1D364:
case 0x1D376:
case 0x1D378:
case 0x1D7D3:
case 0x1D7DD:
case 0x1D7E7:
case 0x1D7F1:
case 0x1D7FB:
case 0x1E145:
case 0x1E2F5:
case 0x1E4F5:
case 0x1E5F6:
case 0x1E8CB:
case 0x1E955:
case 0x1EC75:
case 0x1ECA7:
case 0x1ED05:
case 0x1ED32:
case 0x1F106:
case 0x1FBF5:
case 0x20121:
return (double) 5.0;
case 0x109FA:
return (double) 5.0/12.0;
case 0x0F2C:
return (double) 5.0/2.0;
case 0x215A:
case 0x1245C:
return (double) 5.0/6.0;
case 0x215D:
return (double) 5.0/8.0;
case 0x1376:
case 0x216C:
case 0x217C:
case 0x2186:
case 0x324C:
case 0x32BF:
case 0x10114:
case 0x10144:
case 0x1014A:
case 0x10151:
case 0x10166:
case 0x10167:
case 0x10168:
case 0x10169:
case 0x10174:
case 0x102EE:
case 0x10323:
case 0x109CD:
case 0x10A7E:
case 0x10CFD:
case 0x10E6D:
case 0x1105F:
case 0x111EE:
case 0x118EE:
case 0x11C67:
case 0x12468:
case 0x1D36D:
case 0x1EC7E:
case 0x1ED0E:
return (double) 50.0;
case 0x216E:
case 0x217E:
case 0x1011D:
case 0x10145:
case 0x1014C:
case 0x10153:
case 0x1016C:
case 0x1016D:
case 0x1016E:
case 0x1016F:
case 0x10170:
case 0x102F7:
case 0x109D6:
case 0x10E76:
case 0x1EC87:
case 0x1ED17:
return (double) 500.0;
case 0x2181:
case 0x10126:
case 0x10146:
case 0x1014E:
case 0x10172:
case 0x109DF:
case 0x1EC90:
case 0x1ED20:
return (double) 5000.0;
case 0x2187:
case 0x1012F:
case 0x10147:
case 0x10156:
case 0x109E8:
case 0x1EC99:
case 0x1ED29:
return (double) 50000.0;
case 0x109F1:
return (double) 500000.0;
case 0x0036:
case 0x0666:
case 0x06F6:
case 0x07C6:
case 0x096C:
case 0x09EC:
case 0x0A6C:
case 0x0AEC:
case 0x0B6C:
case 0x0BEC:
case 0x0C6C:
case 0x0CEC:
case 0x0D6C:
case 0x0DEC:
case 0x0E56:
case 0x0ED6:
case 0x0F26:
case 0x1046:
case 0x1096:
case 0x136E:
case 0x17E6:
case 0x17F6:
case 0x1816:
case 0x194C:
case 0x19D6:
case 0x1A86:
case 0x1A96:
case 0x1B56:
case 0x1BB6:
case 0x1C46:
case 0x1C56:
case 0x2076:
case 0x2086:
case 0x2165:
case 0x2175:
case 0x2185:
case 0x2465:
case 0x2479:
case 0x248D:
case 0x24FA:
case 0x277B:
case 0x2785:
case 0x278F:
case 0x3026:
case 0x3225:
case 0x3285:
case 0x516D:
case 0x9646:
case 0x9678:
case 0xA626:
case 0xA6EB:
case 0xA8D6:
case 0xA906:
case 0xA9D6:
case 0xA9F6:
case 0xAA56:
case 0xABF6:
case 0xF9D1:
case 0xF9D3:
case 0xFF16:
case 0x1010C:
case 0x102E6:
case 0x104A6:
case 0x109C5:
case 0x10D36:
case 0x10D46:
case 0x10E65:
case 0x11057:
case 0x1106C:
case 0x110F6:
case 0x1113C:
case 0x111D6:
case 0x111E6:
case 0x112F6:
case 0x11456:
case 0x114D6:
case 0x11656:
case 0x116C6:
case 0x116D6:
case 0x116E0:
case 0x11736:
case 0x118E6:
case 0x11956:
case 0x11BF6:
case 0x11C56:
case 0x11C5F:
case 0x11D56:
case 0x11DA6:
case 0x11DE6:
case 0x11F56:
case 0x12404:
case 0x1240B:
case 0x12411:
case 0x1241A:
case 0x12428:
case 0x12440:
case 0x1244E:
case 0x1246B:
case 0x16136:
case 0x16A66:
case 0x16AC6:
case 0x16B56:
case 0x16D76:
case 0x16E86:
case 0x1CCF6:
case 0x1D2C6:
case 0x1D2E6:
case 0x1D365:
case 0x1D7D4:
case 0x1D7DE:
case 0x1D7E8:
case 0x1D7F2:
case 0x1D7FC:
case 0x1E146:
case 0x1E2F6:
case 0x1E4F6:
case 0x1E5F7:
case 0x1E8CC:
case 0x1E956:
case 0x1EC76:
case 0x1ECA8:
case 0x1ED06:
case 0x1ED33:
case 0x1F107:
case 0x1FBF6:
case 0x20AEA:
return (double) 6.0;
case 0x109FB:
return (double) 6.0/12.0;
case 0x1377:
case 0x324D:
case 0x10115:
case 0x102EF:
case 0x109CE:
case 0x10E6E:
case 0x11060:
case 0x111EF:
case 0x118EF:
case 0x11C68:
case 0x1D36E:
case 0x1EC7F:
case 0x1ED0F:
return (double) 60.0;
case 0x1011E:
case 0x102F8:
case 0x109D7:
case 0x10E77:
case 0x1EC88:
case 0x1ED18:
case 0x1ED39:
return (double) 600.0;
case 0x10127:
case 0x109E0:
case 0x1EC91:
case 0x1ED21:
return (double) 6000.0;
case 0x10130:
case 0x109E9:
case 0x1EC9A:
case 0x1ED2A:
return (double) 60000.0;
case 0x109F2:
return (double) 600000.0;
case 0x0037:
case 0x0667:
case 0x06F7:
case 0x07C7:
case 0x096D:
case 0x09ED:
case 0x0A6D:
case 0x0AED:
case 0x0B6D:
case 0x0BED:
case 0x0C6D:
case 0x0CED:
case 0x0D6D:
case 0x0DED:
case 0x0E57:
case 0x0ED7:
case 0x0F27:
case 0x1047:
case 0x1097:
case 0x136F:
case 0x17E7:
case 0x17F7:
case 0x1817:
case 0x194D:
case 0x19D7:
case 0x1A87:
case 0x1A97:
case 0x1B57:
case 0x1BB7:
case 0x1C47:
case 0x1C57:
case 0x2077:
case 0x2087:
case 0x2166:
case 0x2176:
case 0x2466:
case 0x247A:
case 0x248E:
case 0x24FB:
case 0x277C:
case 0x2786:
case 0x2790:
case 0x3027:
case 0x3226:
case 0x3286:
case 0x3B4D:
case 0x4E03:
case 0x62D0:
case 0x67D2:
case 0x6F06:
case 0xA627:
case 0xA6EC:
case 0xA8D7:
case 0xA907:
case 0xA9D7:
case 0xA9F7:
case 0xAA57:
case 0xABF7:
case 0xFF17:
case 0x1010D:
case 0x102E7:
case 0x104A7:
case 0x109C6:
case 0x10D37:
case 0x10D47:
case 0x10E66:
case 0x11058:
case 0x1106D:
case 0x110F7:
case 0x1113D:
case 0x111D7:
case 0x111E7:
case 0x112F7:
case 0x11457:
case 0x114D7:
case 0x11657:
case 0x116C7:
case 0x116D7:
case 0x116E1:
case 0x11737:
case 0x118E7:
case 0x11957:
case 0x11BF7:
case 0x11C57:
case 0x11C60:
case 0x11D57:
case 0x11DA7:
case 0x11DE7:
case 0x11F57:
case 0x12405:
case 0x1240C:
case 0x12412:
case 0x1241B:
case 0x12429:
case 0x12441:
case 0x12442:
case 0x12443:
case 0x1246C:
case 0x16137:
case 0x16A67:
case 0x16AC7:
case 0x16B57:
case 0x16D77:
case 0x16E87:
case 0x1CCF7:
case 0x1D2C7:
case 0x1D2E7:
case 0x1D366:
case 0x1D7D5:
case 0x1D7DF:
case 0x1D7E9:
case 0x1D7F3:
case 0x1D7FD:
case 0x1E147:
case 0x1E2F7:
case 0x1E4F7:
case 0x1E5F8:
case 0x1E8CD:
case 0x1E957:
case 0x1EC77:
case 0x1ECA9:
case 0x1ED07:
case 0x1ED34:
case 0x1F108:
case 0x1FBF7:
case 0x20001:
return (double) 7.0;
case 0x109FC:
return (double) 7.0/12.0;
case 0x0F2D:
return (double) 7.0/2.0;
case 0x215E:
return (double) 7.0/8.0;
case 0x1378:
case 0x324E:
case 0x10116:
case 0x102F0:
case 0x109CF:
case 0x10E6F:
case 0x11061:
case 0x111F0:
case 0x118F0:
case 0x11C69:
case 0x1D36F:
case 0x1EC80:
case 0x1ED10:
return (double) 70.0;
case 0x1011F:
case 0x102F9:
case 0x109D8:
case 0x10E78:
case 0x1EC89:
case 0x1ED19:
return (double) 700.0;
case 0x10128:
case 0x109E1:
case 0x1EC92:
case 0x1ED22:
return (double) 7000.0;
case 0x10131:
case 0x109EA:
case 0x1EC9B:
case 0x1ED2B:
return (double) 70000.0;
case 0x109F3:
return (double) 700000.0;
case 0x0038:
case 0x0668:
case 0x06F8:
case 0x07C8:
case 0x096E:
case 0x09EE:
case 0x0A6E:
case 0x0AEE:
case 0x0B6E:
case 0x0BEE:
case 0x0C6E:
case 0x0CEE:
case 0x0D6E:
case 0x0DEE:
case 0x0E58:
case 0x0ED8:
case 0x0F28:
case 0x1048:
case 0x1098:
case 0x1370:
case 0x17E8:
case 0x17F8:
case 0x1818:
case 0x194E:
case 0x19D8:
case 0x1A88:
case 0x1A98:
case 0x1B58:
case 0x1BB8:
case 0x1C48:
case 0x1C58:
case 0x2078:
case 0x2088:
case 0x2167:
case 0x2177:
case 0x2467:
case 0x247B:
case 0x248F:
case 0x24FC:
case 0x277D:
case 0x2787:
case 0x2791:
case 0x3028:
case 0x3227:
case 0x3287:
case 0x516B:
case 0x634C:
case 0xA628:
case 0xA6ED:
case 0xA8D8:
case 0xA908:
case 0xA9D8:
case 0xA9F8:
case 0xAA58:
case 0xABF8:
case 0xFF18:
case 0x1010E:
case 0x102E8:
case 0x104A8:
case 0x109C7:
case 0x10D38:
case 0x10D48:
case 0x10E67:
case 0x11059:
case 0x1106E:
case 0x110F8:
case 0x1113E:
case 0x111D8:
case 0x111E8:
case 0x112F8:
case 0x11458:
case 0x114D8:
case 0x11658:
case 0x116C8:
case 0x116D8:
case 0x116E2:
case 0x11738:
case 0x118E8:
case 0x11958:
case 0x11BF8:
case 0x11C58:
case 0x11C61:
case 0x11D58:
case 0x11DA8:
case 0x11DE8:
case 0x11F58:
case 0x12406:
case 0x1240D:
case 0x12413:
case 0x1241C:
case 0x1242A:
case 0x12444:
case 0x12445:
case 0x1246D:
case 0x16138:
case 0x16A68:
case 0x16AC8:
case 0x16B58:
case 0x16D78:
case 0x16E88:
case 0x1CCF8:
case 0x1D2C8:
case 0x1D2E8:
case 0x1D367:
case 0x1D7D6:
case 0x1D7E0:
case 0x1D7EA:
case 0x1D7F4:
case 0x1D7FE:
case 0x1E148:
case 0x1E2F8:
case 0x1E4F8:
case 0x1E5F9:
case 0x1E8CE:
case 0x1E958:
case 0x1EC78:
case 0x1ECAA:
case 0x1ED08:
case 0x1ED35:
case 0x1F109:
case 0x1FBF8:
return (double) 8.0;
case 0x109FD:
return (double) 8.0/12.0;
case 0x1379:
case 0x324F:
case 0x10117:
case 0x102F1:
case 0x10E70:
case 0x11062:
case 0x111F1:
case 0x118F1:
case 0x11C6A:
case 0x1D370:
case 0x1EC81:
case 0x1ED11:
return (double) 80.0;
case 0x10120:
case 0x102FA:
case 0x109D9:
case 0x10E79:
case 0x1EC8A:
case 0x1ED1A:
return (double) 800.0;
case 0x10129:
case 0x109E2:
case 0x1EC93:
case 0x1ED23:
return (double) 8000.0;
case 0x10132:
case 0x109EB:
case 0x1EC9C:
case 0x1ED2C:
return (double) 80000.0;
case 0x109F4:
return (double) 800000.0;
case 0x0039:
case 0x0669:
case 0x06F9:
case 0x07C9:
case 0x096F:
case 0x09EF:
case 0x0A6F:
case 0x0AEF:
case 0x0B6F:
case 0x0BEF:
case 0x0C6F:
case 0x0CEF:
case 0x0D6F:
case 0x0DEF:
case 0x0E59:
case 0x0ED9:
case 0x0F29:
case 0x1049:
case 0x1099:
case 0x1371:
case 0x17E9:
case 0x17F9:
case 0x1819:
case 0x194F:
case 0x19D9:
case 0x1A89:
case 0x1A99:
case 0x1B59:
case 0x1BB9:
case 0x1C49:
case 0x1C59:
case 0x2079:
case 0x2089:
case 0x2168:
case 0x2178:
case 0x2468:
case 0x247C:
case 0x2490:
case 0x24FD:
case 0x277E:
case 0x2788:
case 0x2792:
case 0x3029:
case 0x3228:
case 0x3288:
case 0x4E5D:
case 0x5EFE:
case 0x7396:
case 0x920E:
case 0x94A9:
case 0xA629:
case 0xA6EE:
case 0xA8D9:
case 0xA909:
case 0xA9D9:
case 0xA9F9:
case 0xAA59:
case 0xABF9:
case 0xFF19:
case 0x1010F:
case 0x102E9:
case 0x104A9:
case 0x109C8:
case 0x10D39:
case 0x10D49:
case 0x10E68:
case 0x1105A:
case 0x1106F:
case 0x110F9:
case 0x1113F:
case 0x111D9:
case 0x111E9:
case 0x112F9:
case 0x11459:
case 0x114D9:
case 0x11659:
case 0x116C9:
case 0x116D9:
case 0x116E3:
case 0x11739:
case 0x118E9:
case 0x11959:
case 0x11BF9:
case 0x11C59:
case 0x11C62:
case 0x11D59:
case 0x11DA9:
case 0x11DE9:
case 0x11F59:
case 0x12407:
case 0x1240E:
case 0x12414:
case 0x1241D:
case 0x1242B:
case 0x12446:
case 0x12447:
case 0x12448:
case 0x12449:
case 0x1246E:
case 0x16139:
case 0x16A69:
case 0x16AC9:
case 0x16B59:
case 0x16D79:
case 0x16E89:
case 0x1CCF9:
case 0x1D2C9:
case 0x1D2E9:
case 0x1D368:
case 0x1D7D7:
case 0x1D7E1:
case 0x1D7EB:
case 0x1D7F5:
case 0x1D7FF:
case 0x1E149:
case 0x1E2F9:
case 0x1E4F9:
case 0x1E5FA:
case 0x1E8CF:
case 0x1E959:
case 0x1EC79:
case 0x1ECAB:
case 0x1ED09:
case 0x1ED36:
case 0x1F10A:
case 0x1FBF9:
case 0x2F890:
return (double) 9.0;
case 0x109FE:
return (double) 9.0/12.0;
case 0x0F2E:
return (double) 9.0/2.0;
case 0x137A:
case 0x10118:
case 0x102F2:
case 0x10341:
case 0x10E71:
case 0x11063:
case 0x111F2:
case 0x118F2:
case 0x11C6B:
case 0x1D371:
case 0x1EC82:
case 0x1ED12:
return (double) 90.0;
case 0x10121:
case 0x102FB:
case 0x1034A:
case 0x109DA:
case 0x10E7A:
case 0x1EC8B:
case 0x1ED1B:
return (double) 900.0;
case 0x1012A:
case 0x109E3:
case 0x1EC94:
case 0x1ED24:
return (double) 9000.0;
case 0x10133:
case 0x109EC:
case 0x1EC9D:
case 0x1ED2D:
return (double) 90000.0;
case 0x109F5:
return (double) 900000.0;
}
return -1.0;
}
/* Returns 1 for Unicode characters having the bidirectional
* type 'WS', 'B' or 'S' or the category 'Zs', 0 otherwise.
*/
int _PyUnicode_IsWhitespace(const Py_UCS4 ch)
{
switch (ch) {
case 0x0009:
case 0x000A:
case 0x000B:
case 0x000C:
case 0x000D:
case 0x001C:
case 0x001D:
case 0x001E:
case 0x001F:
case 0x0020:
case 0x0085:
case 0x00A0:
case 0x1680:
case 0x2000:
case 0x2001:
case 0x2002:
case 0x2003:
case 0x2004:
case 0x2005:
case 0x2006:
case 0x2007:
case 0x2008:
case 0x2009:
case 0x200A:
case 0x2028:
case 0x2029:
case 0x202F:
case 0x205F:
case 0x3000:
return 1;
}
return 0;
}
/* Returns 1 for Unicode characters having the line break
* property 'BK', 'CR', 'LF' or 'NL' or having bidirectional
* type 'B', 0 otherwise.
*/
int _PyUnicode_IsLinebreak(const Py_UCS4 ch)
{
switch (ch) {
case 0x000A:
case 0x000B:
case 0x000C:
case 0x000D:
case 0x001C:
case 0x001D:
case 0x001E:
case 0x0085:
case 0x2028:
case 0x2029:
return 1;
}
return 0;
} | c | github | https://github.com/python/cpython | Objects/unicodetype_db.h |
from yowsup.structs import ProtocolEntity, ProtocolTreeNode
from yowsup.layers.protocol_notifications.protocolentities import NotificationProtocolEntity
class GroupsNotificationProtocolEntity(NotificationProtocolEntity):
'''
<notification notify="WhatsApp" id="{{id}}" t="1420402514" participant="{{participant_jiid}}" from="{{group_jid}}" type="w:gp2">
</notification>
'''
def __init__(self, _id, _from, timestamp, notify, participant, offline):
super(GroupsNotificationProtocolEntity, self).__init__("w:gp2", _id, _from, timestamp, notify, offline)
self.setParticipant(participant)
self.setGroupId(_from)
def setParticipant(self, participant):
self._participant = participant
def getParticipant(self, full = True):
return self._participant if full else self._participant.split('@')[0]
def getGroupId(self):
return self._id
def setGroupId(self, groupId):
self._id = groupId
def __str__(self):
out = super(GroupsNotificationProtocolEntity, self).__str__()
out += "Participant: %s\n" % self.getParticipant()
return out
def toProtocolTreeNode(self):
node = super(GroupsNotificationProtocolEntity, self).toProtocolTreeNode()
node.setAttribute("participant", self.getParticipant())
return node
@staticmethod
def fromProtocolTreeNode(node):
entity = super(GroupsNotificationProtocolEntity, GroupsNotificationProtocolEntity).fromProtocolTreeNode(node)
entity.__class__ = GroupsNotificationProtocolEntity
entity.setParticipant(node.getAttributeValue("participant"))
entity.setGroupId(node.getAttributeValue("from"))
return entity | unknown | codeparrot/codeparrot-clean | ||
"""
Generate samples of synthetic data sets.
"""
# Authors: B. Thirion, G. Varoquaux, A. Gramfort, V. Michel, O. Grisel,
# G. Louppe, J. Nothman
# License: BSD 3 clause
import numbers
import array
import numpy as np
from scipy import linalg
import scipy.sparse as sp
from ..preprocessing import MultiLabelBinarizer
from ..utils import check_array, check_random_state
from ..utils import shuffle as util_shuffle
from ..utils.fixes import astype
from ..utils.random import sample_without_replacement
from ..externals import six
map = six.moves.map
zip = six.moves.zip
def _generate_hypercube(samples, dimensions, rng):
"""Returns distinct binary samples of length dimensions
"""
if dimensions > 30:
return np.hstack([_generate_hypercube(samples, dimensions - 30, rng),
_generate_hypercube(samples, 30, rng)])
out = astype(sample_without_replacement(2 ** dimensions, samples,
random_state=rng),
dtype='>u4', copy=False)
out = np.unpackbits(out.view('>u1')).reshape((-1, 32))[:, -dimensions:]
return out
def make_classification(n_samples=100, n_features=20, n_informative=2,
n_redundant=2, n_repeated=0, n_classes=2,
n_clusters_per_class=2, weights=None, flip_y=0.01,
class_sep=1.0, hypercube=True, shift=0.0, scale=1.0,
shuffle=True, random_state=None):
"""Generate a random n-class classification problem.
This initially creates clusters of points normally distributed (std=1)
about vertices of a `2 * class_sep`-sided hypercube, and assigns an equal
number of clusters to each class. It introduces interdependence between
these features and adds various types of further noise to the data.
Prior to shuffling, `X` stacks a number of these primary "informative"
features, "redundant" linear combinations of these, "repeated" duplicates
of sampled features, and arbitrary noise for and remaining features.
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
n_samples : int, optional (default=100)
The number of samples.
n_features : int, optional (default=20)
The total number of features. These comprise `n_informative`
informative features, `n_redundant` redundant features, `n_repeated`
duplicated features and `n_features-n_informative-n_redundant-
n_repeated` useless features drawn at random.
n_informative : int, optional (default=2)
The number of informative features. Each class is composed of a number
of gaussian clusters each located around the vertices of a hypercube
in a subspace of dimension `n_informative`. For each cluster,
informative features are drawn independently from N(0, 1) and then
randomly linearly combined within each cluster in order to add
covariance. The clusters are then placed on the vertices of the
hypercube.
n_redundant : int, optional (default=2)
The number of redundant features. These features are generated as
random linear combinations of the informative features.
n_repeated : int, optional (default=0)
The number of duplicated features, drawn randomly from the informative
and the redundant features.
n_classes : int, optional (default=2)
The number of classes (or labels) of the classification problem.
n_clusters_per_class : int, optional (default=2)
The number of clusters per class.
weights : list of floats or None (default=None)
The proportions of samples assigned to each class. If None, then
classes are balanced. Note that if `len(weights) == n_classes - 1`,
then the last class weight is automatically inferred.
More than `n_samples` samples may be returned if the sum of `weights`
exceeds 1.
flip_y : float, optional (default=0.01)
The fraction of samples whose class are randomly exchanged.
class_sep : float, optional (default=1.0)
The factor multiplying the hypercube dimension.
hypercube : boolean, optional (default=True)
If True, the clusters are put on the vertices of a hypercube. If
False, the clusters are put on the vertices of a random polytope.
shift : float, array of shape [n_features] or None, optional (default=0.0)
Shift features by the specified value. If None, then features
are shifted by a random value drawn in [-class_sep, class_sep].
scale : float, array of shape [n_features] or None, optional (default=1.0)
Multiply features by the specified value. If None, then features
are scaled by a random value drawn in [1, 100]. Note that scaling
happens after shifting.
shuffle : boolean, optional (default=True)
Shuffle the samples and the features.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape [n_samples, n_features]
The generated samples.
y : array of shape [n_samples]
The integer labels for class membership of each sample.
Notes
-----
The algorithm is adapted from Guyon [1] and was designed to generate
the "Madelon" dataset.
References
----------
.. [1] I. Guyon, "Design of experiments for the NIPS 2003 variable
selection benchmark", 2003.
See also
--------
make_blobs: simplified variant
make_multilabel_classification: unrelated generator for multilabel tasks
"""
generator = check_random_state(random_state)
# Count features, clusters and samples
if n_informative + n_redundant + n_repeated > n_features:
raise ValueError("Number of informative, redundant and repeated "
"features must sum to less than the number of total"
" features")
if 2 ** n_informative < n_classes * n_clusters_per_class:
raise ValueError("n_classes * n_clusters_per_class must"
" be smaller or equal 2 ** n_informative")
if weights and len(weights) not in [n_classes, n_classes - 1]:
raise ValueError("Weights specified but incompatible with number "
"of classes.")
n_useless = n_features - n_informative - n_redundant - n_repeated
n_clusters = n_classes * n_clusters_per_class
if weights and len(weights) == (n_classes - 1):
weights.append(1.0 - sum(weights))
if weights is None:
weights = [1.0 / n_classes] * n_classes
weights[-1] = 1.0 - sum(weights[:-1])
# Distribute samples among clusters by weight
n_samples_per_cluster = []
for k in range(n_clusters):
n_samples_per_cluster.append(int(n_samples * weights[k % n_classes]
/ n_clusters_per_class))
for i in range(n_samples - sum(n_samples_per_cluster)):
n_samples_per_cluster[i % n_clusters] += 1
# Intialize X and y
X = np.zeros((n_samples, n_features))
y = np.zeros(n_samples, dtype=np.int)
# Build the polytope whose vertices become cluster centroids
centroids = _generate_hypercube(n_clusters, n_informative,
generator).astype(float)
centroids *= 2 * class_sep
centroids -= class_sep
if not hypercube:
centroids *= generator.rand(n_clusters, 1)
centroids *= generator.rand(1, n_informative)
# Initially draw informative features from the standard normal
X[:, :n_informative] = generator.randn(n_samples, n_informative)
# Create each cluster; a variant of make_blobs
stop = 0
for k, centroid in enumerate(centroids):
start, stop = stop, stop + n_samples_per_cluster[k]
y[start:stop] = k % n_classes # assign labels
X_k = X[start:stop, :n_informative] # slice a view of the cluster
A = 2 * generator.rand(n_informative, n_informative) - 1
X_k[...] = np.dot(X_k, A) # introduce random covariance
X_k += centroid # shift the cluster to a vertex
# Create redundant features
if n_redundant > 0:
B = 2 * generator.rand(n_informative, n_redundant) - 1
X[:, n_informative:n_informative + n_redundant] = \
np.dot(X[:, :n_informative], B)
# Repeat some features
if n_repeated > 0:
n = n_informative + n_redundant
indices = ((n - 1) * generator.rand(n_repeated) + 0.5).astype(np.intp)
X[:, n:n + n_repeated] = X[:, indices]
# Fill useless features
if n_useless > 0:
X[:, -n_useless:] = generator.randn(n_samples, n_useless)
# Randomly replace labels
if flip_y >= 0.0:
flip_mask = generator.rand(n_samples) < flip_y
y[flip_mask] = generator.randint(n_classes, size=flip_mask.sum())
# Randomly shift and scale
if shift is None:
shift = (2 * generator.rand(n_features) - 1) * class_sep
X += shift
if scale is None:
scale = 1 + 100 * generator.rand(n_features)
X *= scale
if shuffle:
# Randomly permute samples
X, y = util_shuffle(X, y, random_state=generator)
# Randomly permute features
indices = np.arange(n_features)
generator.shuffle(indices)
X[:, :] = X[:, indices]
return X, y
def make_multilabel_classification(n_samples=100, n_features=20, n_classes=5,
n_labels=2, length=50, allow_unlabeled=True,
sparse=False, return_indicator='dense',
return_distributions=False,
random_state=None):
"""Generate a random multilabel classification problem.
For each sample, the generative process is:
- pick the number of labels: n ~ Poisson(n_labels)
- n times, choose a class c: c ~ Multinomial(theta)
- pick the document length: k ~ Poisson(length)
- k times, choose a word: w ~ Multinomial(theta_c)
In the above process, rejection sampling is used to make sure that
n is never zero or more than `n_classes`, and that the document length
is never zero. Likewise, we reject classes which have already been chosen.
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
n_samples : int, optional (default=100)
The number of samples.
n_features : int, optional (default=20)
The total number of features.
n_classes : int, optional (default=5)
The number of classes of the classification problem.
n_labels : int, optional (default=2)
The average number of labels per instance. More precisely, the number
of labels per sample is drawn from a Poisson distribution with
``n_labels`` as its expected value, but samples are bounded (using
rejection sampling) by ``n_classes``, and must be nonzero if
``allow_unlabeled`` is False.
length : int, optional (default=50)
The sum of the features (number of words if documents) is drawn from
a Poisson distribution with this expected value.
allow_unlabeled : bool, optional (default=True)
If ``True``, some instances might not belong to any class.
sparse : bool, optional (default=False)
If ``True``, return a sparse feature matrix
.. versionadded:: 0.17
parameter to allow *sparse* output.
return_indicator : 'dense' (default) | 'sparse' | False
If ``dense`` return ``Y`` in the dense binary indicator format. If
``'sparse'`` return ``Y`` in the sparse binary indicator format.
``False`` returns a list of lists of labels.
return_distributions : bool, optional (default=False)
If ``True``, return the prior class probability and conditional
probabilities of features given classes, from which the data was
drawn.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape [n_samples, n_features]
The generated samples.
Y : array or sparse CSR matrix of shape [n_samples, n_classes]
The label sets.
p_c : array, shape [n_classes]
The probability of each class being drawn. Only returned if
``return_distributions=True``.
p_w_c : array, shape [n_features, n_classes]
The probability of each feature being drawn given each class.
Only returned if ``return_distributions=True``.
"""
generator = check_random_state(random_state)
p_c = generator.rand(n_classes)
p_c /= p_c.sum()
cumulative_p_c = np.cumsum(p_c)
p_w_c = generator.rand(n_features, n_classes)
p_w_c /= np.sum(p_w_c, axis=0)
def sample_example():
_, n_classes = p_w_c.shape
# pick a nonzero number of labels per document by rejection sampling
y_size = n_classes + 1
while (not allow_unlabeled and y_size == 0) or y_size > n_classes:
y_size = generator.poisson(n_labels)
# pick n classes
y = set()
while len(y) != y_size:
# pick a class with probability P(c)
c = np.searchsorted(cumulative_p_c,
generator.rand(y_size - len(y)))
y.update(c)
y = list(y)
# pick a non-zero document length by rejection sampling
n_words = 0
while n_words == 0:
n_words = generator.poisson(length)
# generate a document of length n_words
if len(y) == 0:
# if sample does not belong to any class, generate noise word
words = generator.randint(n_features, size=n_words)
return words, y
# sample words with replacement from selected classes
cumulative_p_w_sample = p_w_c.take(y, axis=1).sum(axis=1).cumsum()
cumulative_p_w_sample /= cumulative_p_w_sample[-1]
words = np.searchsorted(cumulative_p_w_sample, generator.rand(n_words))
return words, y
X_indices = array.array('i')
X_indptr = array.array('i', [0])
Y = []
for i in range(n_samples):
words, y = sample_example()
X_indices.extend(words)
X_indptr.append(len(X_indices))
Y.append(y)
X_data = np.ones(len(X_indices), dtype=np.float64)
X = sp.csr_matrix((X_data, X_indices, X_indptr),
shape=(n_samples, n_features))
X.sum_duplicates()
if not sparse:
X = X.toarray()
# return_indicator can be True due to backward compatibility
if return_indicator in (True, 'sparse', 'dense'):
lb = MultiLabelBinarizer(sparse_output=(return_indicator == 'sparse'))
Y = lb.fit([range(n_classes)]).transform(Y)
elif return_indicator is not False:
raise ValueError("return_indicator must be either 'sparse', 'dense' "
'or False.')
if return_distributions:
return X, Y, p_c, p_w_c
return X, Y
def make_hastie_10_2(n_samples=12000, random_state=None):
"""Generates data for binary classification used in
Hastie et al. 2009, Example 10.2.
The ten features are standard independent Gaussian and
the target ``y`` is defined by::
y[i] = 1 if np.sum(X[i] ** 2) > 9.34 else -1
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
n_samples : int, optional (default=12000)
The number of samples.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape [n_samples, 10]
The input samples.
y : array of shape [n_samples]
The output values.
References
----------
.. [1] T. Hastie, R. Tibshirani and J. Friedman, "Elements of Statistical
Learning Ed. 2", Springer, 2009.
See also
--------
make_gaussian_quantiles: a generalization of this dataset approach
"""
rs = check_random_state(random_state)
shape = (n_samples, 10)
X = rs.normal(size=shape).reshape(shape)
y = ((X ** 2.0).sum(axis=1) > 9.34).astype(np.float64)
y[y == 0.0] = -1.0
return X, y
def make_regression(n_samples=100, n_features=100, n_informative=10,
n_targets=1, bias=0.0, effective_rank=None,
tail_strength=0.5, noise=0.0, shuffle=True, coef=False,
random_state=None):
"""Generate a random regression problem.
The input set can either be well conditioned (by default) or have a low
rank-fat tail singular profile. See :func:`make_low_rank_matrix` for
more details.
The output is generated by applying a (potentially biased) random linear
regression model with `n_informative` nonzero regressors to the previously
generated input and some gaussian centered noise with some adjustable
scale.
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
n_samples : int, optional (default=100)
The number of samples.
n_features : int, optional (default=100)
The number of features.
n_informative : int, optional (default=10)
The number of informative features, i.e., the number of features used
to build the linear model used to generate the output.
n_targets : int, optional (default=1)
The number of regression targets, i.e., the dimension of the y output
vector associated with a sample. By default, the output is a scalar.
bias : float, optional (default=0.0)
The bias term in the underlying linear model.
effective_rank : int or None, optional (default=None)
if not None:
The approximate number of singular vectors required to explain most
of the input data by linear combinations. Using this kind of
singular spectrum in the input allows the generator to reproduce
the correlations often observed in practice.
if None:
The input set is well conditioned, centered and gaussian with
unit variance.
tail_strength : float between 0.0 and 1.0, optional (default=0.5)
The relative importance of the fat noisy tail of the singular values
profile if `effective_rank` is not None.
noise : float, optional (default=0.0)
The standard deviation of the gaussian noise applied to the output.
shuffle : boolean, optional (default=True)
Shuffle the samples and the features.
coef : boolean, optional (default=False)
If True, the coefficients of the underlying linear model are returned.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape [n_samples, n_features]
The input samples.
y : array of shape [n_samples] or [n_samples, n_targets]
The output values.
coef : array of shape [n_features] or [n_features, n_targets], optional
The coefficient of the underlying linear model. It is returned only if
coef is True.
"""
n_informative = min(n_features, n_informative)
generator = check_random_state(random_state)
if effective_rank is None:
# Randomly generate a well conditioned input set
X = generator.randn(n_samples, n_features)
else:
# Randomly generate a low rank, fat tail input set
X = make_low_rank_matrix(n_samples=n_samples,
n_features=n_features,
effective_rank=effective_rank,
tail_strength=tail_strength,
random_state=generator)
# Generate a ground truth model with only n_informative features being non
# zeros (the other features are not correlated to y and should be ignored
# by a sparsifying regularizers such as L1 or elastic net)
ground_truth = np.zeros((n_features, n_targets))
ground_truth[:n_informative, :] = 100 * generator.rand(n_informative,
n_targets)
y = np.dot(X, ground_truth) + bias
# Add noise
if noise > 0.0:
y += generator.normal(scale=noise, size=y.shape)
# Randomly permute samples and features
if shuffle:
X, y = util_shuffle(X, y, random_state=generator)
indices = np.arange(n_features)
generator.shuffle(indices)
X[:, :] = X[:, indices]
ground_truth = ground_truth[indices]
y = np.squeeze(y)
if coef:
return X, y, np.squeeze(ground_truth)
else:
return X, y
def make_circles(n_samples=100, shuffle=True, noise=None, random_state=None,
factor=.8):
"""Make a large circle containing a smaller circle in 2d.
A simple toy dataset to visualize clustering and classification
algorithms.
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
n_samples : int, optional (default=100)
The total number of points generated.
shuffle: bool, optional (default=True)
Whether to shuffle the samples.
noise : double or None (default=None)
Standard deviation of Gaussian noise added to the data.
factor : double < 1 (default=.8)
Scale factor between inner and outer circle.
Returns
-------
X : array of shape [n_samples, 2]
The generated samples.
y : array of shape [n_samples]
The integer labels (0 or 1) for class membership of each sample.
"""
if factor > 1 or factor < 0:
raise ValueError("'factor' has to be between 0 and 1.")
generator = check_random_state(random_state)
# so as not to have the first point = last point, we add one and then
# remove it.
linspace = np.linspace(0, 2 * np.pi, n_samples // 2 + 1)[:-1]
outer_circ_x = np.cos(linspace)
outer_circ_y = np.sin(linspace)
inner_circ_x = outer_circ_x * factor
inner_circ_y = outer_circ_y * factor
X = np.vstack((np.append(outer_circ_x, inner_circ_x),
np.append(outer_circ_y, inner_circ_y))).T
y = np.hstack([np.zeros(n_samples // 2, dtype=np.intp),
np.ones(n_samples // 2, dtype=np.intp)])
if shuffle:
X, y = util_shuffle(X, y, random_state=generator)
if noise is not None:
X += generator.normal(scale=noise, size=X.shape)
return X, y
def make_moons(n_samples=100, shuffle=True, noise=None, random_state=None):
"""Make two interleaving half circles
A simple toy dataset to visualize clustering and classification
algorithms.
Parameters
----------
n_samples : int, optional (default=100)
The total number of points generated.
shuffle : bool, optional (default=True)
Whether to shuffle the samples.
noise : double or None (default=None)
Standard deviation of Gaussian noise added to the data.
Read more in the :ref:`User Guide <sample_generators>`.
Returns
-------
X : array of shape [n_samples, 2]
The generated samples.
y : array of shape [n_samples]
The integer labels (0 or 1) for class membership of each sample.
"""
n_samples_out = n_samples // 2
n_samples_in = n_samples - n_samples_out
generator = check_random_state(random_state)
outer_circ_x = np.cos(np.linspace(0, np.pi, n_samples_out))
outer_circ_y = np.sin(np.linspace(0, np.pi, n_samples_out))
inner_circ_x = 1 - np.cos(np.linspace(0, np.pi, n_samples_in))
inner_circ_y = 1 - np.sin(np.linspace(0, np.pi, n_samples_in)) - .5
X = np.vstack((np.append(outer_circ_x, inner_circ_x),
np.append(outer_circ_y, inner_circ_y))).T
y = np.hstack([np.zeros(n_samples_in, dtype=np.intp),
np.ones(n_samples_out, dtype=np.intp)])
if shuffle:
X, y = util_shuffle(X, y, random_state=generator)
if noise is not None:
X += generator.normal(scale=noise, size=X.shape)
return X, y
def make_blobs(n_samples=100, n_features=2, centers=3, cluster_std=1.0,
center_box=(-10.0, 10.0), shuffle=True, random_state=None):
"""Generate isotropic Gaussian blobs for clustering.
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
n_samples : int, optional (default=100)
The total number of points equally divided among clusters.
n_features : int, optional (default=2)
The number of features for each sample.
centers : int or array of shape [n_centers, n_features], optional
(default=3)
The number of centers to generate, or the fixed center locations.
cluster_std: float or sequence of floats, optional (default=1.0)
The standard deviation of the clusters.
center_box: pair of floats (min, max), optional (default=(-10.0, 10.0))
The bounding box for each cluster center when centers are
generated at random.
shuffle : boolean, optional (default=True)
Shuffle the samples.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape [n_samples, n_features]
The generated samples.
y : array of shape [n_samples]
The integer labels for cluster membership of each sample.
Examples
--------
>>> from sklearn.datasets.samples_generator import make_blobs
>>> X, y = make_blobs(n_samples=10, centers=3, n_features=2,
... random_state=0)
>>> print(X.shape)
(10, 2)
>>> y
array([0, 0, 1, 0, 2, 2, 2, 1, 1, 0])
See also
--------
make_classification: a more intricate variant
"""
generator = check_random_state(random_state)
if isinstance(centers, numbers.Integral):
centers = generator.uniform(center_box[0], center_box[1],
size=(centers, n_features))
else:
centers = check_array(centers)
n_features = centers.shape[1]
if isinstance(cluster_std, numbers.Real):
cluster_std = np.ones(len(centers)) * cluster_std
X = []
y = []
n_centers = centers.shape[0]
n_samples_per_center = [int(n_samples // n_centers)] * n_centers
for i in range(n_samples % n_centers):
n_samples_per_center[i] += 1
for i, (n, std) in enumerate(zip(n_samples_per_center, cluster_std)):
X.append(centers[i] + generator.normal(scale=std,
size=(n, n_features)))
y += [i] * n
X = np.concatenate(X)
y = np.array(y)
if shuffle:
indices = np.arange(n_samples)
generator.shuffle(indices)
X = X[indices]
y = y[indices]
return X, y
def make_friedman1(n_samples=100, n_features=10, noise=0.0, random_state=None):
"""Generate the "Friedman \#1" regression problem
This dataset is described in Friedman [1] and Breiman [2].
Inputs `X` are independent features uniformly distributed on the interval
[0, 1]. The output `y` is created according to the formula::
y(X) = 10 * sin(pi * X[:, 0] * X[:, 1]) + 20 * (X[:, 2] - 0.5) ** 2 \
+ 10 * X[:, 3] + 5 * X[:, 4] + noise * N(0, 1).
Out of the `n_features` features, only 5 are actually used to compute
`y`. The remaining features are independent of `y`.
The number of features has to be >= 5.
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
n_samples : int, optional (default=100)
The number of samples.
n_features : int, optional (default=10)
The number of features. Should be at least 5.
noise : float, optional (default=0.0)
The standard deviation of the gaussian noise applied to the output.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape [n_samples, n_features]
The input samples.
y : array of shape [n_samples]
The output values.
References
----------
.. [1] J. Friedman, "Multivariate adaptive regression splines", The Annals
of Statistics 19 (1), pages 1-67, 1991.
.. [2] L. Breiman, "Bagging predictors", Machine Learning 24,
pages 123-140, 1996.
"""
if n_features < 5:
raise ValueError("n_features must be at least five.")
generator = check_random_state(random_state)
X = generator.rand(n_samples, n_features)
y = 10 * np.sin(np.pi * X[:, 0] * X[:, 1]) + 20 * (X[:, 2] - 0.5) ** 2 \
+ 10 * X[:, 3] + 5 * X[:, 4] + noise * generator.randn(n_samples)
return X, y
def make_friedman2(n_samples=100, noise=0.0, random_state=None):
"""Generate the "Friedman \#2" regression problem
This dataset is described in Friedman [1] and Breiman [2].
Inputs `X` are 4 independent features uniformly distributed on the
intervals::
0 <= X[:, 0] <= 100,
40 * pi <= X[:, 1] <= 560 * pi,
0 <= X[:, 2] <= 1,
1 <= X[:, 3] <= 11.
The output `y` is created according to the formula::
y(X) = (X[:, 0] ** 2 + (X[:, 1] * X[:, 2] \
- 1 / (X[:, 1] * X[:, 3])) ** 2) ** 0.5 + noise * N(0, 1).
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
n_samples : int, optional (default=100)
The number of samples.
noise : float, optional (default=0.0)
The standard deviation of the gaussian noise applied to the output.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape [n_samples, 4]
The input samples.
y : array of shape [n_samples]
The output values.
References
----------
.. [1] J. Friedman, "Multivariate adaptive regression splines", The Annals
of Statistics 19 (1), pages 1-67, 1991.
.. [2] L. Breiman, "Bagging predictors", Machine Learning 24,
pages 123-140, 1996.
"""
generator = check_random_state(random_state)
X = generator.rand(n_samples, 4)
X[:, 0] *= 100
X[:, 1] *= 520 * np.pi
X[:, 1] += 40 * np.pi
X[:, 3] *= 10
X[:, 3] += 1
y = (X[:, 0] ** 2
+ (X[:, 1] * X[:, 2] - 1 / (X[:, 1] * X[:, 3])) ** 2) ** 0.5 \
+ noise * generator.randn(n_samples)
return X, y
def make_friedman3(n_samples=100, noise=0.0, random_state=None):
"""Generate the "Friedman \#3" regression problem
This dataset is described in Friedman [1] and Breiman [2].
Inputs `X` are 4 independent features uniformly distributed on the
intervals::
0 <= X[:, 0] <= 100,
40 * pi <= X[:, 1] <= 560 * pi,
0 <= X[:, 2] <= 1,
1 <= X[:, 3] <= 11.
The output `y` is created according to the formula::
y(X) = arctan((X[:, 1] * X[:, 2] - 1 / (X[:, 1] * X[:, 3])) \
/ X[:, 0]) + noise * N(0, 1).
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
n_samples : int, optional (default=100)
The number of samples.
noise : float, optional (default=0.0)
The standard deviation of the gaussian noise applied to the output.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape [n_samples, 4]
The input samples.
y : array of shape [n_samples]
The output values.
References
----------
.. [1] J. Friedman, "Multivariate adaptive regression splines", The Annals
of Statistics 19 (1), pages 1-67, 1991.
.. [2] L. Breiman, "Bagging predictors", Machine Learning 24,
pages 123-140, 1996.
"""
generator = check_random_state(random_state)
X = generator.rand(n_samples, 4)
X[:, 0] *= 100
X[:, 1] *= 520 * np.pi
X[:, 1] += 40 * np.pi
X[:, 3] *= 10
X[:, 3] += 1
y = np.arctan((X[:, 1] * X[:, 2] - 1 / (X[:, 1] * X[:, 3])) / X[:, 0]) \
+ noise * generator.randn(n_samples)
return X, y
def make_low_rank_matrix(n_samples=100, n_features=100, effective_rank=10,
tail_strength=0.5, random_state=None):
"""Generate a mostly low rank matrix with bell-shaped singular values
Most of the variance can be explained by a bell-shaped curve of width
effective_rank: the low rank part of the singular values profile is::
(1 - tail_strength) * exp(-1.0 * (i / effective_rank) ** 2)
The remaining singular values' tail is fat, decreasing as::
tail_strength * exp(-0.1 * i / effective_rank).
The low rank part of the profile can be considered the structured
signal part of the data while the tail can be considered the noisy
part of the data that cannot be summarized by a low number of linear
components (singular vectors).
This kind of singular profiles is often seen in practice, for instance:
- gray level pictures of faces
- TF-IDF vectors of text documents crawled from the web
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
n_samples : int, optional (default=100)
The number of samples.
n_features : int, optional (default=100)
The number of features.
effective_rank : int, optional (default=10)
The approximate number of singular vectors required to explain most of
the data by linear combinations.
tail_strength : float between 0.0 and 1.0, optional (default=0.5)
The relative importance of the fat noisy tail of the singular values
profile.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape [n_samples, n_features]
The matrix.
"""
generator = check_random_state(random_state)
n = min(n_samples, n_features)
# Random (ortho normal) vectors
u, _ = linalg.qr(generator.randn(n_samples, n), mode='economic')
v, _ = linalg.qr(generator.randn(n_features, n), mode='economic')
# Index of the singular values
singular_ind = np.arange(n, dtype=np.float64)
# Build the singular profile by assembling signal and noise components
low_rank = ((1 - tail_strength) *
np.exp(-1.0 * (singular_ind / effective_rank) ** 2))
tail = tail_strength * np.exp(-0.1 * singular_ind / effective_rank)
s = np.identity(n) * (low_rank + tail)
return np.dot(np.dot(u, s), v.T)
def make_sparse_coded_signal(n_samples, n_components, n_features,
n_nonzero_coefs, random_state=None):
"""Generate a signal as a sparse combination of dictionary elements.
Returns a matrix Y = DX, such as D is (n_features, n_components),
X is (n_components, n_samples) and each column of X has exactly
n_nonzero_coefs non-zero elements.
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
n_samples : int
number of samples to generate
n_components: int,
number of components in the dictionary
n_features : int
number of features of the dataset to generate
n_nonzero_coefs : int
number of active (non-zero) coefficients in each sample
random_state: int or RandomState instance, optional (default=None)
seed used by the pseudo random number generator
Returns
-------
data: array of shape [n_features, n_samples]
The encoded signal (Y).
dictionary: array of shape [n_features, n_components]
The dictionary with normalized components (D).
code: array of shape [n_components, n_samples]
The sparse code such that each column of this matrix has exactly
n_nonzero_coefs non-zero items (X).
"""
generator = check_random_state(random_state)
# generate dictionary
D = generator.randn(n_features, n_components)
D /= np.sqrt(np.sum((D ** 2), axis=0))
# generate code
X = np.zeros((n_components, n_samples))
for i in range(n_samples):
idx = np.arange(n_components)
generator.shuffle(idx)
idx = idx[:n_nonzero_coefs]
X[idx, i] = generator.randn(n_nonzero_coefs)
# encode signal
Y = np.dot(D, X)
return map(np.squeeze, (Y, D, X))
def make_sparse_uncorrelated(n_samples=100, n_features=10, random_state=None):
"""Generate a random regression problem with sparse uncorrelated design
This dataset is described in Celeux et al [1]. as::
X ~ N(0, 1)
y(X) = X[:, 0] + 2 * X[:, 1] - 2 * X[:, 2] - 1.5 * X[:, 3]
Only the first 4 features are informative. The remaining features are
useless.
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
n_samples : int, optional (default=100)
The number of samples.
n_features : int, optional (default=10)
The number of features.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape [n_samples, n_features]
The input samples.
y : array of shape [n_samples]
The output values.
References
----------
.. [1] G. Celeux, M. El Anbari, J.-M. Marin, C. P. Robert,
"Regularization in regression: comparing Bayesian and frequentist
methods in a poorly informative situation", 2009.
"""
generator = check_random_state(random_state)
X = generator.normal(loc=0, scale=1, size=(n_samples, n_features))
y = generator.normal(loc=(X[:, 0] +
2 * X[:, 1] -
2 * X[:, 2] -
1.5 * X[:, 3]), scale=np.ones(n_samples))
return X, y
def make_spd_matrix(n_dim, random_state=None):
"""Generate a random symmetric, positive-definite matrix.
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
n_dim : int
The matrix dimension.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape [n_dim, n_dim]
The random symmetric, positive-definite matrix.
See also
--------
make_sparse_spd_matrix
"""
generator = check_random_state(random_state)
A = generator.rand(n_dim, n_dim)
U, s, V = linalg.svd(np.dot(A.T, A))
X = np.dot(np.dot(U, 1.0 + np.diag(generator.rand(n_dim))), V)
return X
def make_sparse_spd_matrix(dim=1, alpha=0.95, norm_diag=False,
smallest_coef=.1, largest_coef=.9,
random_state=None):
"""Generate a sparse symmetric definite positive matrix.
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
dim: integer, optional (default=1)
The size of the random matrix to generate.
alpha: float between 0 and 1, optional (default=0.95)
The probability that a coefficient is non zero (see notes).
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
largest_coef : float between 0 and 1, optional (default=0.9)
The value of the largest coefficient.
smallest_coef : float between 0 and 1, optional (default=0.1)
The value of the smallest coefficient.
norm_diag : boolean, optional (default=False)
Whether to normalize the output matrix to make the leading diagonal
elements all 1
Returns
-------
prec : sparse matrix of shape (dim, dim)
The generated matrix.
Notes
-----
The sparsity is actually imposed on the cholesky factor of the matrix.
Thus alpha does not translate directly into the filling fraction of
the matrix itself.
See also
--------
make_spd_matrix
"""
random_state = check_random_state(random_state)
chol = -np.eye(dim)
aux = random_state.rand(dim, dim)
aux[aux < alpha] = 0
aux[aux > alpha] = (smallest_coef
+ (largest_coef - smallest_coef)
* random_state.rand(np.sum(aux > alpha)))
aux = np.tril(aux, k=-1)
# Permute the lines: we don't want to have asymmetries in the final
# SPD matrix
permutation = random_state.permutation(dim)
aux = aux[permutation].T[permutation]
chol += aux
prec = np.dot(chol.T, chol)
if norm_diag:
# Form the diagonal vector into a row matrix
d = np.diag(prec).reshape(1, prec.shape[0])
d = 1. / np.sqrt(d)
prec *= d
prec *= d.T
return prec
def make_swiss_roll(n_samples=100, noise=0.0, random_state=None):
"""Generate a swiss roll dataset.
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
n_samples : int, optional (default=100)
The number of sample points on the S curve.
noise : float, optional (default=0.0)
The standard deviation of the gaussian noise.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape [n_samples, 3]
The points.
t : array of shape [n_samples]
The univariate position of the sample according to the main dimension
of the points in the manifold.
Notes
-----
The algorithm is from Marsland [1].
References
----------
.. [1] S. Marsland, "Machine Learning: An Algorithmic Perspective",
Chapter 10, 2009.
http://www-ist.massey.ac.nz/smarsland/Code/10/lle.py
"""
generator = check_random_state(random_state)
t = 1.5 * np.pi * (1 + 2 * generator.rand(1, n_samples))
x = t * np.cos(t)
y = 21 * generator.rand(1, n_samples)
z = t * np.sin(t)
X = np.concatenate((x, y, z))
X += noise * generator.randn(3, n_samples)
X = X.T
t = np.squeeze(t)
return X, t
def make_s_curve(n_samples=100, noise=0.0, random_state=None):
"""Generate an S curve dataset.
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
n_samples : int, optional (default=100)
The number of sample points on the S curve.
noise : float, optional (default=0.0)
The standard deviation of the gaussian noise.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape [n_samples, 3]
The points.
t : array of shape [n_samples]
The univariate position of the sample according to the main dimension
of the points in the manifold.
"""
generator = check_random_state(random_state)
t = 3 * np.pi * (generator.rand(1, n_samples) - 0.5)
x = np.sin(t)
y = 2.0 * generator.rand(1, n_samples)
z = np.sign(t) * (np.cos(t) - 1)
X = np.concatenate((x, y, z))
X += noise * generator.randn(3, n_samples)
X = X.T
t = np.squeeze(t)
return X, t
def make_gaussian_quantiles(mean=None, cov=1., n_samples=100,
n_features=2, n_classes=3,
shuffle=True, random_state=None):
"""Generate isotropic Gaussian and label samples by quantile
This classification dataset is constructed by taking a multi-dimensional
standard normal distribution and defining classes separated by nested
concentric multi-dimensional spheres such that roughly equal numbers of
samples are in each class (quantiles of the :math:`\chi^2` distribution).
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
mean : array of shape [n_features], optional (default=None)
The mean of the multi-dimensional normal distribution.
If None then use the origin (0, 0, ...).
cov : float, optional (default=1.)
The covariance matrix will be this value times the unit matrix. This
dataset only produces symmetric normal distributions.
n_samples : int, optional (default=100)
The total number of points equally divided among classes.
n_features : int, optional (default=2)
The number of features for each sample.
n_classes : int, optional (default=3)
The number of classes
shuffle : boolean, optional (default=True)
Shuffle the samples.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape [n_samples, n_features]
The generated samples.
y : array of shape [n_samples]
The integer labels for quantile membership of each sample.
Notes
-----
The dataset is from Zhu et al [1].
References
----------
.. [1] J. Zhu, H. Zou, S. Rosset, T. Hastie, "Multi-class AdaBoost", 2009.
"""
if n_samples < n_classes:
raise ValueError("n_samples must be at least n_classes")
generator = check_random_state(random_state)
if mean is None:
mean = np.zeros(n_features)
else:
mean = np.array(mean)
# Build multivariate normal distribution
X = generator.multivariate_normal(mean, cov * np.identity(n_features),
(n_samples,))
# Sort by distance from origin
idx = np.argsort(np.sum((X - mean[np.newaxis, :]) ** 2, axis=1))
X = X[idx, :]
# Label by quantile
step = n_samples // n_classes
y = np.hstack([np.repeat(np.arange(n_classes), step),
np.repeat(n_classes - 1, n_samples - step * n_classes)])
if shuffle:
X, y = util_shuffle(X, y, random_state=generator)
return X, y
def _shuffle(data, random_state=None):
generator = check_random_state(random_state)
n_rows, n_cols = data.shape
row_idx = generator.permutation(n_rows)
col_idx = generator.permutation(n_cols)
result = data[row_idx][:, col_idx]
return result, row_idx, col_idx
def make_biclusters(shape, n_clusters, noise=0.0, minval=10,
maxval=100, shuffle=True, random_state=None):
"""Generate an array with constant block diagonal structure for
biclustering.
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
shape : iterable (n_rows, n_cols)
The shape of the result.
n_clusters : integer
The number of biclusters.
noise : float, optional (default=0.0)
The standard deviation of the gaussian noise.
minval : int, optional (default=10)
Minimum value of a bicluster.
maxval : int, optional (default=100)
Maximum value of a bicluster.
shuffle : boolean, optional (default=True)
Shuffle the samples.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape `shape`
The generated array.
rows : array of shape (n_clusters, X.shape[0],)
The indicators for cluster membership of each row.
cols : array of shape (n_clusters, X.shape[1],)
The indicators for cluster membership of each column.
References
----------
.. [1] Dhillon, I. S. (2001, August). Co-clustering documents and
words using bipartite spectral graph partitioning. In Proceedings
of the seventh ACM SIGKDD international conference on Knowledge
discovery and data mining (pp. 269-274). ACM.
See also
--------
make_checkerboard
"""
generator = check_random_state(random_state)
n_rows, n_cols = shape
consts = generator.uniform(minval, maxval, n_clusters)
# row and column clusters of approximately equal sizes
row_sizes = generator.multinomial(n_rows,
np.repeat(1.0 / n_clusters,
n_clusters))
col_sizes = generator.multinomial(n_cols,
np.repeat(1.0 / n_clusters,
n_clusters))
row_labels = np.hstack(list(np.repeat(val, rep) for val, rep in
zip(range(n_clusters), row_sizes)))
col_labels = np.hstack(list(np.repeat(val, rep) for val, rep in
zip(range(n_clusters), col_sizes)))
result = np.zeros(shape, dtype=np.float64)
for i in range(n_clusters):
selector = np.outer(row_labels == i, col_labels == i)
result[selector] += consts[i]
if noise > 0:
result += generator.normal(scale=noise, size=result.shape)
if shuffle:
result, row_idx, col_idx = _shuffle(result, random_state)
row_labels = row_labels[row_idx]
col_labels = col_labels[col_idx]
rows = np.vstack(row_labels == c for c in range(n_clusters))
cols = np.vstack(col_labels == c for c in range(n_clusters))
return result, rows, cols
def make_checkerboard(shape, n_clusters, noise=0.0, minval=10,
maxval=100, shuffle=True, random_state=None):
"""Generate an array with block checkerboard structure for
biclustering.
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
shape : iterable (n_rows, n_cols)
The shape of the result.
n_clusters : integer or iterable (n_row_clusters, n_column_clusters)
The number of row and column clusters.
noise : float, optional (default=0.0)
The standard deviation of the gaussian noise.
minval : int, optional (default=10)
Minimum value of a bicluster.
maxval : int, optional (default=100)
Maximum value of a bicluster.
shuffle : boolean, optional (default=True)
Shuffle the samples.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape `shape`
The generated array.
rows : array of shape (n_clusters, X.shape[0],)
The indicators for cluster membership of each row.
cols : array of shape (n_clusters, X.shape[1],)
The indicators for cluster membership of each column.
References
----------
.. [1] Kluger, Y., Basri, R., Chang, J. T., & Gerstein, M. (2003).
Spectral biclustering of microarray data: coclustering genes
and conditions. Genome research, 13(4), 703-716.
See also
--------
make_biclusters
"""
generator = check_random_state(random_state)
if hasattr(n_clusters, "__len__"):
n_row_clusters, n_col_clusters = n_clusters
else:
n_row_clusters = n_col_clusters = n_clusters
# row and column clusters of approximately equal sizes
n_rows, n_cols = shape
row_sizes = generator.multinomial(n_rows,
np.repeat(1.0 / n_row_clusters,
n_row_clusters))
col_sizes = generator.multinomial(n_cols,
np.repeat(1.0 / n_col_clusters,
n_col_clusters))
row_labels = np.hstack(list(np.repeat(val, rep) for val, rep in
zip(range(n_row_clusters), row_sizes)))
col_labels = np.hstack(list(np.repeat(val, rep) for val, rep in
zip(range(n_col_clusters), col_sizes)))
result = np.zeros(shape, dtype=np.float64)
for i in range(n_row_clusters):
for j in range(n_col_clusters):
selector = np.outer(row_labels == i, col_labels == j)
result[selector] += generator.uniform(minval, maxval)
if noise > 0:
result += generator.normal(scale=noise, size=result.shape)
if shuffle:
result, row_idx, col_idx = _shuffle(result, random_state)
row_labels = row_labels[row_idx]
col_labels = col_labels[col_idx]
rows = np.vstack(row_labels == label
for label in range(n_row_clusters)
for _ in range(n_col_clusters))
cols = np.vstack(col_labels == label
for _ in range(n_row_clusters)
for label in range(n_col_clusters))
return result, rows, cols | unknown | codeparrot/codeparrot-clean | ||
CHANGELOG
=========
8.1
---
* Add `$defaultOptions` to `JsonStreamReader` and `JsonStreamWriter`
8.0
---
* Remove `$streamToNativeValueTransformers` from `PropertyMetadata`
7.4
---
* The component is not marked as `@experimental` anymore
* Remove `nikic/php-parser` dependency
* Add `_current_object` to the context passed to value transformers during write operations
* Add `include_null_properties` option to encode the properties with `null` value
* Add synthetic properties support
* Deprecate `PropertyMetadata::$streamToNativeValueTransformers`, use `PropertyMetadata::$valueTransformers` instead
* Deprecate `PropertyMetadata::getNativeToStreamValueTransformer()` and `PropertyMetadata::getStreamToNativeValueTransformers()`, use `PropertyMetadata::getValueTransformers()` instead
* Deprecate `PropertyMetadata::withNativeToStreamValueTransformers()` and `PropertyMetadata::withStreamToNativeValueTransformers()`, use `PropertyMetadata::withValueTransformers()` instead
* Deprecate `PropertyMetadata::withAdditionalNativeToStreamValueTransformer()` and `PropertyMetadata::withAdditionalStreamToNativeValueTransformer`, use `PropertyMetadata::withAdditionalValueTransformer()` instead
7.3
---
* Introduce the component as experimental | unknown | github | https://github.com/symfony/symfony | src/Symfony/Component/JsonStreamer/CHANGELOG.md |
# (c) 2014, Matt Martz <matt@sivel.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = """
options:
path:
description:
- 'path to the file being managed. Aliases: I(dest), I(name)'
required: true
default: []
aliases: ['dest', 'name']
state:
description:
- If C(directory), all immediate subdirectories will be created if they
do not exist, since 1.7 they will be created with the supplied permissions.
If C(file), the file will NOT be created if it does not exist, see the M(copy)
or M(template) module if you want that behavior. If C(link), the symbolic
link will be created or changed. Use C(hard) for hardlinks. If C(absent),
directories will be recursively deleted, and files or symlinks will be unlinked.
If C(touch) (new in 1.4), an empty file will be created if the c(path) does not
exist, while an existing file or directory will receive updated file access and
modification times (similar to the way `touch` works from the command line).
required: false
default: file
choices: [ file, link, directory, hard, touch, absent ]
mode:
required: false
default: null
choices: []
description:
- mode the file or directory should be, such as 0644 as would be fed to I(chmod)
owner:
required: false
default: null
choices: []
description:
- name of the user that should own the file/directory, as would be fed to I(chown)
group:
required: false
default: null
choices: []
description:
- name of the group that should own the file/directory, as would be fed to I(chown)
src:
required: false
default: null
choices: []
description:
- path of the file to link to (applies only to C(state=link)). Will accept absolute,
relative and nonexisting paths. Relative paths are not expanded.
seuser:
required: false
default: null
choices: []
description:
- user part of SELinux file context. Will default to system policy, if
applicable. If set to C(_default), it will use the C(user) portion of the
policy if available
serole:
required: false
default: null
choices: []
description:
- role part of SELinux file context, C(_default) feature works as for I(seuser).
setype:
required: false
default: null
choices: []
description:
- type part of SELinux file context, C(_default) feature works as for I(seuser).
selevel:
required: false
default: "s0"
choices: []
description:
- level part of the SELinux file context. This is the MLS/MCS attribute,
sometimes known as the C(range). C(_default) feature works as for
I(seuser).
recurse:
required: false
default: "no"
choices: [ "yes", "no" ]
version_added: "1.1"
description:
- recursively set the specified file attributes (applies only to state=directory)
force:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- 'force the creation of the symlinks in two cases: the source file does
not exist (but will appear later); the destination exists and is a file (so, we need to unlink the
"path" file and create symlink to the "src" file in place of it).'
""" | unknown | codeparrot/codeparrot-clean | ||
# Copyright (C) 2008 One Laptop Per Child
# Copyright (C) 2009 Tomeu Vizoso
# Copyright (C) 2008-2013 Sugar Labs
# Copyright (C) 2013 Daniel Francis
# Copyright (C) 2013 Walter Bender
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import logging
from gettext import gettext as _
from gi.repository import GLib
from gi.repository import GObject
from gi.repository import Gtk
from gi.repository import Gdk
from sugar3 import profile
from sugar3 import util
from sugar3.graphics import style
from sugar3.graphics.icon import Icon, CellRendererIcon
from sugar3.graphics.xocolor import XoColor
from sugar3.graphics.alert import Alert
from sugar3.graphics.palettemenu import PaletteMenuItem
from sugar3.graphics.scrollingdetector import ScrollingDetector
from sugar3.graphics.palettewindow import TreeViewInvoker
from sugar3.datastore import datastore
from jarabe.model import bundleregistry
from jarabe.model import desktop
from jarabe.view.palettes import ActivityPalette
from jarabe.journal import misc
from jarabe.util.normalize import normalize_string
class ActivitiesTreeView(Gtk.TreeView):
__gtype_name__ = 'SugarActivitiesTreeView'
__gsignals__ = {
'erase-activated': (GObject.SignalFlags.RUN_FIRST, None,
([str])),
}
def __init__(self):
Gtk.TreeView.__init__(self)
self.set_can_focus(False)
self._query = ''
self.set_headers_visible(False)
self.add_events(Gdk.EventMask.BUTTON_PRESS_MASK |
Gdk.EventMask.TOUCH_MASK |
Gdk.EventMask.BUTTON_RELEASE_MASK)
selection = self.get_selection()
selection.set_mode(Gtk.SelectionMode.NONE)
self._model = ListModel()
self._model.set_visible_func(self.__model_visible_cb)
self.set_model(self._model)
self._favorite_columns = []
for i in range(desktop.get_number_of_views()):
self.fav_column = Gtk.TreeViewColumn()
self.cell_favorite = CellRendererFavorite(i)
self.cell_favorite.connect('clicked', self.__favorite_clicked_cb)
self.fav_column.pack_start(self.cell_favorite, True)
self.fav_column.set_cell_data_func(self.cell_favorite,
self.__favorite_set_data_cb)
self.append_column(self.fav_column)
self._favorite_columns.append(self.fav_column)
self.cell_icon = CellRendererActivityIcon()
column = Gtk.TreeViewColumn()
column.pack_start(self.cell_icon, True)
column.add_attribute(self.cell_icon, 'file-name',
self._model.column_icon)
self.append_column(column)
self._icon_column = column
cell_text = Gtk.CellRendererText()
cell_text.props.ellipsize = style.ELLIPSIZE_MODE_DEFAULT
cell_text.props.ellipsize_set = True
column = Gtk.TreeViewColumn()
column.props.sizing = Gtk.TreeViewColumnSizing.GROW_ONLY
column.props.expand = True
column.set_sort_column_id(self._model.column_title)
column.pack_start(cell_text, True)
column.add_attribute(cell_text, 'markup', self._model.column_title)
self.append_column(column)
cell_text = Gtk.CellRendererText()
cell_text.props.xalign = 1
self.version_column = Gtk.TreeViewColumn()
self.version_column.set_alignment(1)
self.version_column.props.sizing = Gtk.TreeViewColumnSizing.GROW_ONLY
self.version_column.props.resizable = True
self.version_column.props.reorderable = True
self.version_column.props.expand = True
self.version_column.set_sort_column_id(self._model.column_version)
self.version_column.pack_start(cell_text, True)
self.version_column.add_attribute(cell_text, 'text',
self._model.column_version_text)
self.append_column(self.version_column)
cell_text = Gtk.CellRendererText()
cell_text.props.xalign = 1
self.date_column = Gtk.TreeViewColumn()
self.date_column.set_alignment(1)
self.date_column.props.sizing = Gtk.TreeViewColumnSizing.GROW_ONLY
self.date_column.props.resizable = True
self.date_column.props.reorderable = True
self.date_column.props.expand = True
self.date_column.set_sort_column_id(self._model.column_date)
self.date_column.pack_start(cell_text, True)
self.date_column.add_attribute(cell_text, 'text',
self._model.column_date_text)
self.append_column(self.date_column)
self.set_search_column(self._model.column_title)
self.set_enable_search(False)
self._activity_selected = None
self._invoker = TreeViewInvoker()
self._invoker.attach_treeview(self)
self.button_press_handler = None
self.button_reslease_handler = None
self.icon_clicked_handler = None
self.row_activated_handler = None
if hasattr(self.props, 'activate_on_single_click'):
# Gtk+ 3.8 and later
self.props.activate_on_single_click = True
self.row_activated_handler = self.connect('row-activated',
self.__row_activated_cb)
else:
self.icon_clicked_handler = self.cell_icon.connect(
'clicked', self.__icon_clicked_cb)
self.button_press_handler = self.connect(
'button-press-event', self.__button_press_cb)
self.button_reslease_handler = self.connect(
'button-release-event', self.__button_release_cb)
self._row_activated_armed_path = None
def __favorite_set_data_cb(self, column, cell, model, tree_iter, data):
favorite = \
model[tree_iter][self._model.column_favorites[cell.favorite_view]]
if favorite:
cell.props.xo_color = profile.get_color()
else:
cell.props.xo_color = None
def __favorite_clicked_cb(self, cell, path):
row = self.get_model()[path]
registry = bundleregistry.get_registry()
registry.set_bundle_favorite(
row[self._model.column_bundle_id],
row[self._model.column_version],
not row[self._model.column_favorites[cell.favorite_view]],
cell.favorite_view)
def __icon_clicked_cb(self, cell, path):
"""
A click on activity icon cell is to start an activity.
"""
logging.debug('__icon_clicked_cb')
self._start_activity(path)
def __row_activated_cb(self, treeview, path, col):
"""
A click on cells other than the favorite toggle is to start an
activity. Gtk+ 3.8 and later.
"""
logging.debug('__row_activated_cb')
if col is not treeview.get_column(0):
self._start_activity(path)
def __button_to_path(self, event, event_type):
if event.window != self.get_bin_window() or \
event.button != 1 or \
event.type != event_type:
return None
pos = self.get_path_at_pos(int(event.x), int(event.y))
if pos is None:
return None
path, column, x_, y_ = pos
if column == self._icon_column:
return None
if column in self._favorite_columns:
return None
return path
def __button_press_cb(self, widget, event):
logging.debug('__button_press_cb')
path = self.__button_to_path(event, Gdk.EventType.BUTTON_PRESS)
if path is None:
return
self._row_activated_armed_path = path
def __button_release_cb(self, widget, event):
logging.debug('__button_release_cb')
path = self.__button_to_path(event, Gdk.EventType.BUTTON_RELEASE)
if path is None:
return
if self._row_activated_armed_path != path:
return
self._start_activity(path)
self._row_activated_armed_path = None
def _start_activity(self, path):
model = self.get_model()
row = model[path]
registry = bundleregistry.get_registry()
bundle = registry.get_bundle(row[self._model.column_bundle_id])
misc.launch(bundle)
def set_filter(self, query):
"""Set a new query and refilter the model, return the number
of matching activities.
"""
if isinstance(query, bytes):
query = query.decode()
self._query = normalize_string(query)
self.get_model().refilter()
matches = self.get_model().iter_n_children(None)
return matches
def __model_visible_cb(self, model, tree_iter, data):
title = model[tree_iter][self._model.column_title]
if isinstance(title, bytes):
title = title.decode('utf-8')
title = normalize_string(title)
return title is not None and title.find(self._query) > -1
def create_palette(self, path, column):
if column == self._icon_column:
row = self.get_model()[path]
bundle_id = row[self.get_model().column_bundle_id]
registry = bundleregistry.get_registry()
palette = ActivityListPalette(registry.get_bundle(bundle_id))
palette.connect('erase-activated', self.__erase_activated_cb,
bundle_id)
return palette
def __erase_activated_cb(self, palette, event, bundle_id):
self.emit('erase-activated', bundle_id)
def get_activities_selected(self):
activities = []
for row in self.get_model():
activities.append(
{'name': row[self.get_model().column_activity_name],
'bundle_id': row[self.get_model().column_bundle_id]})
return activities
def run_activity(self, bundle_id, resume_mode):
if not resume_mode:
registry = bundleregistry.get_registry()
bundle = registry.get_bundle(bundle_id)
misc.launch(bundle)
return
self._activity_selected = bundle_id
query = {'activity': bundle_id}
properties = ['uid', 'title', 'icon-color', 'activity', 'activity_id',
'mime_type', 'mountpoint']
datastore.find(query, sorting=['+timestamp'],
limit=1, properties=properties,
reply_handler=self.__get_last_activity_reply_handler_cb,
error_handler=self.__get_last_activity_error_handler_cb)
def __get_last_activity_reply_handler_cb(self, entries, total_count):
registry = bundleregistry.get_registry()
if entries:
misc.resume(entries[0], entries[0]['activity'])
else:
bundle = registry.get_bundle(self._activity_selected)
misc.launch(bundle)
def __get_last_activity_error_handler_cb(self, entries, total_count):
pass
def connect_to_scroller(self, scrolled):
scrolled.connect('scroll-start', self._scroll_start_cb)
scrolled.connect('scroll-end', self._scroll_end_cb)
self.cell_icon.connect_to_scroller(scrolled)
self.cell_favorite.connect_to_scroller(scrolled)
def _scroll_start_cb(self, event):
self._invoker.detach()
def _scroll_end_cb(self, event):
self._invoker.attach_treeview(self)
class ListModel(Gtk.TreeModelSort):
__gtype_name__ = 'SugarListModel'
def __init__(self):
self.column_bundle_id = 0
self.column_favorites = []
for i in range(desktop.get_number_of_views()):
self.column_favorites.append(self.column_bundle_id + i + 1)
self.column_icon = self.column_favorites[-1] + 1
self.column_title = self.column_icon + 1
self.column_version = self.column_title + 1
self.column_version_text = self.column_version + 1
self.column_date = self.column_version_text + 1
self.column_date_text = self.column_date + 1
self.column_activity_name = self.column_date_text + 1
column_types = [str, str, str, str, str, int, str, str]
for i in range(desktop.get_number_of_views()):
column_types.insert(1, bool)
self._model = Gtk.ListStore()
self._model.set_column_types(column_types)
self._model_filter = self._model.filter_new()
Gtk.TreeModelSort.__init__(self, model=self._model_filter)
self.set_sort_column_id(self.column_title, Gtk.SortType.ASCENDING)
GLib.idle_add(self.__connect_to_bundle_registry_cb)
def __connect_to_bundle_registry_cb(self):
registry = bundleregistry.get_registry()
for info in registry:
self._add_activity(info)
registry.connect('bundle-added', self.__activity_added_cb)
registry.connect('bundle-changed', self.__activity_changed_cb)
registry.connect('bundle-removed', self.__activity_removed_cb)
def __activity_added_cb(self, activity_registry, activity_info):
self._add_activity(activity_info)
def __activity_changed_cb(self, activity_registry, activity_info):
bundle_id = activity_info.get_bundle_id()
version = activity_info.get_activity_version()
favorites = []
for i in range(desktop.get_number_of_views()):
favorites.append(
activity_registry.is_bundle_favorite(bundle_id, version, i))
for row in self._model:
if row[self.column_bundle_id] == bundle_id and \
row[self.column_version] == version:
for i in range(desktop.get_number_of_views()):
row[self.column_favorites[i]] = favorites[i]
return
def __activity_removed_cb(self, activity_registry, activity_info):
bundle_id = activity_info.get_bundle_id()
version = activity_info.get_activity_version()
for row in self._model:
if row[self.column_bundle_id] == bundle_id and \
row[self.column_version] == version:
self._model.remove(row.iter)
return
def _add_activity(self, activity_info):
if activity_info.get_bundle_id() == 'org.laptop.JournalActivity':
return
if not activity_info.get_show_launcher():
return
timestamp = activity_info.get_installation_time()
version = activity_info.get_activity_version()
registry = bundleregistry.get_registry()
favorites = []
for i in range(desktop.get_number_of_views()):
favorites.append(
registry.is_bundle_favorite(activity_info.get_bundle_id(),
version,
i))
tag_list = activity_info.get_tags()
if tag_list is None or not tag_list:
title = '<b>%s</b>' % activity_info.get_name()
else:
tags = ', '.join(tag_list)
title = '<b>%s</b>\n' \
'<span style="italic" weight="light">%s</span>' % \
(activity_info.get_name(), tags)
model_list = [activity_info.get_bundle_id()]
for i in range(desktop.get_number_of_views()):
model_list.append(favorites[i])
model_list.append(activity_info.get_icon())
model_list.append(title)
model_list.append(version)
model_list.append(_('Version %s') % version)
model_list.append(int(timestamp))
model_list.append(util.timestamp_to_elapsed_string(timestamp))
model_list.append(activity_info.get_name())
self._model.append(model_list)
def set_visible_func(self, func):
self._model_filter.set_visible_func(func)
def refilter(self):
self._model_filter.refilter()
class CellRendererFavorite(CellRendererIcon):
__gtype_name__ = 'SugarCellRendererFavorite'
def __init__(self, favorite_view):
CellRendererIcon.__init__(self)
self.favorite_view = favorite_view
self.props.width = style.GRID_CELL_SIZE
self.props.height = style.GRID_CELL_SIZE
self.props.size = style.SMALL_ICON_SIZE
self.props.icon_name = desktop.get_favorite_icons()[favorite_view]
self.props.mode = Gtk.CellRendererMode.ACTIVATABLE
class CellRendererActivityIcon(CellRendererIcon):
__gtype_name__ = 'SugarCellRendererActivityIcon'
__gsignals__ = {
'erase-activated': (GObject.SignalFlags.RUN_FIRST, None,
([str])),
}
def __init__(self):
CellRendererIcon.__init__(self)
self.props.width = style.GRID_CELL_SIZE
self.props.height = style.GRID_CELL_SIZE
self.props.size = style.STANDARD_ICON_SIZE
self.props.stroke_color = style.COLOR_BUTTON_GREY.get_svg()
self.props.fill_color = style.COLOR_TRANSPARENT.get_svg()
self.props.mode = Gtk.CellRendererMode.ACTIVATABLE
prelit_color = profile.get_color()
self.props.prelit_stroke_color = prelit_color.get_stroke_color()
self.props.prelit_fill_color = prelit_color.get_fill_color()
class ClearMessageBox(Gtk.EventBox):
def __init__(self, message, button_callback):
Gtk.EventBox.__init__(self)
self.modify_bg(Gtk.StateType.NORMAL,
style.COLOR_WHITE.get_gdk_color())
alignment = Gtk.Alignment.new(0.5, 0.5, 0.1, 0.1)
self.add(alignment)
alignment.show()
box = Gtk.VBox()
alignment.add(box)
box.show()
icon = Icon(pixel_size=style.LARGE_ICON_SIZE,
icon_name='system-search',
stroke_color=style.COLOR_BUTTON_GREY.get_svg(),
fill_color=style.COLOR_TRANSPARENT.get_svg())
box.pack_start(icon, expand=True, fill=False, padding=0)
icon.show()
label = Gtk.Label()
color = style.COLOR_BUTTON_GREY.get_html()
label.set_markup('<span weight="bold" color="%s">%s</span>' % (
color, GLib.markup_escape_text(message)))
box.pack_start(label, expand=True, fill=False, padding=0)
label.show()
button_box = Gtk.HButtonBox()
button_box.set_layout(Gtk.ButtonBoxStyle.CENTER)
box.pack_start(button_box, False, True, 0)
button_box.show()
button = Gtk.Button(label=_('Clear search'))
button.connect('clicked', button_callback)
button.props.image = Icon(icon_name='dialog-cancel',
pixel_size=style.SMALL_ICON_SIZE)
button_box.pack_start(button, expand=True, fill=False, padding=0)
button.show()
class ActivitiesList(Gtk.VBox):
__gtype_name__ = 'SugarActivitiesList'
__gsignals__ = {
'clear-clicked': (GObject.SignalFlags.RUN_FIRST, None, ([])),
}
def __init__(self):
logging.debug('STARTUP: Loading the activities list')
Gtk.VBox.__init__(self)
self._scrolled_window = Gtk.ScrolledWindow()
self._scrolled_window.set_can_focus(False)
self._scrolled_window.set_policy(Gtk.PolicyType.NEVER,
Gtk.PolicyType.AUTOMATIC)
self._scrolled_window.set_shadow_type(Gtk.ShadowType.NONE)
self._scrolled_window.connect('key-press-event',
self.__key_press_event_cb)
self.pack_start(self._scrolled_window, True, True, 0)
self._scrolled_window.show()
self._tree_view = ActivitiesTreeView()
self._tree_view.connect('erase-activated', self.__erase_activated_cb)
self._scrolled_window.add(self._tree_view)
self._tree_view.show()
scrolling_detector = ScrollingDetector(self._scrolled_window)
self._tree_view.connect_to_scroller(scrolling_detector)
self._alert = None
self._clear_message_box = None
desktop_model = desktop.get_model()
desktop_model.connect('desktop-view-icons-changed',
self.__desktop_view_icons_changed_cb)
def grab_focus(self):
# overwrite grab focus in order to grab focus from the parent
self._tree_view.grab_focus()
def set_filter(self, query):
matches = self._tree_view.set_filter(query)
if matches == 0:
self._show_clear_message()
else:
self._hide_clear_message()
def __desktop_view_icons_changed_cb(self, model):
self._tree_view.destroy()
self._tree_view = ActivitiesTreeView()
self._tree_view.connect('erase-activated', self.__erase_activated_cb)
self._scrolled_window.add(self._tree_view)
self._tree_view.show()
def __key_press_event_cb(self, scrolled_window, event):
keyname = Gdk.keyval_name(event.keyval)
vadjustment = scrolled_window.props.vadjustment
if keyname == 'Up':
if vadjustment.props.value > vadjustment.props.lower:
vadjustment.props.value -= vadjustment.props.step_increment
elif keyname == 'Down':
max_value = vadjustment.props.upper - vadjustment.props.page_size
if vadjustment.props.value < max_value:
vadjustment.props.value = min(
vadjustment.props.value + vadjustment.props.step_increment,
max_value)
else:
return False
return True
def _show_clear_message(self):
if self._clear_message_box in self.get_children():
return
if self._scrolled_window in self.get_children():
self.remove(self._scrolled_window)
self._clear_message_box = ClearMessageBox(
message=_('No matching activities'),
button_callback=self.__clear_button_clicked_cb)
self.pack_end(self._clear_message_box, True, True, 0)
self._clear_message_box.show()
def __clear_button_clicked_cb(self, button):
self.emit('clear-clicked')
def _hide_clear_message(self):
if self._scrolled_window in self.get_children():
return
if self._clear_message_box in self.get_children():
self.remove(self._clear_message_box)
self._clear_message_box = None
self.pack_end(self._scrolled_window, True, True, 0)
self._scrolled_window.show()
def add_alert(self, alert):
if self._alert is not None:
self.remove_alert()
self._alert = alert
self.pack_start(alert, False, True, 0)
self.reorder_child(alert, 0)
def remove_alert(self):
self.remove(self._alert)
self._alert = None
def __erase_activated_cb(self, tree_view, bundle_id):
registry = bundleregistry.get_registry()
activity_info = registry.get_bundle(bundle_id)
alert = Alert()
alert.props.title = _('Confirm erase')
alert.props.msg = \
_('Confirm erase: Do you want to permanently erase %s?') \
% activity_info.get_name()
cancel_icon = Icon(icon_name='dialog-cancel')
alert.add_button(Gtk.ResponseType.CANCEL, _('Keep'), cancel_icon)
erase_icon = Icon(icon_name='dialog-ok')
alert.add_button(Gtk.ResponseType.OK, _('Erase'), erase_icon)
alert.connect('response', self.__erase_confirmation_dialog_response_cb,
bundle_id)
self.add_alert(alert)
def __erase_confirmation_dialog_response_cb(self, alert, response_id,
bundle_id):
self.remove_alert()
if response_id == Gtk.ResponseType.OK:
registry = bundleregistry.get_registry()
bundle = registry.get_bundle(bundle_id)
registry.uninstall(bundle, delete_profile=True)
def get_activities_selected(self):
return self._tree_view.get_activities_selected()
def run_activity(self, bundle_id, resume_mode):
self._tree_view.run_activity(bundle_id, resume_mode)
class ActivityListPalette(ActivityPalette):
__gtype_name__ = 'SugarActivityListPalette'
__gsignals__ = {
'erase-activated': (GObject.SignalFlags.RUN_FIRST, None,
([str])),
}
def __init__(self, activity_info):
ActivityPalette.__init__(self, activity_info)
self._bundle_id = activity_info.get_bundle_id()
self._version = activity_info.get_activity_version()
registry = bundleregistry.get_registry()
self._favorites = []
self._favorite_items = []
self._favorite_icons = []
for i in range(desktop.get_number_of_views()):
self._favorites.append(
registry.is_bundle_favorite(self._bundle_id, self._version, i))
self._favorite_items.append(PaletteMenuItem())
self._favorite_icons.append(
Icon(icon_name=desktop.get_favorite_icons()[i],
pixel_size=style.SMALL_ICON_SIZE))
self._favorite_items[i].set_image(self._favorite_icons[i])
self._favorite_icons[i].show()
self._favorite_items[i].connect(
'activate', self.__change_favorite_activate_cb, i)
self.menu_box.append_item(self._favorite_items[i])
self._favorite_items[i].show()
if activity_info.is_user_activity():
self._add_erase_option(registry, activity_info)
registry = bundleregistry.get_registry()
self._activity_changed_sid = []
for i in range(desktop.get_number_of_views()):
self._activity_changed_sid.append(
registry.connect('bundle-changed',
self.__activity_changed_cb, i))
self._update_favorite_item(i)
self.menu_box.connect('destroy', self.__destroy_cb)
def _add_erase_option(self, registry, activity_info):
menu_item = PaletteMenuItem(_('Erase'), 'list-remove')
menu_item.connect('activate', self.__erase_activate_cb)
self.menu_box.append_item(menu_item)
menu_item.show()
if not os.access(activity_info.get_path(), os.W_OK) or \
registry.is_activity_protected(self._bundle_id):
menu_item.props.sensitive = False
def __destroy_cb(self, palette):
registry = bundleregistry.get_registry()
for i in range(desktop.get_number_of_views()):
registry.disconnect(self._activity_changed_sid[i])
def _update_favorite_item(self, favorite_view):
if self._favorites[favorite_view]:
self._favorite_items[favorite_view].set_label(_('Remove favorite'))
xo_color = XoColor('%s,%s' % (style.COLOR_WHITE.get_svg(),
style.COLOR_TRANSPARENT.get_svg()))
else:
self._favorite_items[favorite_view].set_label(_('Make favorite'))
xo_color = profile.get_color()
self._favorite_icons[favorite_view].props.xo_color = xo_color
def __change_favorite_activate_cb(self, menu_item, favorite_view):
registry = bundleregistry.get_registry()
registry.set_bundle_favorite(self._bundle_id,
self._version,
not self._favorites[favorite_view],
favorite_view)
def __activity_changed_cb(self, activity_registry, activity_info,
favorite_view):
if activity_info.get_bundle_id() == self._bundle_id and \
activity_info.get_activity_version() == self._version:
registry = bundleregistry.get_registry()
self._favorites[favorite_view] = registry.is_bundle_favorite(
self._bundle_id, self._version, favorite_view)
self._update_favorite_item(favorite_view)
def __erase_activate_cb(self, menu_item):
self.emit('erase-activated', self._bundle_id) | unknown | codeparrot/codeparrot-clean | ||
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package views
import (
"encoding/json"
"fmt"
"time"
"github.com/hashicorp/terraform/internal/command/arguments"
)
// The StateLocker view is used to display locking/unlocking status messages
// if the state lock process takes longer than expected.
type StateLocker interface {
Locking()
Unlocking()
}
// NewStateLocker returns an initialized StateLocker implementation for the given ViewType.
func NewStateLocker(vt arguments.ViewType, view *View) StateLocker {
switch vt {
case arguments.ViewHuman:
return &StateLockerHuman{view: view}
case arguments.ViewJSON:
return &StateLockerJSON{view: view}
default:
panic(fmt.Sprintf("unknown view type %v", vt))
}
}
// StateLockerHuman is an implementation of StateLocker which prints status to
// a terminal.
type StateLockerHuman struct {
view *View
}
var _ StateLocker = (*StateLockerHuman)(nil)
var _ StateLocker = (*StateLockerJSON)(nil)
func (v *StateLockerHuman) Locking() {
v.view.streams.Println("Acquiring state lock. This may take a few moments...")
}
func (v *StateLockerHuman) Unlocking() {
v.view.streams.Println("Releasing state lock. This may take a few moments...")
}
// StateLockerJSON is an implementation of StateLocker which prints the state lock status
// to a terminal in machine-readable JSON form.
type StateLockerJSON struct {
view *View
}
func (v *StateLockerJSON) Locking() {
current_timestamp := time.Now().Format(time.RFC3339)
json_data := map[string]string{
"@level": "info",
"@message": "Acquiring state lock. This may take a few moments...",
"@module": "terraform.ui",
"@timestamp": current_timestamp,
"type": "state_lock_acquire"}
lock_info_message, _ := json.Marshal(json_data)
v.view.streams.Println(string(lock_info_message))
}
func (v *StateLockerJSON) Unlocking() {
current_timestamp := time.Now().Format(time.RFC3339)
json_data := map[string]string{
"@level": "info",
"@message": "Releasing state lock. This may take a few moments...",
"@module": "terraform.ui",
"@timestamp": current_timestamp,
"type": "state_lock_release"}
lock_info_message, _ := json.Marshal(json_data)
v.view.streams.Println(string(lock_info_message))
} | go | github | https://github.com/hashicorp/terraform | internal/command/views/state_locker.go |
# frozen_string_literal: true
unless defined?(::JSON::JSON_LOADED) and ::JSON::JSON_LOADED
require 'json'
end
class Struct
# See #as_json.
def self.json_create(object)
new(*object['v'])
end
# Methods <tt>Struct#as_json</tt> and +Struct.json_create+ may be used
# to serialize and deserialize a \Struct object;
# see Marshal[rdoc-ref:Marshal].
#
# \Method <tt>Struct#as_json</tt> serializes +self+,
# returning a 2-element hash representing +self+:
#
# require 'json/add/struct'
# Customer = Struct.new('Customer', :name, :address, :zip)
# x = Struct::Customer.new.as_json
# # => {"json_class"=>"Struct::Customer", "v"=>[nil, nil, nil]}
#
# \Method +JSON.create+ deserializes such a hash, returning a \Struct object:
#
# Struct::Customer.json_create(x)
# # => #<struct Struct::Customer name=nil, address=nil, zip=nil>
#
def as_json(*)
klass = self.class.name
klass.to_s.empty? and raise JSON::JSONError, "Only named structs are supported!"
{
JSON.create_id => klass,
'v' => values,
}
end
# Returns a JSON string representing +self+:
#
# require 'json/add/struct'
# Customer = Struct.new('Customer', :name, :address, :zip)
# puts Struct::Customer.new.to_json
#
# Output:
#
# {"json_class":"Struct","t":{'name':'Rowdy',"age":null}}
#
def to_json(*args)
as_json.to_json(*args)
end
end | ruby | github | https://github.com/ruby/ruby | ext/json/lib/json/add/struct.rb |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.gs.contract;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractUnbufferTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
public class ITestGoogleContractUnbuffer extends AbstractContractUnbufferTest {
@Override
protected AbstractFSContract createContract(Configuration conf) {
return new GoogleContract(conf);
}
} | java | github | https://github.com/apache/hadoop | hadoop-cloud-storage-project/hadoop-gcp/src/test/java/org/apache/hadoop/fs/gs/contract/ITestGoogleContractUnbuffer.java |
#!/usr/bin/python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Compute global objects.
Global objects are defined by interfaces with [Global] or [PrimaryGlobal] on
their definition: http://heycam.github.io/webidl/#Global
Design document: http://www.chromium.org/developers/design-documents/idl-build
"""
# pylint: disable=relative-import
import optparse
import os
import sys
from utilities import get_file_contents
from utilities import get_interface_extended_attributes_from_idl
from utilities import idl_filename_to_interface_name
from utilities import read_file_to_list
from utilities import read_pickle_files
from utilities import write_pickle_file
GLOBAL_EXTENDED_ATTRIBUTES = frozenset([
'Global',
'PrimaryGlobal',
])
def parse_options():
usage = 'Usage: %prog [options] [GlobalObjects.pickle]'
parser = optparse.OptionParser(usage=usage)
parser.add_option('--idl-files-list', help='file listing IDL files')
parser.add_option('--global-objects-component-files', action='append',
help='optionally preceeded input pickle filename.')
options, args = parser.parse_args()
if options.idl_files_list is None:
parser.error('Must specify a file listing IDL files using --idl-files-list.')
if options.global_objects_component_files is None:
options.global_objects_component_files = []
if len(args) != 1:
parser.error('Must specify an output pickle filename as an argument')
return options, args
def dict_union(dicts):
return dict((k, v) for d in dicts for k, v in d.iteritems())
def idl_file_to_global_names(idl_filename):
"""Returns global names, if any, for an IDL file.
If the [Global] or [PrimaryGlobal] extended attribute is declared with an
identifier list argument, then those identifiers are the interface's global
names; otherwise, the interface has a single global name, which is the
interface's identifier (http://heycam.github.io/webidl/#Global).
"""
interface_name = idl_filename_to_interface_name(idl_filename)
full_path = os.path.realpath(idl_filename)
idl_file_contents = get_file_contents(full_path)
extended_attributes = get_interface_extended_attributes_from_idl(idl_file_contents)
global_keys = GLOBAL_EXTENDED_ATTRIBUTES.intersection(
extended_attributes.iterkeys())
if not global_keys:
return
if len(global_keys) > 1:
raise ValueError('The [Global] and [PrimaryGlobal] extended attributes '
'MUST NOT be declared on the same interface.')
global_key = next(iter(global_keys))
global_value = extended_attributes[global_key]
if global_value:
return global_value.strip('()').split(',')
return [interface_name]
def idl_files_to_interface_name_global_names(idl_files):
"""Yields pairs (interface_name, global_names) found in IDL files."""
for idl_filename in idl_files:
interface_name = idl_filename_to_interface_name(idl_filename)
global_names = idl_file_to_global_names(idl_filename)
if global_names:
yield interface_name, global_names
################################################################################
def main():
options, args = parse_options()
output_global_objects_filename = args.pop()
interface_name_global_names = dict_union(
existing_interface_name_global_names
for existing_interface_name_global_names
in read_pickle_files(options.global_objects_component_files))
# Input IDL files are passed in a file, due to OS command line length
# limits. This is generated at GYP time, which is ok b/c files are static.
idl_files = read_file_to_list(options.idl_files_list)
interface_name_global_names.update(
idl_files_to_interface_name_global_names(idl_files))
write_pickle_file(output_global_objects_filename,
interface_name_global_names)
if __name__ == '__main__':
sys.exit(main()) | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
class account_analytic_balance(osv.osv_memory):
_name = 'account.analytic.balance'
_description = 'Account Analytic Balance'
_columns = {
'date1': fields.date('Start of period', required=True),
'date2': fields.date('End of period', required=True),
'empty_acc': fields.boolean('Empty Accounts ? ', help='Check if you want to display Accounts with 0 balance too.'),
}
_defaults = {
'date1': lambda *a: time.strftime('%Y-01-01'),
'date2': lambda *a: time.strftime('%Y-%m-%d')
}
def check_report(self, cr, uid, ids, context=None):
if context is None:
context = {}
data = self.read(cr, uid, ids)[0]
datas = {
'ids': context.get('active_ids', []),
'model': 'account.analytic.account',
'form': data
}
datas['form']['active_ids'] = context.get('active_ids', False)
return self.pool['report'].get_action(cr, uid, [], 'account.report_analyticbalance', data=datas, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | unknown | codeparrot/codeparrot-clean | ||
/**
* \file chachapoly.h
*
* \brief This file contains the AEAD-ChaCha20-Poly1305 definitions and
* functions.
*
* ChaCha20-Poly1305 is an algorithm for Authenticated Encryption
* with Associated Data (AEAD) that can be used to encrypt and
* authenticate data. It is based on ChaCha20 and Poly1305 by Daniel
* Bernstein and was standardized in RFC 7539.
*
* \author Daniel King <damaki.gh@gmail.com>
*/
/*
* Copyright The Mbed TLS Contributors
* SPDX-License-Identifier: Apache-2.0 OR GPL-2.0-or-later
*/
#ifndef MBEDTLS_CHACHAPOLY_H
#define MBEDTLS_CHACHAPOLY_H
#include "mbedtls/private_access.h"
#include "mbedtls/build_info.h"
/* for shared error codes */
#include "mbedtls/poly1305.h"
/** The requested operation is not permitted in the current state. */
#define MBEDTLS_ERR_CHACHAPOLY_BAD_STATE -0x0054
/** Authenticated decryption failed: data was not authentic. */
#define MBEDTLS_ERR_CHACHAPOLY_AUTH_FAILED -0x0056
#ifdef __cplusplus
extern "C" {
#endif
typedef enum {
MBEDTLS_CHACHAPOLY_ENCRYPT, /**< The mode value for performing encryption. */
MBEDTLS_CHACHAPOLY_DECRYPT /**< The mode value for performing decryption. */
}
mbedtls_chachapoly_mode_t;
#if !defined(MBEDTLS_CHACHAPOLY_ALT)
#include "mbedtls/chacha20.h"
typedef struct mbedtls_chachapoly_context {
mbedtls_chacha20_context MBEDTLS_PRIVATE(chacha20_ctx); /**< The ChaCha20 context. */
mbedtls_poly1305_context MBEDTLS_PRIVATE(poly1305_ctx); /**< The Poly1305 context. */
uint64_t MBEDTLS_PRIVATE(aad_len); /**< The length (bytes) of the Additional Authenticated Data. */
uint64_t MBEDTLS_PRIVATE(ciphertext_len); /**< The length (bytes) of the ciphertext. */
int MBEDTLS_PRIVATE(state); /**< The current state of the context. */
mbedtls_chachapoly_mode_t MBEDTLS_PRIVATE(mode); /**< Cipher mode (encrypt or decrypt). */
}
mbedtls_chachapoly_context;
#else /* !MBEDTLS_CHACHAPOLY_ALT */
#include "chachapoly_alt.h"
#endif /* !MBEDTLS_CHACHAPOLY_ALT */
/**
* \brief This function initializes the specified ChaCha20-Poly1305 context.
*
* It must be the first API called before using
* the context. It must be followed by a call to
* \c mbedtls_chachapoly_setkey() before any operation can be
* done, and to \c mbedtls_chachapoly_free() once all
* operations with that context have been finished.
*
* In order to encrypt or decrypt full messages at once, for
* each message you should make a single call to
* \c mbedtls_chachapoly_crypt_and_tag() or
* \c mbedtls_chachapoly_auth_decrypt().
*
* In order to encrypt messages piecewise, for each
* message you should make a call to
* \c mbedtls_chachapoly_starts(), then 0 or more calls to
* \c mbedtls_chachapoly_update_aad(), then 0 or more calls to
* \c mbedtls_chachapoly_update(), then one call to
* \c mbedtls_chachapoly_finish().
*
* \warning Decryption with the piecewise API is discouraged! Always
* use \c mbedtls_chachapoly_auth_decrypt() when possible!
*
* If however this is not possible because the data is too
* large to fit in memory, you need to:
*
* - call \c mbedtls_chachapoly_starts() and (if needed)
* \c mbedtls_chachapoly_update_aad() as above,
* - call \c mbedtls_chachapoly_update() multiple times and
* ensure its output (the plaintext) is NOT used in any other
* way than placing it in temporary storage at this point,
* - call \c mbedtls_chachapoly_finish() to compute the
* authentication tag and compared it in constant time to the
* tag received with the ciphertext.
*
* If the tags are not equal, you must immediately discard
* all previous outputs of \c mbedtls_chachapoly_update(),
* otherwise you can now safely use the plaintext.
*
* \param ctx The ChachaPoly context to initialize. Must not be \c NULL.
*/
void mbedtls_chachapoly_init(mbedtls_chachapoly_context *ctx);
/**
* \brief This function releases and clears the specified
* ChaCha20-Poly1305 context.
*
* \param ctx The ChachaPoly context to clear. This may be \c NULL, in which
* case this function is a no-op.
*/
void mbedtls_chachapoly_free(mbedtls_chachapoly_context *ctx);
/**
* \brief This function sets the ChaCha20-Poly1305
* symmetric encryption key.
*
* \param ctx The ChaCha20-Poly1305 context to which the key should be
* bound. This must be initialized.
* \param key The \c 256 Bit (\c 32 Bytes) key.
*
* \return \c 0 on success.
* \return A negative error code on failure.
*/
int mbedtls_chachapoly_setkey(mbedtls_chachapoly_context *ctx,
const unsigned char key[32]);
/**
* \brief This function starts a ChaCha20-Poly1305 encryption or
* decryption operation.
*
* \warning You must never use the same nonce twice with the same key.
* This would void any confidentiality and authenticity
* guarantees for the messages encrypted with the same nonce
* and key.
*
* \note If the context is being used for AAD only (no data to
* encrypt or decrypt) then \p mode can be set to any value.
*
* \warning Decryption with the piecewise API is discouraged, see the
* warning on \c mbedtls_chachapoly_init().
*
* \param ctx The ChaCha20-Poly1305 context. This must be initialized
* and bound to a key.
* \param nonce The nonce/IV to use for the message.
* This must be a readable buffer of length \c 12 Bytes.
* \param mode The operation to perform: #MBEDTLS_CHACHAPOLY_ENCRYPT or
* #MBEDTLS_CHACHAPOLY_DECRYPT (discouraged, see warning).
*
* \return \c 0 on success.
* \return A negative error code on failure.
*/
int mbedtls_chachapoly_starts(mbedtls_chachapoly_context *ctx,
const unsigned char nonce[12],
mbedtls_chachapoly_mode_t mode);
/**
* \brief This function feeds additional data to be authenticated
* into an ongoing ChaCha20-Poly1305 operation.
*
* The Additional Authenticated Data (AAD), also called
* Associated Data (AD) is only authenticated but not
* encrypted nor included in the encrypted output. It is
* usually transmitted separately from the ciphertext or
* computed locally by each party.
*
* \note This function is called before data is encrypted/decrypted.
* I.e. call this function to process the AAD before calling
* \c mbedtls_chachapoly_update().
*
* You may call this function multiple times to process
* an arbitrary amount of AAD. It is permitted to call
* this function 0 times, if no AAD is used.
*
* This function cannot be called any more if data has
* been processed by \c mbedtls_chachapoly_update(),
* or if the context has been finished.
*
* \warning Decryption with the piecewise API is discouraged, see the
* warning on \c mbedtls_chachapoly_init().
*
* \param ctx The ChaCha20-Poly1305 context. This must be initialized
* and bound to a key.
* \param aad_len The length in Bytes of the AAD. The length has no
* restrictions.
* \param aad Buffer containing the AAD.
* This pointer can be \c NULL if `aad_len == 0`.
*
* \return \c 0 on success.
* \return #MBEDTLS_ERR_POLY1305_BAD_INPUT_DATA
* if \p ctx or \p aad are NULL.
* \return #MBEDTLS_ERR_CHACHAPOLY_BAD_STATE
* if the operations has not been started or has been
* finished, or if the AAD has been finished.
*/
int mbedtls_chachapoly_update_aad(mbedtls_chachapoly_context *ctx,
const unsigned char *aad,
size_t aad_len);
/**
* \brief Thus function feeds data to be encrypted or decrypted
* into an on-going ChaCha20-Poly1305
* operation.
*
* The direction (encryption or decryption) depends on the
* mode that was given when calling
* \c mbedtls_chachapoly_starts().
*
* You may call this function multiple times to process
* an arbitrary amount of data. It is permitted to call
* this function 0 times, if no data is to be encrypted
* or decrypted.
*
* \warning Decryption with the piecewise API is discouraged, see the
* warning on \c mbedtls_chachapoly_init().
*
* \param ctx The ChaCha20-Poly1305 context to use. This must be initialized.
* \param len The length (in bytes) of the data to encrypt or decrypt.
* \param input The buffer containing the data to encrypt or decrypt.
* This pointer can be \c NULL if `len == 0`.
* \param output The buffer to where the encrypted or decrypted data is
* written. This must be able to hold \p len bytes.
* This pointer can be \c NULL if `len == 0`.
*
* \return \c 0 on success.
* \return #MBEDTLS_ERR_CHACHAPOLY_BAD_STATE
* if the operation has not been started or has been
* finished.
* \return Another negative error code on other kinds of failure.
*/
int mbedtls_chachapoly_update(mbedtls_chachapoly_context *ctx,
size_t len,
const unsigned char *input,
unsigned char *output);
/**
* \brief This function finished the ChaCha20-Poly1305 operation and
* generates the MAC (authentication tag).
*
* \param ctx The ChaCha20-Poly1305 context to use. This must be initialized.
* \param mac The buffer to where the 128-bit (16 bytes) MAC is written.
*
* \warning Decryption with the piecewise API is discouraged, see the
* warning on \c mbedtls_chachapoly_init().
*
* \return \c 0 on success.
* \return #MBEDTLS_ERR_CHACHAPOLY_BAD_STATE
* if the operation has not been started or has been
* finished.
* \return Another negative error code on other kinds of failure.
*/
int mbedtls_chachapoly_finish(mbedtls_chachapoly_context *ctx,
unsigned char mac[16]);
/**
* \brief This function performs a complete ChaCha20-Poly1305
* authenticated encryption with the previously-set key.
*
* \note Before using this function, you must set the key with
* \c mbedtls_chachapoly_setkey().
*
* \warning You must never use the same nonce twice with the same key.
* This would void any confidentiality and authenticity
* guarantees for the messages encrypted with the same nonce
* and key.
*
* \param ctx The ChaCha20-Poly1305 context to use (holds the key).
* This must be initialized.
* \param length The length (in bytes) of the data to encrypt or decrypt.
* \param nonce The 96-bit (12 bytes) nonce/IV to use.
* \param aad The buffer containing the additional authenticated
* data (AAD). This pointer can be \c NULL if `aad_len == 0`.
* \param aad_len The length (in bytes) of the AAD data to process.
* \param input The buffer containing the data to encrypt or decrypt.
* This pointer can be \c NULL if `ilen == 0`.
* \param output The buffer to where the encrypted or decrypted data
* is written. This pointer can be \c NULL if `ilen == 0`.
* \param tag The buffer to where the computed 128-bit (16 bytes) MAC
* is written. This must not be \c NULL.
*
* \return \c 0 on success.
* \return A negative error code on failure.
*/
int mbedtls_chachapoly_encrypt_and_tag(mbedtls_chachapoly_context *ctx,
size_t length,
const unsigned char nonce[12],
const unsigned char *aad,
size_t aad_len,
const unsigned char *input,
unsigned char *output,
unsigned char tag[16]);
/**
* \brief This function performs a complete ChaCha20-Poly1305
* authenticated decryption with the previously-set key.
*
* \note Before using this function, you must set the key with
* \c mbedtls_chachapoly_setkey().
*
* \param ctx The ChaCha20-Poly1305 context to use (holds the key).
* \param length The length (in Bytes) of the data to decrypt.
* \param nonce The \c 96 Bit (\c 12 bytes) nonce/IV to use.
* \param aad The buffer containing the additional authenticated data (AAD).
* This pointer can be \c NULL if `aad_len == 0`.
* \param aad_len The length (in bytes) of the AAD data to process.
* \param tag The buffer holding the authentication tag.
* This must be a readable buffer of length \c 16 Bytes.
* \param input The buffer containing the data to decrypt.
* This pointer can be \c NULL if `ilen == 0`.
* \param output The buffer to where the decrypted data is written.
* This pointer can be \c NULL if `ilen == 0`.
*
* \return \c 0 on success.
* \return #MBEDTLS_ERR_CHACHAPOLY_AUTH_FAILED
* if the data was not authentic.
* \return Another negative error code on other kinds of failure.
*/
int mbedtls_chachapoly_auth_decrypt(mbedtls_chachapoly_context *ctx,
size_t length,
const unsigned char nonce[12],
const unsigned char *aad,
size_t aad_len,
const unsigned char tag[16],
const unsigned char *input,
unsigned char *output);
#if defined(MBEDTLS_SELF_TEST)
/**
* \brief The ChaCha20-Poly1305 checkup routine.
*
* \return \c 0 on success.
* \return \c 1 on failure.
*/
int mbedtls_chachapoly_self_test(int verbose);
#endif /* MBEDTLS_SELF_TEST */
#ifdef __cplusplus
}
#endif
#endif /* MBEDTLS_CHACHAPOLY_H */ | c | github | https://github.com/nodejs/node | deps/LIEF/third-party/mbedtls/include/mbedtls/chachapoly.h |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import datetime
from typing import Optional
from superset.db_engine_specs.base import BaseEngineSpec
from superset.utils import core as utils
class KylinEngineSpec(BaseEngineSpec): # pylint: disable=abstract-method
"""Dialect for Apache Kylin"""
engine = "kylin"
engine_name = "Apache Kylin"
_time_grain_expressions = {
None: "{col}",
"PT1S": "CAST(FLOOR(CAST({col} AS TIMESTAMP) TO SECOND) AS TIMESTAMP)",
"PT1M": "CAST(FLOOR(CAST({col} AS TIMESTAMP) TO MINUTE) AS TIMESTAMP)",
"PT1H": "CAST(FLOOR(CAST({col} AS TIMESTAMP) TO HOUR) AS TIMESTAMP)",
"P1D": "CAST(FLOOR(CAST({col} AS TIMESTAMP) TO DAY) AS DATE)",
"P1W": "CAST(TIMESTAMPADD(WEEK, WEEK(CAST({col} AS DATE)) - 1, \
FLOOR(CAST({col} AS TIMESTAMP) TO YEAR)) AS DATE)",
"P1M": "CAST(FLOOR(CAST({col} AS TIMESTAMP) TO MONTH) AS DATE)",
"P0.25Y": "CAST(TIMESTAMPADD(QUARTER, QUARTER(CAST({col} AS DATE)) - 1, \
FLOOR(CAST({col} AS TIMESTAMP) TO YEAR)) AS DATE)",
"P1Y": "CAST(FLOOR(CAST({col} AS TIMESTAMP) TO YEAR) AS DATE)",
}
@classmethod
def convert_dttm(cls, target_type: str, dttm: datetime) -> Optional[str]:
tt = target_type.upper()
if tt == utils.TemporalType.DATE:
return f"CAST('{dttm.date().isoformat()}' AS DATE)"
if tt == utils.TemporalType.TIMESTAMP:
datetime_fomatted = dttm.isoformat(sep=" ", timespec="seconds")
return f"""CAST('{datetime_fomatted}' AS TIMESTAMP)"""
return None | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
import werkzeug.exceptions
import werkzeug.urls
import werkzeug.wrappers
import simplejson
from openerp import tools
from openerp import SUPERUSER_ID
from openerp.addons.web import http
from openerp.addons.web.controllers.main import login_redirect
from openerp.addons.web.http import request
from openerp.addons.website.controllers.main import Website as controllers
from openerp.addons.website.models.website import slug
controllers = controllers()
class WebsiteForum(http.Controller):
_post_per_page = 10
_user_per_page = 30
def _get_notifications(self):
cr, uid, context = request.cr, request.uid, request.context
Message = request.registry['mail.message']
badge_st_id = request.registry['ir.model.data'].xmlid_to_res_id(cr, uid, 'gamification.mt_badge_granted')
if badge_st_id:
msg_ids = Message.search(cr, uid, [('subtype_id', '=', badge_st_id), ('to_read', '=', True)], context=context)
msg = Message.browse(cr, uid, msg_ids, context=context)
else:
msg = list()
return msg
def _prepare_forum_values(self, forum=None, **kwargs):
user = request.registry['res.users'].browse(request.cr, request.uid, request.uid, context=request.context)
values = {
'user': user,
'is_public_user': user.id == request.website.user_id.id,
'notifications': self._get_notifications(),
'header': kwargs.get('header', dict()),
'searches': kwargs.get('searches', dict()),
'validation_email_sent': request.session.get('validation_email_sent', False),
'validation_email_done': request.session.get('validation_email_done', False),
}
if forum:
values['forum'] = forum
elif kwargs.get('forum_id'):
values['forum'] = request.registry['forum.forum'].browse(request.cr, request.uid, kwargs.pop('forum_id'), context=request.context)
values.update(kwargs)
return values
# User and validation
# --------------------------------------------------
@http.route('/forum/send_validation_email', type='json', auth='user', website=True)
def send_validation_email(self, forum_id=None, **kwargs):
request.registry['res.users'].send_forum_validation_email(request.cr, request.uid, request.uid, forum_id=forum_id, context=request.context)
request.session['validation_email_sent'] = True
return True
@http.route('/forum/validate_email', type='http', auth='public', website=True)
def validate_email(self, token, id, email, forum_id=None, **kwargs):
if forum_id:
try:
forum_id = int(forum_id)
except ValueError:
forum_id = None
done = request.registry['res.users'].process_forum_validation_token(request.cr, request.uid, token, int(id), email, forum_id=forum_id, context=request.context)
if done:
request.session['validation_email_done'] = True
if forum_id:
return request.redirect("/forum/%s" % int(forum_id))
return request.redirect('/forum')
@http.route('/forum/validate_email/close', type='json', auth='public', website=True)
def validate_email_done(self):
request.session['validation_email_done'] = False
return True
# Forum
# --------------------------------------------------
@http.route(['/forum'], type='http', auth="public", website=True)
def forum(self, **kwargs):
cr, uid, context = request.cr, request.uid, request.context
Forum = request.registry['forum.forum']
obj_ids = Forum.search(cr, uid, [], context=context)
forums = Forum.browse(cr, uid, obj_ids, context=context)
return request.website.render("website_forum.forum_all", {'forums': forums})
@http.route('/forum/new', type='http', auth="user", methods=['POST'], website=True)
def forum_create(self, forum_name="New Forum", **kwargs):
forum_id = request.registry['forum.forum'].create(request.cr, request.uid, {
'name': forum_name,
}, context=request.context)
return request.redirect("/forum/%s" % forum_id)
@http.route('/forum/notification_read', type='json', auth="user", methods=['POST'], website=True)
def notification_read(self, **kwargs):
request.registry['mail.message'].set_message_read(request.cr, request.uid, [int(kwargs.get('notification_id'))], read=True, context=request.context)
return True
@http.route(['/forum/<model("forum.forum"):forum>',
'/forum/<model("forum.forum"):forum>/page/<int:page>',
'''/forum/<model("forum.forum"):forum>/tag/<model("forum.tag", "[('forum_id','=',forum[0])]"):tag>/questions''',
'''/forum/<model("forum.forum"):forum>/tag/<model("forum.tag", "[('forum_id','=',forum[0])]"):tag>/questions/page/<int:page>''',
], type='http', auth="public", website=True)
def questions(self, forum, tag=None, page=1, filters='all', sorting='date', search='', **post):
cr, uid, context = request.cr, request.uid, request.context
Post = request.registry['forum.post']
user = request.registry['res.users'].browse(cr, uid, uid, context=context)
domain = [('forum_id', '=', forum.id), ('parent_id', '=', False), ('state', '=', 'active')]
if search:
domain += ['|', ('name', 'ilike', search), ('content', 'ilike', search)]
if tag:
domain += [('tag_ids', 'in', tag.id)]
if filters == 'unanswered':
domain += [('child_ids', '=', False)]
elif filters == 'followed':
domain += [('message_follower_ids', '=', user.partner_id.id)]
else:
filters = 'all'
if sorting == 'answered':
order = 'child_count desc'
elif sorting == 'vote':
order = 'vote_count desc'
elif sorting == 'date':
order = 'write_date desc'
else:
sorting = 'creation'
order = 'create_date desc'
question_count = Post.search(cr, uid, domain, count=True, context=context)
if tag:
url = "/forum/%s/tag/%s/questions" % (slug(forum), slug(tag))
else:
url = "/forum/%s" % slug(forum)
url_args = {}
if search:
url_args['search'] = search
if filters:
url_args['filters'] = filters
if sorting:
url_args['sorting'] = sorting
pager = request.website.pager(url=url, total=question_count, page=page,
step=self._post_per_page, scope=self._post_per_page,
url_args=url_args)
obj_ids = Post.search(cr, uid, domain, limit=self._post_per_page, offset=pager['offset'], order=order, context=context)
question_ids = Post.browse(cr, uid, obj_ids, context=context)
values = self._prepare_forum_values(forum=forum, searches=post)
values.update({
'main_object': tag or forum,
'question_ids': question_ids,
'question_count': question_count,
'pager': pager,
'tag': tag,
'filters': filters,
'sorting': sorting,
'search': search,
})
return request.website.render("website_forum.forum_index", values)
@http.route(['/forum/<model("forum.forum"):forum>/faq'], type='http', auth="public", website=True)
def forum_faq(self, forum, **post):
values = self._prepare_forum_values(forum=forum, searches=dict(), header={'is_guidelines': True}, **post)
return request.website.render("website_forum.faq", values)
@http.route('/forum/get_tags', type='http', auth="public", methods=['GET'], website=True)
def tag_read(self, q='', l=25, t='texttext', **post):
data = request.registry['forum.tag'].search_read(
request.cr,
request.uid,
domain=[('name', '=ilike', (q or '') + "%")],
fields=['id', 'name'],
limit=int(l),
context=request.context
)
if t == 'texttext':
# old tag with texttext - Retro for V8 - #TODO Remove in master
data = [tag['name'] for tag in data]
return simplejson.dumps(data)
@http.route(['/forum/<model("forum.forum"):forum>/tag'], type='http', auth="public", website=True)
def tags(self, forum, page=1, **post):
cr, uid, context = request.cr, request.uid, request.context
Tag = request.registry['forum.tag']
obj_ids = Tag.search(cr, uid, [('forum_id', '=', forum.id), ('posts_count', '>', 0)], limit=None, order='posts_count DESC', context=context)
tags = Tag.browse(cr, uid, obj_ids, context=context)
values = self._prepare_forum_values(forum=forum, searches={'tags': True}, **post)
values.update({
'tags': tags,
'main_object': forum,
})
return request.website.render("website_forum.tag", values)
# Questions
# --------------------------------------------------
@http.route(['/forum/<model("forum.forum"):forum>/ask'], type='http', auth="public", website=True)
def question_ask(self, forum, **post):
if not request.session.uid:
return login_redirect()
values = self._prepare_forum_values(forum=forum, searches={}, header={'ask_hide': True})
return request.website.render("website_forum.ask_question", values)
@http.route('/forum/<model("forum.forum"):forum>/question/new', type='http', auth="user", methods=['POST'], website=True)
def question_create(self, forum, **post):
cr, uid, context = request.cr, request.uid, request.context
Tag = request.registry['forum.tag']
Forum = request.registry['forum.forum']
question_tag_ids = []
tag_version = post.get('tag_type', 'texttext')
if tag_version == "texttext": # TODO Remove in master
if post.get('question_tags').strip('[]'):
tags = post.get('question_tags').strip('[]').replace('"', '').split(",")
for tag in tags:
tag_ids = Tag.search(cr, uid, [('name', '=', tag)], context=context)
if tag_ids:
question_tag_ids.append((4, tag_ids[0]))
else:
question_tag_ids.append((0, 0, {'name': tag, 'forum_id': forum.id}))
question_tag_ids = {forum.id: question_tag_ids}
elif tag_version == "select2":
question_tag_ids = Forum._tag_to_write_vals(cr, uid, [forum.id], post.get('question_tags', ''), context)
new_question_id = request.registry['forum.post'].create(
request.cr, request.uid, {
'forum_id': forum.id,
'name': post.get('question_name'),
'content': post.get('content'),
'tag_ids': question_tag_ids[forum.id],
}, context=context)
return werkzeug.utils.redirect("/forum/%s/question/%s" % (slug(forum), new_question_id))
@http.route(['''/forum/<model("forum.forum"):forum>/question/<model("forum.post", "[('forum_id','=',forum[0]),('parent_id','=',False)]"):question>'''], type='http', auth="public", website=True)
def question(self, forum, question, **post):
cr, uid, context = request.cr, request.uid, request.context
# Hide posts from abusers (negative karma), except for moderators
if not question.can_view:
raise werkzeug.exceptions.NotFound()
# increment view counter
request.registry['forum.post'].set_viewed(cr, SUPERUSER_ID, [question.id], context=context)
if question.parent_id:
redirect_url = "/forum/%s/question/%s" % (slug(forum), slug(question.parent_id))
return werkzeug.utils.redirect(redirect_url, 301)
filters = 'question'
values = self._prepare_forum_values(forum=forum, searches=post)
values.update({
'main_object': question,
'question': question,
'header': {'question_data': True},
'filters': filters,
'reversed': reversed,
})
return request.website.render("website_forum.post_description_full", values)
@http.route('/forum/<model("forum.forum"):forum>/question/<model("forum.post"):question>/toggle_favourite', type='json', auth="user", methods=['POST'], website=True)
def question_toggle_favorite(self, forum, question, **post):
if not request.session.uid:
return {'error': 'anonymous_user'}
# TDE: add check for not public
favourite = False if question.user_favourite else True
if favourite:
favourite_ids = [(4, request.uid)]
else:
favourite_ids = [(3, request.uid)]
request.registry['forum.post'].write(request.cr, request.uid, [question.id], {'favourite_ids': favourite_ids}, context=request.context)
return favourite
@http.route('/forum/<model("forum.forum"):forum>/question/<model("forum.post"):question>/ask_for_close', type='http', auth="user", methods=['POST'], website=True)
def question_ask_for_close(self, forum, question, **post):
cr, uid, context = request.cr, request.uid, request.context
Reason = request.registry['forum.post.reason']
reason_ids = Reason.search(cr, uid, [], context=context)
reasons = Reason.browse(cr, uid, reason_ids, context)
values = self._prepare_forum_values(**post)
values.update({
'question': question,
'question': question,
'forum': forum,
'reasons': reasons,
})
return request.website.render("website_forum.close_question", values)
@http.route('/forum/<model("forum.forum"):forum>/question/<model("forum.post"):question>/edit_answer', type='http', auth="user", website=True)
def question_edit_answer(self, forum, question, **kwargs):
for record in question.child_ids:
if record.create_uid.id == request.uid:
answer = record
break
return werkzeug.utils.redirect("/forum/%s/post/%s/edit" % (slug(forum), slug(answer)))
@http.route('/forum/<model("forum.forum"):forum>/question/<model("forum.post"):question>/close', type='http', auth="user", methods=['POST'], website=True)
def question_close(self, forum, question, **post):
request.registry['forum.post'].close(request.cr, request.uid, [question.id], reason_id=int(post.get('reason_id', False)), context=request.context)
return werkzeug.utils.redirect("/forum/%s/question/%s" % (slug(forum), slug(question)))
@http.route('/forum/<model("forum.forum"):forum>/question/<model("forum.post"):question>/reopen', type='http', auth="user", methods=['POST'], website=True)
def question_reopen(self, forum, question, **kwarg):
request.registry['forum.post'].reopen(request.cr, request.uid, [question.id], context=request.context)
return werkzeug.utils.redirect("/forum/%s/question/%s" % (slug(forum), slug(question)))
@http.route('/forum/<model("forum.forum"):forum>/question/<model("forum.post"):question>/delete', type='http', auth="user", methods=['POST'], website=True)
def question_delete(self, forum, question, **kwarg):
request.registry['forum.post'].write(request.cr, request.uid, [question.id], {'active': False}, context=request.context)
return werkzeug.utils.redirect("/forum/%s/question/%s" % (slug(forum), slug(question)))
@http.route('/forum/<model("forum.forum"):forum>/question/<model("forum.post"):question>/undelete', type='http', auth="user", methods=['POST'], website=True)
def question_undelete(self, forum, question, **kwarg):
request.registry['forum.post'].write(request.cr, request.uid, [question.id], {'active': True}, context=request.context)
return werkzeug.utils.redirect("/forum/%s/question/%s" % (slug(forum), slug(question)))
# Post
# --------------------------------------------------
@http.route('/forum/<model("forum.forum"):forum>/post/<model("forum.post"):post>/new', type='http', auth="public", methods=['POST'], website=True)
def post_new(self, forum, post, **kwargs):
if not request.session.uid:
return login_redirect()
cr, uid, context = request.cr, request.uid, request.context
user = request.registry['res.users'].browse(cr, SUPERUSER_ID, uid, context=context)
if not user.email or not tools.single_email_re.match(user.email):
return werkzeug.utils.redirect("/forum/%s/user/%s/edit?email_required=1" % (slug(forum), uid))
request.registry['forum.post'].create(
request.cr, request.uid, {
'forum_id': forum.id,
'parent_id': post.id,
'content': kwargs.get('content'),
}, context=request.context)
return werkzeug.utils.redirect("/forum/%s/question/%s" % (slug(forum), slug(post)))
@http.route('/forum/<model("forum.forum"):forum>/post/<model("forum.post"):post>/comment', type='http', auth="public", methods=['POST'], website=True)
def post_comment(self, forum, post, **kwargs):
if not request.session.uid:
return login_redirect()
question = post.parent_id if post.parent_id else post
cr, uid, context = request.cr, request.uid, request.context
if kwargs.get('comment') and post.forum_id.id == forum.id:
# TDE FIXME: check that post_id is the question or one of its answers
request.registry['forum.post'].message_post(
cr, uid, post.id,
body=kwargs.get('comment'),
type='comment',
subtype='mt_comment',
context=dict(context, mail_create_nosubscribe=True))
return werkzeug.utils.redirect("/forum/%s/question/%s" % (slug(forum), slug(question)))
@http.route('/forum/<model("forum.forum"):forum>/post/<model("forum.post"):post>/toggle_correct', type='json', auth="public", website=True)
def post_toggle_correct(self, forum, post, **kwargs):
cr, uid, context = request.cr, request.uid, request.context
if post.parent_id is False:
return request.redirect('/')
if not request.session.uid:
return {'error': 'anonymous_user'}
# set all answers to False, only one can be accepted
request.registry['forum.post'].write(cr, uid, [c.id for c in post.parent_id.child_ids if not c.id == post.id], {'is_correct': False}, context=context)
request.registry['forum.post'].write(cr, uid, [post.id], {'is_correct': not post.is_correct}, context=context)
return post.is_correct
@http.route('/forum/<model("forum.forum"):forum>/post/<model("forum.post"):post>/delete', type='http', auth="user", methods=['POST'], website=True)
def post_delete(self, forum, post, **kwargs):
question = post.parent_id
request.registry['forum.post'].unlink(request.cr, request.uid, [post.id], context=request.context)
if question:
werkzeug.utils.redirect("/forum/%s/question/%s" % (slug(forum), slug(question)))
return werkzeug.utils.redirect("/forum/%s" % slug(forum))
@http.route('/forum/<model("forum.forum"):forum>/post/<model("forum.post"):post>/edit', type='http', auth="user", website=True)
def post_edit(self, forum, post, **kwargs):
tag_version = kwargs.get('tag_type', 'texttext')
if tag_version == "texttext": # old version - retro v8 - #TODO Remove in master
tags = ""
for tag_name in post.tag_ids:
tags += tag_name.name + ","
elif tag_version == "select2": # new version
tags = [dict(id=tag.id, name=tag.name) for tag in post.tag_ids]
tags = simplejson.dumps(tags)
values = self._prepare_forum_values(forum=forum)
values.update({
'tags': tags,
'post': post,
'is_answer': bool(post.parent_id),
'searches': kwargs
})
return request.website.render("website_forum.edit_post", values)
@http.route('/forum/<model("forum.forum"):forum>/post/<model("forum.post"):post>/edition', type='http', auth="user", website=True)
def post_edit_retro(self, forum, post, **kwargs):
# This function is only there for retrocompatibility between old template using texttext and template using select2
# It should be removed into master #TODO JKE: remove in master all condition with tag_type
kwargs.update(tag_type="select2")
return self.post_edit(forum, post, **kwargs)
@http.route('/forum/<model("forum.forum"):forum>/post/<model("forum.post"):post>/save', type='http', auth="user", methods=['POST'], website=True)
def post_save(self, forum, post, **kwargs):
cr, uid, context = request.cr, request.uid, request.context
question_tags = []
Tag = request.registry['forum.tag']
Forum = request.registry['forum.forum']
tag_version = kwargs.get('tag_type', 'texttext')
vals = {
'name': kwargs.get('question_name'),
'content': kwargs.get('content'),
}
if tag_version == "texttext": # old version - retro v8 - #TODO Remove in master
if kwargs.get('question_tag') and kwargs.get('question_tag').strip('[]'):
tags = kwargs.get('question_tag').strip('[]').replace('"', '').split(",")
for tag in tags:
tag_ids = Tag.search(cr, uid, [('name', '=', tag)], context=context)
if tag_ids:
question_tags += tag_ids
else:
new_tag = Tag.create(cr, uid, {'name': tag, 'forum_id': forum.id}, context=context)
question_tags.append(new_tag)
vals['tag_ids'] = [(6, 0, question_tags)]
elif tag_version == "select2": # new version
vals['tag_ids'] = Forum._tag_to_write_vals(cr, uid, [forum.id], kwargs.get('question_tag', ''), context)[forum.id]
request.registry['forum.post'].write(cr, uid, [post.id], vals, context=context)
question = post.parent_id if post.parent_id else post
return werkzeug.utils.redirect("/forum/%s/question/%s" % (slug(forum), slug(question)))
@http.route('/forum/<model("forum.forum"):forum>/post/<model("forum.post"):post>/upvote', type='json', auth="public", website=True)
def post_upvote(self, forum, post, **kwargs):
if not request.session.uid:
return {'error': 'anonymous_user'}
if request.uid == post.create_uid.id:
return {'error': 'own_post'}
upvote = True if not post.user_vote > 0 else False
return request.registry['forum.post'].vote(request.cr, request.uid, [post.id], upvote=upvote, context=request.context)
@http.route('/forum/<model("forum.forum"):forum>/post/<model("forum.post"):post>/downvote', type='json', auth="public", website=True)
def post_downvote(self, forum, post, **kwargs):
if not request.session.uid:
return {'error': 'anonymous_user'}
if request.uid == post.create_uid.id:
return {'error': 'own_post'}
upvote = True if post.user_vote < 0 else False
return request.registry['forum.post'].vote(request.cr, request.uid, [post.id], upvote=upvote, context=request.context)
# User
# --------------------------------------------------
@http.route(['/forum/<model("forum.forum"):forum>/users',
'/forum/<model("forum.forum"):forum>/users/page/<int:page>'],
type='http', auth="public", website=True)
def users(self, forum, page=1, **searches):
cr, uid, context = request.cr, request.uid, request.context
User = request.registry['res.users']
step = 30
tag_count = User.search(cr, SUPERUSER_ID, [('karma', '>', 1), ('website_published', '=', True)], count=True, context=context)
pager = request.website.pager(url="/forum/%s/users" % slug(forum), total=tag_count, page=page, step=step, scope=30)
obj_ids = User.search(cr, SUPERUSER_ID, [('karma', '>', 1), ('website_published', '=', True)], limit=step, offset=pager['offset'], order='karma DESC', context=context)
# put the users in block of 3 to display them as a table
users = [[] for i in range(len(obj_ids)/3+1)]
for index, user in enumerate(User.browse(cr, SUPERUSER_ID, obj_ids, context=context)):
users[index/3].append(user)
searches['users'] = 'True'
values = self._prepare_forum_values(forum=forum, searches=searches)
values .update({
'users': users,
'main_object': forum,
'notifications': self._get_notifications(),
'pager': pager,
})
return request.website.render("website_forum.users", values)
@http.route(['/forum/<model("forum.forum"):forum>/partner/<int:partner_id>'], type='http', auth="public", website=True)
def open_partner(self, forum, partner_id=0, **post):
cr, uid, context = request.cr, request.uid, request.context
if partner_id:
partner = request.registry['res.partner'].browse(cr, SUPERUSER_ID, partner_id, context=context)
if partner.exists() and partner.user_ids:
return werkzeug.utils.redirect("/forum/%s/user/%d" % (slug(forum), partner.user_ids[0].id))
return werkzeug.utils.redirect("/forum/%s" % slug(forum))
@http.route(['/forum/user/<int:user_id>/avatar'], type='http', auth="public", website=True)
def user_avatar(self, user_id=0, **post):
cr, uid, context = request.cr, request.uid, request.context
response = werkzeug.wrappers.Response()
User = request.registry['res.users']
Website = request.registry['website']
user = User.browse(cr, SUPERUSER_ID, user_id, context=context)
if not user.exists() or (user_id != request.session.uid and user.karma < 1):
return Website._image_placeholder(response)
return Website._image(cr, SUPERUSER_ID, 'res.users', user.id, 'image', response)
@http.route(['/forum/<model("forum.forum"):forum>/user/<int:user_id>'], type='http', auth="public", website=True)
def open_user(self, forum, user_id=0, **post):
cr, uid, context = request.cr, request.uid, request.context
User = request.registry['res.users']
Post = request.registry['forum.post']
Vote = request.registry['forum.post.vote']
Activity = request.registry['mail.message']
Followers = request.registry['mail.followers']
Data = request.registry["ir.model.data"]
user = User.browse(cr, SUPERUSER_ID, user_id, context=context)
current_user = User.browse(cr, SUPERUSER_ID, uid, context=context)
# Users with high karma can see users with karma <= 0 for
# moderation purposes, IFF they have posted something (see below)
if (not user.exists() or
(user.karma < 1 and current_user.karma < forum.karma_unlink_all)):
return werkzeug.utils.redirect("/forum/%s" % slug(forum))
values = self._prepare_forum_values(forum=forum, **post)
# questions and answers by user
user_question_ids = Post.search(cr, uid, [
('parent_id', '=', False),
('forum_id', '=', forum.id), ('create_uid', '=', user.id),
], order='create_date desc', context=context)
count_user_questions = len(user_question_ids)
if (user_id != request.session.uid and not
(user.website_published or
(count_user_questions and current_user.karma > forum.karma_unlink_all))):
return request.website.render("website_forum.private_profile", values)
# limit length of visible posts by default for performance reasons, except for the high
# karma users (not many of them, and they need it to properly moderate the forum)
post_display_limit = None
if current_user.karma < forum.karma_unlink_all:
post_display_limit = 20
user_questions = Post.browse(cr, uid, user_question_ids[:post_display_limit], context=context)
user_answer_ids = Post.search(cr, uid, [
('parent_id', '!=', False),
('forum_id', '=', forum.id), ('create_uid', '=', user.id),
], order='create_date desc', context=context)
count_user_answers = len(user_answer_ids)
user_answers = Post.browse(cr, uid, user_answer_ids[:post_display_limit], context=context)
# showing questions which user following
obj_ids = Followers.search(cr, SUPERUSER_ID, [('res_model', '=', 'forum.post'), ('partner_id', '=', user.partner_id.id)], context=context)
post_ids = [follower.res_id for follower in Followers.browse(cr, SUPERUSER_ID, obj_ids, context=context)]
que_ids = Post.search(cr, uid, [('id', 'in', post_ids), ('forum_id', '=', forum.id), ('parent_id', '=', False)], context=context)
followed = Post.browse(cr, uid, que_ids, context=context)
#showing Favourite questions of user.
fav_que_ids = Post.search(cr, uid, [('favourite_ids', '=', user.id), ('forum_id', '=', forum.id), ('parent_id', '=', False)], context=context)
favourite = Post.browse(cr, uid, fav_que_ids, context=context)
#votes which given on users questions and answers.
data = Vote.read_group(cr, uid, [('forum_id', '=', forum.id), ('recipient_id', '=', user.id)], ["vote"], groupby=["vote"], context=context)
up_votes, down_votes = 0, 0
for rec in data:
if rec['vote'] == '1':
up_votes = rec['vote_count']
elif rec['vote'] == '-1':
down_votes = rec['vote_count']
#Votes which given by users on others questions and answers.
post_votes = Vote.search(cr, uid, [('user_id', '=', user.id)], context=context)
vote_ids = Vote.browse(cr, uid, post_votes, context=context)
#activity by user.
model, comment = Data.get_object_reference(cr, uid, 'mail', 'mt_comment')
activity_ids = Activity.search(cr, uid, [('res_id', 'in', user_question_ids+user_answer_ids), ('model', '=', 'forum.post'), ('subtype_id', '!=', comment)], order='date DESC', limit=100, context=context)
activities = Activity.browse(cr, uid, activity_ids, context=context)
posts = {}
for act in activities:
posts[act.res_id] = True
posts_ids = Post.browse(cr, uid, posts.keys(), context=context)
posts = dict(map(lambda x: (x.id, (x.parent_id or x, x.parent_id and x or False)), posts_ids))
# TDE CLEANME MASTER: couldn't it be rewritten using a 'menu' key instead of one key for each menu ?
if user.id == uid:
post['my_profile'] = True
else:
post['users'] = True
values.update({
'uid': uid,
'user': user,
'main_object': user,
'searches': post,
'questions': user_questions,
'count_questions': count_user_questions,
'answers': user_answers,
'count_answers': count_user_answers,
'followed': followed,
'favourite': favourite,
'up_votes': up_votes,
'down_votes': down_votes,
'activities': activities,
'posts': posts,
'vote_post': vote_ids,
})
return request.website.render("website_forum.user_detail_full", values)
@http.route('/forum/<model("forum.forum"):forum>/user/<model("res.users"):user>/edit', type='http', auth="user", website=True)
def edit_profile(self, forum, user, **kwargs):
country = request.registry['res.country']
country_ids = country.search(request.cr, SUPERUSER_ID, [], context=request.context)
countries = country.browse(request.cr, SUPERUSER_ID, country_ids, context=request.context)
values = self._prepare_forum_values(forum=forum, searches=kwargs)
values.update({
'email_required': kwargs.get('email_required'),
'countries': countries,
'notifications': self._get_notifications(),
})
return request.website.render("website_forum.edit_profile", values)
@http.route('/forum/<model("forum.forum"):forum>/user/<model("res.users"):user>/save', type='http', auth="user", methods=['POST'], website=True)
def save_edited_profile(self, forum, user, **kwargs):
values = {
'name': kwargs.get('name'),
'website': kwargs.get('website'),
'email': kwargs.get('email'),
'city': kwargs.get('city'),
'country_id': int(kwargs.get('country')) if kwargs.get('country') else False,
'website_description': kwargs.get('description'),
}
if request.uid == user.id: # the controller allows to edit only its own privacy settings; use partner management for other cases
values['website_published'] = kwargs.get('website_published') == 'True'
request.registry['res.users'].write(request.cr, request.uid, [user.id], values, context=request.context)
return werkzeug.utils.redirect("/forum/%s/user/%d" % (slug(forum), user.id))
# Badges
# --------------------------------------------------
@http.route('/forum/<model("forum.forum"):forum>/badge', type='http', auth="public", website=True)
def badges(self, forum, **searches):
cr, uid, context = request.cr, request.uid, request.context
Badge = request.registry['gamification.badge']
badge_ids = Badge.search(cr, SUPERUSER_ID, [('challenge_ids.category', '=', 'forum')], context=context)
badges = Badge.browse(cr, uid, badge_ids, context=context)
badges = sorted(badges, key=lambda b: b.stat_count_distinct, reverse=True)
values = self._prepare_forum_values(forum=forum, searches={'badges': True})
values.update({
'badges': badges,
})
return request.website.render("website_forum.badge", values)
@http.route(['''/forum/<model("forum.forum"):forum>/badge/<model("gamification.badge"):badge>'''], type='http', auth="public", website=True)
def badge_users(self, forum, badge, **kwargs):
user_ids = [badge_user.user_id.id for badge_user in badge.owner_ids]
users = request.registry['res.users'].browse(request.cr, SUPERUSER_ID, user_ids, context=request.context)
values = self._prepare_forum_values(forum=forum, searches={'badges': True})
values.update({
'badge': badge,
'users': users,
})
return request.website.render("website_forum.badge_user", values)
# Messaging
# --------------------------------------------------
@http.route('/forum/<model("forum.forum"):forum>/post/<model("forum.post"):post>/comment/<model("mail.message"):comment>/convert_to_answer', type='http', auth="user", methods=['POST'], website=True)
def convert_comment_to_answer(self, forum, post, comment, **kwarg):
new_post_id = request.registry['forum.post'].convert_comment_to_answer(request.cr, request.uid, comment.id, context=request.context)
if not new_post_id:
return werkzeug.utils.redirect("/forum/%s" % slug(forum))
post = request.registry['forum.post'].browse(request.cr, request.uid, new_post_id, context=request.context)
question = post.parent_id if post.parent_id else post
return werkzeug.utils.redirect("/forum/%s/question/%s" % (slug(forum), slug(question)))
@http.route('/forum/<model("forum.forum"):forum>/post/<model("forum.post"):post>/convert_to_comment', type='http', auth="user", methods=['POST'], website=True)
def convert_answer_to_comment(self, forum, post, **kwarg):
question = post.parent_id
new_msg_id = request.registry['forum.post'].convert_answer_to_comment(request.cr, request.uid, post.id, context=request.context)
if not new_msg_id:
return werkzeug.utils.redirect("/forum/%s" % slug(forum))
return werkzeug.utils.redirect("/forum/%s/question/%s" % (slug(forum), slug(question)))
@http.route('/forum/<model("forum.forum"):forum>/post/<model("forum.post"):post>/comment/<model("mail.message"):comment>/delete', type='json', auth="user", website=True)
def delete_comment(self, forum, post, comment, **kwarg):
if not request.session.uid:
return {'error': 'anonymous_user'}
return request.registry['forum.post'].unlink_comment(request.cr, request.uid, post.id, comment.id, context=request.context) | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.stack_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.compiler.tests import xla_test
from tensorflow.python.compiler.xla import xla
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_data_flow_ops
from tensorflow.python.platform import test
class StackOpTest(xla_test.XLATestCase):
def testStackPushPop(self):
with self.session(), self.test_scope():
v = array_ops.placeholder(dtypes.float32)
def fn():
h = gen_data_flow_ops.stack_v2(5, dtypes.float32, stack_name="foo")
c = gen_data_flow_ops.stack_push_v2(h, v)
with ops.control_dependencies([c]):
c1 = gen_data_flow_ops.stack_pop_v2(h, dtypes.float32)
return c1
self.assertAllClose([[4.0, 5.0]],
xla.compile(fn)[0].eval({v: [[4.0, 5.0]]}))
def testStackPushPopSwap(self):
with self.session(), self.test_scope():
a = np.arange(2000)
x = array_ops.placeholder(dtypes.float32)
def fn():
h = gen_data_flow_ops.stack_v2(5, dtypes.float32, stack_name="foo")
c = gen_data_flow_ops.stack_push_v2(h, x, swap_memory=True)
with ops.control_dependencies([c]):
return gen_data_flow_ops.stack_pop_v2(h, dtypes.float32)
self.assertAllClose(a, xla.compile(fn)[0].eval({x: a}))
def testMultiStack(self):
with self.session(), self.test_scope():
v = array_ops.placeholder(dtypes.float32)
def fn():
h1 = gen_data_flow_ops.stack_v2(5, dtypes.float32, stack_name="foo")
c1 = gen_data_flow_ops.stack_push_v2(h1, v)
with ops.control_dependencies([c1]):
c1 = gen_data_flow_ops.stack_pop_v2(h1, dtypes.float32)
h2 = gen_data_flow_ops.stack_v2(5, dtypes.float32, stack_name="bar")
c2 = gen_data_flow_ops.stack_push_v2(h2, 5.0)
with ops.control_dependencies([c2]):
c2 = gen_data_flow_ops.stack_pop_v2(h2, dtypes.float32)
return c1 + c2
self.assertAllClose(9.0, xla.compile(fn)[0].eval({v: 4.0}))
def testSameNameStacks(self):
"""Different stacks with the same name do not interfere."""
with self.session() as sess, self.test_scope():
v1 = array_ops.placeholder(dtypes.float32)
v2 = array_ops.placeholder(dtypes.float32)
def fn():
h1 = gen_data_flow_ops.stack_v2(5, dtypes.float32, stack_name="foo")
h2 = gen_data_flow_ops.stack_v2(5, dtypes.float32, stack_name="foo")
c1 = gen_data_flow_ops.stack_push_v2(h1, v1)
with ops.control_dependencies([c1]):
c2 = gen_data_flow_ops.stack_push_v2(h2, v2)
with ops.control_dependencies([c2]):
pop1 = gen_data_flow_ops.stack_pop_v2(h1, dtypes.float32)
pop2 = gen_data_flow_ops.stack_pop_v2(h2, dtypes.float32)
return [pop1, pop2]
[pop1_compiled, pop2_compiled] = xla.compile(fn)
out1, out2 = sess.run([pop1_compiled, pop2_compiled], {v1: 4.0, v2: 5.0})
self.assertAllClose(out1, 4.0)
self.assertAllClose(out2, 5.0)
def testCloseStack(self):
with self.session() as sess, self.test_scope():
def fn():
h = gen_data_flow_ops.stack_v2(5, dtypes.float32, stack_name="foo")
gen_data_flow_ops.stack_close_v2(h)
sess.run(xla.compile(fn))
def testPushCloseStack(self):
with self.session() as sess, self.test_scope():
v = array_ops.placeholder(dtypes.float32)
def fn():
h = gen_data_flow_ops.stack_v2(5, dtypes.float32, stack_name="foo")
c = gen_data_flow_ops.stack_push_v2(h, v)
with ops.control_dependencies([c]):
gen_data_flow_ops.stack_close_v2(h)
sess.run(xla.compile(fn), {v: [[4.0, 5.0]]})
if __name__ == "__main__":
test.main() | unknown | codeparrot/codeparrot-clean | ||
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.http;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import javax.servlet.http.HttpServletResponse;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
/**
* Small test to cover default disabled prof endpoint.
*/
public class TestDisabledProfileServlet extends HttpServerFunctionalTest {
private static HttpServer2 server;
private static URL baseUrl;
@BeforeAll
public static void setup() throws Exception {
server = createTestServer();
server.start();
baseUrl = getServerURL(server);
}
@AfterAll
public static void cleanup() throws Exception {
server.stop();
}
@Test
public void testQuery() throws Exception {
try {
readOutput(new URL(baseUrl, "/prof"));
throw new IllegalStateException("Should not reach here");
} catch (IOException e) {
assertTrue(e.getMessage()
.contains(HttpServletResponse.SC_INTERNAL_SERVER_ERROR + " for URL: " + baseUrl));
}
// CORS headers
HttpURLConnection conn =
(HttpURLConnection) new URL(baseUrl, "/prof").openConnection();
assertEquals("GET", conn.getHeaderField(ProfileServlet.ACCESS_CONTROL_ALLOW_METHODS));
assertNotNull(conn.getHeaderField(ProfileServlet.ACCESS_CONTROL_ALLOW_ORIGIN));
conn.disconnect();
}
@Test
public void testRequestMethods() throws IOException {
HttpURLConnection connection = getConnection("PUT");
assertEquals(HttpServletResponse.SC_METHOD_NOT_ALLOWED,
connection.getResponseCode(), "Unexpected response code");
connection.disconnect();
connection = getConnection("POST");
assertEquals(HttpServletResponse.SC_METHOD_NOT_ALLOWED,
connection.getResponseCode(), "Unexpected response code");
connection.disconnect();
connection = getConnection("DELETE");
assertEquals(HttpServletResponse.SC_METHOD_NOT_ALLOWED,
connection.getResponseCode(), "Unexpected response code");
connection.disconnect();
connection = getConnection("GET");
assertEquals(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
connection.getResponseCode(), "Unexpected response code");
connection.disconnect();
}
private HttpURLConnection getConnection(final String method) throws IOException {
URL url = new URL(baseUrl, "/prof");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod(method);
return conn;
}
} | java | github | https://github.com/apache/hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestDisabledProfileServlet.java |
'''
Created on Apr 14, 2015
@author: shurrey
'''
import logging
import sys
import suds
import random
from suds.client import Client
from suds.xsd.doctor import ImportDoctor, Import
from suds.wsse import *
from uuid import uuid1
from datetime import datetime
def getEntitlements():
return(["Announcement.WS:createCourseAnnouncements",
"Announcement.WS:createOrgAnnouncements",
"Announcement.WS:createSystemAnnouncements",
"Announcement.WS:deleteCourseAnnouncements",
"Announcement.WS:deleteOrgAnnouncements",
"Announcement.WS:deleteSystemAnnouncements",
"Announcement.WS:getCourseAnnouncements",
"Announcement.WS:getOrgAnnouncements",
"Announcement.WS:getSystemAnnouncements",
"Announcement.WS:updateCourseAnnouncements",
"Announcement.WS:updateOrgAnnouncements",
"Announcement.WS:updateSystemAnnouncements",
"Calendar.WS:canUpdateCourseCalendarItem",
"Calendar.WS:canUpdateInstitutionCalendarItem",
"Calendar.WS:canUpdatePersonalCalendarItem",
"Calendar.WS:createCourseCalendarItem",
"Calendar.WS:createInstitutionCalendarItem",
"Calendar.WS:createPersonalCalendarItem",
"Calendar.WS:deleteCourseCalendarItem",
"Calendar.WS:deleteInstitutionCalendarItem",
"Calendar.WS:deletePersonalCalendarItem",
"Calendar.WS:getCalendarItem",
"Calendar.WS:saveCourseCalendarItem",
"Calendar.WS:saveInstitutionCalendarItem",
"Calendar.WS:savePersonalCalendarItem",
"Calendar.WS:updateCourseCalendarItem",
"Calendar.WS:updateInstitutionCalendarItem",
"Calendar.WS:updatePersonalCalendarItem",
"Content.WS:addContentFile",
"Content.WS:deleteContentFiles",
"Content.WS:deleteContents",
"Content.WS:deleteCourseTOCs",
"Content.WS:deleteLinks",
"Content.WS:getContentFiles",
"Content.WS:getFilteredContent",
"Content.WS:getFilteredCourseStatus",
"Content.WS:getLinksByReferredToType",
"Content.WS:getLinksByReferrerType",
"Content.WS:getReviewStatusByCourseId",
"Content.WS:getTOCsByCourseId",
"Content.WS:loadContent",
"Content.WS:removeContent",
"Content.WS:saveContent",
"Content.WS:saveContentsReviewed",
"Content.WS:saveCourseTOC",
"Content.WS:saveLink",
"Context.WS:emulateUser",
"Context.WS:getMemberships",
"Context.WS:getMyMemberships",
"Course.WS:changeCourseBatchUid",
"Course.WS:changeCourseCategoryBatchUid",
"Course.WS:changeCourseDataSourceId",
"Course.WS:changeOrgBatchUid",
"Course.WS:changeOrgCategoryBatchUid",
"Course.WS:changeOrgDataSourceId",
"Course.WS:createCourse",
"Course.WS:createOrg",
"Course.WS:deleteCartridge",
"Course.WS:deleteCourse",
"Course.WS:deleteCourseCategory",
"Course.WS:deleteCourseCategoryMembership",
"Course.WS:deleteGroup",
"Course.WS:deleteOrg",
"Course.WS:deleteOrgCategory",
"Course.WS:deleteOrgCategoryMembership",
"Course.WS:deleteStaffInfo",
"Course.WS:getAvailableGroupTools",
"Course.WS:getCartridge",
"Course.WS:getCategories",
"Course.WS:getCategoryMembership",
"Course.WS:getClassifications",
"Course.WS:getCourse",
"Course.WS:getGroup",
"Course.WS:getOrg",
"Course.WS:getStaffInfo",
"Course.WS:saveCartridge",
"Course.WS:saveCourse",
"Course.WS:saveCourseCategory",
"Course.WS:saveCourseCategoryMembership",
"Course.WS:saveGroup",
"Course.WS:saveOrgCategory",
"Course.WS:saveOrgCategoryMembership",
"Course.WS:saveStaffInfo",
"Course.WS:updateCourse",
"Course.WS:updateOrg",
"Course.WS:loadCoursesInTerm",
"Course.WS:addCourseToTerm",
"Course.WS:removeCourseFromTerm",
"Course.WS:loadTerm",
"Course.WS:loadTermByCourseId",
"Course.WS:saveTerm",
"Course.WS:removeTerm",
"Course.WS:loadTerms",
"Course.WS:loadTermsByName",
"CourseMembership.WS:deleteCourseMembership",
"CourseMembership.WS:deleteGroupMembership",
"CourseMembership.WS:getCourseMembership",
"CourseMembership.WS:getCourseRoles",
"CourseMembership.WS:getGroupMembership",
"CourseMembership.WS:saveCourseMembership",
"CourseMembership.WS:saveGroupMembership",
"Gradebook.WS:deleteAttempts",
"Gradebook.WS:deleteColumns",
"Gradebook.WS:deleteGradebookTypes",
"Gradebook.WS:deleteGrades",
"Gradebook.WS:deleteGradingSchemas",
"Gradebook.WS:getAttempts",
"Gradebook.WS:getGradebookColumns",
"Gradebook.WS:getGradebookTypes",
"Gradebook.WS:getGrades",
"Gradebook.WS:getGradingSchemas",
"Gradebook.WS:saveAttempts",
"Gradebook.WS:saveColumns",
"Gradebook.WS:saveGradebookTypes",
"Gradebook.WS:saveGrades",
"Gradebook.WS:saveGradingSchemas",
"Gradebook.WS:updateColumnAttribute",
"User.WS:changeUserBatchUid",
"User.WS:changeUserDataSourceId",
"User.WS:deleteAddressBookEntry",
"User.WS:deleteObserverAssociation",
"User.WS:deleteUser",
"User.WS:deleteUserByInstitutionRole",
"User.WS:getAddressBookEntry",
"User.WS:getInstitutionRoles",
"User.WS:getObservee",
"User.WS:getSystemRoles",
"User.WS:getUser",
"User.WS:getUserInstitutionRoles",
"User.WS:saveAddressBookEntry",
"User.WS:saveObserverAssociation",
"User.WS:saveUser",
"Util.WS:checkEntitlement",
"Util.WS:deleteSetting",
"Util.WS:getDataSources",
"Util.WS:loadSetting",
"Util.WS:saveSetting"])
def generate_nonce(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
def createHeaders(action, username, password, endpoint):
"""Create the soap headers section of the XML to send to Blackboard Learn Web Service Endpoints"""
# Namespaces
xsd_ns = ('xsd', 'http://www.w3.org/2001/XMLSchema')
wsu_ns = ('wsu',"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd")
wsa_ns = ('wsa', 'http://schemas.xmlsoap.org/ws/2004/03/addressing')
# Set the action. This is a string passed to this funtion and corresponds to the method being called
# For example, if calling Context.WS.initialize(), this should be set to 'initialize'
wsa_action = Element('Action', ns=wsa_ns).setText(action)
# Each method requires a unique identifier. We are using Python's built-in uuid generation tool.
wsa_uuid = Element('MessageID', ns=wsa_ns).setText('uuid:' + str(uuid1()))
# Setting the replyTo address == to the SOAP role anonymous
wsa_address = Element('Address', ns=wsa_ns).setText('http://schemas.xmlsoap.org/ws/2004/03/addressing/role/anonymous')
wsa_replyTo = Element('ReplyTo', ns=wsa_ns).insert(wsa_address)
# Setting the To element to the endpoint being called
wsa_to = Element('To', ns=wsa_ns).setText(url_header + endpoint)
# Generate the WS_Security headers necessary to authenticate to Learn's Web Services
# To create a session, ContextWS.initialize() must first be called with username session and password no session.
# This will return a session Id, which then becomes the password for subsequent calls.
security = createWSSecurityHeader(username, password)
# Return the soapheaders that can be added to the soap call
return([wsa_action, wsa_uuid, wsa_replyTo, wsa_to, security])
def createWSSecurityHeader(username,password):
"""
Generate the WS-Security headers for making Blackboard Web Service calls.
SUDS comes with a WSSE header generation tool out of the box, but it does not offer
the flexibility needed to properly authenticate to the Blackboard SOAP-based services.
Thus, we are creating the necessary headers ourselves.
"""
# Namespaces
wsse = ('wsse', 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd')
wsu = ('wsu', 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd')
# Create Security Element
security = Element('Security', ns=wsse)
security.set('SOAP-ENV:mustUnderstand', '1')
# Create UsernameToken, Username/Pass Element
usernametoken = Element('UsernameToken', ns=wsse)
# Add the wsu namespace to the Username Token. This is necessary for the created date to be included.
# Also add a Security Token UUID to uniquely identify this username Token. This uses Python's built-in uuid generation tool.
usernametoken.set('xmlns:wsu', 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd')
usernametoken.set('wsu:Id', 'SecurityToken-' + str(uuid1()))
# Add the username token to the security header. This will always be 'session'
uname = Element('Username', ns=wsse).setText(username)
# Add the password element and set the type to 'PasswordText'.
# This will be nosession on the initialize() call, and the returned sessionID on subsequent calls.
passwd = Element('Password', ns=wsse).setText(password)
passwd.set('Type', 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordText')
# Add a nonce element to further uniquely identify this message.
nonce = Element('Nonce', ns=wsse).setText(str(generate_nonce(24)))
# Add the current time in UTC format.
created = Element('Created', ns=wsu).setText(str(datetime.utcnow()))
# Add Username, Password, Nonce, and Created elements to UsernameToken element.
# Python inserts tags at the top, and Learn needs these in a specific order, so they are added in reverse order
usernametoken.insert(created)
usernametoken.insert(nonce)
usernametoken.insert(passwd)
usernametoken.insert(uname)
# Insert the usernametoken into the wsse:security tag
security.insert(usernametoken)
# Create the timestamp in the wsu namespace. Set a unique id for this timestamp using Python's built-in user generation tool.
timestamp = Element('Timestamp', ns=wsu)
timestamp.set('wsu:Id', 'Timestamp-' + str(uuid1()))
# Insert the timestamp into the wsse:security tag. This is done after usernametoken to insert before usernametoken in the subsequent XML
security.insert(timestamp)
# Return the security XML
return security
if __name__ == '__main__':
"""
This is the main class for the Blackboard Soap Web Services Python sample code.
If I were to turn this into a production-level tool, much of this would be abstracted into more manageable chunks.
"""
# If True, extra information will be printed to the console
DEBUG = True;
# If True, register tool and exit
REGISTER = False;
# Set up logging. logging level is set to DEBUG on the suds tools in order to show you what's happening along the way.
# It will give you SOAP messages and responses, which will help you develop your own tool.
#logging.basicConfig(level=logging.INFO)
#logging.getLogger('suds.client').setLevel(logging.DEBUG)
#logging.getLogger('suds.transport').setLevel(logging.DEBUG)
#logging.getLogger('suds.xsd.schema').setLevel(logging.DEBUG)
#logging.getLogger('suds.wsdl').setLevel(logging.DEBUG)
# Necessary system-setting for handling large complex WSDLs
sys.setrecursionlimit(10000)
# Set up the base URL for Web Service endpoints
protocol = 'https'
server = 'localhost:9887'
service_path = 'webapps/ws/services'
url_header = protocol + "://" + server + "/" + service_path + "/"
vendor_id = "bbdn"
program_id = "Test"
registration_password = ""
tool_description = "Test registration for Liverpool"
shared_password = "shared-password"
# This is the pattern for the SUDS library to dynamically create your Web Service code.
# There are caching capabilities so that you can avoid this overhead everytime your script runs.
# I have included the code for each endpoint, although only the ones I need are uncommented.
url = url_header + 'Context.WS?wsdl'
contextWS = Client(url, autoblend=True)
if DEBUG == True:
print(contextWS)
# Initialize headers and then call createHeaders to generate the soap headers with WSSE bits.
headers = []
headers = createHeaders('initialize', "session", "nosession", 'Context.WS')
# Add Headers and WS-Security to client. Set port to default value, otherwise, you must add to service call
contextWS.set_options(soapheaders=headers, port='Context.WSSOAP12port_https')
# Initialize Context
sessionId = contextWS.service.initialize()
if DEBUG == True:
print(sessionId)
if REGISTER == True:
# Initialize headers and then call createHeaders to generate the soap headers with WSSE bits.
headers = []
headers = createHeaders('registerTool', 'session', sessionId, 'Context.WS')
# Add Headers and WS-Security to client. Set port to default value, otherwise, you must add to service call
contextWS.set_options(soapheaders=headers, port='Context.WSSOAP12port_https')
# Register Tool.
registered = contextWS.factory.create("ns4:RegisterToolResultVO")
registered = contextWS.service.registerTool(vendor_id, program_id, registration_password, tool_description, shared_password, getEntitlements(), [])
if DEBUG == True:
print(registered)
else:
# Initialize headers and then call createHeaders to generate the soap headers with WSSE bits.
headers = []
headers = createHeaders('loginTool', 'session', sessionId, 'Context.WS')
# Add Headers and WS-Security to client. Set port to default value, otherwise, you must add to service call
contextWS.set_options(soapheaders=headers, port='Context.WSSOAP12port_https')
# Login as tool.
logged_in = contextWS.service.loginTool(shared_password, vendor_id, program_id, "", 3600)
if DEBUG == True:
print(logged_in)
url = url_header + 'User.WS?wsdl'
userWS = Client(url, autoblend=True)
if DEBUG == True:
print(userWS)
# Initialize headers and then call createHeaders to generate the soap headers with WSSE bits.
headers = []
headers = createHeaders('initializeUserWS', "session", sessionId, 'User.WS')
# Add Headers and WS-Security to client. Set port to default value, otherwise, you must add to service call
userWS.set_options(soapheaders=headers, port='User.WSSOAP12port_https')
# Initialize User
user_init = userWS.service.initializeUserWS(False)
if DEBUG == True:
print(user_init)
# Initialize headers and then call createHeaders to generate the soap headers with WSSE bits.
headers = []
headers = createHeaders('logout', "session", sessionId, 'Context.WS')
# Add Headers and WS-Security to client. Set port to default value, otherwise, you must add to service call
contextWS.set_options(soapheaders=headers, port='Context.WSSOAP12port_https')
# Initialize Context
logged_out = contextWS.service.logout()
if DEBUG == True:
print(logged_out) | unknown | codeparrot/codeparrot-clean | ||
# Copyright 2017, Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# EDITING INSTRUCTIONS
# This file was generated from the file
# https://github.com/google/googleapis/blob/master/google/monitoring/v3/metric_service.proto,
# and updates to that file get reflected here through a refresh process.
# For the short term, the refresh process will only be runnable by Google engineers.
#
# The only allowed edits are to method and file documentation. A 3-way
# merge preserves those additions if the generated source changes.
"""Accesses the google.monitoring.v3 MetricService API."""
import collections
import json
import os
import pkg_resources
import platform
from google.gax import api_callable
from google.gax import config
from google.gax import path_template
import google.gax
from google.api import metric_pb2 as api_metric_pb2
from google.cloud.gapic.monitoring.v3 import enums
from google.cloud.proto.monitoring.v3 import common_pb2
from google.cloud.proto.monitoring.v3 import metric_pb2 as v3_metric_pb2
from google.cloud.proto.monitoring.v3 import metric_service_pb2
_PageDesc = google.gax.PageDescriptor
class MetricServiceClient(object):
"""
Manages metric descriptors, monitored resource descriptors, and
time series data.
"""
SERVICE_ADDRESS = 'monitoring.googleapis.com'
"""The default address of the service."""
DEFAULT_SERVICE_PORT = 443
"""The default port of the service."""
_PAGE_DESCRIPTORS = {
'list_monitored_resource_descriptors':
_PageDesc('page_token', 'next_page_token', 'resource_descriptors'),
'list_metric_descriptors': _PageDesc('page_token', 'next_page_token',
'metric_descriptors'),
'list_time_series': _PageDesc('page_token', 'next_page_token',
'time_series')
}
# The scopes needed to make gRPC calls to all of the methods defined in
# this service
_ALL_SCOPES = (
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/monitoring',
'https://www.googleapis.com/auth/monitoring.read',
'https://www.googleapis.com/auth/monitoring.write', )
_PROJECT_PATH_TEMPLATE = path_template.PathTemplate('projects/{project}')
_METRIC_DESCRIPTOR_PATH_TEMPLATE = path_template.PathTemplate(
'projects/{project}/metricDescriptors/{metric_descriptor=**}')
_MONITORED_RESOURCE_DESCRIPTOR_PATH_TEMPLATE = path_template.PathTemplate(
'projects/{project}/monitoredResourceDescriptors/{monitored_resource_descriptor}'
)
@classmethod
def project_path(cls, project):
"""Returns a fully-qualified project resource name string."""
return cls._PROJECT_PATH_TEMPLATE.render({'project': project, })
@classmethod
def metric_descriptor_path(cls, project, metric_descriptor):
"""Returns a fully-qualified metric_descriptor resource name string."""
return cls._METRIC_DESCRIPTOR_PATH_TEMPLATE.render({
'project': project,
'metric_descriptor': metric_descriptor,
})
@classmethod
def monitored_resource_descriptor_path(cls, project,
monitored_resource_descriptor):
"""Returns a fully-qualified monitored_resource_descriptor resource name string."""
return cls._MONITORED_RESOURCE_DESCRIPTOR_PATH_TEMPLATE.render({
'project': project,
'monitored_resource_descriptor': monitored_resource_descriptor,
})
@classmethod
def match_project_from_project_name(cls, project_name):
"""Parses the project from a project resource.
Args:
project_name (string): A fully-qualified path representing a project
resource.
Returns:
A string representing the project.
"""
return cls._PROJECT_PATH_TEMPLATE.match(project_name).get('project')
@classmethod
def match_project_from_metric_descriptor_name(cls, metric_descriptor_name):
"""Parses the project from a metric_descriptor resource.
Args:
metric_descriptor_name (string): A fully-qualified path representing a metric_descriptor
resource.
Returns:
A string representing the project.
"""
return cls._METRIC_DESCRIPTOR_PATH_TEMPLATE.match(
metric_descriptor_name).get('project')
@classmethod
def match_metric_descriptor_from_metric_descriptor_name(
cls, metric_descriptor_name):
"""Parses the metric_descriptor from a metric_descriptor resource.
Args:
metric_descriptor_name (string): A fully-qualified path representing a metric_descriptor
resource.
Returns:
A string representing the metric_descriptor.
"""
return cls._METRIC_DESCRIPTOR_PATH_TEMPLATE.match(
metric_descriptor_name).get('metric_descriptor')
@classmethod
def match_project_from_monitored_resource_descriptor_name(
cls, monitored_resource_descriptor_name):
"""Parses the project from a monitored_resource_descriptor resource.
Args:
monitored_resource_descriptor_name (string): A fully-qualified path representing a monitored_resource_descriptor
resource.
Returns:
A string representing the project.
"""
return cls._MONITORED_RESOURCE_DESCRIPTOR_PATH_TEMPLATE.match(
monitored_resource_descriptor_name).get('project')
@classmethod
def match_monitored_resource_descriptor_from_monitored_resource_descriptor_name(
cls, monitored_resource_descriptor_name):
"""Parses the monitored_resource_descriptor from a monitored_resource_descriptor resource.
Args:
monitored_resource_descriptor_name (string): A fully-qualified path representing a monitored_resource_descriptor
resource.
Returns:
A string representing the monitored_resource_descriptor.
"""
return cls._MONITORED_RESOURCE_DESCRIPTOR_PATH_TEMPLATE.match(
monitored_resource_descriptor_name).get(
'monitored_resource_descriptor')
def __init__(self,
service_path=SERVICE_ADDRESS,
port=DEFAULT_SERVICE_PORT,
channel=None,
credentials=None,
ssl_credentials=None,
scopes=None,
client_config=None,
app_name=None,
app_version='',
lib_name=None,
lib_version='',
metrics_headers=()):
"""Constructor.
Args:
service_path (string): The domain name of the API remote host.
port (int): The port on which to connect to the remote host.
channel (:class:`grpc.Channel`): A ``Channel`` instance through
which to make calls.
credentials (object): The authorization credentials to attach to
requests. These credentials identify this application to the
service.
ssl_credentials (:class:`grpc.ChannelCredentials`): A
``ChannelCredentials`` instance for use with an SSL-enabled
channel.
scopes (list[string]): A list of OAuth2 scopes to attach to requests.
client_config (dict):
A dictionary for call options for each method. See
:func:`google.gax.construct_settings` for the structure of
this data. Falls back to the default config if not specified
or the specified config is missing data points.
app_name (string): The name of the application calling
the service. Recommended for analytics purposes.
app_version (string): The version of the application calling
the service. Recommended for analytics purposes.
lib_name (string): The API library software used for calling
the service. (Unless you are writing an API client itself,
leave this as default.)
lib_version (string): The API library software version used
for calling the service. (Unless you are writing an API client
itself, leave this as default.)
metrics_headers (dict): A dictionary of values for tracking
client library metrics. Ultimately serializes to a string
(e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be
considered private.
Returns:
A MetricServiceClient object.
"""
# Unless the calling application specifically requested
# OAuth scopes, request everything.
if scopes is None:
scopes = self._ALL_SCOPES
# Initialize an empty client config, if none is set.
if client_config is None:
client_config = {}
# Initialize metrics_headers as an ordered dictionary
# (cuts down on cardinality of the resulting string slightly).
metrics_headers = collections.OrderedDict(metrics_headers)
metrics_headers['gl-python'] = platform.python_version()
# The library may or may not be set, depending on what is
# calling this client. Newer client libraries set the library name
# and version.
if lib_name:
metrics_headers[lib_name] = lib_version
# Finally, track the GAPIC package version.
metrics_headers['gapic'] = pkg_resources.get_distribution(
'gapic-google-cloud-monitoring-v3', ).version
# Load the configuration defaults.
default_client_config = json.loads(
pkg_resources.resource_string(
__name__, 'metric_service_client_config.json').decode())
defaults = api_callable.construct_settings(
'google.monitoring.v3.MetricService',
default_client_config,
client_config,
config.STATUS_CODE_NAMES,
metrics_headers=metrics_headers,
page_descriptors=self._PAGE_DESCRIPTORS, )
self.metric_service_stub = config.create_stub(
metric_service_pb2.MetricServiceStub,
channel=channel,
service_path=service_path,
service_port=port,
credentials=credentials,
scopes=scopes,
ssl_credentials=ssl_credentials)
self._list_monitored_resource_descriptors = api_callable.create_api_call(
self.metric_service_stub.ListMonitoredResourceDescriptors,
settings=defaults['list_monitored_resource_descriptors'])
self._get_monitored_resource_descriptor = api_callable.create_api_call(
self.metric_service_stub.GetMonitoredResourceDescriptor,
settings=defaults['get_monitored_resource_descriptor'])
self._list_metric_descriptors = api_callable.create_api_call(
self.metric_service_stub.ListMetricDescriptors,
settings=defaults['list_metric_descriptors'])
self._get_metric_descriptor = api_callable.create_api_call(
self.metric_service_stub.GetMetricDescriptor,
settings=defaults['get_metric_descriptor'])
self._create_metric_descriptor = api_callable.create_api_call(
self.metric_service_stub.CreateMetricDescriptor,
settings=defaults['create_metric_descriptor'])
self._delete_metric_descriptor = api_callable.create_api_call(
self.metric_service_stub.DeleteMetricDescriptor,
settings=defaults['delete_metric_descriptor'])
self._list_time_series = api_callable.create_api_call(
self.metric_service_stub.ListTimeSeries,
settings=defaults['list_time_series'])
self._create_time_series = api_callable.create_api_call(
self.metric_service_stub.CreateTimeSeries,
settings=defaults['create_time_series'])
# Service calls
def list_monitored_resource_descriptors(self,
name,
filter_='',
page_size=0,
options=None):
"""
Lists monitored resource descriptors that match a filter. This method does not require a Stackdriver account.
Example:
>>> from google.cloud.gapic.monitoring.v3 import metric_service_client
>>> from google.gax import CallOptions, INITIAL_PAGE
>>> api = metric_service_client.MetricServiceClient()
>>> name = api.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in api.list_monitored_resource_descriptors(name):
>>> # process element
>>> pass
>>>
>>> # Or iterate over results one page at a time
>>> for page in api.list_monitored_resource_descriptors(name, options=CallOptions(page_token=INITIAL_PAGE)):
>>> for element in page:
>>> # process element
>>> pass
Args:
name (string): The project on which to execute the request. The format is
``\"projects/{project_id_or_number}\"``.
filter_ (string): An optional `filter <https://cloud.google.com/monitoring/api/v3/filters>`_ describing
the descriptors to be returned. The filter can reference
the descriptor's type and labels. For example, the
following filter returns only Google Compute Engine descriptors
that have an ``id`` label:
::
resource.type = starts_with(\"gce_\") AND resource.label:id
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.gax.PageIterator` instance. By default, this
is an iterable of :class:`google.api.monitored_resource_pb2.MonitoredResourceDescriptor` instances.
This object can also be configured to iterate over the pages
of the response through the `CallOptions` parameter.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
# Create the request object.
request = metric_service_pb2.ListMonitoredResourceDescriptorsRequest(
name=name, filter=filter_, page_size=page_size)
return self._list_monitored_resource_descriptors(request, options)
def get_monitored_resource_descriptor(self, name, options=None):
"""
Gets a single monitored resource descriptor. This method does not require a Stackdriver account.
Example:
>>> from google.cloud.gapic.monitoring.v3 import metric_service_client
>>> api = metric_service_client.MetricServiceClient()
>>> name = api.monitored_resource_descriptor_path('[PROJECT]', '[MONITORED_RESOURCE_DESCRIPTOR]')
>>> response = api.get_monitored_resource_descriptor(name)
Args:
name (string): The monitored resource descriptor to get. The format is
``\"projects/{project_id_or_number}/monitoredResourceDescriptors/{resource_type}\"``.
The ``{resource_type}`` is a predefined type, such as
``cloudsql_database``.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.api.monitored_resource_pb2.MonitoredResourceDescriptor` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
# Create the request object.
request = metric_service_pb2.GetMonitoredResourceDescriptorRequest(
name=name)
return self._get_monitored_resource_descriptor(request, options)
def list_metric_descriptors(self,
name,
filter_='',
page_size=0,
options=None):
"""
Lists metric descriptors that match a filter. This method does not require a Stackdriver account.
Example:
>>> from google.cloud.gapic.monitoring.v3 import metric_service_client
>>> from google.gax import CallOptions, INITIAL_PAGE
>>> api = metric_service_client.MetricServiceClient()
>>> name = api.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in api.list_metric_descriptors(name):
>>> # process element
>>> pass
>>>
>>> # Or iterate over results one page at a time
>>> for page in api.list_metric_descriptors(name, options=CallOptions(page_token=INITIAL_PAGE)):
>>> for element in page:
>>> # process element
>>> pass
Args:
name (string): The project on which to execute the request. The format is
``\"projects/{project_id_or_number}\"``.
filter_ (string): If this field is empty, all custom and
system-defined metric descriptors are returned.
Otherwise, the `filter <https://cloud.google.com/monitoring/api/v3/filters>`_
specifies which metric descriptors are to be
returned. For example, the following filter matches all
`custom metrics <https://cloud.google.com/monitoring/custom-metrics>`_:
::
metric.type = starts_with(\"custom.googleapis.com/\")
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.gax.PageIterator` instance. By default, this
is an iterable of :class:`google.api.metric_pb2.MetricDescriptor` instances.
This object can also be configured to iterate over the pages
of the response through the `CallOptions` parameter.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
# Create the request object.
request = metric_service_pb2.ListMetricDescriptorsRequest(
name=name, filter=filter_, page_size=page_size)
return self._list_metric_descriptors(request, options)
def get_metric_descriptor(self, name, options=None):
"""
Gets a single metric descriptor. This method does not require a Stackdriver account.
Example:
>>> from google.cloud.gapic.monitoring.v3 import metric_service_client
>>> api = metric_service_client.MetricServiceClient()
>>> name = api.metric_descriptor_path('[PROJECT]', '[METRIC_DESCRIPTOR]')
>>> response = api.get_metric_descriptor(name)
Args:
name (string): The metric descriptor on which to execute the request. The format is
``\"projects/{project_id_or_number}/metricDescriptors/{metric_id}\"``.
An example value of ``{metric_id}`` is
``\"compute.googleapis.com/instance/disk/read_bytes_count\"``.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.api.metric_pb2.MetricDescriptor` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
# Create the request object.
request = metric_service_pb2.GetMetricDescriptorRequest(name=name)
return self._get_metric_descriptor(request, options)
def create_metric_descriptor(self, name, metric_descriptor, options=None):
"""
Creates a new metric descriptor.
User-created metric descriptors define
`custom metrics <https://cloud.google.com/monitoring/custom-metrics>`_.
Example:
>>> from google.cloud.gapic.monitoring.v3 import metric_service_client
>>> from google.api import metric_pb2 as api_metric_pb2
>>> api = metric_service_client.MetricServiceClient()
>>> name = api.project_path('[PROJECT]')
>>> metric_descriptor = api_metric_pb2.MetricDescriptor()
>>> response = api.create_metric_descriptor(name, metric_descriptor)
Args:
name (string): The project on which to execute the request. The format is
``\"projects/{project_id_or_number}\"``.
metric_descriptor (:class:`google.api.metric_pb2.MetricDescriptor`): The new `custom metric <https://cloud.google.com/monitoring/custom-metrics>`_
descriptor.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.api.metric_pb2.MetricDescriptor` instance.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
# Create the request object.
request = metric_service_pb2.CreateMetricDescriptorRequest(
name=name, metric_descriptor=metric_descriptor)
return self._create_metric_descriptor(request, options)
def delete_metric_descriptor(self, name, options=None):
"""
Deletes a metric descriptor. Only user-created
`custom metrics <https://cloud.google.com/monitoring/custom-metrics>`_ can be deleted.
Example:
>>> from google.cloud.gapic.monitoring.v3 import metric_service_client
>>> api = metric_service_client.MetricServiceClient()
>>> name = api.metric_descriptor_path('[PROJECT]', '[METRIC_DESCRIPTOR]')
>>> api.delete_metric_descriptor(name)
Args:
name (string): The metric descriptor on which to execute the request. The format is
``\"projects/{project_id_or_number}/metricDescriptors/{metric_id}\"``.
An example of ``{metric_id}`` is:
``\"custom.googleapis.com/my_test_metric\"``.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
# Create the request object.
request = metric_service_pb2.DeleteMetricDescriptorRequest(name=name)
self._delete_metric_descriptor(request, options)
def list_time_series(self,
name,
filter_,
interval,
view,
aggregation=None,
order_by='',
page_size=0,
options=None):
"""
Lists time series that match a filter. This method does not require a Stackdriver account.
Example:
>>> from google.cloud.gapic.monitoring.v3 import metric_service_client
>>> from google.cloud.gapic.monitoring.v3 import enums
>>> from google.cloud.proto.monitoring.v3 import common_pb2
>>> from google.gax import CallOptions, INITIAL_PAGE
>>> api = metric_service_client.MetricServiceClient()
>>> name = api.project_path('[PROJECT]')
>>> filter_ = ''
>>> interval = common_pb2.TimeInterval()
>>> view = enums.ListTimeSeriesRequest.TimeSeriesView.FULL
>>>
>>> # Iterate over all results
>>> for element in api.list_time_series(name, filter_, interval, view):
>>> # process element
>>> pass
>>>
>>> # Or iterate over results one page at a time
>>> for page in api.list_time_series(name, filter_, interval, view, options=CallOptions(page_token=INITIAL_PAGE)):
>>> for element in page:
>>> # process element
>>> pass
Args:
name (string): The project on which to execute the request. The format is
\"projects/{project_id_or_number}\".
filter_ (string): A `monitoring filter <https://cloud.google.com/monitoring/api/v3/filters>`_ that specifies which time
series should be returned. The filter must specify a single metric type,
and can additionally specify metric labels and other information. For
example:
::
metric.type = \"compute.googleapis.com/instance/cpu/usage_time\" AND
metric.label.instance_name = \"my-instance-name\"
interval (:class:`google.cloud.proto.monitoring.v3.common_pb2.TimeInterval`): The time interval for which results should be returned. Only time series
that contain data points in the specified interval are included
in the response.
aggregation (:class:`google.cloud.proto.monitoring.v3.common_pb2.Aggregation`): By default, the raw time series data is returned.
Use this field to combine multiple time series for different
views of the data.
order_by (string): Specifies the order in which the points of the time series should
be returned. By default, results are not ordered. Currently,
this field must be left blank.
view (enum :class:`google.cloud.gapic.monitoring.v3.enums.ListTimeSeriesRequest.TimeSeriesView`): Specifies which information is returned about the time series.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Returns:
A :class:`google.gax.PageIterator` instance. By default, this
is an iterable of :class:`google.cloud.proto.monitoring.v3.metric_pb2.TimeSeries` instances.
This object can also be configured to iterate over the pages
of the response through the `CallOptions` parameter.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
if aggregation is None:
aggregation = common_pb2.Aggregation()
# Create the request object.
request = metric_service_pb2.ListTimeSeriesRequest(
name=name,
filter=filter_,
interval=interval,
view=view,
aggregation=aggregation,
order_by=order_by,
page_size=page_size)
return self._list_time_series(request, options)
def create_time_series(self, name, time_series, options=None):
"""
Creates or adds data to one or more time series.
The response is empty if all time series in the request were written.
If any time series could not be written, a corresponding failure message is
included in the error response.
Example:
>>> from google.cloud.gapic.monitoring.v3 import metric_service_client
>>> from google.cloud.proto.monitoring.v3 import metric_pb2 as v3_metric_pb2
>>> api = metric_service_client.MetricServiceClient()
>>> name = api.project_path('[PROJECT]')
>>> time_series = []
>>> api.create_time_series(name, time_series)
Args:
name (string): The project on which to execute the request. The format is
``\"projects/{project_id_or_number}\"``.
time_series (list[:class:`google.cloud.proto.monitoring.v3.metric_pb2.TimeSeries`]): The new data to be added to a list of time series.
Adds at most one data point to each of several time series. The new data
point must be more recent than any other point in its time series. Each
``TimeSeries`` value must fully specify a unique time series by supplying
all label values for the metric and the monitored resource.
options (:class:`google.gax.CallOptions`): Overrides the default
settings for this call, e.g, timeout, retries etc.
Raises:
:exc:`google.gax.errors.GaxError` if the RPC is aborted.
:exc:`ValueError` if the parameters are invalid.
"""
# Create the request object.
request = metric_service_pb2.CreateTimeSeriesRequest(
name=name, time_series=time_series)
self._create_time_series(request, options) | unknown | codeparrot/codeparrot-clean | ||
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: BUSL-1.1
package stacksplugin1
import (
"context"
"errors"
"net/rpc"
"github.com/hashicorp/go-plugin"
"github.com/hashicorp/terraform-svchost/disco"
"github.com/hashicorp/terraform/internal/pluginshared"
"github.com/hashicorp/terraform/internal/rpcapi"
"github.com/hashicorp/terraform/internal/stacksplugin/stacksproto1"
"google.golang.org/grpc"
"google.golang.org/grpc/metadata"
)
// GRPCCloudPlugin is the go-plugin implementation, but only the client
// implementation exists in this package.
type GRPCStacksPlugin struct {
plugin.GRPCPlugin
Metadata metadata.MD
Impl pluginshared.CustomPluginClient
Services *disco.Disco
ShutdownCh <-chan struct{}
}
// Server always returns an error; we're only implementing the GRPCPlugin
// interface, not the Plugin interface.
func (p *GRPCStacksPlugin) Server(*plugin.MuxBroker) (interface{}, error) {
return nil, errors.New("stacksplugin only implements gRPC clients")
}
// Client always returns an error; we're only implementing the GRPCPlugin
// interface, not the Plugin interface.
func (p *GRPCStacksPlugin) Client(*plugin.MuxBroker, *rpc.Client) (interface{}, error) {
return nil, errors.New("stacksplugin only implements gRPC clients")
}
// GRPCClient returns a new GRPC client for interacting with the cloud plugin server.
func (p *GRPCStacksPlugin) GRPCClient(ctx context.Context, broker *plugin.GRPCBroker, c *grpc.ClientConn) (interface{}, error) {
ctx = metadata.NewOutgoingContext(ctx, p.Metadata)
return &rpcapi.GRPCStacksClient{
Client: stacksproto1.NewCommandServiceClient(c),
Broker: broker,
Services: p.Services,
Context: ctx,
ShutdownCh: p.ShutdownCh,
}, nil
}
// GRPCServer always returns an error; we're only implementing the client
// interface, not the server.
func (p *GRPCStacksPlugin) GRPCServer(broker *plugin.GRPCBroker, s *grpc.Server) error {
return errors.ErrUnsupported
} | go | github | https://github.com/hashicorp/terraform | internal/stacksplugin/stacksplugin1/stacks_grpc_plugin.go |
import logging
from .enabled import EnabledExtensionManager
LOG = logging.getLogger(__name__)
class DispatchExtensionManager(EnabledExtensionManager):
"""Loads all plugins and filters on execution.
This is useful for long-running processes that need to pass
different inputs to different extensions.
:param namespace: The namespace for the entry points.
:type namespace: str
:param check_func: Function to determine which extensions to load.
:type check_func: callable
:param invoke_on_load: Boolean controlling whether to invoke the
object returned by the entry point after the driver is loaded.
:type invoke_on_load: bool
:param invoke_args: Positional arguments to pass when invoking
the object returned by the entry point. Only used if invoke_on_load
is True.
:type invoke_args: tuple
:param invoke_kwds: Named arguments to pass when invoking
the object returned by the entry point. Only used if invoke_on_load
is True.
:type invoke_kwds: dict
:param propagate_map_exceptions: Boolean controlling whether exceptions
are propagated up through the map call or whether they are logged and
then ignored
:type invoke_on_load: bool
"""
def map(self, filter_func, func, *args, **kwds):
"""Iterate over the extensions invoking func() for any where
filter_func() returns True.
The signature of filter_func() should be::
def filter_func(ext, *args, **kwds):
pass
The first argument to filter_func(), 'ext', is the
:class:`~stevedore.extension.Extension`
instance. filter_func() should return True if the extension
should be invoked for the input arguments.
The signature for func() should be::
def func(ext, *args, **kwds):
pass
The first argument to func(), 'ext', is the
:class:`~stevedore.extension.Extension` instance.
Exceptions raised from within func() are propagated up and
processing stopped if self.propagate_map_exceptions is True,
otherwise they are logged and ignored.
:param filter_func: Callable to test each extension.
:param func: Callable to invoke for each extension.
:param args: Variable arguments to pass to func()
:param kwds: Keyword arguments to pass to func()
:returns: List of values returned from func()
"""
if not self.extensions:
# FIXME: Use a more specific exception class here.
raise RuntimeError('No %s extensions found' % self.namespace)
response = []
for e in self.extensions:
if filter_func(e, *args, **kwds):
self._invoke_one_plugin(response.append, func, e, args, kwds)
return response
def map_method(self, filter_func, method_name, *args, **kwds):
"""Iterate over the extensions invoking each one's object method called
`method_name` for any where filter_func() returns True.
This is equivalent of using :meth:`map` with func set to
`lambda x: x.obj.method_name()`
while being more convenient.
Exceptions raised from within the called method are propagated up
and processing stopped if self.propagate_map_exceptions is True,
otherwise they are logged and ignored.
.. versionadded:: 0.12
:param filter_func: Callable to test each extension.
:param method_name: The extension method name to call
for each extension.
:param args: Variable arguments to pass to method
:param kwds: Keyword arguments to pass to method
:returns: List of values returned from methods
"""
return self.map(filter_func, self._call_extension_method,
method_name, *args, **kwds)
class NameDispatchExtensionManager(DispatchExtensionManager):
"""Loads all plugins and filters on execution.
This is useful for long-running processes that need to pass
different inputs to different extensions and can predict the name
of the extensions before calling them.
The check_func argument should return a boolean, with ``True``
indicating that the extension should be loaded and made available
and ``False`` indicating that the extension should be ignored.
:param namespace: The namespace for the entry points.
:type namespace: str
:param check_func: Function to determine which extensions to load.
:type check_func: callable
:param invoke_on_load: Boolean controlling whether to invoke the
object returned by the entry point after the driver is loaded.
:type invoke_on_load: bool
:param invoke_args: Positional arguments to pass when invoking
the object returned by the entry point. Only used if invoke_on_load
is True.
:type invoke_args: tuple
:param invoke_kwds: Named arguments to pass when invoking
the object returned by the entry point. Only used if invoke_on_load
is True.
:type invoke_kwds: dict
:param propagate_map_exceptions: Boolean controlling whether exceptions
are propagated up through the map call or whether they are logged and
then ignored
:type invoke_on_load: bool
:param on_load_failure_callback: Callback function that will be called when
a entrypoint can not be loaded. The arguments that will be provided
when this is called (when an entrypoint fails to load) are
(manager, entrypoint, exception)
:type on_load_failure_callback: function
:param verify_requirements: Use setuptools to enforce the
dependencies of the plugin(s) being loaded. Defaults to False.
:type verify_requirements: bool
"""
def __init__(self, namespace, check_func, invoke_on_load=False,
invoke_args=(), invoke_kwds={},
propagate_map_exceptions=False,
on_load_failure_callback=None,
verify_requirements=False):
super(NameDispatchExtensionManager, self).__init__(
namespace=namespace,
check_func=check_func,
invoke_on_load=invoke_on_load,
invoke_args=invoke_args,
invoke_kwds=invoke_kwds,
propagate_map_exceptions=propagate_map_exceptions,
on_load_failure_callback=on_load_failure_callback,
verify_requirements=verify_requirements,
)
def _init_plugins(self, extensions):
super(NameDispatchExtensionManager, self)._init_plugins(extensions)
self.by_name = dict((e.name, e) for e in self.extensions)
def map(self, names, func, *args, **kwds):
"""Iterate over the extensions invoking func() for any where
the name is in the given list of names.
The signature for func() should be::
def func(ext, *args, **kwds):
pass
The first argument to func(), 'ext', is the
:class:`~stevedore.extension.Extension` instance.
Exceptions raised from within func() are propagated up and
processing stopped if self.propagate_map_exceptions is True,
otherwise they are logged and ignored.
:param names: List or set of name(s) of extension(s) to invoke.
:param func: Callable to invoke for each extension.
:param args: Variable arguments to pass to func()
:param kwds: Keyword arguments to pass to func()
:returns: List of values returned from func()
"""
response = []
for name in names:
try:
e = self.by_name[name]
except KeyError:
LOG.debug('Missing extension %r being ignored', name)
else:
self._invoke_one_plugin(response.append, func, e, args, kwds)
return response
def map_method(self, names, method_name, *args, **kwds):
"""Iterate over the extensions invoking each one's object method called
`method_name` for any where the name is in the given list of names.
This is equivalent of using :meth:`map` with func set to
`lambda x: x.obj.method_name()`
while being more convenient.
Exceptions raised from within the called method are propagated up
and processing stopped if self.propagate_map_exceptions is True,
otherwise they are logged and ignored.
.. versionadded:: 0.12
:param names: List or set of name(s) of extension(s) to invoke.
:param method_name: The extension method name
to call for each extension.
:param args: Variable arguments to pass to method
:param kwds: Keyword arguments to pass to method
:returns: List of values returned from methods
"""
return self.map(names, self._call_extension_method,
method_name, *args, **kwds) | unknown | codeparrot/codeparrot-clean | ||
/* NOLINT(build/header_guard) */
/* Copyright 2018 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* template parameters: FN, JUMP, NUMBUCKETS, MASK, CHUNKLEN */
/* NUMBUCKETS / (MASK + 1) = probability of storing and using hash code. */
/* JUMP = skip bytes for speedup */
/* Rolling hash for long distance long string matches. Stores one position
per bucket, bucket key is computed over a long region. */
#define HashRolling HASHER()
static const uint32_t FN(kRollingHashMul32) = 69069;
static const uint32_t FN(kInvalidPos) = 0xffffffff;
/* This hasher uses a longer forward length, but returning a higher value here
will hurt compression by the main hasher when combined with a composite
hasher. The hasher tests for forward itself instead. */
static BROTLI_INLINE size_t FN(HashTypeLength)(void) { return 4; }
static BROTLI_INLINE size_t FN(StoreLookahead)(void) { return 4; }
/* Computes a code from a single byte. A lookup table of 256 values could be
used, but simply adding 1 works about as good. */
static uint32_t FN(HashByte)(uint8_t byte) {
return (uint32_t)byte + 1u;
}
static uint32_t FN(HashRollingFunctionInitial)(uint32_t state, uint8_t add,
uint32_t factor) {
return (uint32_t)(factor * state + FN(HashByte)(add));
}
static uint32_t FN(HashRollingFunction)(uint32_t state, uint8_t add,
uint8_t rem, uint32_t factor,
uint32_t factor_remove) {
return (uint32_t)(factor * state +
FN(HashByte)(add) - factor_remove * FN(HashByte)(rem));
}
typedef struct HashRolling {
uint32_t state;
uint32_t* table;
size_t next_ix;
uint32_t chunk_len;
uint32_t factor;
uint32_t factor_remove;
} HashRolling;
static void FN(Initialize)(
HasherCommon* common, HashRolling* BROTLI_RESTRICT self,
const BrotliEncoderParams* params) {
size_t i;
self->state = 0;
self->next_ix = 0;
self->factor = FN(kRollingHashMul32);
/* Compute the factor of the oldest byte to remove: factor**steps modulo
0xffffffff (the multiplications rely on 32-bit overflow) */
self->factor_remove = 1;
for (i = 0; i < CHUNKLEN; i += JUMP) {
self->factor_remove *= self->factor;
}
self->table = (uint32_t*)common->extra[0];
for (i = 0; i < NUMBUCKETS; i++) {
self->table[i] = FN(kInvalidPos);
}
BROTLI_UNUSED(params);
}
static void FN(Prepare)(HashRolling* BROTLI_RESTRICT self, BROTLI_BOOL one_shot,
size_t input_size, const uint8_t* BROTLI_RESTRICT data) {
size_t i;
/* Too small size, cannot use this hasher. */
if (input_size < CHUNKLEN) return;
self->state = 0;
for (i = 0; i < CHUNKLEN; i += JUMP) {
self->state = FN(HashRollingFunctionInitial)(
self->state, data[i], self->factor);
}
BROTLI_UNUSED(one_shot);
}
static BROTLI_INLINE void FN(HashMemAllocInBytes)(
const BrotliEncoderParams* params, BROTLI_BOOL one_shot,
size_t input_size, size_t* alloc_size) {
BROTLI_UNUSED(params);
BROTLI_UNUSED(one_shot);
BROTLI_UNUSED(input_size);
alloc_size[0] = NUMBUCKETS * sizeof(uint32_t);
}
static BROTLI_INLINE void FN(Store)(HashRolling* BROTLI_RESTRICT self,
const uint8_t* BROTLI_RESTRICT data, const size_t mask, const size_t ix) {
BROTLI_UNUSED(self);
BROTLI_UNUSED(data);
BROTLI_UNUSED(mask);
BROTLI_UNUSED(ix);
}
static BROTLI_INLINE void FN(StoreRange)(HashRolling* BROTLI_RESTRICT self,
const uint8_t* BROTLI_RESTRICT data, const size_t mask,
const size_t ix_start, const size_t ix_end) {
BROTLI_UNUSED(self);
BROTLI_UNUSED(data);
BROTLI_UNUSED(mask);
BROTLI_UNUSED(ix_start);
BROTLI_UNUSED(ix_end);
}
static BROTLI_INLINE void FN(StitchToPreviousBlock)(
HashRolling* BROTLI_RESTRICT self,
size_t num_bytes, size_t position, const uint8_t* ringbuffer,
size_t ring_buffer_mask) {
/* In this case we must re-initialize the hasher from scratch from the
current position. */
size_t position_masked;
size_t available = num_bytes;
if ((position & (JUMP - 1)) != 0) {
size_t diff = JUMP - (position & (JUMP - 1));
available = (diff > available) ? 0 : (available - diff);
position += diff;
}
position_masked = position & ring_buffer_mask;
/* wrapping around ringbuffer not handled. */
if (available > ring_buffer_mask - position_masked) {
available = ring_buffer_mask - position_masked;
}
FN(Prepare)(self, BROTLI_FALSE, available,
ringbuffer + (position & ring_buffer_mask));
self->next_ix = position;
BROTLI_UNUSED(num_bytes);
}
static BROTLI_INLINE void FN(PrepareDistanceCache)(
HashRolling* BROTLI_RESTRICT self,
int* BROTLI_RESTRICT distance_cache) {
BROTLI_UNUSED(self);
BROTLI_UNUSED(distance_cache);
}
static BROTLI_INLINE void FN(FindLongestMatch)(
HashRolling* BROTLI_RESTRICT self,
const BrotliEncoderDictionary* dictionary,
const uint8_t* BROTLI_RESTRICT data, const size_t ring_buffer_mask,
const int* BROTLI_RESTRICT distance_cache, const size_t cur_ix,
const size_t max_length, const size_t max_backward,
const size_t dictionary_distance, const size_t max_distance,
HasherSearchResult* BROTLI_RESTRICT out) {
const size_t cur_ix_masked = cur_ix & ring_buffer_mask;
size_t pos;
if ((cur_ix & (JUMP - 1)) != 0) return;
/* Not enough lookahead */
if (max_length < CHUNKLEN) return;
for (pos = self->next_ix; pos <= cur_ix; pos += JUMP) {
uint32_t code = self->state & MASK;
uint8_t rem = data[pos & ring_buffer_mask];
uint8_t add = data[(pos + CHUNKLEN) & ring_buffer_mask];
size_t found_ix = FN(kInvalidPos);
self->state = FN(HashRollingFunction)(
self->state, add, rem, self->factor, self->factor_remove);
if (code < NUMBUCKETS) {
found_ix = self->table[code];
self->table[code] = (uint32_t)pos;
if (pos == cur_ix && found_ix != FN(kInvalidPos)) {
/* The cast to 32-bit makes backward distances up to 4GB work even
if cur_ix is above 4GB, despite using 32-bit values in the table. */
size_t backward = (uint32_t)(cur_ix - found_ix);
if (backward <= max_backward) {
const size_t found_ix_masked = found_ix & ring_buffer_mask;
const size_t len = FindMatchLengthWithLimit(&data[found_ix_masked],
&data[cur_ix_masked],
max_length);
if (len >= 4 && len > out->len) {
score_t score = BackwardReferenceScore(len, backward);
if (score > out->score) {
out->len = len;
out->distance = backward;
out->score = score;
out->len_code_delta = 0;
}
}
}
}
}
}
self->next_ix = cur_ix + JUMP;
/* NOTE: this hasher does not search in the dictionary. It is used as
backup-hasher, the main hasher already searches in it. */
BROTLI_UNUSED(dictionary);
BROTLI_UNUSED(distance_cache);
BROTLI_UNUSED(dictionary_distance);
BROTLI_UNUSED(max_distance);
}
#undef HashRolling | c | github | https://github.com/nodejs/node | deps/brotli/c/enc/hash_rolling_inc.h |
import {
CompletionKind,
createCompletionDetails,
createCompletionDetailsForSymbol,
getCompletionEntriesFromSymbols,
getConstraintOfTypeArgumentProperty,
getDefaultCommitCharacters,
getPropertiesForObjectExpression,
Log,
SortText,
} from "./_namespaces/ts.Completions.js";
import {
addToSeen,
altDirectorySeparator,
arrayFrom,
BinaryExpression,
CallLikeExpression,
CancellationToken,
CaseClause,
changeExtension,
CharacterCodes,
combinePaths,
comparePaths,
comparePatternKeys,
compareStringsCaseSensitive,
compareValues,
Comparison,
CompilerOptions,
CompletionEntry,
CompletionEntryDetails,
CompletionInfo,
concatenate,
contains,
containsPath,
ContextFlags,
createModuleSpecifierResolutionHost,
createSortedArray,
createTextSpan,
createTextSpanFromStringLiteralLikeContent,
Debug,
deduplicate,
directorySeparator,
ElementAccessExpression,
emptyArray,
endsWith,
ensureTrailingDirectorySeparator,
equateStringsCaseSensitive,
escapeString,
Extension,
fileExtensionIsOneOf,
filter,
find,
findAncestor,
findPackageJson,
findPackageJsons,
firstDefined,
firstOrUndefined,
flatMap,
flatten,
forEachAncestorDirectoryStoppingAtGlobalCache,
getBaseFileName,
getConditions,
getContextualTypeFromParent,
getDeclarationEmitExtensionForPath,
getDirectoryPath,
getEffectiveTypeRoots,
getEmitModuleResolutionKind,
getLeadingCommentRanges,
getOwnKeys,
getPackageJsonTypesVersionsPaths,
getPathComponents,
getPathsBasePath,
getPossibleOriginalInputExtensionForExtension,
getPossibleOriginalInputPathWithoutChangingExt,
getReplacementSpanForContextToken,
getResolvePackageJsonExports,
getResolvePackageJsonImports,
getSupportedExtensions,
getSupportedExtensionsWithJsonIfResolveJsonModule,
getTextOfJsxAttributeName,
getTextOfNode,
getTokenAtPosition,
hasIndexSignature,
hasProperty,
hasTrailingDirectorySeparator,
hostGetCanonicalFileName,
hostUsesCaseSensitiveFileNames,
ImportOrExportSpecifier,
IndexedAccessTypeNode,
InternalSymbolName,
isApplicableVersionedTypesKey,
isArray,
isCallExpression,
isCallLikeExpression,
isIdentifier,
isIdentifierText,
isImportCall,
isInReferenceComment,
isInString,
isJsxAttribute,
isJsxOpeningLikeElement,
isLiteralTypeNode,
isObjectLiteralExpression,
isPatternMatch,
isPrivateIdentifierClassElementDeclaration,
isRootedDiskPath,
isString,
isStringLiteral,
isStringLiteralLike,
isUrl,
JsxAttribute,
LanguageServiceHost,
length,
LiteralExpression,
LiteralTypeNode,
mapDefined,
MapLike,
moduleExportNameTextEscaped,
moduleResolutionUsesNodeModules,
ModuleSpecifierEnding,
ModuleSpecifierResolutionHost,
moduleSpecifiers,
newCaseClauseTracker,
Node,
normalizePath,
normalizeSlashes,
ObjectLiteralExpression,
Path,
Program,
PropertyAssignment,
rangeContainsPosition,
readJson,
removeFileExtension,
removePrefix,
removeTrailingDirectorySeparator,
ResolutionMode,
resolvePath,
ScriptElementKind,
ScriptElementKindModifier,
ScriptTarget,
signatureHasRestParameter,
SignatureHelp,
singleElementArray,
skipConstraint,
skipParentheses,
SourceFile,
startsWith,
StringLiteralLike,
StringLiteralType,
stripQuotes,
supportedTSImplementationExtensions,
Symbol,
SyntaxKind,
textPart,
TextSpan,
tryAndIgnoreErrors,
tryDirectoryExists,
tryFileExists,
tryGetDirectories,
tryGetExtensionFromPath,
tryParsePattern,
tryReadDirectory,
tryRemoveDirectoryPrefix,
tryRemovePrefix,
Type,
TypeChecker,
TypeFlags,
UnionTypeNode,
unmangleScopedPackageName,
UserPreferences,
walkUpParenthesizedExpressions,
walkUpParenthesizedTypes,
} from "./_namespaces/ts.js";
interface NameAndKindSet {
add(value: NameAndKind): void;
has(name: string): boolean;
values(): IterableIterator<NameAndKind>;
}
const kindPrecedence = {
[ScriptElementKind.directory]: 0,
[ScriptElementKind.scriptElement]: 1,
[ScriptElementKind.externalModuleName]: 2,
};
function createNameAndKindSet(): NameAndKindSet {
const map = new Map<string, NameAndKind>();
function add(value: NameAndKind) {
const existing = map.get(value.name);
if (!existing || kindPrecedence[existing.kind] < kindPrecedence[value.kind]) {
map.set(value.name, value);
}
}
return {
add,
has: map.has.bind(map),
values: map.values.bind(map),
};
}
/** @internal */
export function getStringLiteralCompletions(
sourceFile: SourceFile,
position: number,
contextToken: Node | undefined,
options: CompilerOptions,
host: LanguageServiceHost,
program: Program,
log: Log,
preferences: UserPreferences,
includeSymbol: boolean,
): CompletionInfo | undefined {
if (isInReferenceComment(sourceFile, position)) {
const entries = getTripleSlashReferenceCompletion(sourceFile, position, program, host, createModuleSpecifierResolutionHost(program, host));
return entries && convertPathCompletions(entries);
}
if (isInString(sourceFile, position, contextToken)) {
if (!contextToken || !isStringLiteralLike(contextToken)) return undefined;
const entries = getStringLiteralCompletionEntries(sourceFile, contextToken, position, program, host, preferences);
return convertStringLiteralCompletions(entries, contextToken, sourceFile, host, program, log, options, preferences, position, includeSymbol);
}
}
function convertStringLiteralCompletions(
completion: StringLiteralCompletion | undefined,
contextToken: StringLiteralLike,
sourceFile: SourceFile,
host: LanguageServiceHost,
program: Program,
log: Log,
options: CompilerOptions,
preferences: UserPreferences,
position: number,
includeSymbol: boolean,
): CompletionInfo | undefined {
if (completion === undefined) {
return undefined;
}
const optionalReplacementSpan = createTextSpanFromStringLiteralLikeContent(contextToken, position);
switch (completion.kind) {
case StringLiteralCompletionKind.Paths:
return convertPathCompletions(completion.paths);
case StringLiteralCompletionKind.Properties: {
const entries = createSortedArray<CompletionEntry>();
getCompletionEntriesFromSymbols(
completion.symbols,
entries,
contextToken,
contextToken,
sourceFile,
position,
sourceFile,
host,
program,
ScriptTarget.ESNext,
log,
CompletionKind.String,
preferences,
options,
/*formatContext*/ undefined,
/*isTypeOnlyLocation*/ undefined,
/*propertyAccessToConvert*/ undefined,
/*jsxIdentifierExpected*/ undefined,
/*isJsxInitializer*/ undefined,
/*importStatementCompletion*/ undefined,
/*recommendedCompletion*/ undefined,
/*symbolToOriginInfoMap*/ undefined,
/*symbolToSortTextMap*/ undefined,
/*isJsxIdentifierExpected*/ undefined,
/*isRightOfOpenTag*/ undefined,
includeSymbol,
); // Target will not be used, so arbitrary
return {
isGlobalCompletion: false,
isMemberCompletion: true,
isNewIdentifierLocation: completion.hasIndexSignature,
optionalReplacementSpan,
entries,
defaultCommitCharacters: getDefaultCommitCharacters(completion.hasIndexSignature),
};
}
case StringLiteralCompletionKind.Types: {
const quoteChar = contextToken.kind === SyntaxKind.NoSubstitutionTemplateLiteral
? CharacterCodes.backtick
: startsWith(getTextOfNode(contextToken), "'")
? CharacterCodes.singleQuote
: CharacterCodes.doubleQuote;
const entries = completion.types.map(type => ({
name: escapeString(type.value, quoteChar),
kindModifiers: ScriptElementKindModifier.none,
kind: ScriptElementKind.string,
sortText: SortText.LocationPriority,
replacementSpan: getReplacementSpanForContextToken(contextToken, position),
commitCharacters: [],
}));
return {
isGlobalCompletion: false,
isMemberCompletion: false,
isNewIdentifierLocation: completion.isNewIdentifier,
optionalReplacementSpan,
entries,
defaultCommitCharacters: getDefaultCommitCharacters(completion.isNewIdentifier),
};
}
default:
return Debug.assertNever(completion);
}
}
/** @internal */
export function getStringLiteralCompletionDetails(
name: string,
sourceFile: SourceFile,
position: number,
contextToken: Node | undefined,
program: Program,
host: LanguageServiceHost,
cancellationToken: CancellationToken,
preferences: UserPreferences,
): CompletionEntryDetails | undefined {
if (!contextToken || !isStringLiteralLike(contextToken)) return undefined;
const completions = getStringLiteralCompletionEntries(sourceFile, contextToken, position, program, host, preferences);
return completions && stringLiteralCompletionDetails(name, contextToken, completions, sourceFile, program.getTypeChecker(), cancellationToken);
}
function stringLiteralCompletionDetails(name: string, location: Node, completion: StringLiteralCompletion, sourceFile: SourceFile, checker: TypeChecker, cancellationToken: CancellationToken): CompletionEntryDetails | undefined {
switch (completion.kind) {
case StringLiteralCompletionKind.Paths: {
const match = find(completion.paths, p => p.name === name);
return match && createCompletionDetails(name, kindModifiersFromExtension(match.extension), match.kind, [textPart(name)]);
}
case StringLiteralCompletionKind.Properties: {
const match = find(completion.symbols, s => s.name === name);
return match && createCompletionDetailsForSymbol(match, match.name, checker, sourceFile, location, cancellationToken);
}
case StringLiteralCompletionKind.Types:
return find(completion.types, t => t.value === name) ? createCompletionDetails(name, ScriptElementKindModifier.none, ScriptElementKind.string, [textPart(name)]) : undefined;
default:
return Debug.assertNever(completion);
}
}
function convertPathCompletions(pathCompletions: readonly PathCompletion[]): CompletionInfo {
const isGlobalCompletion = false; // We don't want the editor to offer any other completions, such as snippets, inside a comment.
const isNewIdentifierLocation = true; // The user may type in a path that doesn't yet exist, creating a "new identifier" with respect to the collection of identifiers the server is aware of.
const entries = pathCompletions.map(({ name, kind, span, extension }): CompletionEntry => ({ name, kind, kindModifiers: kindModifiersFromExtension(extension), sortText: SortText.LocationPriority, replacementSpan: span }));
return {
isGlobalCompletion,
isMemberCompletion: false,
isNewIdentifierLocation,
entries,
defaultCommitCharacters: getDefaultCommitCharacters(isNewIdentifierLocation),
};
}
function kindModifiersFromExtension(extension: Extension | undefined): ScriptElementKindModifier {
switch (extension) {
case Extension.Dts:
return ScriptElementKindModifier.dtsModifier;
case Extension.Js:
return ScriptElementKindModifier.jsModifier;
case Extension.Json:
return ScriptElementKindModifier.jsonModifier;
case Extension.Jsx:
return ScriptElementKindModifier.jsxModifier;
case Extension.Ts:
return ScriptElementKindModifier.tsModifier;
case Extension.Tsx:
return ScriptElementKindModifier.tsxModifier;
case Extension.Dmts:
return ScriptElementKindModifier.dmtsModifier;
case Extension.Mjs:
return ScriptElementKindModifier.mjsModifier;
case Extension.Mts:
return ScriptElementKindModifier.mtsModifier;
case Extension.Dcts:
return ScriptElementKindModifier.dctsModifier;
case Extension.Cjs:
return ScriptElementKindModifier.cjsModifier;
case Extension.Cts:
return ScriptElementKindModifier.ctsModifier;
case Extension.TsBuildInfo:
return Debug.fail(`Extension ${Extension.TsBuildInfo} is unsupported.`);
case undefined:
return ScriptElementKindModifier.none;
default:
return Debug.assertNever(extension);
}
}
const enum StringLiteralCompletionKind {
Paths,
Properties,
Types,
}
interface StringLiteralCompletionsFromProperties {
readonly kind: StringLiteralCompletionKind.Properties;
readonly symbols: readonly Symbol[];
readonly hasIndexSignature: boolean;
}
interface StringLiteralCompletionsFromTypes {
readonly kind: StringLiteralCompletionKind.Types;
readonly types: readonly StringLiteralType[];
readonly isNewIdentifier: boolean;
}
type StringLiteralCompletion = { readonly kind: StringLiteralCompletionKind.Paths; readonly paths: readonly PathCompletion[]; } | StringLiteralCompletionsFromProperties | StringLiteralCompletionsFromTypes;
function getStringLiteralCompletionEntries(sourceFile: SourceFile, node: StringLiteralLike, position: number, program: Program, host: LanguageServiceHost, preferences: UserPreferences): StringLiteralCompletion | undefined {
const typeChecker = program.getTypeChecker();
const parent = walkUpParentheses(node.parent);
switch (parent.kind) {
case SyntaxKind.LiteralType: {
const grandParent = walkUpParentheses(parent.parent);
if (grandParent.kind === SyntaxKind.ImportType) {
return { kind: StringLiteralCompletionKind.Paths, paths: getStringLiteralCompletionsFromModuleNames(sourceFile, node, program, host, preferences) };
}
return fromUnionableLiteralType(grandParent);
}
case SyntaxKind.PropertyAssignment:
if (isObjectLiteralExpression(parent.parent) && (parent as PropertyAssignment).name === node) {
// Get quoted name of properties of the object literal expression
// i.e. interface ConfigFiles {
// 'jspm:dev': string
// }
// let files: ConfigFiles = {
// '/*completion position*/'
// }
//
// function foo(c: ConfigFiles) {}
// foo({
// '/*completion position*/'
// });
return stringLiteralCompletionsForObjectLiteral(typeChecker, parent.parent);
}
if (findAncestor(parent.parent, isCallLikeExpression)) {
const uniques = new Set<string>();
const stringLiteralTypes = concatenate(
getStringLiteralTypes(typeChecker.getContextualType(node, ContextFlags.None), uniques),
getStringLiteralTypes(typeChecker.getContextualType(node, ContextFlags.IgnoreNodeInferences), uniques),
);
return toStringLiteralCompletionsFromTypes(stringLiteralTypes);
}
return fromContextualType(ContextFlags.None);
case SyntaxKind.ElementAccessExpression: {
const { expression, argumentExpression } = parent as ElementAccessExpression;
if (node === skipParentheses(argumentExpression)) {
// Get all names of properties on the expression
// i.e. interface A {
// 'prop1': string
// }
// let a: A;
// a['/*completion position*/']
return stringLiteralCompletionsFromProperties(typeChecker.getTypeAtLocation(expression));
}
return undefined;
}
case SyntaxKind.CallExpression:
case SyntaxKind.NewExpression:
case SyntaxKind.JsxAttribute:
if (!isRequireCallArgument(node) && !isImportCall(parent)) {
const argumentInfo = SignatureHelp.getArgumentInfoForCompletions(parent.kind === SyntaxKind.JsxAttribute ? parent.parent : node, position, sourceFile, typeChecker);
// Get string literal completions from specialized signatures of the target
// i.e. declare function f(a: 'A');
// f("/*completion position*/")
return argumentInfo && getStringLiteralCompletionsFromSignature(argumentInfo.invocation, node, argumentInfo, typeChecker) || fromContextualType(ContextFlags.None);
}
// falls through (is `require("")` or `require(""` or `import("")`)
case SyntaxKind.ImportDeclaration:
case SyntaxKind.ExportDeclaration:
case SyntaxKind.ExternalModuleReference:
case SyntaxKind.JSDocImportTag:
// Get all known external module names or complete a path to a module
// i.e. import * as ns from "/*completion position*/";
// var y = import("/*completion position*/");
// import x = require("/*completion position*/");
// var y = require("/*completion position*/");
// export * from "/*completion position*/";
return { kind: StringLiteralCompletionKind.Paths, paths: getStringLiteralCompletionsFromModuleNames(sourceFile, node, program, host, preferences) };
case SyntaxKind.CaseClause:
const tracker = newCaseClauseTracker(typeChecker, (parent as CaseClause).parent.clauses);
const contextualTypes = fromContextualType();
if (!contextualTypes) {
return;
}
const literals = contextualTypes.types.filter(literal => !tracker.hasValue(literal.value));
return { kind: StringLiteralCompletionKind.Types, types: literals, isNewIdentifier: false };
case SyntaxKind.ImportSpecifier:
case SyntaxKind.ExportSpecifier:
// Complete string aliases in `import { "|" } from` and `export { "|" } from`
const specifier = parent as ImportOrExportSpecifier;
if (specifier.propertyName && node !== specifier.propertyName) {
return; // Don't complete in `export { "..." as "|" } from`
}
const namedImportsOrExports = specifier.parent;
const { moduleSpecifier } = namedImportsOrExports.kind === SyntaxKind.NamedImports ? namedImportsOrExports.parent.parent : namedImportsOrExports.parent;
if (!moduleSpecifier) return;
const moduleSpecifierSymbol = typeChecker.getSymbolAtLocation(moduleSpecifier); // TODO: GH#18217
if (!moduleSpecifierSymbol) return;
const exports = typeChecker.getExportsAndPropertiesOfModule(moduleSpecifierSymbol);
const existing = new Set(namedImportsOrExports.elements.map(n => moduleExportNameTextEscaped(n.propertyName || n.name)));
const uniques = exports.filter(e => e.escapedName !== InternalSymbolName.Default && !existing.has(e.escapedName));
return { kind: StringLiteralCompletionKind.Properties, symbols: uniques, hasIndexSignature: false };
case SyntaxKind.BinaryExpression:
if ((parent as BinaryExpression).operatorToken.kind === SyntaxKind.InKeyword) {
const type = typeChecker.getTypeAtLocation((parent as BinaryExpression).right);
const properties = type.isUnion() ? typeChecker.getAllPossiblePropertiesOfTypes(type.types) : type.getApparentProperties();
return {
kind: StringLiteralCompletionKind.Properties,
symbols: properties.filter(prop => !prop.valueDeclaration || !isPrivateIdentifierClassElementDeclaration(prop.valueDeclaration)),
hasIndexSignature: false,
};
}
return fromContextualType(ContextFlags.None);
default:
return fromContextualType() || fromContextualType(ContextFlags.None);
}
function fromUnionableLiteralType(grandParent: Node): StringLiteralCompletionsFromTypes | StringLiteralCompletionsFromProperties | undefined {
switch (grandParent.kind) {
case SyntaxKind.CallExpression:
case SyntaxKind.ExpressionWithTypeArguments:
case SyntaxKind.JsxOpeningElement:
case SyntaxKind.JsxSelfClosingElement:
case SyntaxKind.NewExpression:
case SyntaxKind.TaggedTemplateExpression:
case SyntaxKind.TypeReference: {
const typeArgument = findAncestor(parent, n => n.parent === grandParent) as LiteralTypeNode;
if (typeArgument) {
return { kind: StringLiteralCompletionKind.Types, types: getStringLiteralTypes(typeChecker.getTypeArgumentConstraint(typeArgument)), isNewIdentifier: false };
}
return undefined;
}
case SyntaxKind.IndexedAccessType:
// Get all apparent property names
// i.e. interface Foo {
// foo: string;
// bar: string;
// }
// let x: Foo["/*completion position*/"]
const { indexType, objectType } = grandParent as IndexedAccessTypeNode;
if (!rangeContainsPosition(indexType, position)) {
return undefined;
}
return stringLiteralCompletionsFromProperties(typeChecker.getTypeFromTypeNode(objectType));
case SyntaxKind.PropertySignature:
return { kind: StringLiteralCompletionKind.Types, types: getStringLiteralTypes(getConstraintOfTypeArgumentProperty(grandParent, typeChecker)), isNewIdentifier: false };
case SyntaxKind.UnionType: {
const result = fromUnionableLiteralType(walkUpParentheses(grandParent.parent));
if (!result) {
return undefined;
}
const alreadyUsedTypes = getAlreadyUsedTypesInStringLiteralUnion(grandParent as UnionTypeNode, parent as LiteralTypeNode);
if (result.kind === StringLiteralCompletionKind.Properties) {
return { kind: StringLiteralCompletionKind.Properties, symbols: result.symbols.filter(sym => !contains(alreadyUsedTypes, sym.name)), hasIndexSignature: result.hasIndexSignature };
}
return { kind: StringLiteralCompletionKind.Types, types: result.types.filter(t => !contains(alreadyUsedTypes, t.value)), isNewIdentifier: false };
}
default:
return undefined;
}
}
function fromContextualType(contextFlags: ContextFlags = ContextFlags.IgnoreNodeInferences): StringLiteralCompletionsFromTypes | undefined {
// Get completion for string literal from string literal type
// i.e. var x: "hi" | "hello" = "/*completion position*/"
return toStringLiteralCompletionsFromTypes(getStringLiteralTypes(getContextualTypeFromParent(node, typeChecker, contextFlags)));
}
}
function toStringLiteralCompletionsFromTypes(types: readonly StringLiteralType[]): StringLiteralCompletionsFromTypes | undefined {
return types.length ? { kind: StringLiteralCompletionKind.Types, types, isNewIdentifier: false } : undefined;
}
function walkUpParentheses(node: Node) {
switch (node.kind) {
case SyntaxKind.ParenthesizedType:
return walkUpParenthesizedTypes(node);
case SyntaxKind.ParenthesizedExpression:
return walkUpParenthesizedExpressions(node);
default:
return node;
}
}
function getAlreadyUsedTypesInStringLiteralUnion(union: UnionTypeNode, current: LiteralTypeNode): readonly string[] {
return mapDefined(union.types, type => type !== current && isLiteralTypeNode(type) && isStringLiteral(type.literal) ? type.literal.text : undefined);
}
function getStringLiteralCompletionsFromSignature(call: CallLikeExpression, arg: StringLiteralLike, argumentInfo: SignatureHelp.ArgumentInfoForCompletions, checker: TypeChecker): StringLiteralCompletionsFromTypes | undefined {
let isNewIdentifier = false;
const uniques = new Set<string>();
const editingArgument = isJsxOpeningLikeElement(call) ? Debug.checkDefined(findAncestor(arg.parent, isJsxAttribute)) : arg;
const candidates = checker.getCandidateSignaturesForStringLiteralCompletions(call, editingArgument);
const types = flatMap(candidates, candidate => {
if (!signatureHasRestParameter(candidate) && argumentInfo.argumentCount > candidate.parameters.length) return;
let type = candidate.getTypeParameterAtPosition(argumentInfo.argumentIndex);
if (isJsxOpeningLikeElement(call)) {
const propType = checker.getTypeOfPropertyOfType(type, getTextOfJsxAttributeName((editingArgument as JsxAttribute).name));
if (propType) {
type = propType;
}
}
isNewIdentifier = isNewIdentifier || !!(type.flags & TypeFlags.String);
return getStringLiteralTypes(type, uniques);
});
return length(types) ? { kind: StringLiteralCompletionKind.Types, types, isNewIdentifier } : undefined;
}
function stringLiteralCompletionsFromProperties(type: Type | undefined): StringLiteralCompletionsFromProperties | undefined {
return type && {
kind: StringLiteralCompletionKind.Properties,
symbols: filter(type.getApparentProperties(), prop => !(prop.valueDeclaration && isPrivateIdentifierClassElementDeclaration(prop.valueDeclaration))),
hasIndexSignature: hasIndexSignature(type),
};
}
function stringLiteralCompletionsForObjectLiteral(checker: TypeChecker, objectLiteralExpression: ObjectLiteralExpression): StringLiteralCompletionsFromProperties | undefined {
const contextualType = checker.getContextualType(objectLiteralExpression);
if (!contextualType) return undefined;
const completionsType = checker.getContextualType(objectLiteralExpression, ContextFlags.IgnoreNodeInferences);
const symbols = getPropertiesForObjectExpression(
contextualType,
completionsType,
objectLiteralExpression,
checker,
);
return {
kind: StringLiteralCompletionKind.Properties,
symbols,
hasIndexSignature: hasIndexSignature(contextualType),
};
}
function getStringLiteralTypes(type: Type | undefined, uniques = new Set<string>()): readonly StringLiteralType[] {
if (!type) return emptyArray;
type = skipConstraint(type);
return type.isUnion() ? flatMap(type.types, t => getStringLiteralTypes(t, uniques)) :
type.isStringLiteral() && !(type.flags & TypeFlags.EnumLiteral) && addToSeen(uniques, type.value) ? [type] : emptyArray;
}
interface NameAndKind {
readonly name: string;
readonly kind: ScriptElementKind.scriptElement | ScriptElementKind.directory | ScriptElementKind.externalModuleName;
readonly extension: Extension | undefined;
}
interface PathCompletion extends NameAndKind {
readonly span: TextSpan | undefined;
}
function nameAndKind(name: string, kind: NameAndKind["kind"], extension: Extension | undefined): NameAndKind {
return { name, kind, extension };
}
function directoryResult(name: string): NameAndKind {
return nameAndKind(name, ScriptElementKind.directory, /*extension*/ undefined);
}
function addReplacementSpans(text: string, textStart: number, names: readonly NameAndKind[]): readonly PathCompletion[] {
const span = getDirectoryFragmentTextSpan(text, textStart);
const wholeSpan = text.length === 0 ? undefined : createTextSpan(textStart, text.length);
return names.map(({ name, kind, extension }): PathCompletion => (name.includes(directorySeparator) || name.includes(altDirectorySeparator)) ? { name, kind, extension, span: wholeSpan } : { name, kind, extension, span });
}
function getStringLiteralCompletionsFromModuleNames(sourceFile: SourceFile, node: LiteralExpression, program: Program, host: LanguageServiceHost, preferences: UserPreferences): readonly PathCompletion[] {
return addReplacementSpans(node.text, node.getStart(sourceFile) + 1, getStringLiteralCompletionsFromModuleNamesWorker(sourceFile, node, program, host, preferences));
}
function getStringLiteralCompletionsFromModuleNamesWorker(sourceFile: SourceFile, node: LiteralExpression, program: Program, host: LanguageServiceHost, preferences: UserPreferences): readonly NameAndKind[] {
const literalValue = normalizeSlashes(node.text);
const mode = isStringLiteralLike(node) ? program.getModeForUsageLocation(sourceFile, node) : undefined;
const scriptPath = sourceFile.path;
const scriptDirectory = getDirectoryPath(scriptPath);
const compilerOptions = program.getCompilerOptions();
const typeChecker = program.getTypeChecker();
const moduleSpecifierResolutionHost = createModuleSpecifierResolutionHost(program, host);
const extensionOptions = getExtensionOptions(compilerOptions, ReferenceKind.ModuleSpecifier, sourceFile, typeChecker, preferences, mode);
return isPathRelativeToScript(literalValue) || !compilerOptions.baseUrl && !compilerOptions.paths && (isRootedDiskPath(literalValue) || isUrl(literalValue))
? getCompletionEntriesForRelativeModules(literalValue, scriptDirectory, program, host, moduleSpecifierResolutionHost, scriptPath, extensionOptions)
: getCompletionEntriesForNonRelativeModules(literalValue, scriptDirectory, mode, program, host, moduleSpecifierResolutionHost, extensionOptions);
}
interface ExtensionOptions {
readonly extensionsToSearch: readonly string[];
readonly referenceKind: ReferenceKind;
readonly importingSourceFile: SourceFile;
readonly endingPreference?: UserPreferences["importModuleSpecifierEnding"];
readonly resolutionMode?: ResolutionMode;
}
function getExtensionOptions(compilerOptions: CompilerOptions, referenceKind: ReferenceKind, importingSourceFile: SourceFile, typeChecker?: TypeChecker, preferences?: UserPreferences, resolutionMode?: ResolutionMode): ExtensionOptions {
return {
extensionsToSearch: flatten(getSupportedExtensionsForModuleResolution(compilerOptions, typeChecker)),
referenceKind,
importingSourceFile,
endingPreference: preferences?.importModuleSpecifierEnding,
resolutionMode,
};
}
function getCompletionEntriesForRelativeModules(literalValue: string, scriptDirectory: string, program: Program, host: LanguageServiceHost, moduleSpecifierResolutionHost: ModuleSpecifierResolutionHost, scriptPath: Path, extensionOptions: ExtensionOptions) {
const compilerOptions = program.getCompilerOptions();
if (compilerOptions.rootDirs) {
return getCompletionEntriesForDirectoryFragmentWithRootDirs(
compilerOptions.rootDirs,
literalValue,
scriptDirectory,
extensionOptions,
program,
host,
moduleSpecifierResolutionHost,
scriptPath,
);
}
else {
return arrayFrom(getCompletionEntriesForDirectoryFragment(literalValue, scriptDirectory, extensionOptions, program, host, moduleSpecifierResolutionHost, /*moduleSpecifierIsRelative*/ true, scriptPath).values());
}
}
function getSupportedExtensionsForModuleResolution(compilerOptions: CompilerOptions, typeChecker?: TypeChecker): readonly string[][] {
/** file extensions from ambient modules declarations e.g. *.css */
const ambientModulesExtensions = !typeChecker ? [] : mapDefined(typeChecker.getAmbientModules(), module => {
const name = module.name.slice(1, -1);
if (!name.startsWith("*.") || name.includes("/")) return;
return name.slice(1);
});
const extensions = [...getSupportedExtensions(compilerOptions), ambientModulesExtensions];
const moduleResolution = getEmitModuleResolutionKind(compilerOptions);
return moduleResolutionUsesNodeModules(moduleResolution) ?
getSupportedExtensionsWithJsonIfResolveJsonModule(compilerOptions, extensions) :
extensions;
}
/**
* Takes a script path and returns paths for all potential folders that could be merged with its
* containing folder via the "rootDirs" compiler option
*/
function getBaseDirectoriesFromRootDirs(rootDirs: string[], basePath: string, scriptDirectory: string, ignoreCase: boolean): readonly string[] {
// Make all paths absolute/normalized if they are not already
rootDirs = rootDirs.map(rootDirectory => ensureTrailingDirectorySeparator(normalizePath(isRootedDiskPath(rootDirectory) ? rootDirectory : combinePaths(basePath, rootDirectory))));
// Determine the path to the directory containing the script relative to the root directory it is contained within
const relativeDirectory = firstDefined(rootDirs, rootDirectory => containsPath(rootDirectory, scriptDirectory, basePath, ignoreCase) ? scriptDirectory.substr(rootDirectory.length) : undefined)!; // TODO: GH#18217
// Now find a path for each potential directory that is to be merged with the one containing the script
return deduplicate<string>(
[...rootDirs.map(rootDirectory => combinePaths(rootDirectory, relativeDirectory)), scriptDirectory].map(baseDir => removeTrailingDirectorySeparator(baseDir)),
equateStringsCaseSensitive,
compareStringsCaseSensitive,
);
}
function getCompletionEntriesForDirectoryFragmentWithRootDirs(rootDirs: string[], fragment: string, scriptDirectory: string, extensionOptions: ExtensionOptions, program: Program, host: LanguageServiceHost, moduleSpecifierResolutionHost: ModuleSpecifierResolutionHost, exclude: string): readonly NameAndKind[] {
const compilerOptions = program.getCompilerOptions();
const basePath = compilerOptions.project || host.getCurrentDirectory();
const ignoreCase = !(host.useCaseSensitiveFileNames && host.useCaseSensitiveFileNames());
const baseDirectories = getBaseDirectoriesFromRootDirs(rootDirs, basePath, scriptDirectory, ignoreCase);
return deduplicate<NameAndKind>(
flatMap(baseDirectories, baseDirectory => arrayFrom(getCompletionEntriesForDirectoryFragment(fragment, baseDirectory, extensionOptions, program, host, moduleSpecifierResolutionHost, /*moduleSpecifierIsRelative*/ true, exclude).values())),
(itemA, itemB) => itemA.name === itemB.name && itemA.kind === itemB.kind && itemA.extension === itemB.extension,
);
}
const enum ReferenceKind {
Filename,
ModuleSpecifier,
}
/**
* Given a path ending at a directory, gets the completions for the path, and filters for those entries containing the basename.
*/
function getCompletionEntriesForDirectoryFragment(
fragment: string,
scriptDirectory: string,
extensionOptions: ExtensionOptions,
program: Program,
host: LanguageServiceHost,
moduleSpecifierResolutionHost: ModuleSpecifierResolutionHost,
moduleSpecifierIsRelative: boolean,
exclude?: string,
result = createNameAndKindSet(),
): NameAndKindSet {
if (fragment === undefined) {
fragment = "";
}
fragment = normalizeSlashes(fragment);
/**
* Remove the basename from the path. Note that we don't use the basename to filter completions;
* the client is responsible for refining completions.
*/
if (!hasTrailingDirectorySeparator(fragment)) {
fragment = getDirectoryPath(fragment);
}
if (fragment === "") {
fragment = "." + directorySeparator;
}
fragment = ensureTrailingDirectorySeparator(fragment);
const absolutePath = resolvePath(scriptDirectory, fragment);
const baseDirectory = hasTrailingDirectorySeparator(absolutePath) ? absolutePath : getDirectoryPath(absolutePath);
if (!moduleSpecifierIsRelative) {
// check for a version redirect
const packageJsonPath = findPackageJson(baseDirectory, host);
if (packageJsonPath) {
const packageJson = readJson(packageJsonPath, host as { readFile: (filename: string) => string | undefined; });
const typesVersions = (packageJson as any).typesVersions;
if (typeof typesVersions === "object") {
const versionPaths = getPackageJsonTypesVersionsPaths(typesVersions)?.paths;
if (versionPaths) {
const packageDirectory = getDirectoryPath(packageJsonPath);
const pathInPackage = absolutePath.slice(ensureTrailingDirectorySeparator(packageDirectory).length);
if (addCompletionEntriesFromPaths(result, pathInPackage, packageDirectory, extensionOptions, program, host, moduleSpecifierResolutionHost, versionPaths)) {
// A true result means one of the `versionPaths` was matched, which will block relative resolution
// to files and folders from here. All reachable paths given the pattern match are already added.
return result;
}
}
}
}
}
const ignoreCase = !(host.useCaseSensitiveFileNames && host.useCaseSensitiveFileNames());
if (!tryDirectoryExists(host, baseDirectory)) return result;
// Enumerate the available files if possible
const files = tryReadDirectory(host, baseDirectory, extensionOptions.extensionsToSearch, /*exclude*/ undefined, /*include*/ ["./*"]);
if (files) {
for (let filePath of files) {
filePath = normalizePath(filePath);
if (exclude && comparePaths(filePath, exclude, scriptDirectory, ignoreCase) === Comparison.EqualTo) {
continue;
}
const { name, extension } = getFilenameWithExtensionOption(getBaseFileName(filePath), program, extensionOptions, /*isExportsOrImportsWildcard*/ false);
result.add(nameAndKind(name, ScriptElementKind.scriptElement, extension));
}
}
// If possible, get folder completion as well
const directories = tryGetDirectories(host, baseDirectory);
if (directories) {
for (const directory of directories) {
const directoryName = getBaseFileName(normalizePath(directory));
if (directoryName !== "@types") {
result.add(directoryResult(directoryName));
}
}
}
return result;
}
function getFilenameWithExtensionOption(name: string, program: Program, extensionOptions: ExtensionOptions, isExportsOrImportsWildcard: boolean): { name: string; extension: Extension | undefined; } {
const nonJsResult = moduleSpecifiers.tryGetRealFileNameForNonJsDeclarationFileName(name);
if (nonJsResult) {
return { name: nonJsResult, extension: tryGetExtensionFromPath(nonJsResult) };
}
if (extensionOptions.referenceKind === ReferenceKind.Filename) {
return { name, extension: tryGetExtensionFromPath(name) };
}
let allowedEndings = moduleSpecifiers.getModuleSpecifierPreferences(
{ importModuleSpecifierEnding: extensionOptions.endingPreference },
program,
program.getCompilerOptions(),
extensionOptions.importingSourceFile,
).getAllowedEndingsInPreferredOrder(extensionOptions.resolutionMode);
if (isExportsOrImportsWildcard) {
// If we're completing `import {} from "foo/|"` and subpaths are available via `"exports": { "./*": "./src/*" }`,
// the completion must be a (potentially extension-swapped) file name. Dropping extensions and index files is not allowed.
allowedEndings = allowedEndings.filter(e => e !== ModuleSpecifierEnding.Minimal && e !== ModuleSpecifierEnding.Index);
}
if (allowedEndings[0] === ModuleSpecifierEnding.TsExtension) {
if (fileExtensionIsOneOf(name, supportedTSImplementationExtensions)) {
return { name, extension: tryGetExtensionFromPath(name) };
}
const outputExtension = moduleSpecifiers.tryGetJSExtensionForFile(name, program.getCompilerOptions());
return outputExtension
? { name: changeExtension(name, outputExtension), extension: outputExtension }
: { name, extension: tryGetExtensionFromPath(name) };
}
if (
!isExportsOrImportsWildcard &&
(allowedEndings[0] === ModuleSpecifierEnding.Minimal || allowedEndings[0] === ModuleSpecifierEnding.Index) &&
fileExtensionIsOneOf(name, [Extension.Js, Extension.Jsx, Extension.Ts, Extension.Tsx, Extension.Dts])
) {
return { name: removeFileExtension(name), extension: tryGetExtensionFromPath(name) };
}
const outputExtension = moduleSpecifiers.tryGetJSExtensionForFile(name, program.getCompilerOptions());
return outputExtension
? { name: changeExtension(name, outputExtension), extension: outputExtension }
: { name, extension: tryGetExtensionFromPath(name) };
}
/** @returns whether `fragment` was a match for any `paths` (which should indicate whether any other path completions should be offered) */
function addCompletionEntriesFromPaths(
result: NameAndKindSet,
fragment: string,
baseDirectory: string,
extensionOptions: ExtensionOptions,
program: Program,
host: LanguageServiceHost,
moduleSpecifierResolutionHost: ModuleSpecifierResolutionHost,
paths: MapLike<string[]>,
) {
const getPatternsForKey = (key: string) => paths[key];
const comparePaths = (a: string, b: string): Comparison => {
const patternA = tryParsePattern(a);
const patternB = tryParsePattern(b);
const lengthA = typeof patternA === "object" ? patternA.prefix.length : a.length;
const lengthB = typeof patternB === "object" ? patternB.prefix.length : b.length;
return compareValues(lengthB, lengthA);
};
return addCompletionEntriesFromPathsOrExportsOrImports(result, /*isExports*/ false, /*isImports*/ false, fragment, baseDirectory, extensionOptions, program, host, moduleSpecifierResolutionHost, getOwnKeys(paths), getPatternsForKey, comparePaths);
}
/** @returns whether `fragment` was a match for any `paths` (which should indicate whether any other path completions should be offered) */
function addCompletionEntriesFromPathsOrExportsOrImports(
result: NameAndKindSet,
isExports: boolean,
isImports: boolean,
fragment: string,
baseDirectory: string,
extensionOptions: ExtensionOptions,
program: Program,
host: LanguageServiceHost,
moduleSpecifierResolutionHost: ModuleSpecifierResolutionHost,
keys: readonly string[],
getPatternsForKey: (key: string) => string[] | undefined,
comparePaths: (a: string, b: string) => Comparison,
) {
let pathResults: { results: NameAndKind[]; matchedPattern: boolean; }[] = [];
let matchedPath: string | undefined;
for (const key of keys) {
if (key === ".") continue;
const keyWithoutLeadingDotSlash = key
.replace(/^\.\//, "") // remove leading "./"
+ ((isExports || isImports) && endsWith(key, "/") ? "*" : ""); // normalize trailing `/` to `/*`
const patterns = getPatternsForKey(key);
if (patterns) {
const pathPattern = tryParsePattern(keyWithoutLeadingDotSlash);
if (!pathPattern) continue;
const isMatch = typeof pathPattern === "object" && isPatternMatch(pathPattern, fragment);
const isLongestMatch = isMatch && (matchedPath === undefined || comparePaths(keyWithoutLeadingDotSlash, matchedPath) === Comparison.LessThan);
if (isLongestMatch) {
// If this is a higher priority match than anything we've seen so far, previous results from matches are invalid, e.g.
// for `import {} from "some-package/|"` with a typesVersions:
// {
// "bar/*": ["bar/*"], // <-- 1. We add 'bar', but 'bar/*' doesn't match yet.
// "*": ["dist/*"], // <-- 2. We match here and add files from dist. 'bar' is still ok because it didn't come from a match.
// "foo/*": ["foo/*"] // <-- 3. We matched '*' earlier and added results from dist, but if 'foo/*' also matched,
// } results in dist would not be visible. 'bar' still stands because it didn't come from a match.
// This is especially important if `dist/foo` is a folder, because if we fail to clear results
// added by the '*' match, after typing `"some-package/foo/|"` we would get file results from both
// ./dist/foo and ./foo, when only the latter will actually be resolvable.
// See pathCompletionsTypesVersionsWildcard6.ts.
matchedPath = keyWithoutLeadingDotSlash;
pathResults = pathResults.filter(r => !r.matchedPattern);
}
if (typeof pathPattern === "string" || matchedPath === undefined || comparePaths(keyWithoutLeadingDotSlash, matchedPath) !== Comparison.GreaterThan) {
pathResults.push({
matchedPattern: isMatch,
results: getCompletionsForPathMapping(keyWithoutLeadingDotSlash, patterns, fragment, baseDirectory, extensionOptions, isExports, isImports, program, host, moduleSpecifierResolutionHost)
.map(({ name, kind, extension }) => nameAndKind(name, kind, extension)),
});
}
}
}
pathResults.forEach(pathResult => pathResult.results.forEach(r => result.add(r)));
return matchedPath !== undefined;
}
/**
* Check all of the declared modules and those in node modules. Possible sources of modules:
* Modules that are found by the type checker
* Modules found relative to "baseUrl" compliler options (including patterns from "paths" compiler option)
* Modules from node_modules (i.e. those listed in package.json)
* This includes all files that are found in node_modules/moduleName/ with acceptable file extensions
*/
function getCompletionEntriesForNonRelativeModules(
fragment: string,
scriptPath: string,
mode: ResolutionMode,
program: Program,
host: LanguageServiceHost,
moduleSpecifierResolutionHost: ModuleSpecifierResolutionHost,
extensionOptions: ExtensionOptions,
): readonly NameAndKind[] {
const typeChecker = program.getTypeChecker();
const compilerOptions = program.getCompilerOptions();
const { baseUrl, paths } = compilerOptions;
const result = createNameAndKindSet();
const moduleResolution = getEmitModuleResolutionKind(compilerOptions);
if (baseUrl) {
const absolute = normalizePath(combinePaths(host.getCurrentDirectory(), baseUrl));
getCompletionEntriesForDirectoryFragment(fragment, absolute, extensionOptions, program, host, moduleSpecifierResolutionHost, /*moduleSpecifierIsRelative*/ false, /*exclude*/ undefined, result);
}
if (paths) {
const absolute = getPathsBasePath(compilerOptions, host)!;
addCompletionEntriesFromPaths(result, fragment, absolute, extensionOptions, program, host, moduleSpecifierResolutionHost, paths);
}
const fragmentDirectory = getFragmentDirectory(fragment);
for (const ambientName of getAmbientModuleCompletions(fragment, fragmentDirectory, typeChecker)) {
result.add(nameAndKind(ambientName, ScriptElementKind.externalModuleName, /*extension*/ undefined));
}
getCompletionEntriesFromTypings(program, host, moduleSpecifierResolutionHost, scriptPath, fragmentDirectory, extensionOptions, result);
if (moduleResolutionUsesNodeModules(moduleResolution)) {
// If looking for a global package name, don't just include everything in `node_modules` because that includes dependencies' own dependencies.
// (But do if we didn't find anything, e.g. 'package.json' missing.)
let foundGlobal = false;
if (fragmentDirectory === undefined) {
for (const moduleName of enumerateNodeModulesVisibleToScript(host, scriptPath)) {
const moduleResult = nameAndKind(moduleName, ScriptElementKind.externalModuleName, /*extension*/ undefined);
if (!result.has(moduleResult.name)) {
foundGlobal = true;
result.add(moduleResult);
}
}
}
if (!foundGlobal) {
const resolvePackageJsonExports = getResolvePackageJsonExports(compilerOptions);
const resolvePackageJsonImports = getResolvePackageJsonImports(compilerOptions);
let seenPackageScope = false;
const importsLookup = (directory: string) => {
if (resolvePackageJsonImports && !seenPackageScope) {
const packageFile = combinePaths(directory, "package.json");
if (seenPackageScope = tryFileExists(host, packageFile)) {
const packageJson = readJson(packageFile, host);
exportsOrImportsLookup((packageJson as MapLike<unknown>).imports, fragment, directory, /*isExports*/ false, /*isImports*/ true);
}
}
};
let ancestorLookup: (directory: string) => void | undefined = ancestor => {
const nodeModules = combinePaths(ancestor, "node_modules");
if (tryDirectoryExists(host, nodeModules)) {
getCompletionEntriesForDirectoryFragment(fragment, nodeModules, extensionOptions, program, host, moduleSpecifierResolutionHost, /*moduleSpecifierIsRelative*/ false, /*exclude*/ undefined, result);
}
importsLookup(ancestor);
};
if (fragmentDirectory && resolvePackageJsonExports) {
const nodeModulesDirectoryOrImportsLookup = ancestorLookup;
ancestorLookup = ancestor => {
const components = getPathComponents(fragment);
components.shift(); // shift off empty root
let packagePath = components.shift();
if (!packagePath) {
return nodeModulesDirectoryOrImportsLookup(ancestor);
}
if (startsWith(packagePath, "@")) {
const subName = components.shift();
if (!subName) {
return nodeModulesDirectoryOrImportsLookup(ancestor);
}
packagePath = combinePaths(packagePath, subName);
}
if (resolvePackageJsonImports && startsWith(packagePath, "#")) {
return importsLookup(ancestor);
}
const packageDirectory = combinePaths(ancestor, "node_modules", packagePath);
const packageFile = combinePaths(packageDirectory, "package.json");
if (tryFileExists(host, packageFile)) {
const packageJson = readJson(packageFile, host);
const fragmentSubpath = components.join("/") + (components.length && hasTrailingDirectorySeparator(fragment) ? "/" : "");
if (exportsOrImportsLookup((packageJson as MapLike<unknown>).exports, fragmentSubpath, packageDirectory, /*isExports*/ true, /*isImports*/ false)) {
return;
}
}
return nodeModulesDirectoryOrImportsLookup(ancestor);
};
}
forEachAncestorDirectoryStoppingAtGlobalCache(host, scriptPath, ancestorLookup);
}
}
return arrayFrom(result.values());
/** Returns true if the search should stop */
function exportsOrImportsLookup(lookupTable: unknown, fragment: string, baseDirectory: string, isExports: boolean, isImports: boolean): boolean {
if (typeof lookupTable !== "object" || lookupTable === null) { // eslint-disable-line no-restricted-syntax
return lookupTable !== undefined; // null lookupTable or entrypoint only
}
const keys = getOwnKeys(lookupTable as MapLike<unknown>);
const conditions = getConditions(compilerOptions, mode);
addCompletionEntriesFromPathsOrExportsOrImports(
result,
isExports,
isImports,
fragment,
baseDirectory,
extensionOptions,
program,
host,
moduleSpecifierResolutionHost,
keys,
key => {
const pattern = getPatternFromFirstMatchingCondition((lookupTable as MapLike<unknown>)[key], conditions);
if (pattern === undefined) {
return undefined;
}
return singleElementArray(endsWith(key, "/") && endsWith(pattern, "/") ? pattern + "*" : pattern);
},
comparePatternKeys,
);
return true;
}
}
function getPatternFromFirstMatchingCondition(target: unknown, conditions: readonly string[]): string | undefined {
if (typeof target === "string") {
return target;
}
if (target && typeof target === "object" && !isArray(target)) {
for (const condition in target) {
if (condition === "default" || conditions.includes(condition) || isApplicableVersionedTypesKey(conditions, condition)) {
const pattern = (target as MapLike<unknown>)[condition];
return getPatternFromFirstMatchingCondition(pattern, conditions);
}
}
}
}
function getFragmentDirectory(fragment: string): string | undefined {
return containsSlash(fragment) ? hasTrailingDirectorySeparator(fragment) ? fragment : getDirectoryPath(fragment) : undefined;
}
function getCompletionsForPathMapping(
path: string,
patterns: readonly string[],
fragment: string,
packageDirectory: string,
extensionOptions: ExtensionOptions,
isExports: boolean,
isImports: boolean,
program: Program,
host: LanguageServiceHost,
moduleSpecifierResolutionHost: ModuleSpecifierResolutionHost,
): readonly NameAndKind[] {
const parsedPath = tryParsePattern(path);
if (!parsedPath) {
return emptyArray;
}
// no stars in the pattern
if (typeof parsedPath === "string") {
// For a path mapping "foo": ["/x/y/z.ts"], add "foo" itself as a completion.
return justPathMappingName(path, ScriptElementKind.scriptElement);
}
const remainingFragment = tryRemovePrefix(fragment, parsedPath.prefix);
if (remainingFragment === undefined) {
const starIsFullPathComponent = endsWith(path, "/*");
return starIsFullPathComponent ? justPathMappingName(parsedPath.prefix, ScriptElementKind.directory) : flatMap(patterns, pattern => getModulesForPathsPattern("", packageDirectory, pattern, extensionOptions, isExports, isImports, program, host, moduleSpecifierResolutionHost)?.map(({ name, ...rest }) => ({ name: parsedPath.prefix + name + parsedPath.suffix, ...rest })));
}
return flatMap(patterns, pattern => getModulesForPathsPattern(remainingFragment, packageDirectory, pattern, extensionOptions, isExports, isImports, program, host, moduleSpecifierResolutionHost));
function justPathMappingName(name: string, kind: ScriptElementKind.directory | ScriptElementKind.scriptElement): readonly NameAndKind[] {
return startsWith(name, fragment) ? [{ name: removeTrailingDirectorySeparator(name), kind, extension: undefined }] : emptyArray;
}
}
function getModulesForPathsPattern(
fragment: string,
packageDirectory: string,
pattern: string,
extensionOptions: ExtensionOptions,
isExports: boolean,
isImports: boolean,
program: Program,
host: LanguageServiceHost,
moduleSpecifierResolutionHost: ModuleSpecifierResolutionHost,
): readonly NameAndKind[] | undefined {
if (!host.readDirectory) {
return undefined;
}
const parsed = tryParsePattern(pattern);
if (parsed === undefined || isString(parsed)) {
return undefined;
}
// The prefix has two effective parts: the directory path and the base component after the filepath that is not a
// full directory component. For example: directory/path/of/prefix/base*
const normalizedPrefix = resolvePath(parsed.prefix);
const normalizedPrefixDirectory = hasTrailingDirectorySeparator(parsed.prefix) ? normalizedPrefix : getDirectoryPath(normalizedPrefix);
const normalizedPrefixBase = hasTrailingDirectorySeparator(parsed.prefix) ? "" : getBaseFileName(normalizedPrefix);
const fragmentHasPath = containsSlash(fragment);
const fragmentDirectory = fragmentHasPath ? hasTrailingDirectorySeparator(fragment) ? fragment : getDirectoryPath(fragment) : undefined;
const getCommonSourceDirectory = () => moduleSpecifierResolutionHost.getCommonSourceDirectory();
const ignoreCase = !hostUsesCaseSensitiveFileNames(moduleSpecifierResolutionHost);
const outDir = program.getCompilerOptions().outDir;
const declarationDir = program.getCompilerOptions().declarationDir;
// Try and expand the prefix to include any path from the fragment so that we can limit the readDirectory call
const expandedPrefixDirectory = fragmentHasPath ? combinePaths(normalizedPrefixDirectory, normalizedPrefixBase + fragmentDirectory) : normalizedPrefixDirectory;
// Need to normalize after combining: If we combinePaths("a", "../b"), we want "b" and not "a/../b".
const baseDirectory = normalizePath(combinePaths(packageDirectory, expandedPrefixDirectory));
const possibleInputBaseDirectoryForOutDir = isImports && outDir && getPossibleOriginalInputPathWithoutChangingExt(baseDirectory, ignoreCase, outDir, getCommonSourceDirectory);
const possibleInputBaseDirectoryForDeclarationDir = isImports && declarationDir && getPossibleOriginalInputPathWithoutChangingExt(baseDirectory, ignoreCase, declarationDir, getCommonSourceDirectory);
const normalizedSuffix = normalizePath(parsed.suffix);
const declarationExtension = normalizedSuffix && getDeclarationEmitExtensionForPath("_" + normalizedSuffix);
const inputExtension = normalizedSuffix ? getPossibleOriginalInputExtensionForExtension("_" + normalizedSuffix) : undefined;
const matchingSuffixes = [
declarationExtension && changeExtension(normalizedSuffix, declarationExtension),
...(inputExtension ? inputExtension.map(ext => changeExtension(normalizedSuffix, ext)) : []),
normalizedSuffix,
].filter(isString);
// If we have a suffix, then we read the directory all the way down to avoid returning completions for
// directories that don't contain files that would match the suffix. A previous comment here was concerned
// about the case where `normalizedSuffix` includes a `?` character, which should be interpreted literally,
// but will match any single character as part of the `include` pattern in `tryReadDirectory`. This is not
// a problem, because (in the extremely unusual circumstance where the suffix has a `?` in it) a `?`
// interpreted as "any character" can only return *too many* results as compared to the literal
// interpretation, so we can filter those superfluous results out via `trimPrefixAndSuffix` as we've always
// done.
const includeGlobs = normalizedSuffix
? matchingSuffixes.map(suffix => "**/*" + suffix)
: ["./*"];
const isExportsOrImportsWildcard = (isExports || isImports) && endsWith(pattern, "/*");
let matches = getMatchesWithPrefix(baseDirectory);
if (possibleInputBaseDirectoryForOutDir) {
matches = concatenate(matches, getMatchesWithPrefix(possibleInputBaseDirectoryForOutDir));
}
if (possibleInputBaseDirectoryForDeclarationDir) {
matches = concatenate(matches, getMatchesWithPrefix(possibleInputBaseDirectoryForDeclarationDir));
}
// If we had a suffix, we already recursively searched for all possible files that could match
// it and returned the directories leading to those files. Otherwise, assume any directory could
// have something valid to import.
if (!normalizedSuffix) {
matches = concatenate(matches, getDirectoryMatches(baseDirectory));
if (possibleInputBaseDirectoryForOutDir) {
matches = concatenate(matches, getDirectoryMatches(possibleInputBaseDirectoryForOutDir));
}
if (possibleInputBaseDirectoryForDeclarationDir) {
matches = concatenate(matches, getDirectoryMatches(possibleInputBaseDirectoryForDeclarationDir));
}
}
return matches;
function getMatchesWithPrefix(directory: string) {
const completePrefix = fragmentHasPath ? directory : ensureTrailingDirectorySeparator(directory) + normalizedPrefixBase;
return mapDefined(tryReadDirectory(host, directory, extensionOptions.extensionsToSearch, /*exclude*/ undefined, includeGlobs), match => {
const trimmedWithPattern = trimPrefixAndSuffix(match, completePrefix);
if (trimmedWithPattern) {
if (containsSlash(trimmedWithPattern)) {
return directoryResult(getPathComponents(removeLeadingDirectorySeparator(trimmedWithPattern))[1]);
}
const { name, extension } = getFilenameWithExtensionOption(trimmedWithPattern, program, extensionOptions, isExportsOrImportsWildcard);
return nameAndKind(name, ScriptElementKind.scriptElement, extension);
}
});
}
function getDirectoryMatches(directoryName: string) {
return mapDefined(tryGetDirectories(host, directoryName), dir => dir === "node_modules" ? undefined : directoryResult(dir));
}
function trimPrefixAndSuffix(path: string, prefix: string): string | undefined {
return firstDefined(matchingSuffixes, suffix => {
const inner = withoutStartAndEnd(normalizePath(path), prefix, suffix);
return inner === undefined ? undefined : removeLeadingDirectorySeparator(inner);
});
}
}
function withoutStartAndEnd(s: string, start: string, end: string): string | undefined {
return startsWith(s, start) && endsWith(s, end) ? s.slice(start.length, s.length - end.length) : undefined;
}
function removeLeadingDirectorySeparator(path: string): string {
return path[0] === directorySeparator ? path.slice(1) : path;
}
function getAmbientModuleCompletions(fragment: string, fragmentDirectory: string | undefined, checker: TypeChecker): readonly string[] {
// Get modules that the type checker picked up
const ambientModules = checker.getAmbientModules().map(sym => stripQuotes(sym.name));
const nonRelativeModuleNames = ambientModules.filter(moduleName => startsWith(moduleName, fragment) && !moduleName.includes("*"));
// Nested modules of the form "module-name/sub" need to be adjusted to only return the string
// after the last '/' that appears in the fragment because that's where the replacement span
// starts
if (fragmentDirectory !== undefined) {
const moduleNameWithSeparator = ensureTrailingDirectorySeparator(fragmentDirectory);
return nonRelativeModuleNames.map(nonRelativeModuleName => removePrefix(nonRelativeModuleName, moduleNameWithSeparator));
}
return nonRelativeModuleNames;
}
function getTripleSlashReferenceCompletion(sourceFile: SourceFile, position: number, program: Program, host: LanguageServiceHost, moduleSpecifierResolutionHost: ModuleSpecifierResolutionHost): readonly PathCompletion[] | undefined {
const compilerOptions = program.getCompilerOptions();
const token = getTokenAtPosition(sourceFile, position);
const commentRanges = getLeadingCommentRanges(sourceFile.text, token.pos);
const range = commentRanges && find(commentRanges, commentRange => position >= commentRange.pos && position <= commentRange.end);
if (!range) {
return undefined;
}
const text = sourceFile.text.slice(range.pos, position);
const match = tripleSlashDirectiveFragmentRegex.exec(text);
if (!match) {
return undefined;
}
const [, prefix, kind, toComplete] = match;
const scriptPath = getDirectoryPath(sourceFile.path);
const names = kind === "path" ? getCompletionEntriesForDirectoryFragment(toComplete, scriptPath, getExtensionOptions(compilerOptions, ReferenceKind.Filename, sourceFile), program, host, moduleSpecifierResolutionHost, /*moduleSpecifierIsRelative*/ true, sourceFile.path)
: kind === "types" ? getCompletionEntriesFromTypings(program, host, moduleSpecifierResolutionHost, scriptPath, getFragmentDirectory(toComplete), getExtensionOptions(compilerOptions, ReferenceKind.ModuleSpecifier, sourceFile))
: Debug.fail();
return addReplacementSpans(toComplete, range.pos + prefix.length, arrayFrom(names.values()));
}
function getCompletionEntriesFromTypings(program: Program, host: LanguageServiceHost, moduleSpecifierResolutionHost: ModuleSpecifierResolutionHost, scriptPath: string, fragmentDirectory: string | undefined, extensionOptions: ExtensionOptions, result = createNameAndKindSet()): NameAndKindSet {
const options = program.getCompilerOptions();
// Check for typings specified in compiler options
const seen = new Map<string, true>();
const typeRoots = tryAndIgnoreErrors(() => getEffectiveTypeRoots(options, host)) || emptyArray;
for (const root of typeRoots) {
getCompletionEntriesFromDirectories(root);
}
// Also get all @types typings installed in visible node_modules directories
for (const packageJson of findPackageJsons(scriptPath, host)) {
const typesDir = combinePaths(getDirectoryPath(packageJson), "node_modules/@types");
getCompletionEntriesFromDirectories(typesDir);
}
return result;
function getCompletionEntriesFromDirectories(directory: string): void {
if (!tryDirectoryExists(host, directory)) return;
for (const typeDirectoryName of tryGetDirectories(host, directory)) {
const packageName = unmangleScopedPackageName(typeDirectoryName);
if (options.types && !contains(options.types, packageName)) continue;
if (fragmentDirectory === undefined) {
if (!seen.has(packageName)) {
result.add(nameAndKind(packageName, ScriptElementKind.externalModuleName, /*extension*/ undefined));
seen.set(packageName, true);
}
}
else {
const baseDirectory = combinePaths(directory, typeDirectoryName);
const remainingFragment = tryRemoveDirectoryPrefix(fragmentDirectory, packageName, hostGetCanonicalFileName(host));
if (remainingFragment !== undefined) {
getCompletionEntriesForDirectoryFragment(remainingFragment, baseDirectory, extensionOptions, program, host, moduleSpecifierResolutionHost, /*moduleSpecifierIsRelative*/ false, /*exclude*/ undefined, result);
}
}
}
}
}
function enumerateNodeModulesVisibleToScript(host: LanguageServiceHost, scriptPath: string): readonly string[] {
if (!host.readFile || !host.fileExists) return emptyArray;
const result: string[] = [];
for (const packageJson of findPackageJsons(scriptPath, host)) {
const contents = readJson(packageJson, host as { readFile: (filename: string) => string | undefined; }); // Cast to assert that readFile is defined
// Provide completions for all non @types dependencies
for (const key of nodeModulesDependencyKeys) {
const dependencies: object | undefined = (contents as any)[key];
if (!dependencies) continue;
for (const dep in dependencies) {
if (hasProperty(dependencies, dep) && !startsWith(dep, "@types/")) {
result.push(dep);
}
}
}
}
return result;
}
// Replace everything after the last directory separator that appears
function getDirectoryFragmentTextSpan(text: string, textStart: number): TextSpan | undefined {
const index = Math.max(text.lastIndexOf(directorySeparator), text.lastIndexOf(altDirectorySeparator));
const offset = index !== -1 ? index + 1 : 0;
// If the range is an identifier, span is unnecessary.
const length = text.length - offset;
return length === 0 || isIdentifierText(text.substr(offset, length), ScriptTarget.ESNext) ? undefined : createTextSpan(textStart + offset, length);
}
// Returns true if the path is explicitly relative to the script (i.e. relative to . or ..)
function isPathRelativeToScript(path: string) {
if (path && path.length >= 2 && path.charCodeAt(0) === CharacterCodes.dot) {
const slashIndex = path.length >= 3 && path.charCodeAt(1) === CharacterCodes.dot ? 2 : 1;
const slashCharCode = path.charCodeAt(slashIndex);
return slashCharCode === CharacterCodes.slash || slashCharCode === CharacterCodes.backslash;
}
return false;
}
/**
* Matches a triple slash reference directive with an incomplete string literal for its path. Used
* to determine if the caret is currently within the string literal and capture the literal fragment
* for completions.
* For example, this matches
*
* /// <reference path="fragment
*
* but not
*
* /// <reference path="fragment"
*/
const tripleSlashDirectiveFragmentRegex = /^(\/\/\/\s*<reference\s+(path|types)\s*=\s*(?:'|"))([^\x03"]*)$/;
const nodeModulesDependencyKeys: readonly string[] = ["dependencies", "devDependencies", "peerDependencies", "optionalDependencies"];
function containsSlash(fragment: string) {
return fragment.includes(directorySeparator);
}
/**
* Matches
* require(""
* require("")
*/
function isRequireCallArgument(node: Node) {
return isCallExpression(node.parent) && firstOrUndefined(node.parent.arguments) === node
&& isIdentifier(node.parent.expression) && node.parent.expression.escapedText === "require";
} | typescript | github | https://github.com/microsoft/TypeScript | src/services/stringCompletions.ts |
# Copyright (c) 2012 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Andreas Hansson
from m5.params import *
from MemObject import MemObject
# An address mapper changes the packet addresses in going from the
# slave port side of the mapper to the master port side. When the
# slave port is queried for the address ranges, it also performs the
# necessary range updates. Note that snoop requests that travel from
# the master port (i.e. the memory side) to the slave port are
# currently not modified.
class AddrMapper(MemObject):
type = 'AddrMapper'
cxx_header = 'mem/addr_mapper.hh'
abstract = True
# one port in each direction
master = MasterPort("Master port")
slave = SlavePort("Slave port")
# Range address mapper that maps a set of original ranges to a set of
# remapped ranges, where a specific range is of the same size
# (original and remapped), only with an offset.
class RangeAddrMapper(AddrMapper):
type = 'RangeAddrMapper'
cxx_header = 'mem/addr_mapper.hh'
# These two vectors should be the exact same length and each range
# should be the exact same size. Each range in original_ranges is
# mapped to the corresponding element in the remapped_ranges. Note
# that the same range can occur multiple times in the remapped
# ranges for address aliasing.
original_ranges = VectorParam.AddrRange(
"Ranges of memory that should me remapped")
remapped_ranges = VectorParam.AddrRange(
"Ranges of memory that are being mapped to") | unknown | codeparrot/codeparrot-clean | ||
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.views import redirect_to_login
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.utils import six
from django.utils.encoding import force_text
class AccessMixin(object):
"""
Abstract CBV mixin that gives access mixins the same customizable
functionality.
"""
login_url = None
permission_denied_message = ''
raise_exception = False
redirect_field_name = REDIRECT_FIELD_NAME
def get_login_url(self):
"""
Override this method to override the login_url attribute.
"""
login_url = self.login_url or settings.LOGIN_URL
if not login_url:
raise ImproperlyConfigured(
'{0} is missing the login_url attribute. Define {0}.login_url, settings.LOGIN_URL, or override '
'{0}.get_login_url().'.format(self.__class__.__name__)
)
return force_text(login_url)
def get_permission_denied_message(self):
"""
Override this method to override the permission_denied_message attribute.
"""
return self.permission_denied_message
def get_redirect_field_name(self):
"""
Override this method to override the redirect_field_name attribute.
"""
return self.redirect_field_name
def handle_no_permission(self):
if self.raise_exception:
raise PermissionDenied(self.get_permission_denied_message())
return redirect_to_login(self.request.get_full_path(), self.get_login_url(), self.get_redirect_field_name())
class LoginRequiredMixin(AccessMixin):
"""
CBV mixin which verifies that the current user is authenticated.
"""
def dispatch(self, request, *args, **kwargs):
if not request.user.is_authenticated():
return self.handle_no_permission()
return super(LoginRequiredMixin, self).dispatch(request, *args, **kwargs)
class PermissionRequiredMixin(AccessMixin):
"""
CBV mixin which verifies that the current user has all specified
permissions.
"""
permission_required = None
def get_permission_required(self):
"""
Override this method to override the permission_required attribute.
Must return an iterable.
"""
if self.permission_required is None:
raise ImproperlyConfigured(
'{0} is missing the permission_required attribute. Define {0}.permission_required, or override '
'{0}.get_permission_required().'.format(self.__class__.__name__)
)
if isinstance(self.permission_required, six.string_types):
perms = (self.permission_required, )
else:
perms = self.permission_required
return perms
def has_permission(self):
"""
Override this method to customize the way permissions are checked.
"""
perms = self.get_permission_required()
return self.request.user.has_perms(perms)
def dispatch(self, request, *args, **kwargs):
if not self.has_permission():
return self.handle_no_permission()
return super(PermissionRequiredMixin, self).dispatch(request, *args, **kwargs)
class UserPassesTestMixin(AccessMixin):
"""
CBV Mixin that allows you to define a test function which must return True
if the current user can access the view.
"""
def test_func(self):
raise NotImplementedError(
'{0} is missing the implementation of the test_func() method.'.format(self.__class__.__name__)
)
def get_test_func(self):
"""
Override this method to use a different test_func method.
"""
return self.test_func
def dispatch(self, request, *args, **kwargs):
user_test_result = self.get_test_func()()
if not user_test_result:
return self.handle_no_permission()
return super(UserPassesTestMixin, self).dispatch(request, *args, **kwargs) | unknown | codeparrot/codeparrot-clean | ||
"""
Example:
python crop_segmented_objects -i /idpdata/frontal/*_COLOR.bmp -s /idpdata/frontal/segmentation_label.csv -o /idpdata/frontal_objects/
Crops objects from images and saves each object to an image file.
Takes a list of color_image names and a segmentation csv file.
Then each object (defined in the csv file for each image) is written to an output directory as a separate image file.
If a depth image was found beside the color image, a depth version of the object will be written to a separate file too.
Suppose that image
1234_COLOR.bmp
1234_DEPTH.png
has 3 objects, then the output will be:
1234_COLOR_1.bmp
1234_COLOR_2.bmp
1234_COLOR_3.bmp
1234_DEPTH_1.png
1234_DEPTH_2.png
1234_DEPTH_3.png
"""
import argparse
import idputils
import cv2
import os
import numpy as np
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-i',dest='imagepath', nargs='+', help="Path to a single color image, or a wildcard path matching multiple images.")
parser.add_argument('-s',dest='csvpath')
parser.add_argument('-o',dest='output_dir')
parser.add_argument('-scale',dest='scale', type=float, default = 1, help="The scale at which coordinates are saved in the csv. E.g: if -scale 0.4, then coordinates will be scaled up by 1/0.4=2.5")
args = parser.parse_args()
if not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
prefix_box = idputils.read_segmentation_csv(args.csvpath)
for colorimagefilePATH in args.imagepath:
our_prefix = idputils.get_filename_prefix(colorimagefilePATH)
#Get depthfile full path
depthimagefilePATH = idputils.to_depth_filename(colorimagefilePATH)
boxes = prefix_box[our_prefix]
colorimagefilename = os.path.basename(colorimagefilePATH)
depthimagefilename = os.path.basename(depthimagefilePATH)
count = 1
print 'Cropping %i from image with id %s' % (len(boxes), str(our_prefix))
for box in boxes:
box = np.array(box)*(1/args.scale)
idputils.crop_to_file(colorimagefilePATH,
os.path.join(args.output_dir, idputils.to_object_filename(colorimagefilename, count)), *box)
idputils.crop_to_file(depthimagefilePATH,
os.path.join(args.output_dir, idputils.to_object_filename(depthimagefilename, count)), *box)
count += 1 | unknown | codeparrot/codeparrot-clean | ||
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mainwindow.ui'
#
# Created by: PyQt5 UI code generator 5.8
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(750, 600)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setDocumentMode(False)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.tabWidget = QtWidgets.QTabWidget(self.centralwidget)
self.tabWidget.setEnabled(True)
self.tabWidget.setTabPosition(QtWidgets.QTabWidget.West)
self.tabWidget.setTabShape(QtWidgets.QTabWidget.Rounded)
self.tabWidget.setIconSize(QtCore.QSize(32, 32))
self.tabWidget.setElideMode(QtCore.Qt.ElideNone)
self.tabWidget.setUsesScrollButtons(True)
self.tabWidget.setDocumentMode(False)
self.tabWidget.setTabsClosable(False)
self.tabWidget.setMovable(True)
self.tabWidget.setTabBarAutoHide(False)
self.tabWidget.setObjectName("tabWidget")
self.tab_light = QtWidgets.QWidget()
self.tab_light.setObjectName("tab_light")
self.gridLayout_2 = QtWidgets.QGridLayout(self.tab_light)
self.gridLayout_2.setContentsMargins(0, 0, 0, 0)
self.gridLayout_2.setObjectName("gridLayout_2")
self.groupBox_color_bright = QtWidgets.QGroupBox(self.tab_light)
self.groupBox_color_bright.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_color_bright.sizePolicy().hasHeightForWidth())
self.groupBox_color_bright.setSizePolicy(sizePolicy)
self.groupBox_color_bright.setObjectName("groupBox_color_bright")
self.gridLayout_3 = QtWidgets.QGridLayout(self.groupBox_color_bright)
self.gridLayout_3.setObjectName("gridLayout_3")
self.horizontalSlider_b = QtWidgets.QSlider(self.groupBox_color_bright)
self.horizontalSlider_b.setEnabled(True)
self.horizontalSlider_b.setMaximum(255)
self.horizontalSlider_b.setProperty("value", 0)
self.horizontalSlider_b.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider_b.setObjectName("horizontalSlider_b")
self.gridLayout_3.addWidget(self.horizontalSlider_b, 3, 3, 2, 1)
self.horizontalSlider_g = QtWidgets.QSlider(self.groupBox_color_bright)
self.horizontalSlider_g.setEnabled(True)
self.horizontalSlider_g.setMaximum(255)
self.horizontalSlider_g.setProperty("value", 0)
self.horizontalSlider_g.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider_g.setObjectName("horizontalSlider_g")
self.gridLayout_3.addWidget(self.horizontalSlider_g, 2, 3, 1, 1)
self.spinBox_b = QtWidgets.QSpinBox(self.groupBox_color_bright)
self.spinBox_b.setEnabled(True)
self.spinBox_b.setWrapping(False)
self.spinBox_b.setFrame(True)
self.spinBox_b.setReadOnly(False)
self.spinBox_b.setButtonSymbols(QtWidgets.QAbstractSpinBox.UpDownArrows)
self.spinBox_b.setAccelerated(False)
self.spinBox_b.setProperty("showGroupSeparator", False)
self.spinBox_b.setMaximum(255)
self.spinBox_b.setProperty("value", 0)
self.spinBox_b.setObjectName("spinBox_b")
self.gridLayout_3.addWidget(self.spinBox_b, 3, 4, 2, 2)
self.label_2 = QtWidgets.QLabel(self.groupBox_color_bright)
self.label_2.setObjectName("label_2")
self.gridLayout_3.addWidget(self.label_2, 2, 2, 1, 1)
self.spinBox_g = QtWidgets.QSpinBox(self.groupBox_color_bright)
self.spinBox_g.setEnabled(True)
self.spinBox_g.setWrapping(False)
self.spinBox_g.setFrame(True)
self.spinBox_g.setReadOnly(False)
self.spinBox_g.setButtonSymbols(QtWidgets.QAbstractSpinBox.UpDownArrows)
self.spinBox_g.setAccelerated(False)
self.spinBox_g.setProperty("showGroupSeparator", False)
self.spinBox_g.setMaximum(255)
self.spinBox_g.setProperty("value", 0)
self.spinBox_g.setObjectName("spinBox_g")
self.gridLayout_3.addWidget(self.spinBox_g, 2, 4, 1, 2)
self.label_3 = QtWidgets.QLabel(self.groupBox_color_bright)
self.label_3.setObjectName("label_3")
self.gridLayout_3.addWidget(self.label_3, 3, 2, 2, 1)
self.spinBox_r = QtWidgets.QSpinBox(self.groupBox_color_bright)
self.spinBox_r.setEnabled(True)
self.spinBox_r.setWrapping(False)
self.spinBox_r.setFrame(True)
self.spinBox_r.setReadOnly(False)
self.spinBox_r.setButtonSymbols(QtWidgets.QAbstractSpinBox.UpDownArrows)
self.spinBox_r.setAccelerated(False)
self.spinBox_r.setProperty("showGroupSeparator", False)
self.spinBox_r.setMaximum(255)
self.spinBox_r.setProperty("value", 0)
self.spinBox_r.setObjectName("spinBox_r")
self.gridLayout_3.addWidget(self.spinBox_r, 0, 4, 2, 2)
self.label = QtWidgets.QLabel(self.groupBox_color_bright)
self.label.setObjectName("label")
self.gridLayout_3.addWidget(self.label, 0, 2, 2, 1)
self.lcdNumber_bright = QtWidgets.QLCDNumber(self.groupBox_color_bright)
self.lcdNumber_bright.setEnabled(True)
self.lcdNumber_bright.setFrameShape(QtWidgets.QFrame.NoFrame)
self.lcdNumber_bright.setSmallDecimalPoint(False)
self.lcdNumber_bright.setDigitCount(3)
self.lcdNumber_bright.setMode(QtWidgets.QLCDNumber.Dec)
self.lcdNumber_bright.setSegmentStyle(QtWidgets.QLCDNumber.Flat)
self.lcdNumber_bright.setProperty("value", 0.0)
self.lcdNumber_bright.setProperty("intValue", 0)
self.lcdNumber_bright.setObjectName("lcdNumber_bright")
self.gridLayout_3.addWidget(self.lcdNumber_bright, 0, 7, 5, 1)
self.pushButton_color = QtWidgets.QPushButton(self.groupBox_color_bright)
self.pushButton_color.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_color.sizePolicy().hasHeightForWidth())
self.pushButton_color.setSizePolicy(sizePolicy)
self.pushButton_color.setMinimumSize(QtCore.QSize(108, 0))
self.pushButton_color.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_color.setFlat(False)
self.pushButton_color.setObjectName("pushButton_color")
self.gridLayout_3.addWidget(self.pushButton_color, 0, 0, 5, 1)
self.horizontalSlider_r = QtWidgets.QSlider(self.groupBox_color_bright)
self.horizontalSlider_r.setEnabled(True)
self.horizontalSlider_r.setMaximum(255)
self.horizontalSlider_r.setProperty("value", 0)
self.horizontalSlider_r.setSliderPosition(0)
self.horizontalSlider_r.setTracking(True)
self.horizontalSlider_r.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider_r.setInvertedAppearance(False)
self.horizontalSlider_r.setInvertedControls(False)
self.horizontalSlider_r.setTickPosition(QtWidgets.QSlider.NoTicks)
self.horizontalSlider_r.setTickInterval(0)
self.horizontalSlider_r.setObjectName("horizontalSlider_r")
self.gridLayout_3.addWidget(self.horizontalSlider_r, 0, 3, 2, 1)
self.dial_bright = QtWidgets.QDial(self.groupBox_color_bright)
self.dial_bright.setEnabled(True)
self.dial_bright.setMaximum(255)
self.dial_bright.setProperty("value", 0)
self.dial_bright.setSliderPosition(0)
self.dial_bright.setTracking(False)
self.dial_bright.setOrientation(QtCore.Qt.Horizontal)
self.dial_bright.setInvertedAppearance(False)
self.dial_bright.setInvertedControls(False)
self.dial_bright.setNotchesVisible(False)
self.dial_bright.setObjectName("dial_bright")
self.gridLayout_3.addWidget(self.dial_bright, 0, 6, 5, 1)
self.gridLayout_2.addWidget(self.groupBox_color_bright, 2, 0, 1, 2)
self.groupBox_last_colors = QtWidgets.QGroupBox(self.tab_light)
self.groupBox_last_colors.setEnabled(True)
self.groupBox_last_colors.setObjectName("groupBox_last_colors")
self.gridLayout_5 = QtWidgets.QGridLayout(self.groupBox_last_colors)
self.gridLayout_5.setObjectName("gridLayout_5")
self.pushButton_last18 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last18.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last18.sizePolicy().hasHeightForWidth())
self.pushButton_last18.setSizePolicy(sizePolicy)
self.pushButton_last18.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last18.setFlat(False)
self.pushButton_last18.setObjectName("pushButton_last18")
self.gridLayout_5.addWidget(self.pushButton_last18, 4, 3, 1, 1)
self.pushButton_last11 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last11.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last11.sizePolicy().hasHeightForWidth())
self.pushButton_last11.setSizePolicy(sizePolicy)
self.pushButton_last11.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last11.setFlat(False)
self.pushButton_last11.setObjectName("pushButton_last11")
self.gridLayout_5.addWidget(self.pushButton_last11, 2, 0, 1, 1)
self.pushButton_last15 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last15.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last15.sizePolicy().hasHeightForWidth())
self.pushButton_last15.setSizePolicy(sizePolicy)
self.pushButton_last15.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last15.setFlat(False)
self.pushButton_last15.setObjectName("pushButton_last15")
self.gridLayout_5.addWidget(self.pushButton_last15, 2, 5, 1, 1)
self.pushButton_last04 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last04.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last04.sizePolicy().hasHeightForWidth())
self.pushButton_last04.setSizePolicy(sizePolicy)
self.pushButton_last04.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last04.setFlat(False)
self.pushButton_last04.setObjectName("pushButton_last04")
self.gridLayout_5.addWidget(self.pushButton_last04, 0, 4, 1, 1)
self.pushButton_last17 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last17.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last17.sizePolicy().hasHeightForWidth())
self.pushButton_last17.setSizePolicy(sizePolicy)
self.pushButton_last17.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last17.setFlat(False)
self.pushButton_last17.setObjectName("pushButton_last17")
self.gridLayout_5.addWidget(self.pushButton_last17, 4, 1, 1, 1)
self.pushButton_last05 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last05.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last05.sizePolicy().hasHeightForWidth())
self.pushButton_last05.setSizePolicy(sizePolicy)
self.pushButton_last05.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last05.setFlat(False)
self.pushButton_last05.setObjectName("pushButton_last05")
self.gridLayout_5.addWidget(self.pushButton_last05, 0, 5, 1, 1)
self.pushButton_last12 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last12.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last12.sizePolicy().hasHeightForWidth())
self.pushButton_last12.setSizePolicy(sizePolicy)
self.pushButton_last12.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last12.setFlat(False)
self.pushButton_last12.setObjectName("pushButton_last12")
self.gridLayout_5.addWidget(self.pushButton_last12, 2, 1, 1, 1)
self.pushButton_last08 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last08.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last08.sizePolicy().hasHeightForWidth())
self.pushButton_last08.setSizePolicy(sizePolicy)
self.pushButton_last08.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last08.setFlat(False)
self.pushButton_last08.setObjectName("pushButton_last08")
self.gridLayout_5.addWidget(self.pushButton_last08, 1, 3, 1, 1)
self.pushButton_last16 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last16.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last16.sizePolicy().hasHeightForWidth())
self.pushButton_last16.setSizePolicy(sizePolicy)
self.pushButton_last16.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last16.setFlat(False)
self.pushButton_last16.setObjectName("pushButton_last16")
self.gridLayout_5.addWidget(self.pushButton_last16, 4, 0, 1, 1)
self.pushButton_last07 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last07.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last07.sizePolicy().hasHeightForWidth())
self.pushButton_last07.setSizePolicy(sizePolicy)
self.pushButton_last07.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last07.setFlat(False)
self.pushButton_last07.setObjectName("pushButton_last07")
self.gridLayout_5.addWidget(self.pushButton_last07, 1, 1, 1, 1)
self.pushButton_last14 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last14.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last14.sizePolicy().hasHeightForWidth())
self.pushButton_last14.setSizePolicy(sizePolicy)
self.pushButton_last14.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last14.setFlat(False)
self.pushButton_last14.setObjectName("pushButton_last14")
self.gridLayout_5.addWidget(self.pushButton_last14, 2, 4, 1, 1)
self.pushButton_last20 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last20.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last20.sizePolicy().hasHeightForWidth())
self.pushButton_last20.setSizePolicy(sizePolicy)
self.pushButton_last20.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last20.setFlat(False)
self.pushButton_last20.setObjectName("pushButton_last20")
self.gridLayout_5.addWidget(self.pushButton_last20, 4, 5, 1, 1)
self.pushButton_last09 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last09.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last09.sizePolicy().hasHeightForWidth())
self.pushButton_last09.setSizePolicy(sizePolicy)
self.pushButton_last09.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last09.setFlat(False)
self.pushButton_last09.setObjectName("pushButton_last09")
self.gridLayout_5.addWidget(self.pushButton_last09, 1, 4, 1, 1)
self.pushButton_last13 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last13.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last13.sizePolicy().hasHeightForWidth())
self.pushButton_last13.setSizePolicy(sizePolicy)
self.pushButton_last13.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last13.setFlat(False)
self.pushButton_last13.setObjectName("pushButton_last13")
self.gridLayout_5.addWidget(self.pushButton_last13, 2, 3, 1, 1)
self.pushButton_last10 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last10.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last10.sizePolicy().hasHeightForWidth())
self.pushButton_last10.setSizePolicy(sizePolicy)
self.pushButton_last10.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last10.setFlat(False)
self.pushButton_last10.setObjectName("pushButton_last10")
self.gridLayout_5.addWidget(self.pushButton_last10, 1, 5, 1, 1)
self.pushButton_last02 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last02.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last02.sizePolicy().hasHeightForWidth())
self.pushButton_last02.setSizePolicy(sizePolicy)
self.pushButton_last02.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last02.setFlat(False)
self.pushButton_last02.setObjectName("pushButton_last02")
self.gridLayout_5.addWidget(self.pushButton_last02, 0, 1, 1, 1)
self.pushButton_last19 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last19.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last19.sizePolicy().hasHeightForWidth())
self.pushButton_last19.setSizePolicy(sizePolicy)
self.pushButton_last19.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last19.setFlat(False)
self.pushButton_last19.setObjectName("pushButton_last19")
self.gridLayout_5.addWidget(self.pushButton_last19, 4, 4, 1, 1)
self.pushButton_last03 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last03.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last03.sizePolicy().hasHeightForWidth())
self.pushButton_last03.setSizePolicy(sizePolicy)
self.pushButton_last03.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last03.setFlat(False)
self.pushButton_last03.setObjectName("pushButton_last03")
self.gridLayout_5.addWidget(self.pushButton_last03, 0, 3, 1, 1)
self.pushButton_last01 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last01.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last01.sizePolicy().hasHeightForWidth())
self.pushButton_last01.setSizePolicy(sizePolicy)
self.pushButton_last01.setAutoFillBackground(False)
self.pushButton_last01.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last01.setAutoDefault(False)
self.pushButton_last01.setDefault(False)
self.pushButton_last01.setFlat(False)
self.pushButton_last01.setObjectName("pushButton_last01")
self.gridLayout_5.addWidget(self.pushButton_last01, 0, 0, 1, 1)
self.pushButton_last06 = QtWidgets.QPushButton(self.groupBox_last_colors)
self.pushButton_last06.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_last06.sizePolicy().hasHeightForWidth())
self.pushButton_last06.setSizePolicy(sizePolicy)
self.pushButton_last06.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_last06.setFlat(False)
self.pushButton_last06.setObjectName("pushButton_last06")
self.gridLayout_5.addWidget(self.pushButton_last06, 1, 0, 1, 1)
self.pushButton_last01.raise_()
self.pushButton_last07.raise_()
self.pushButton_last06.raise_()
self.pushButton_last08.raise_()
self.pushButton_last16.raise_()
self.pushButton_last18.raise_()
self.pushButton_last17.raise_()
self.pushButton_last02.raise_()
self.pushButton_last03.raise_()
self.pushButton_last11.raise_()
self.pushButton_last12.raise_()
self.pushButton_last19.raise_()
self.pushButton_last09.raise_()
self.pushButton_last14.raise_()
self.pushButton_last04.raise_()
self.pushButton_last05.raise_()
self.pushButton_last10.raise_()
self.pushButton_last15.raise_()
self.pushButton_last20.raise_()
self.pushButton_last13.raise_()
self.gridLayout_2.addWidget(self.groupBox_last_colors, 0, 0, 1, 2)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("Images/128x128/actions/application-exit-5.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.tab_light, icon, "")
self.tab_ilumination = QtWidgets.QWidget()
self.tab_ilumination.setObjectName("tab_ilumination")
self.gridLayout_6 = QtWidgets.QGridLayout(self.tab_ilumination)
self.gridLayout_6.setContentsMargins(0, 0, 0, 0)
self.gridLayout_6.setObjectName("gridLayout_6")
self.groupBox_effect2 = QtWidgets.QGroupBox(self.tab_ilumination)
self.groupBox_effect2.setEnabled(True)
self.groupBox_effect2.setObjectName("groupBox_effect2")
self.gridLayout_8 = QtWidgets.QGridLayout(self.groupBox_effect2)
self.gridLayout_8.setObjectName("gridLayout_8")
self.plainTextEdit_input2 = QtWidgets.QPlainTextEdit(self.groupBox_effect2)
self.plainTextEdit_input2.setEnabled(True)
font = QtGui.QFont()
font.setFamily("Courier New")
font.setPointSize(12)
self.plainTextEdit_input2.setFont(font)
self.plainTextEdit_input2.setObjectName("plainTextEdit_input2")
self.gridLayout_8.addWidget(self.plainTextEdit_input2, 0, 2, 5, 1)
self.spinBox_time2 = QtWidgets.QSpinBox(self.groupBox_effect2)
self.spinBox_time2.setEnabled(True)
self.spinBox_time2.setMaximum(10000)
self.spinBox_time2.setProperty("value", 500)
self.spinBox_time2.setObjectName("spinBox_time2")
self.gridLayout_8.addWidget(self.spinBox_time2, 3, 0, 1, 1)
self.label_16 = QtWidgets.QLabel(self.groupBox_effect2)
self.label_16.setObjectName("label_16")
self.gridLayout_8.addWidget(self.label_16, 3, 1, 1, 1)
self.comboBox_effect2 = QtWidgets.QComboBox(self.groupBox_effect2)
self.comboBox_effect2.setEnabled(True)
self.comboBox_effect2.setObjectName("comboBox_effect2")
self.gridLayout_8.addWidget(self.comboBox_effect2, 1, 0, 1, 2)
self.label_11 = QtWidgets.QLabel(self.groupBox_effect2)
self.label_11.setObjectName("label_11")
self.gridLayout_8.addWidget(self.label_11, 0, 0, 1, 2)
self.label_10 = QtWidgets.QLabel(self.groupBox_effect2)
self.label_10.setObjectName("label_10")
self.gridLayout_8.addWidget(self.label_10, 2, 0, 1, 2)
self.pushButton_effect2 = QtWidgets.QPushButton(self.groupBox_effect2)
self.pushButton_effect2.setEnabled(True)
self.pushButton_effect2.setCheckable(True)
self.pushButton_effect2.setObjectName("pushButton_effect2")
self.gridLayout_8.addWidget(self.pushButton_effect2, 4, 0, 1, 2)
self.gridLayout_6.addWidget(self.groupBox_effect2, 0, 1, 1, 1)
self.groupBox_effect3 = QtWidgets.QGroupBox(self.tab_ilumination)
self.groupBox_effect3.setEnabled(True)
self.groupBox_effect3.setObjectName("groupBox_effect3")
self.gridLayout_9 = QtWidgets.QGridLayout(self.groupBox_effect3)
self.gridLayout_9.setObjectName("gridLayout_9")
self.spinBox_time3 = QtWidgets.QSpinBox(self.groupBox_effect3)
self.spinBox_time3.setEnabled(True)
self.spinBox_time3.setMaximum(10000)
self.spinBox_time3.setProperty("value", 500)
self.spinBox_time3.setObjectName("spinBox_time3")
self.gridLayout_9.addWidget(self.spinBox_time3, 3, 0, 1, 1)
self.label_17 = QtWidgets.QLabel(self.groupBox_effect3)
self.label_17.setObjectName("label_17")
self.gridLayout_9.addWidget(self.label_17, 3, 1, 1, 1)
self.label_13 = QtWidgets.QLabel(self.groupBox_effect3)
self.label_13.setObjectName("label_13")
self.gridLayout_9.addWidget(self.label_13, 0, 0, 1, 2)
self.comboBox_effect3 = QtWidgets.QComboBox(self.groupBox_effect3)
self.comboBox_effect3.setEnabled(True)
self.comboBox_effect3.setObjectName("comboBox_effect3")
self.gridLayout_9.addWidget(self.comboBox_effect3, 1, 0, 1, 2)
self.label_12 = QtWidgets.QLabel(self.groupBox_effect3)
self.label_12.setObjectName("label_12")
self.gridLayout_9.addWidget(self.label_12, 2, 0, 1, 2)
self.pushButton_effect3 = QtWidgets.QPushButton(self.groupBox_effect3)
self.pushButton_effect3.setEnabled(True)
self.pushButton_effect3.setCheckable(True)
self.pushButton_effect3.setObjectName("pushButton_effect3")
self.gridLayout_9.addWidget(self.pushButton_effect3, 4, 0, 1, 2)
self.plainTextEdit_input3 = QtWidgets.QPlainTextEdit(self.groupBox_effect3)
self.plainTextEdit_input3.setEnabled(True)
font = QtGui.QFont()
font.setFamily("Courier New")
font.setPointSize(12)
self.plainTextEdit_input3.setFont(font)
self.plainTextEdit_input3.setObjectName("plainTextEdit_input3")
self.gridLayout_9.addWidget(self.plainTextEdit_input3, 0, 4, 5, 1)
self.gridLayout_6.addWidget(self.groupBox_effect3, 2, 0, 1, 1)
self.groupBox_effect4 = QtWidgets.QGroupBox(self.tab_ilumination)
self.groupBox_effect4.setEnabled(True)
self.groupBox_effect4.setObjectName("groupBox_effect4")
self.gridLayout_10 = QtWidgets.QGridLayout(self.groupBox_effect4)
self.gridLayout_10.setObjectName("gridLayout_10")
self.plainTextEdit_input4 = QtWidgets.QPlainTextEdit(self.groupBox_effect4)
self.plainTextEdit_input4.setEnabled(True)
font = QtGui.QFont()
font.setFamily("Courier New")
font.setPointSize(12)
self.plainTextEdit_input4.setFont(font)
self.plainTextEdit_input4.setObjectName("plainTextEdit_input4")
self.gridLayout_10.addWidget(self.plainTextEdit_input4, 0, 2, 5, 1)
self.spinBox_time4 = QtWidgets.QSpinBox(self.groupBox_effect4)
self.spinBox_time4.setEnabled(True)
self.spinBox_time4.setMaximum(10000)
self.spinBox_time4.setProperty("value", 500)
self.spinBox_time4.setObjectName("spinBox_time4")
self.gridLayout_10.addWidget(self.spinBox_time4, 3, 0, 1, 1)
self.label_18 = QtWidgets.QLabel(self.groupBox_effect4)
self.label_18.setObjectName("label_18")
self.gridLayout_10.addWidget(self.label_18, 3, 1, 1, 1)
self.label_14 = QtWidgets.QLabel(self.groupBox_effect4)
self.label_14.setObjectName("label_14")
self.gridLayout_10.addWidget(self.label_14, 2, 0, 1, 2)
self.comboBox_effect4 = QtWidgets.QComboBox(self.groupBox_effect4)
self.comboBox_effect4.setEnabled(True)
self.comboBox_effect4.setObjectName("comboBox_effect4")
self.gridLayout_10.addWidget(self.comboBox_effect4, 1, 0, 1, 2)
self.label_15 = QtWidgets.QLabel(self.groupBox_effect4)
self.label_15.setObjectName("label_15")
self.gridLayout_10.addWidget(self.label_15, 0, 0, 1, 2)
self.pushButton_effect4 = QtWidgets.QPushButton(self.groupBox_effect4)
self.pushButton_effect4.setEnabled(True)
self.pushButton_effect4.setCheckable(True)
self.pushButton_effect4.setObjectName("pushButton_effect4")
self.gridLayout_10.addWidget(self.pushButton_effect4, 4, 0, 1, 2)
self.gridLayout_6.addWidget(self.groupBox_effect4, 2, 1, 1, 1)
self.groupBox_effect1 = QtWidgets.QGroupBox(self.tab_ilumination)
self.groupBox_effect1.setEnabled(True)
self.groupBox_effect1.setObjectName("groupBox_effect1")
self.gridLayout_7 = QtWidgets.QGridLayout(self.groupBox_effect1)
self.gridLayout_7.setObjectName("gridLayout_7")
self.spinBox_time1 = QtWidgets.QSpinBox(self.groupBox_effect1)
self.spinBox_time1.setEnabled(True)
self.spinBox_time1.setMaximum(10000)
self.spinBox_time1.setProperty("value", 500)
self.spinBox_time1.setObjectName("spinBox_time1")
self.gridLayout_7.addWidget(self.spinBox_time1, 3, 2, 1, 1)
self.pushButton_effect1 = QtWidgets.QPushButton(self.groupBox_effect1)
self.pushButton_effect1.setEnabled(True)
self.pushButton_effect1.setCheckable(True)
self.pushButton_effect1.setObjectName("pushButton_effect1")
self.gridLayout_7.addWidget(self.pushButton_effect1, 4, 2, 1, 2)
self.label_7 = QtWidgets.QLabel(self.groupBox_effect1)
self.label_7.setObjectName("label_7")
self.gridLayout_7.addWidget(self.label_7, 3, 3, 1, 1)
self.label_8 = QtWidgets.QLabel(self.groupBox_effect1)
self.label_8.setObjectName("label_8")
self.gridLayout_7.addWidget(self.label_8, 2, 2, 1, 2)
self.comboBox_effect1 = QtWidgets.QComboBox(self.groupBox_effect1)
self.comboBox_effect1.setEnabled(True)
self.comboBox_effect1.setObjectName("comboBox_effect1")
self.gridLayout_7.addWidget(self.comboBox_effect1, 1, 2, 1, 2)
self.label_9 = QtWidgets.QLabel(self.groupBox_effect1)
self.label_9.setObjectName("label_9")
self.gridLayout_7.addWidget(self.label_9, 0, 2, 1, 2)
self.plainTextEdit_input1 = QtWidgets.QPlainTextEdit(self.groupBox_effect1)
self.plainTextEdit_input1.setEnabled(True)
font = QtGui.QFont()
font.setFamily("Courier New")
font.setPointSize(12)
self.plainTextEdit_input1.setFont(font)
self.plainTextEdit_input1.setObjectName("plainTextEdit_input1")
self.gridLayout_7.addWidget(self.plainTextEdit_input1, 0, 4, 5, 1)
self.gridLayout_6.addWidget(self.groupBox_effect1, 0, 0, 1, 1)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap("Images/128x128/actions/application-exit-4.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.tab_ilumination, icon1, "")
self.tab_sound = QtWidgets.QWidget()
self.tab_sound.setObjectName("tab_sound")
self.gridLayout_15 = QtWidgets.QGridLayout(self.tab_sound)
self.gridLayout_15.setContentsMargins(0, 0, 0, 0)
self.gridLayout_15.setObjectName("gridLayout_15")
self.label_23 = QtWidgets.QLabel(self.tab_sound)
self.label_23.setMaximumSize(QtCore.QSize(75, 16777215))
self.label_23.setAlignment(QtCore.Qt.AlignCenter)
self.label_23.setObjectName("label_23")
self.gridLayout_15.addWidget(self.label_23, 0, 0, 1, 1)
self.comboBox_effect_music = QtWidgets.QComboBox(self.tab_sound)
self.comboBox_effect_music.setEnabled(True)
self.comboBox_effect_music.setMaximumSize(QtCore.QSize(100, 16777215))
self.comboBox_effect_music.setObjectName("comboBox_effect_music")
self.gridLayout_15.addWidget(self.comboBox_effect_music, 0, 6, 1, 1)
self.pushButton_sound_onoff = QtWidgets.QPushButton(self.tab_sound)
self.pushButton_sound_onoff.setCheckable(True)
self.pushButton_sound_onoff.setObjectName("pushButton_sound_onoff")
self.gridLayout_15.addWidget(self.pushButton_sound_onoff, 3, 0, 1, 7)
self.label_41 = QtWidgets.QLabel(self.tab_sound)
self.label_41.setMaximumSize(QtCore.QSize(75, 16777215))
self.label_41.setAlignment(QtCore.Qt.AlignCenter)
self.label_41.setObjectName("label_41")
self.gridLayout_15.addWidget(self.label_41, 0, 5, 1, 1)
self.comboBox_input = QtWidgets.QComboBox(self.tab_sound)
self.comboBox_input.setEnabled(True)
self.comboBox_input.setObjectName("comboBox_input")
self.gridLayout_15.addWidget(self.comboBox_input, 0, 1, 1, 4)
self.groupBox_bit_detect = QtWidgets.QGroupBox(self.tab_sound)
self.groupBox_bit_detect.setObjectName("groupBox_bit_detect")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.groupBox_bit_detect)
self.horizontalLayout.setObjectName("horizontalLayout")
self.plainTextEdit_bitdetector = QtWidgets.QPlainTextEdit(self.groupBox_bit_detect)
self.plainTextEdit_bitdetector.setEnabled(False)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.plainTextEdit_bitdetector.sizePolicy().hasHeightForWidth())
self.plainTextEdit_bitdetector.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setFamily("Courier New")
font.setPointSize(12)
self.plainTextEdit_bitdetector.setFont(font)
self.plainTextEdit_bitdetector.setDocumentTitle("")
self.plainTextEdit_bitdetector.setBackgroundVisible(False)
self.plainTextEdit_bitdetector.setObjectName("plainTextEdit_bitdetector")
self.horizontalLayout.addWidget(self.plainTextEdit_bitdetector)
self.gridLayout_15.addWidget(self.groupBox_bit_detect, 1, 5, 1, 2)
self.groupBox_freq = QtWidgets.QGroupBox(self.tab_sound)
self.groupBox_freq.setEnabled(True)
self.groupBox_freq.setLayoutDirection(QtCore.Qt.LeftToRight)
self.groupBox_freq.setObjectName("groupBox_freq")
self.gridLayout_16 = QtWidgets.QGridLayout(self.groupBox_freq)
self.gridLayout_16.setObjectName("gridLayout_16")
self.spinBox_noise = QtWidgets.QSpinBox(self.groupBox_freq)
self.spinBox_noise.setMaximum(10000)
self.spinBox_noise.setSingleStep(100)
self.spinBox_noise.setProperty("value", 1000)
self.spinBox_noise.setObjectName("spinBox_noise")
self.gridLayout_16.addWidget(self.spinBox_noise, 5, 6, 1, 1)
self.horizontalSlider_noise = QtWidgets.QSlider(self.groupBox_freq)
self.horizontalSlider_noise.setMaximum(10000)
self.horizontalSlider_noise.setSingleStep(100)
self.horizontalSlider_noise.setPageStep(1000)
self.horizontalSlider_noise.setProperty("value", 1000)
self.horizontalSlider_noise.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider_noise.setObjectName("horizontalSlider_noise")
self.gridLayout_16.addWidget(self.horizontalSlider_noise, 5, 1, 1, 5)
self.doubleSpinBox_mult_high = QtWidgets.QDoubleSpinBox(self.groupBox_freq)
self.doubleSpinBox_mult_high.setMaximum(1.0)
self.doubleSpinBox_mult_high.setSingleStep(0.1)
self.doubleSpinBox_mult_high.setProperty("value", 1.0)
self.doubleSpinBox_mult_high.setObjectName("doubleSpinBox_mult_high")
self.gridLayout_16.addWidget(self.doubleSpinBox_mult_high, 2, 5, 1, 2)
self.doubleSpinBox_mult_low = QtWidgets.QDoubleSpinBox(self.groupBox_freq)
self.doubleSpinBox_mult_low.setMaximum(1.0)
self.doubleSpinBox_mult_low.setSingleStep(0.1)
self.doubleSpinBox_mult_low.setProperty("value", 1.0)
self.doubleSpinBox_mult_low.setObjectName("doubleSpinBox_mult_low")
self.gridLayout_16.addWidget(self.doubleSpinBox_mult_low, 2, 1, 1, 2)
self.doubleSpinBox_mult_mid = QtWidgets.QDoubleSpinBox(self.groupBox_freq)
self.doubleSpinBox_mult_mid.setMaximum(1.0)
self.doubleSpinBox_mult_mid.setSingleStep(0.1)
self.doubleSpinBox_mult_mid.setProperty("value", 1.0)
self.doubleSpinBox_mult_mid.setObjectName("doubleSpinBox_mult_mid")
self.gridLayout_16.addWidget(self.doubleSpinBox_mult_mid, 2, 3, 1, 2)
self.pushButton_color_high = QtWidgets.QPushButton(self.groupBox_freq)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_color_high.sizePolicy().hasHeightForWidth())
self.pushButton_color_high.setSizePolicy(sizePolicy)
self.pushButton_color_high.setMinimumSize(QtCore.QSize(0, 100))
self.pushButton_color_high.setMaximumSize(QtCore.QSize(16777215, 200))
self.pushButton_color_high.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_color_high.setObjectName("pushButton_color_high")
self.gridLayout_16.addWidget(self.pushButton_color_high, 1, 5, 1, 2)
self.pushButton_color_low = QtWidgets.QPushButton(self.groupBox_freq)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_color_low.sizePolicy().hasHeightForWidth())
self.pushButton_color_low.setSizePolicy(sizePolicy)
self.pushButton_color_low.setMinimumSize(QtCore.QSize(0, 100))
self.pushButton_color_low.setMaximumSize(QtCore.QSize(16777215, 200))
self.pushButton_color_low.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_color_low.setObjectName("pushButton_color_low")
self.gridLayout_16.addWidget(self.pushButton_color_low, 1, 1, 1, 2)
self.pushButton_color_mid = QtWidgets.QPushButton(self.groupBox_freq)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_color_mid.sizePolicy().hasHeightForWidth())
self.pushButton_color_mid.setSizePolicy(sizePolicy)
self.pushButton_color_mid.setMinimumSize(QtCore.QSize(0, 100))
self.pushButton_color_mid.setMaximumSize(QtCore.QSize(16777215, 200))
self.pushButton_color_mid.setStyleSheet("border: 0px;\n"
"background-color: rgb(0, 0, 0);")
self.pushButton_color_mid.setObjectName("pushButton_color_mid")
self.gridLayout_16.addWidget(self.pushButton_color_mid, 1, 3, 1, 2)
self.label_22 = QtWidgets.QLabel(self.groupBox_freq)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_22.sizePolicy().hasHeightForWidth())
self.label_22.setSizePolicy(sizePolicy)
self.label_22.setAlignment(QtCore.Qt.AlignCenter)
self.label_22.setObjectName("label_22")
self.gridLayout_16.addWidget(self.label_22, 0, 1, 1, 1)
self.label_24 = QtWidgets.QLabel(self.groupBox_freq)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_24.sizePolicy().hasHeightForWidth())
self.label_24.setSizePolicy(sizePolicy)
self.label_24.setAlignment(QtCore.Qt.AlignCenter)
self.label_24.setObjectName("label_24")
self.gridLayout_16.addWidget(self.label_24, 0, 3, 1, 1)
self.verticalSlider_lower_low = QtWidgets.QSlider(self.groupBox_freq)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.verticalSlider_lower_low.sizePolicy().hasHeightForWidth())
self.verticalSlider_lower_low.setSizePolicy(sizePolicy)
self.verticalSlider_lower_low.setMinimumSize(QtCore.QSize(0, 0))
self.verticalSlider_lower_low.setLayoutDirection(QtCore.Qt.LeftToRight)
self.verticalSlider_lower_low.setMinimum(20)
self.verticalSlider_lower_low.setMaximum(200)
self.verticalSlider_lower_low.setOrientation(QtCore.Qt.Vertical)
self.verticalSlider_lower_low.setObjectName("verticalSlider_lower_low")
self.gridLayout_16.addWidget(self.verticalSlider_lower_low, 3, 1, 1, 1)
self.spinBox_lower_high = QtWidgets.QSpinBox(self.groupBox_freq)
self.spinBox_lower_high.setSuffix("")
self.spinBox_lower_high.setMinimum(1000)
self.spinBox_lower_high.setMaximum(20000)
self.spinBox_lower_high.setProperty("value", 1000)
self.spinBox_lower_high.setObjectName("spinBox_lower_high")
self.gridLayout_16.addWidget(self.spinBox_lower_high, 4, 5, 1, 1)
self.spinBox_higher_high = QtWidgets.QSpinBox(self.groupBox_freq)
self.spinBox_higher_high.setSuffix("")
self.spinBox_higher_high.setMinimum(1000)
self.spinBox_higher_high.setMaximum(20000)
self.spinBox_higher_high.setProperty("value", 20000)
self.spinBox_higher_high.setObjectName("spinBox_higher_high")
self.gridLayout_16.addWidget(self.spinBox_higher_high, 4, 6, 1, 1)
self.spinBox_higher_low = QtWidgets.QSpinBox(self.groupBox_freq)
self.spinBox_higher_low.setSuffix("")
self.spinBox_higher_low.setMinimum(20)
self.spinBox_higher_low.setMaximum(200)
self.spinBox_higher_low.setProperty("value", 100)
self.spinBox_higher_low.setObjectName("spinBox_higher_low")
self.gridLayout_16.addWidget(self.spinBox_higher_low, 4, 2, 1, 1)
self.spinBox_higher_mid = QtWidgets.QSpinBox(self.groupBox_freq)
self.spinBox_higher_mid.setMinimum(100)
self.spinBox_higher_mid.setMaximum(2000)
self.spinBox_higher_mid.setProperty("value", 1000)
self.spinBox_higher_mid.setObjectName("spinBox_higher_mid")
self.gridLayout_16.addWidget(self.spinBox_higher_mid, 4, 4, 1, 1)
self.spinBox_lower_mid = QtWidgets.QSpinBox(self.groupBox_freq)
self.spinBox_lower_mid.setMinimum(100)
self.spinBox_lower_mid.setMaximum(2000)
self.spinBox_lower_mid.setObjectName("spinBox_lower_mid")
self.gridLayout_16.addWidget(self.spinBox_lower_mid, 4, 3, 1, 1)
self.verticalSlider_higher_high = QtWidgets.QSlider(self.groupBox_freq)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.verticalSlider_higher_high.sizePolicy().hasHeightForWidth())
self.verticalSlider_higher_high.setSizePolicy(sizePolicy)
self.verticalSlider_higher_high.setMinimumSize(QtCore.QSize(0, 0))
self.verticalSlider_higher_high.setLayoutDirection(QtCore.Qt.LeftToRight)
self.verticalSlider_higher_high.setMinimum(1000)
self.verticalSlider_higher_high.setMaximum(20000)
self.verticalSlider_higher_high.setProperty("value", 20000)
self.verticalSlider_higher_high.setOrientation(QtCore.Qt.Vertical)
self.verticalSlider_higher_high.setObjectName("verticalSlider_higher_high")
self.gridLayout_16.addWidget(self.verticalSlider_higher_high, 3, 6, 1, 1)
self.spinBox_lower_low = QtWidgets.QSpinBox(self.groupBox_freq)
self.spinBox_lower_low.setSuffix("")
self.spinBox_lower_low.setPrefix("")
self.spinBox_lower_low.setMinimum(20)
self.spinBox_lower_low.setMaximum(200)
self.spinBox_lower_low.setObjectName("spinBox_lower_low")
self.gridLayout_16.addWidget(self.spinBox_lower_low, 4, 1, 1, 1)
self.verticalSlider_lower_high = QtWidgets.QSlider(self.groupBox_freq)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.verticalSlider_lower_high.sizePolicy().hasHeightForWidth())
self.verticalSlider_lower_high.setSizePolicy(sizePolicy)
self.verticalSlider_lower_high.setMinimumSize(QtCore.QSize(0, 0))
self.verticalSlider_lower_high.setLayoutDirection(QtCore.Qt.LeftToRight)
self.verticalSlider_lower_high.setMinimum(1000)
self.verticalSlider_lower_high.setMaximum(20000)
self.verticalSlider_lower_high.setProperty("value", 1000)
self.verticalSlider_lower_high.setOrientation(QtCore.Qt.Vertical)
self.verticalSlider_lower_high.setObjectName("verticalSlider_lower_high")
self.gridLayout_16.addWidget(self.verticalSlider_lower_high, 3, 5, 1, 1)
self.verticalSlider_lower_mid = QtWidgets.QSlider(self.groupBox_freq)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.verticalSlider_lower_mid.sizePolicy().hasHeightForWidth())
self.verticalSlider_lower_mid.setSizePolicy(sizePolicy)
self.verticalSlider_lower_mid.setMinimumSize(QtCore.QSize(0, 0))
self.verticalSlider_lower_mid.setLayoutDirection(QtCore.Qt.LeftToRight)
self.verticalSlider_lower_mid.setMinimum(100)
self.verticalSlider_lower_mid.setMaximum(2000)
self.verticalSlider_lower_mid.setProperty("value", 100)
self.verticalSlider_lower_mid.setOrientation(QtCore.Qt.Vertical)
self.verticalSlider_lower_mid.setObjectName("verticalSlider_lower_mid")
self.gridLayout_16.addWidget(self.verticalSlider_lower_mid, 3, 3, 1, 1)
self.verticalSlider_higher_mid = QtWidgets.QSlider(self.groupBox_freq)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.verticalSlider_higher_mid.sizePolicy().hasHeightForWidth())
self.verticalSlider_higher_mid.setSizePolicy(sizePolicy)
self.verticalSlider_higher_mid.setMinimumSize(QtCore.QSize(0, 0))
self.verticalSlider_higher_mid.setLayoutDirection(QtCore.Qt.LeftToRight)
self.verticalSlider_higher_mid.setMinimum(100)
self.verticalSlider_higher_mid.setMaximum(2000)
self.verticalSlider_higher_mid.setProperty("value", 1000)
self.verticalSlider_higher_mid.setOrientation(QtCore.Qt.Vertical)
self.verticalSlider_higher_mid.setObjectName("verticalSlider_higher_mid")
self.gridLayout_16.addWidget(self.verticalSlider_higher_mid, 3, 4, 1, 1)
self.verticalSlider_higher_low = QtWidgets.QSlider(self.groupBox_freq)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.verticalSlider_higher_low.sizePolicy().hasHeightForWidth())
self.verticalSlider_higher_low.setSizePolicy(sizePolicy)
self.verticalSlider_higher_low.setMinimumSize(QtCore.QSize(0, 0))
self.verticalSlider_higher_low.setLayoutDirection(QtCore.Qt.LeftToRight)
self.verticalSlider_higher_low.setMinimum(20)
self.verticalSlider_higher_low.setMaximum(200)
self.verticalSlider_higher_low.setProperty("value", 100)
self.verticalSlider_higher_low.setOrientation(QtCore.Qt.Vertical)
self.verticalSlider_higher_low.setObjectName("verticalSlider_higher_low")
self.gridLayout_16.addWidget(self.verticalSlider_higher_low, 3, 2, 1, 1)
self.label_25 = QtWidgets.QLabel(self.groupBox_freq)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_25.sizePolicy().hasHeightForWidth())
self.label_25.setSizePolicy(sizePolicy)
self.label_25.setAlignment(QtCore.Qt.AlignCenter)
self.label_25.setObjectName("label_25")
self.gridLayout_16.addWidget(self.label_25, 0, 5, 1, 1)
self.label_26 = QtWidgets.QLabel(self.groupBox_freq)
self.label_26.setObjectName("label_26")
self.gridLayout_16.addWidget(self.label_26, 1, 0, 1, 1)
self.label_42 = QtWidgets.QLabel(self.groupBox_freq)
self.label_42.setObjectName("label_42")
self.gridLayout_16.addWidget(self.label_42, 2, 0, 1, 1)
self.label_45 = QtWidgets.QLabel(self.groupBox_freq)
self.label_45.setObjectName("label_45")
self.gridLayout_16.addWidget(self.label_45, 5, 0, 1, 1)
self.label_43 = QtWidgets.QLabel(self.groupBox_freq)
self.label_43.setObjectName("label_43")
self.gridLayout_16.addWidget(self.label_43, 3, 0, 2, 1)
self.gridLayout_15.addWidget(self.groupBox_freq, 1, 0, 2, 5)
self.tabWidget.addTab(self.tab_sound, "")
self.tab_ext_backlight = QtWidgets.QWidget()
self.tab_ext_backlight.setObjectName("tab_ext_backlight")
self.gridLayout_4 = QtWidgets.QGridLayout(self.tab_ext_backlight)
self.gridLayout_4.setContentsMargins(0, 0, 0, 0)
self.gridLayout_4.setObjectName("gridLayout_4")
self.groupBox_setup_ext = QtWidgets.QGroupBox(self.tab_ext_backlight)
self.groupBox_setup_ext.setEnabled(True)
self.groupBox_setup_ext.setObjectName("groupBox_setup_ext")
self.gridLayout_19 = QtWidgets.QGridLayout(self.groupBox_setup_ext)
self.gridLayout_19.setObjectName("gridLayout_19")
self.pushButton_zones = QtWidgets.QPushButton(self.groupBox_setup_ext)
self.pushButton_zones.setCheckable(True)
self.pushButton_zones.setObjectName("pushButton_zones")
self.gridLayout_19.addWidget(self.pushButton_zones, 0, 1, 1, 2)
self.label_36 = QtWidgets.QLabel(self.groupBox_setup_ext)
self.label_36.setObjectName("label_36")
self.gridLayout_19.addWidget(self.label_36, 6, 1, 1, 1)
self.spinBox_update = QtWidgets.QSpinBox(self.groupBox_setup_ext)
self.spinBox_update.setMinimum(100)
self.spinBox_update.setMaximum(5000)
self.spinBox_update.setSingleStep(50)
self.spinBox_update.setProperty("value", 250)
self.spinBox_update.setObjectName("spinBox_update")
self.gridLayout_19.addWidget(self.spinBox_update, 6, 2, 1, 1)
self.spinBox_count_zones = QtWidgets.QSpinBox(self.groupBox_setup_ext)
self.spinBox_count_zones.setMinimum(1)
self.spinBox_count_zones.setMaximum(6)
self.spinBox_count_zones.setProperty("value", 6)
self.spinBox_count_zones.setObjectName("spinBox_count_zones")
self.gridLayout_19.addWidget(self.spinBox_count_zones, 1, 2, 1, 1)
self.label_4 = QtWidgets.QLabel(self.groupBox_setup_ext)
self.label_4.setObjectName("label_4")
self.gridLayout_19.addWidget(self.label_4, 1, 1, 1, 1)
self.pushButton_ext_on_off = QtWidgets.QPushButton(self.groupBox_setup_ext)
self.pushButton_ext_on_off.setCheckable(True)
self.pushButton_ext_on_off.setObjectName("pushButton_ext_on_off")
self.gridLayout_19.addWidget(self.pushButton_ext_on_off, 8, 1, 1, 2)
self.gridLayout_4.addWidget(self.groupBox_setup_ext, 0, 0, 1, 1)
self.tabWidget.addTab(self.tab_ext_backlight, "")
self.tab_setup = QtWidgets.QWidget()
self.tab_setup.setObjectName("tab_setup")
self.gridLayout_11 = QtWidgets.QGridLayout(self.tab_setup)
self.gridLayout_11.setContentsMargins(0, 0, 0, 0)
self.gridLayout_11.setObjectName("gridLayout_11")
self.groupBox_wb = QtWidgets.QGroupBox(self.tab_setup)
self.groupBox_wb.setEnabled(True)
self.groupBox_wb.setObjectName("groupBox_wb")
self.gridLayout_12 = QtWidgets.QGridLayout(self.groupBox_wb)
self.gridLayout_12.setObjectName("gridLayout_12")
self.horizontalSlider_wb_r = QtWidgets.QSlider(self.groupBox_wb)
self.horizontalSlider_wb_r.setMaximum(511)
self.horizontalSlider_wb_r.setProperty("value", 511)
self.horizontalSlider_wb_r.setSliderPosition(511)
self.horizontalSlider_wb_r.setTracking(True)
self.horizontalSlider_wb_r.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider_wb_r.setInvertedAppearance(False)
self.horizontalSlider_wb_r.setInvertedControls(False)
self.horizontalSlider_wb_r.setTickPosition(QtWidgets.QSlider.NoTicks)
self.horizontalSlider_wb_r.setTickInterval(0)
self.horizontalSlider_wb_r.setObjectName("horizontalSlider_wb_r")
self.gridLayout_12.addWidget(self.horizontalSlider_wb_r, 0, 1, 1, 1)
self.label_20 = QtWidgets.QLabel(self.groupBox_wb)
self.label_20.setObjectName("label_20")
self.gridLayout_12.addWidget(self.label_20, 0, 0, 1, 1)
self.spinBox_wb_r = QtWidgets.QSpinBox(self.groupBox_wb)
self.spinBox_wb_r.setWrapping(False)
self.spinBox_wb_r.setFrame(True)
self.spinBox_wb_r.setReadOnly(False)
self.spinBox_wb_r.setButtonSymbols(QtWidgets.QAbstractSpinBox.UpDownArrows)
self.spinBox_wb_r.setAccelerated(False)
self.spinBox_wb_r.setProperty("showGroupSeparator", False)
self.spinBox_wb_r.setMaximum(511)
self.spinBox_wb_r.setProperty("value", 511)
self.spinBox_wb_r.setObjectName("spinBox_wb_r")
self.gridLayout_12.addWidget(self.spinBox_wb_r, 0, 2, 1, 1)
self.label_19 = QtWidgets.QLabel(self.groupBox_wb)
self.label_19.setObjectName("label_19")
self.gridLayout_12.addWidget(self.label_19, 1, 0, 1, 1)
self.horizontalSlider_wb_g = QtWidgets.QSlider(self.groupBox_wb)
self.horizontalSlider_wb_g.setMaximum(511)
self.horizontalSlider_wb_g.setProperty("value", 511)
self.horizontalSlider_wb_g.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider_wb_g.setObjectName("horizontalSlider_wb_g")
self.gridLayout_12.addWidget(self.horizontalSlider_wb_g, 1, 1, 1, 1)
self.label_21 = QtWidgets.QLabel(self.groupBox_wb)
self.label_21.setObjectName("label_21")
self.gridLayout_12.addWidget(self.label_21, 2, 0, 1, 1)
self.horizontalSlider_wb_b = QtWidgets.QSlider(self.groupBox_wb)
self.horizontalSlider_wb_b.setMaximum(511)
self.horizontalSlider_wb_b.setProperty("value", 511)
self.horizontalSlider_wb_b.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider_wb_b.setObjectName("horizontalSlider_wb_b")
self.gridLayout_12.addWidget(self.horizontalSlider_wb_b, 2, 1, 1, 1)
self.spinBox_wb_b = QtWidgets.QSpinBox(self.groupBox_wb)
self.spinBox_wb_b.setWrapping(False)
self.spinBox_wb_b.setFrame(True)
self.spinBox_wb_b.setReadOnly(False)
self.spinBox_wb_b.setButtonSymbols(QtWidgets.QAbstractSpinBox.UpDownArrows)
self.spinBox_wb_b.setAccelerated(False)
self.spinBox_wb_b.setProperty("showGroupSeparator", False)
self.spinBox_wb_b.setMaximum(511)
self.spinBox_wb_b.setProperty("value", 511)
self.spinBox_wb_b.setObjectName("spinBox_wb_b")
self.gridLayout_12.addWidget(self.spinBox_wb_b, 2, 2, 1, 1)
self.spinBox_wb_g = QtWidgets.QSpinBox(self.groupBox_wb)
self.spinBox_wb_g.setWrapping(False)
self.spinBox_wb_g.setFrame(True)
self.spinBox_wb_g.setReadOnly(False)
self.spinBox_wb_g.setButtonSymbols(QtWidgets.QAbstractSpinBox.UpDownArrows)
self.spinBox_wb_g.setAccelerated(False)
self.spinBox_wb_g.setProperty("showGroupSeparator", False)
self.spinBox_wb_g.setMaximum(511)
self.spinBox_wb_g.setProperty("value", 511)
self.spinBox_wb_g.setObjectName("spinBox_wb_g")
self.gridLayout_12.addWidget(self.spinBox_wb_g, 1, 2, 1, 1)
self.gridLayout_11.addWidget(self.groupBox_wb, 1, 0, 1, 1)
self.groupBox_gamma = QtWidgets.QGroupBox(self.tab_setup)
self.groupBox_gamma.setEnabled(True)
self.groupBox_gamma.setObjectName("groupBox_gamma")
self.gridLayout_13 = QtWidgets.QGridLayout(self.groupBox_gamma)
self.gridLayout_13.setObjectName("gridLayout_13")
self.graphicsView_gamma = PlotWidget(self.groupBox_gamma)
self.graphicsView_gamma.setObjectName("graphicsView_gamma")
self.gridLayout_13.addWidget(self.graphicsView_gamma, 0, 0, 1, 1)
self.doubleSpinBox_gamma = QtWidgets.QDoubleSpinBox(self.groupBox_gamma)
self.doubleSpinBox_gamma.setMinimum(1.0)
self.doubleSpinBox_gamma.setMaximum(10.0)
self.doubleSpinBox_gamma.setSingleStep(0.1)
self.doubleSpinBox_gamma.setProperty("value", 2.8)
self.doubleSpinBox_gamma.setObjectName("doubleSpinBox_gamma")
self.gridLayout_13.addWidget(self.doubleSpinBox_gamma, 1, 0, 1, 1)
self.gridLayout_11.addWidget(self.groupBox_gamma, 1, 2, 1, 1)
self.groupBox_device = QtWidgets.QGroupBox(self.tab_setup)
self.groupBox_device.setObjectName("groupBox_device")
self.gridLayout_14 = QtWidgets.QGridLayout(self.groupBox_device)
self.gridLayout_14.setObjectName("gridLayout_14")
self.comboBox_device = QtWidgets.QComboBox(self.groupBox_device)
self.comboBox_device.setEnabled(True)
self.comboBox_device.setObjectName("comboBox_device")
self.gridLayout_14.addWidget(self.comboBox_device, 0, 0, 1, 1)
self.label_device = QtWidgets.QLabel(self.groupBox_device)
self.label_device.setText("")
self.label_device.setObjectName("label_device")
self.gridLayout_14.addWidget(self.label_device, 0, 2, 1, 1)
self.pushButton_update = QtWidgets.QPushButton(self.groupBox_device)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_update.sizePolicy().hasHeightForWidth())
self.pushButton_update.setSizePolicy(sizePolicy)
self.pushButton_update.setText("")
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap("icons/update.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_update.setIcon(icon2)
self.pushButton_update.setObjectName("pushButton_update")
self.gridLayout_14.addWidget(self.pushButton_update, 0, 1, 1, 1)
self.gridLayout_11.addWidget(self.groupBox_device, 0, 0, 1, 3)
self.tabWidget.addTab(self.tab_setup, "")
self.gridLayout.addWidget(self.tabWidget, 1, 0, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.label_23.setBuddy(self.comboBox_input)
self.label_41.setBuddy(self.comboBox_effect_music)
self.label_36.setBuddy(self.spinBox_update)
self.label_4.setBuddy(self.spinBox_count_zones)
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(0)
self.horizontalSlider_r.valueChanged['int'].connect(self.spinBox_r.setValue)
self.horizontalSlider_g.valueChanged['int'].connect(self.spinBox_g.setValue)
self.horizontalSlider_b.valueChanged['int'].connect(self.spinBox_b.setValue)
self.horizontalSlider_wb_r.valueChanged['int'].connect(self.spinBox_wb_r.setValue)
self.horizontalSlider_wb_g.valueChanged['int'].connect(self.spinBox_wb_g.setValue)
self.spinBox_wb_b.valueChanged['int'].connect(self.horizontalSlider_wb_b.setValue)
self.horizontalSlider_wb_b.valueChanged['int'].connect(self.spinBox_wb_b.setValue)
self.spinBox_wb_g.valueChanged['int'].connect(self.horizontalSlider_wb_g.setValue)
self.spinBox_wb_r.valueChanged['int'].connect(self.horizontalSlider_wb_r.setValue)
self.spinBox_r.valueChanged['int'].connect(self.horizontalSlider_r.setValue)
self.spinBox_g.valueChanged['int'].connect(self.horizontalSlider_g.setValue)
self.spinBox_b.valueChanged['int'].connect(self.horizontalSlider_b.setValue)
self.dial_bright.sliderMoved['int'].connect(self.lcdNumber_bright.display)
self.dial_bright.valueChanged['int'].connect(self.lcdNumber_bright.display)
self.verticalSlider_lower_low.valueChanged['int'].connect(self.spinBox_lower_low.setValue)
self.verticalSlider_higher_low.valueChanged['int'].connect(self.spinBox_higher_low.setValue)
self.verticalSlider_lower_high.valueChanged['int'].connect(self.spinBox_lower_high.setValue)
self.verticalSlider_higher_high.valueChanged['int'].connect(self.spinBox_higher_high.setValue)
self.spinBox_lower_low.valueChanged['int'].connect(self.verticalSlider_lower_low.setValue)
self.spinBox_higher_low.valueChanged['int'].connect(self.verticalSlider_higher_low.setValue)
self.spinBox_lower_high.valueChanged['int'].connect(self.verticalSlider_lower_high.setValue)
self.spinBox_higher_high.valueChanged['int'].connect(self.verticalSlider_higher_high.setValue)
self.horizontalSlider_noise.valueChanged['int'].connect(self.spinBox_noise.setValue)
self.spinBox_noise.valueChanged['int'].connect(self.horizontalSlider_noise.setValue)
self.verticalSlider_lower_mid.valueChanged['int'].connect(self.spinBox_lower_mid.setValue)
self.spinBox_lower_mid.valueChanged['int'].connect(self.verticalSlider_lower_mid.setValue)
self.verticalSlider_higher_mid.valueChanged['int'].connect(self.spinBox_higher_mid.setValue)
self.spinBox_higher_mid.valueChanged['int'].connect(self.verticalSlider_higher_mid.setValue)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
MainWindow.setTabOrder(self.pushButton_color, self.horizontalSlider_r)
MainWindow.setTabOrder(self.horizontalSlider_r, self.spinBox_r)
MainWindow.setTabOrder(self.spinBox_r, self.horizontalSlider_g)
MainWindow.setTabOrder(self.horizontalSlider_g, self.spinBox_g)
MainWindow.setTabOrder(self.spinBox_g, self.horizontalSlider_b)
MainWindow.setTabOrder(self.horizontalSlider_b, self.spinBox_b)
MainWindow.setTabOrder(self.spinBox_b, self.dial_bright)
MainWindow.setTabOrder(self.dial_bright, self.pushButton_last01)
MainWindow.setTabOrder(self.pushButton_last01, self.pushButton_last02)
MainWindow.setTabOrder(self.pushButton_last02, self.pushButton_last03)
MainWindow.setTabOrder(self.pushButton_last03, self.pushButton_last04)
MainWindow.setTabOrder(self.pushButton_last04, self.pushButton_last05)
MainWindow.setTabOrder(self.pushButton_last05, self.pushButton_last06)
MainWindow.setTabOrder(self.pushButton_last06, self.pushButton_last07)
MainWindow.setTabOrder(self.pushButton_last07, self.pushButton_last08)
MainWindow.setTabOrder(self.pushButton_last08, self.pushButton_last09)
MainWindow.setTabOrder(self.pushButton_last09, self.pushButton_last10)
MainWindow.setTabOrder(self.pushButton_last10, self.pushButton_last11)
MainWindow.setTabOrder(self.pushButton_last11, self.pushButton_last12)
MainWindow.setTabOrder(self.pushButton_last12, self.pushButton_last13)
MainWindow.setTabOrder(self.pushButton_last13, self.pushButton_last14)
MainWindow.setTabOrder(self.pushButton_last14, self.pushButton_last15)
MainWindow.setTabOrder(self.pushButton_last15, self.pushButton_last16)
MainWindow.setTabOrder(self.pushButton_last16, self.pushButton_last17)
MainWindow.setTabOrder(self.pushButton_last17, self.pushButton_last18)
MainWindow.setTabOrder(self.pushButton_last18, self.pushButton_last19)
MainWindow.setTabOrder(self.pushButton_last19, self.pushButton_last20)
MainWindow.setTabOrder(self.pushButton_last20, self.comboBox_effect1)
MainWindow.setTabOrder(self.comboBox_effect1, self.spinBox_time1)
MainWindow.setTabOrder(self.spinBox_time1, self.pushButton_effect1)
MainWindow.setTabOrder(self.pushButton_effect1, self.plainTextEdit_input1)
MainWindow.setTabOrder(self.plainTextEdit_input1, self.comboBox_effect2)
MainWindow.setTabOrder(self.comboBox_effect2, self.spinBox_time2)
MainWindow.setTabOrder(self.spinBox_time2, self.pushButton_effect2)
MainWindow.setTabOrder(self.pushButton_effect2, self.plainTextEdit_input2)
MainWindow.setTabOrder(self.plainTextEdit_input2, self.comboBox_effect3)
MainWindow.setTabOrder(self.comboBox_effect3, self.spinBox_time3)
MainWindow.setTabOrder(self.spinBox_time3, self.pushButton_effect3)
MainWindow.setTabOrder(self.pushButton_effect3, self.plainTextEdit_input3)
MainWindow.setTabOrder(self.plainTextEdit_input3, self.comboBox_effect4)
MainWindow.setTabOrder(self.comboBox_effect4, self.spinBox_time4)
MainWindow.setTabOrder(self.spinBox_time4, self.pushButton_effect4)
MainWindow.setTabOrder(self.pushButton_effect4, self.plainTextEdit_input4)
MainWindow.setTabOrder(self.plainTextEdit_input4, self.comboBox_device)
MainWindow.setTabOrder(self.comboBox_device, self.pushButton_update)
MainWindow.setTabOrder(self.pushButton_update, self.horizontalSlider_wb_r)
MainWindow.setTabOrder(self.horizontalSlider_wb_r, self.spinBox_wb_r)
MainWindow.setTabOrder(self.spinBox_wb_r, self.horizontalSlider_wb_g)
MainWindow.setTabOrder(self.horizontalSlider_wb_g, self.spinBox_wb_g)
MainWindow.setTabOrder(self.spinBox_wb_g, self.horizontalSlider_wb_b)
MainWindow.setTabOrder(self.horizontalSlider_wb_b, self.spinBox_wb_b)
MainWindow.setTabOrder(self.spinBox_wb_b, self.doubleSpinBox_gamma)
MainWindow.setTabOrder(self.doubleSpinBox_gamma, self.graphicsView_gamma)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "P3DA"))
self.groupBox_color_bright.setTitle(_translate("MainWindow", "Color and brightness"))
self.label_2.setText(_translate("MainWindow", "Green"))
self.label_3.setText(_translate("MainWindow", "Blue"))
self.label.setText(_translate("MainWindow", "Red"))
self.pushButton_color.setText(_translate("MainWindow", "#000000"))
self.groupBox_last_colors.setTitle(_translate("MainWindow", "Last colors"))
self.pushButton_last18.setText(_translate("MainWindow", "#999999"))
self.pushButton_last11.setText(_translate("MainWindow", "#7f7f00"))
self.pushButton_last15.setText(_translate("MainWindow", "#000000"))
self.pushButton_last04.setText(_translate("MainWindow", "#ffff00"))
self.pushButton_last17.setText(_translate("MainWindow", "#666666"))
self.pushButton_last05.setText(_translate("MainWindow", "#ff00ff"))
self.pushButton_last12.setText(_translate("MainWindow", "#7f007f"))
self.pushButton_last08.setText(_translate("MainWindow", "#7f0000"))
self.pushButton_last16.setText(_translate("MainWindow", "#333333"))
self.pushButton_last07.setText(_translate("MainWindow", "#ffffff"))
self.pushButton_last14.setText(_translate("MainWindow", "#7f7f7f"))
self.pushButton_last20.setText(_translate("MainWindow", "#0369cf"))
self.pushButton_last09.setText(_translate("MainWindow", "#007f00"))
self.pushButton_last13.setText(_translate("MainWindow", "#007f7f"))
self.pushButton_last10.setText(_translate("MainWindow", "#00007f"))
self.pushButton_last02.setText(_translate("MainWindow", "#00ff00"))
self.pushButton_last19.setText(_translate("MainWindow", "#cccccc"))
self.pushButton_last03.setText(_translate("MainWindow", "#0000ff"))
self.pushButton_last01.setText(_translate("MainWindow", "#ff0000"))
self.pushButton_last06.setText(_translate("MainWindow", "#00ffff"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_light), _translate("MainWindow", "Light"))
self.groupBox_effect2.setTitle(_translate("MainWindow", "Effect2"))
self.plainTextEdit_input2.setPlainText(_translate("MainWindow", "#ffffff\n"
"#ffff00\n"
"#ff00ff\n"
"#ff0000\n"
"#00ffff\n"
"#00ff00\n"
"#000000"))
self.label_16.setText(_translate("MainWindow", "ms"))
self.label_11.setText(_translate("MainWindow", "Effect"))
self.label_10.setText(_translate("MainWindow", "Time"))
self.pushButton_effect2.setText(_translate("MainWindow", "On/off"))
self.groupBox_effect3.setTitle(_translate("MainWindow", "Effect3"))
self.label_17.setText(_translate("MainWindow", "ms"))
self.label_13.setText(_translate("MainWindow", "Effect"))
self.label_12.setText(_translate("MainWindow", "Time"))
self.pushButton_effect3.setText(_translate("MainWindow", "On/off"))
self.plainTextEdit_input3.setPlainText(_translate("MainWindow", "#ffffff\n"
"#ffff00\n"
"#ff00ff\n"
"#ff0000\n"
"#00ffff\n"
"#00ff00\n"
"#000000"))
self.groupBox_effect4.setTitle(_translate("MainWindow", "Effect4"))
self.plainTextEdit_input4.setPlainText(_translate("MainWindow", "#ffffff\n"
"#ffff00\n"
"#ff00ff\n"
"#ff0000\n"
"#00ffff\n"
"#00ff00\n"
"#000000"))
self.label_18.setText(_translate("MainWindow", "ms"))
self.label_14.setText(_translate("MainWindow", "Time"))
self.label_15.setText(_translate("MainWindow", "Effect"))
self.pushButton_effect4.setText(_translate("MainWindow", "On/off"))
self.groupBox_effect1.setTitle(_translate("MainWindow", "Effect 1"))
self.pushButton_effect1.setText(_translate("MainWindow", "On/off"))
self.label_7.setText(_translate("MainWindow", "ms"))
self.label_8.setText(_translate("MainWindow", "Time"))
self.label_9.setText(_translate("MainWindow", "Effect"))
self.plainTextEdit_input1.setPlainText(_translate("MainWindow", "#ffffff\n"
"#ffff00\n"
"#ff00ff\n"
"#ff0000\n"
"#00ffff\n"
"#00ff00\n"
"#000000"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_ilumination), _translate("MainWindow", "Ilumination"))
self.label_23.setText(_translate("MainWindow", "I&nput"))
self.pushButton_sound_onoff.setText(_translate("MainWindow", "On/off"))
self.label_41.setText(_translate("MainWindow", "E&ffect"))
self.groupBox_bit_detect.setTitle(_translate("MainWindow", "Bit detector"))
self.plainTextEdit_bitdetector.setPlainText(_translate("MainWindow", "#ff0000\n"
"#00ff00\n"
"#0000ff"))
self.groupBox_freq.setTitle(_translate("MainWindow", "Frequency"))
self.pushButton_color_high.setText(_translate("MainWindow", "#0000ff"))
self.pushButton_color_low.setText(_translate("MainWindow", "#ff0000"))
self.pushButton_color_mid.setText(_translate("MainWindow", "#00ff00"))
self.label_22.setText(_translate("MainWindow", "Low"))
self.label_24.setText(_translate("MainWindow", "Middle"))
self.label_25.setText(_translate("MainWindow", "High"))
self.label_26.setText(_translate("MainWindow", "Color"))
self.label_42.setText(_translate("MainWindow", "Multiplier"))
self.label_45.setText(_translate("MainWindow", "Noise"))
self.label_43.setText(_translate("MainWindow", "Limits"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_sound), _translate("MainWindow", "Sound"))
self.groupBox_setup_ext.setTitle(_translate("MainWindow", "Setup"))
self.pushButton_zones.setText(_translate("MainWindow", "Set zones"))
self.label_36.setText(_translate("MainWindow", "&Update interval"))
self.spinBox_update.setSuffix(_translate("MainWindow", " ms"))
self.label_4.setText(_translate("MainWindow", "Count &of zones"))
self.pushButton_ext_on_off.setText(_translate("MainWindow", "On/off"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_ext_backlight), _translate("MainWindow", "Ext. backlight"))
self.groupBox_wb.setTitle(_translate("MainWindow", "White balance"))
self.label_20.setText(_translate("MainWindow", "Red"))
self.label_19.setText(_translate("MainWindow", "Green"))
self.label_21.setText(_translate("MainWindow", "Blue"))
self.groupBox_gamma.setTitle(_translate("MainWindow", "Gamma"))
self.groupBox_device.setTitle(_translate("MainWindow", "Device selector"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_setup), _translate("MainWindow", "Setup"))
from pyqtgraph import PlotWidget | unknown | codeparrot/codeparrot-clean | ||
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for the compression manager."""
import unittest
from dfvfs.compression import decompressor
from dfvfs.compression import manager
from dfvfs.compression import zlib_decompressor
from dfvfs.lib import definitions
from tests import test_lib as shared_test_lib
class TestDecompressor(decompressor.Decompressor):
"""Decompressor for testing."""
COMPRESSION_METHOD = 'test'
def Decompress(self, compressed_data):
"""Decompresses the compressed data.
Args:
compressed_data (bytes): compressed data.
Returns:
tuple(bytes, bytes): uncompressed data and remaining compressed data.
"""
return b'', b''
class CompressionManagerTest(shared_test_lib.BaseTestCase):
"""Compression manager tests."""
def testDecompressorRegistration(self):
"""Tests the DeregisterDecompressor and DeregisterDecompressor functions."""
# pylint: disable=protected-access
number_of_decompressors = len(manager.CompressionManager._decompressors)
manager.CompressionManager.RegisterDecompressor(TestDecompressor)
self.assertEqual(
len(manager.CompressionManager._decompressors),
number_of_decompressors + 1)
with self.assertRaises(KeyError):
manager.CompressionManager.RegisterDecompressor(TestDecompressor)
manager.CompressionManager.DeregisterDecompressor(TestDecompressor)
self.assertEqual(
len(manager.CompressionManager._decompressors), number_of_decompressors)
with self.assertRaises(KeyError):
manager.CompressionManager.DeregisterDecompressor(TestDecompressor)
def testGetDecompressor(self):
"""Function to test the GetDecompressor function."""
decompressor_object = manager.CompressionManager.GetDecompressor(
definitions.COMPRESSION_METHOD_ZLIB)
self.assertIsInstance(
decompressor_object, zlib_decompressor.ZlibDecompressor)
decompressor_object = manager.CompressionManager.GetDecompressor('bogus')
self.assertIsNone(decompressor_object)
if __name__ == '__main__':
unittest.main() | unknown | codeparrot/codeparrot-clean |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.