repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
xingwu1/azure-sdk-for-node
lib/services/serviceFabric/lib/models/inlinedValueSecretResourceProperties.js
2501
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ 'use strict'; const models = require('./index'); /** * Describes the properties of a secret resource whose value is provided * explicitly as plaintext. The secret resource may have multiple values, each * being uniquely versioned. The secret value of each version is stored * encrypted, and delivered as plaintext into the context of applications * referencing it. * * @extends models['SecretResourceProperties'] */ class InlinedValueSecretResourceProperties extends models['SecretResourceProperties'] { /** * Create a InlinedValueSecretResourceProperties. */ constructor() { super(); } /** * Defines the metadata of InlinedValueSecretResourceProperties * * @returns {object} metadata of InlinedValueSecretResourceProperties * */ mapper() { return { required: false, serializedName: 'inlinedValue', type: { name: 'Composite', polymorphicDiscriminator: { serializedName: 'kind', clientName: 'kind' }, uberParent: 'SecretResourcePropertiesBase', className: 'InlinedValueSecretResourceProperties', modelProperties: { kind: { required: true, serializedName: 'kind', isPolymorphicDiscriminator: true, type: { name: 'String' } }, description: { required: false, serializedName: 'description', type: { name: 'String' } }, status: { required: false, readOnly: true, serializedName: 'status', type: { name: 'String' } }, statusDetails: { required: false, readOnly: true, serializedName: 'statusDetails', type: { name: 'String' } }, contentType: { required: false, serializedName: 'contentType', type: { name: 'String' } } } } }; } } module.exports = InlinedValueSecretResourceProperties;
apache-2.0
ConstantB/ontop-spatial
quest-test/src/test/java/it/unibz/krdb/obda/identifiers/DB2IdentifierTest.java
5133
package it.unibz.krdb.obda.identifiers; /* * #%L * ontop-test * %% * Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import it.unibz.krdb.obda.io.ModelIOManager; import it.unibz.krdb.obda.model.OBDADataFactory; import it.unibz.krdb.obda.model.OBDAModel; import it.unibz.krdb.obda.model.impl.OBDADataFactoryImpl; import it.unibz.krdb.obda.owlrefplatform.core.QuestConstants; import it.unibz.krdb.obda.owlrefplatform.core.QuestPreferences; import it.unibz.krdb.obda.owlrefplatform.owlapi3.QuestOWL; import it.unibz.krdb.obda.owlrefplatform.owlapi3.QuestOWLConnection; import it.unibz.krdb.obda.owlrefplatform.owlapi3.QuestOWLFactory; import it.unibz.krdb.obda.owlrefplatform.owlapi3.QuestOWLResultSet; import it.unibz.krdb.obda.owlrefplatform.owlapi3.QuestOWLStatement; import java.io.File; import junit.framework.TestCase; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.OWLIndividual; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyManager; import org.semanticweb.owlapi.reasoner.SimpleConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /*** * Tests that oracle identifiers for tables and columns are treated * correctly. Especially, that the unquoted identifers are treated as uppercase, and * that the case of quoted identifiers is not changed */ public class DB2IdentifierTest extends TestCase { private OBDADataFactory fac; private QuestOWLConnection conn; Logger log = LoggerFactory.getLogger(this.getClass()); private OBDAModel obdaModel; private OWLOntology ontology; final String owlfile = "resources/identifiers/identifiers.owl"; final String obdafile = "resources/identifiers/identifiers-db2.obda"; private QuestOWL reasoner; @Override public void setUp() throws Exception { // Loading the OWL file OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); ontology = manager.loadOntologyFromOntologyDocument((new File(owlfile))); // Loading the OBDA data fac = OBDADataFactoryImpl.getInstance(); obdaModel = fac.getOBDAModel(); ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load(obdafile); QuestPreferences p = new QuestPreferences(); p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL); p.setCurrentValueOf(QuestPreferences.OBTAIN_FULL_METADATA, QuestConstants.FALSE); // Creating a new instance of the reasoner QuestOWLFactory factory = new QuestOWLFactory(); factory.setOBDAController(obdaModel); factory.setPreferenceHolder(p); reasoner = (QuestOWL) factory.createReasoner(ontology, new SimpleConfiguration()); // Now we are ready for querying conn = reasoner.getConnection(); } public void tearDown() throws Exception{ conn.close(); reasoner.dispose(); } private String runTests(String query) throws Exception { QuestOWLStatement st = conn.createStatement(); String retval; try { QuestOWLResultSet rs = st.executeTuple(query); assertTrue(rs.nextRow()); OWLIndividual ind1 = rs.getOWLIndividual("x") ; retval = ind1.toString(); } catch (Exception e) { throw e; } finally { try { } catch (Exception e) { st.close(); assertTrue(false); } conn.close(); reasoner.dispose(); } return retval; } /** * Test use of quoted table and column identifiers * @throws Exception */ public void testLowercaseQuoted() throws Exception { String query = "PREFIX : <http://www.semanticweb.org/ontologies/2013/7/untitled-ontology-150#> SELECT ?x WHERE {?x a :Country} ORDER BY ?x"; String val = runTests(query); assertEquals("<http://www.semanticweb.org/ontologies/2013/7/untitled-ontology-150#Country-991>", val); } /** * Test use of lowercase, unquoted table, schema and column identifiers * @throws Exception */ public void testLowercaseUnquotedSchema() throws Exception { String query = "PREFIX : <http://www.semanticweb.org/ontologies/2013/7/untitled-ontology-150#> SELECT ?x WHERE {?x a :Country2} ORDER BY ?x"; String val = runTests(query); assertEquals("<http://www.semanticweb.org/ontologies/2013/7/untitled-ontology-150#Country2-991>", val); } public void testAliasUnquotedColumn() throws Exception { String query = "PREFIX : <http://www.semanticweb.org/ontologies/2013/7/untitled-ontology-150#> SELECT ?x WHERE {?x a :Country3} ORDER BY ?x"; String val = runTests(query); assertEquals("<http://www.semanticweb.org/ontologies/2013/7/untitled-ontology-150#Country3-991>", val); } }
apache-2.0
kidaa/incubator-ignite
modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/IgniteCacheClientNodePartitionsExchangeTest.java
20342
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed; import org.apache.ignite.*; import org.apache.ignite.cache.*; import org.apache.ignite.cache.affinity.*; import org.apache.ignite.cache.affinity.fair.*; import org.apache.ignite.cluster.*; import org.apache.ignite.configuration.*; import org.apache.ignite.events.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.managers.communication.*; import org.apache.ignite.internal.managers.discovery.*; import org.apache.ignite.internal.processors.affinity.*; import org.apache.ignite.internal.processors.cache.*; import org.apache.ignite.internal.processors.cache.distributed.dht.*; import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.*; import org.apache.ignite.internal.util.lang.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.lang.*; import org.apache.ignite.plugin.extensions.communication.*; import org.apache.ignite.resources.*; import org.apache.ignite.spi.communication.tcp.*; import org.apache.ignite.spi.discovery.tcp.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*; import org.apache.ignite.testframework.*; import org.apache.ignite.testframework.junits.common.*; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.*; /** * */ public class IgniteCacheClientNodePartitionsExchangeTest extends GridCommonAbstractTest { /** */ protected static TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true); /** */ private boolean client; /** */ private boolean fairAffinity; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(gridName); ((TcpDiscoverySpi)cfg.getDiscoverySpi()).setIpFinder(ipFinder).setForceServerMode(true); cfg.setClientMode(client); CacheConfiguration ccfg = new CacheConfiguration(); if (fairAffinity) ccfg.setAffinity(new FairAffinityFunction()); cfg.setCacheConfiguration(ccfg); cfg.setCommunicationSpi(new TestCommunicationSpi()); return cfg; } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { super.afterTest(); stopAllGrids(); } /** * @throws Exception If failed. */ public void testServerNodeLeave() throws Exception { Ignite ignite0 = startGrid(0); client = true; final Ignite ignite1 = startGrid(1); waitForTopologyUpdate(2, 2); final Ignite ignite2 = startGrid(2); waitForTopologyUpdate(3, 3); ignite0.close(); waitForTopologyUpdate(2, 4); GridTestUtils.assertThrows(log, new Callable<Void>() { @Override public Void call() throws Exception { ignite1.cache(null).get(1); return null; } }, CacheServerNotFoundException.class, null); GridTestUtils.assertThrows(log, new Callable<Void>() { @Override public Void call() throws Exception { ignite2.cache(null).get(1); return null; } }, CacheServerNotFoundException.class, null); ignite1.close(); waitForTopologyUpdate(1, 5); GridTestUtils.assertThrows(log, new Callable<Void>() { @Override public Void call() throws Exception { ignite2.cache(null).get(1); return null; } }, CacheServerNotFoundException.class, null); } /** * @throws Exception If failed. */ public void testSkipPreload() throws Exception { Ignite ignite0 = startGrid(0); final CountDownLatch evtLatch0 = new CountDownLatch(1); ignite0.events().localListen(new IgnitePredicate<Event>() { @Override public boolean apply(Event evt) { log.info("Rebalance event: " + evt); evtLatch0.countDown(); return true; } }, EventType.EVT_CACHE_REBALANCE_STARTED, EventType.EVT_CACHE_REBALANCE_STOPPED); client = true; Ignite ignite1 = startGrid(1); assertTrue(evtLatch0.await(1000, TimeUnit.MILLISECONDS)); ignite1.close(); assertTrue(evtLatch0.await(1000, TimeUnit.MILLISECONDS)); ignite1 = startGrid(1); final CountDownLatch evtLatch1 = new CountDownLatch(1); ignite1.events().localListen(new IgnitePredicate<Event>() { @Override public boolean apply(Event evt) { log.info("Rebalance event: " + evt); evtLatch1.countDown(); return true; } }, EventType.EVT_CACHE_REBALANCE_STARTED, EventType.EVT_CACHE_REBALANCE_STOPPED); assertTrue(evtLatch0.await(1000, TimeUnit.MILLISECONDS)); client = false; startGrid(2); assertTrue(evtLatch0.await(1000, TimeUnit.MILLISECONDS)); assertFalse(evtLatch1.await(1000, TimeUnit.MILLISECONDS)); } /** * @throws Exception If failed. */ public void testPartitionsExchange() throws Exception { partitionsExchange(); } /** * @throws Exception If failed. */ public void testPartitionsExchangeFairAffinity() throws Exception { fairAffinity = true; partitionsExchange(); } /** * @throws Exception If failed. */ private void partitionsExchange() throws Exception { Ignite ignite0 = startGrid(0); TestCommunicationSpi spi0 = (TestCommunicationSpi)ignite0.configuration().getCommunicationSpi(); Ignite ignite1 = startGrid(1); waitForTopologyUpdate(2, 2); TestCommunicationSpi spi1 = (TestCommunicationSpi)ignite1.configuration().getCommunicationSpi(); assertEquals(0, spi0.partitionsSingleMessages()); assertEquals(1, spi0.partitionsFullMessages()); assertEquals(1, spi1.partitionsSingleMessages()); assertEquals(0, spi1.partitionsFullMessages()); spi0.reset(); spi1.reset(); client = true; log.info("Start client node1."); Ignite ignite2 = startGrid(2); waitForTopologyUpdate(3, 3); TestCommunicationSpi spi2 = (TestCommunicationSpi)ignite2.configuration().getCommunicationSpi(); assertEquals(0, spi0.partitionsSingleMessages()); assertEquals(1, spi0.partitionsFullMessages()); assertEquals(0, spi1.partitionsSingleMessages()); assertEquals(0, spi1.partitionsFullMessages()); assertEquals(1, spi2.partitionsSingleMessages()); assertEquals(0, spi2.partitionsFullMessages()); spi0.reset(); spi1.reset(); spi2.reset(); log.info("Start client node2."); Ignite ignite3 = startGrid(3); waitForTopologyUpdate(4, 4); TestCommunicationSpi spi3 = (TestCommunicationSpi)ignite3.configuration().getCommunicationSpi(); assertEquals(0, spi0.partitionsSingleMessages()); assertEquals(1, spi0.partitionsFullMessages()); assertEquals(0, spi1.partitionsSingleMessages()); assertEquals(0, spi1.partitionsFullMessages()); assertEquals(0, spi2.partitionsSingleMessages()); assertEquals(0, spi2.partitionsFullMessages()); assertEquals(1, spi3.partitionsSingleMessages()); assertEquals(0, spi3.partitionsFullMessages()); spi0.reset(); spi1.reset(); spi2.reset(); spi3.reset(); log.info("Start one more server node."); client = false; Ignite ignite4 = startGrid(4); waitForTopologyUpdate(5, 5); TestCommunicationSpi spi4 = (TestCommunicationSpi)ignite4.configuration().getCommunicationSpi(); assertEquals(0, spi0.partitionsSingleMessages()); assertEquals(4, spi0.partitionsFullMessages()); assertEquals(1, spi1.partitionsSingleMessages()); assertEquals(0, spi1.partitionsFullMessages()); assertEquals(1, spi2.partitionsSingleMessages()); assertEquals(0, spi2.partitionsFullMessages()); assertEquals(1, spi3.partitionsSingleMessages()); assertEquals(0, spi3.partitionsFullMessages()); assertEquals(1, spi4.partitionsSingleMessages()); assertEquals(0, spi4.partitionsFullMessages()); spi0.reset(); spi1.reset(); spi2.reset(); spi3.reset(); log.info("Stop server node."); ignite4.close(); waitForTopologyUpdate(4, 6); assertEquals(0, spi0.partitionsSingleMessages()); assertEquals(3, spi0.partitionsFullMessages()); assertEquals(1, spi1.partitionsSingleMessages()); assertEquals(0, spi1.partitionsFullMessages()); assertEquals(1, spi2.partitionsSingleMessages()); assertEquals(0, spi2.partitionsFullMessages()); assertEquals(1, spi3.partitionsSingleMessages()); assertEquals(0, spi3.partitionsFullMessages()); spi0.reset(); spi1.reset(); spi2.reset(); log.info("Stop client node2."); ignite3.close(); waitForTopologyUpdate(3, 7); assertEquals(0, spi0.partitionsSingleMessages()); assertEquals(0, spi0.partitionsFullMessages()); assertEquals(0, spi1.partitionsSingleMessages()); assertEquals(0, spi1.partitionsFullMessages()); assertEquals(0, spi2.partitionsSingleMessages()); assertEquals(0, spi2.partitionsFullMessages()); spi0.reset(); spi1.reset(); log.info("Stop client node1."); ignite2.close(); waitForTopologyUpdate(2, 8); assertEquals(0, spi0.partitionsSingleMessages()); assertEquals(0, spi0.partitionsFullMessages()); assertEquals(0, spi1.partitionsSingleMessages()); assertEquals(0, spi1.partitionsFullMessages()); log.info("Stop server node."); ignite1.close(); waitForTopologyUpdate(1, 9); assertEquals(0, spi0.partitionsSingleMessages()); assertEquals(0, spi0.partitionsFullMessages()); } /** * @param expNodes Expected number of nodes. * @param topVer Expected topology version. * @throws Exception If failed. */ private void waitForTopologyUpdate(int expNodes, int topVer) throws Exception { final AffinityTopologyVersion ver = new AffinityTopologyVersion(topVer, 0); waitForTopologyUpdate(expNodes, ver); } /** * @param expNodes Expected number of nodes. * @param topVer Expected topology version. * @throws Exception If failed. */ private void waitForTopologyUpdate(int expNodes, final AffinityTopologyVersion topVer) throws Exception { List<Ignite> nodes = G.allGrids(); assertEquals(expNodes, nodes.size()); for (Ignite ignite : nodes) { final IgniteKernal kernal = (IgniteKernal)ignite; GridTestUtils.waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { return topVer.equals(kernal.context().cache().context().exchange().readyAffinityVersion()); } }, 10_000); assertEquals("Unexpected affinity version for " + ignite.name(), topVer, kernal.context().cache().context().exchange().readyAffinityVersion()); } Iterator<Ignite> it = nodes.iterator(); Ignite ignite0 = it.next(); Affinity<Integer> aff0 = ignite0.affinity(null); while (it.hasNext()) { Ignite ignite = it.next(); Affinity<Integer> aff = ignite.affinity(null); assertEquals(aff0.partitions(), aff.partitions()); for (int part = 0; part < aff.partitions(); part++) assertEquals(aff0.mapPartitionToPrimaryAndBackups(part), aff.mapPartitionToPrimaryAndBackups(part)); } for (Ignite ignite : nodes) { final IgniteKernal kernal = (IgniteKernal)ignite; for (IgniteInternalCache cache : kernal.context().cache().caches()) { GridDhtPartitionTopology top = cache.context().topology(); assertEquals("Unexpected topology version [node=" + ignite.name() + ", cache=" + cache.name() + ']', topVer, top.topologyVersion()); } } awaitPartitionMapExchange(); } /** * @throws Exception If failed. */ public void testClientOnlyCacheStart() throws Exception { clientOnlyCacheStart(false, false); } /** * @throws Exception If failed. */ public void testNearOnlyCacheStart() throws Exception { clientOnlyCacheStart(true, false); } /** * @throws Exception If failed. */ public void testClientOnlyCacheStartFromServerNode() throws Exception { clientOnlyCacheStart(false, true); } /** * @throws Exception If failed. */ public void testNearOnlyCacheStartFromServerNode() throws Exception { clientOnlyCacheStart(true, true); } /** * @param nearCache If {@code true} creates near cache on client. * @param srvNode If {@code true} creates client cache on server node. * @throws Exception If failed. */ private void clientOnlyCacheStart(boolean nearCache, boolean srvNode) throws Exception { Ignite ignite0 = startGrid(0); Ignite ignite1 = startGrid(1); waitForTopologyUpdate(2, 2); final String CACHE_NAME1 = "cache1"; CacheConfiguration ccfg = new CacheConfiguration(); ccfg.setName(CACHE_NAME1); if (srvNode) ccfg.setNodeFilter(new TestFilter(getTestGridName(2))); ignite0.createCache(ccfg); client = !srvNode; Ignite ignite2 = startGrid(2); waitForTopologyUpdate(3, 3); TestCommunicationSpi spi0 = (TestCommunicationSpi)ignite0.configuration().getCommunicationSpi(); TestCommunicationSpi spi1 = (TestCommunicationSpi)ignite1.configuration().getCommunicationSpi(); TestCommunicationSpi spi2 = (TestCommunicationSpi)ignite2.configuration().getCommunicationSpi(); spi0.reset(); spi1.reset(); spi2.reset(); assertNull(((IgniteKernal)ignite2).context().cache().context().cache().internalCache(CACHE_NAME1)); if (nearCache) ignite2.getOrCreateNearCache(CACHE_NAME1, new NearCacheConfiguration<>()); else ignite2.cache(CACHE_NAME1); waitForTopologyUpdate(3, new AffinityTopologyVersion(3, 1)); GridCacheAdapter cache = ((IgniteKernal)ignite2).context().cache().context().cache().internalCache(CACHE_NAME1); assertNotNull(cache); assertEquals(nearCache, cache.context().isNear()); assertEquals(0, spi0.partitionsSingleMessages()); assertEquals(1, spi0.partitionsFullMessages()); assertEquals(0, spi1.partitionsSingleMessages()); assertEquals(0, spi1.partitionsFullMessages()); assertEquals(1, spi2.partitionsSingleMessages()); assertEquals(0, spi2.partitionsFullMessages()); ClusterNode clientNode = ((IgniteKernal)ignite2).localNode(); for (Ignite ignite : Ignition.allGrids()) { GridDiscoveryManager disco = ((IgniteKernal)ignite).context().discovery(); assertTrue(disco.cacheNode(clientNode, CACHE_NAME1)); assertFalse(disco.cacheAffinityNode(clientNode, CACHE_NAME1)); assertEquals(nearCache, disco.cacheNearNode(clientNode, CACHE_NAME1)); } spi0.reset(); spi1.reset(); spi2.reset(); AffinityTopologyVersion topVer; if (!srvNode) { log.info("Close client cache: " + CACHE_NAME1); ignite2.cache(CACHE_NAME1).close(); assertNull(((IgniteKernal)ignite2).context().cache().context().cache().internalCache(CACHE_NAME1)); waitForTopologyUpdate(3, new AffinityTopologyVersion(3, 2)); assertEquals(0, spi0.partitionsSingleMessages()); assertEquals(0, spi0.partitionsFullMessages()); assertEquals(0, spi1.partitionsSingleMessages()); assertEquals(0, spi1.partitionsFullMessages()); assertEquals(0, spi2.partitionsSingleMessages()); assertEquals(0, spi2.partitionsFullMessages()); topVer = new AffinityTopologyVersion(3, 3); } else topVer = new AffinityTopologyVersion(3, 2); final String CACHE_NAME2 = "cache2"; ccfg = new CacheConfiguration(); ccfg.setName(CACHE_NAME2); ignite2.createCache(ccfg); waitForTopologyUpdate(3, topVer); assertEquals(0, spi0.partitionsSingleMessages()); assertEquals(2, spi0.partitionsFullMessages()); assertEquals(1, spi1.partitionsSingleMessages()); assertEquals(0, spi1.partitionsFullMessages()); assertEquals(1, spi2.partitionsSingleMessages()); assertEquals(0, spi2.partitionsFullMessages()); } /** * */ private static class TestFilter implements IgnitePredicate<ClusterNode> { /** */ private String exclNodeName; /** * @param exclNodeName Node name to exclude. */ public TestFilter(String exclNodeName) { this.exclNodeName = exclNodeName; } /** {@inheritDoc} */ @Override public boolean apply(ClusterNode clusterNode) { return !exclNodeName.equals(clusterNode.attribute(IgniteNodeAttributes.ATTR_GRID_NAME)); } } /** * Test communication SPI. */ private static class TestCommunicationSpi extends TcpCommunicationSpi { /** */ private AtomicInteger partSingleMsgs = new AtomicInteger(); /** */ private AtomicInteger partFullMsgs = new AtomicInteger(); /** */ @LoggerResource private IgniteLogger log; /** {@inheritDoc} */ @Override public void sendMessage(ClusterNode node, Message msg) { super.sendMessage(node, msg); Object msg0 = ((GridIoMessage)msg).message(); if (msg0 instanceof GridDhtPartitionsSingleMessage) { if (((GridDhtPartitionsSingleMessage)msg0).exchangeId() != null) { log.info("Partitions message: " + msg0.getClass().getSimpleName()); partSingleMsgs.incrementAndGet(); } } else if (msg0 instanceof GridDhtPartitionsFullMessage) { if (((GridDhtPartitionsFullMessage)msg0).exchangeId() != null) { log.info("Partitions message: " + msg0.getClass().getSimpleName()); partFullMsgs.incrementAndGet(); } } } /** * */ void reset() { partSingleMsgs.set(0); partFullMsgs.set(0); } /** * @return Sent partitions single messages. */ int partitionsSingleMessages() { return partSingleMsgs.get(); } /** * @return Sent partitions full messages. */ int partitionsFullMessages() { return partFullMsgs.get(); } } }
apache-2.0
ppazos/cabolabs-web-old
content/en/about_us/ing_pazos.php
10021
<style type="text/css"> ul, ol { padding-left: 30px; } /* ------------------>>> CUSTOM PARA ABOUT US */ .page-content p { margin: 10px 0; } .images { text-align: center; margin: 15px 0 25px 0; } img.profile { width: 300px; margin-bottom: 5px; } .images img { padding: 5px; cursor: pointer; } .profile_item li span { font-weight: bold; display: block } .center { text-align: center; } /* Estilo lightbox para captura de pantallas */ #gallery ul { list-style: none; margin: 10px; text-align: center; } #gallery ul li { display: inline; } #gallery ul img { border: 5px solid #f3f3f3; margin: 3px; } </style> <script src="<?php echo $_base_dir; ?>/js/jquery.lightbox-0.5.min.js"></script> <link href="<?php echo $_base_dir; ?>/css/jquery.lightbox-0.5.css" rel="stylesheet" type="text/css"> <script type="text/javascript"> $(document).ready(function() { $('#gallery a').lightBox(); }); </script> <h1>Ing. Pablo Pazos Guti&eacute;rrez</h1> <img src="<?php echo $_base_dir; ?>/images/pablopazos.jpg" class="profile" /> <br/> <a href="https://plus.google.com/s/Pablo%20Pazos" target="_blank"><img src="<?php echo $_base_dir; ?>/images/google+.png" /></a> <a href="https://twitter.com/ppazos" target="_blank"><img src="<?php echo $_base_dir; ?>/images/twitter.png" /></a> <a href="http://www.linkedin.com/in/pablopazosgutierrez" target="_blank"><img src="<?php echo $_base_dir; ?>/images/linkedin.png" /></a> <a href="http://informatica-medica.blogspot.com/" target="_blank"><img src="<?php echo $_base_dir; ?>/images/blogspot.png" /></a> <div class="page_content"> <h2>Curriculum Vitae / Resume</h2> <div class="center"> (<a href="http://docs.google.com/viewer?url=http%3A%2F%2Fwww.cabolabs.com%2Fcontent%2Fen%2Fabout_us%2Fcv_ppazos_20130517_en.pdf">Download</a>) <br/> <iframe src="http://docs.google.com/viewer?url=http%3A%2F%2Fwww.cabolabs.com%2Fcontent%2Fen%2Fabout_us%2Fcv_ppazos_20130517_en.pdf&embedded=true" width="720" height="480" style="border: none;"></iframe> </div> <h2>Certificates</h2> <p>Click on the pictures for details</p> <div id="gallery"> <ul> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2014_infolac.png" title="Expositor en el Congreso InfoLac 2014"> <img src="<?php echo $_base_dir; ?>/images/certificates/2014_infolac_sm.png" height="120" alt="" /> </a> </li> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2012_hiba.jpg" title="Disertante en las VII Jornadas Universitarias de Sistemas de Informacion en Salud - HIBA 2012"> <img src="<?php echo $_base_dir; ?>/images/certificates/2012_hiba_sm.jpg" height="120" alt="" /> </a> </li> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2012_cais.jpg" title="Expositor en Congreso Argentino de Informatica en Salud - CAIS 2012"> <img src="<?php echo $_base_dir; ?>/images/certificates/2012_cais_sm.jpg" height="120" alt="" /> </a> </li> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2011_sueiidiss.jpg" title="Asistencia al 5to Congreso Iberoamericano de Informatica Medica Normalizada - SUEIIDISS"> <img src="<?php echo $_base_dir; ?>/images/certificates/2011_sueiidiss_sm.jpg" height="120" alt="" /> </a> </li> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2011_ingdemuestra.jpg" title="Expositor en feria Ingenieria DeMuestra - FING 2011"> <img src="<?php echo $_base_dir; ?>/images/certificates/2011_ingdemuestra_sm.jpg" height="120" alt="" /> </a> </li> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2010_sueiidiss.jpg" title="Expositor en Evento de Desarrollos de Informatica Sanitaria - SUEIIDISS 2010"> <img src="<?php echo $_base_dir; ?>/images/certificates/2010_sueiidiss_sm.jpg" height="120" alt="" /> </a> </li> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2010_samericano.jpg" title="Asistencia al workshop del Programa de Implementacion de Sistema de Case-Mix - Sanatorio Americano 2010"> <img src="<?php echo $_base_dir; ?>/images/certificates/2010_samericano_sm.jpg" height="120" alt="" /> </a> </li> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2010_cais.jpg" title="Expositor en Congreso Argentino de Informatica en Salud - CAIS 2010"> <img src="<?php echo $_base_dir; ?>/images/certificates/2010_cais_sm.jpg" height="120" alt="" /> </a> </li> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2010_agesic_ide.jpg" title="Expositor en Congreso Uruguayo de Infraestructura de Datos Espaciales - AGESIC 2010"> <img src="<?php echo $_base_dir; ?>/images/certificates/2010_agesic_ide_sm.jpg" height="120" alt="" /> </a> </li> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2009_sueiidiss.jpg" title="Expositor en 4to Congreso Iberoamericano de Informatica Medica Normalizada - SUEIIDISS 2009"> <img src="<?php echo $_base_dir; ?>/images/certificates/2009_sueiidiss_sm.jpg" height="120" alt="" /> </a> </li> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2009_jaiio_sis.jpg" title="Asistente al Simposio Argentino de Informatica en Salud - SIS 2009"> <img src="<?php echo $_base_dir; ?>/images/certificates/2009_jaiio_sis_sm.jpg" height="120" alt="" /> </a> </li> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2009_femi_encuentro.jpg" title="Asistente al 6to Encuentro Nacional FEMI - FEMI 2009"> <img src="<?php echo $_base_dir; ?>/images/certificates/2009_femi_encuentro_sm.jpg" height="120" alt="" /> </a> </li> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2009_curso_hiba.jpg" title="Aprobacion del Curso de Introduccion a la Informatica Biomedica - HIBA 2009"> <img src="<?php echo $_base_dir; ?>/images/certificates/2009_curso_hiba_sm.jpg" height="120" alt="" /> </a> </li> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2009_curso_cda.jpg" title="Asistente al Taller de Interoperabilidad con HL7 CDA R2 - SUEIIDISS 2009"> <img src="<?php echo $_base_dir; ?>/images/certificates/2009_curso_cda_sm.jpg" height="120" alt="" /> </a> </li> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2008_sueiidiss.jpg" title="Expositor en 3er Congreso Iberoamericano de Informatica Medica Normalizada - SUEIIDISS 2009"> <img src="<?php echo $_base_dir; ?>/images/certificates/2008_sueiidiss_sm.jpg" height="120" alt="" /> </a> </li> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2008_fsd.jpg" title="Asistente a la Jornada de Divulgacion con Unidades Coordinadoras Locales - FEMI 2008"> <img src="<?php echo $_base_dir; ?>/images/certificates/2008_fsd_sm.jpg" height="120" alt="" /> </a> </li> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2005_djoven.jpg" title="Participante en el Programa Desafio Joven - ACDE y SHELL Uruguay 2005"> <img src="<?php echo $_base_dir; ?>/images/certificates/2005_djoven_sm.jpg" height="120" alt="" /> </a> </li> <li> <a href="<?php echo $_base_dir; ?>/images/certificates/2001_tecnico.jpg" title="Aprobacion del Curso de Tecnico en Mantenimiento de PC - Taller de Informatica 2001"> <img src="<?php echo $_base_dir; ?>/images/certificates/2001_tecnico_sm.jpg" height="120" alt="" /> </a> </li> </ul> </div> <h2>Publications</h2> <ul class="profile_item"> <li> <span>Est&aacute;ndares e interoperabilidad en salud electr&oacute;nica: Requisitos para una gesti&oacute;n sanitaria efectiva y eficiente</span> CEPAL 2011 <a href="http://www.cepal.org/cgi-bin/getProd.asp?xml=/publicaciones/xml/4/45524/P45524.xml&xsl=/dds/tpl/p9f.xsl&base=/dds/tpl/top-bottom.xsl" target="_blank">ver online</a> </li> <li> <span>Traumagen: una herramienta innovadora</span> Revista Ser M&eacute;dico nº6 (p37) <a href="http://www.smu.org.uy/publicaciones/sermedico/2012/sm6/sermedico.pdf" target="_blank">ver online</a> </li> <li> <span>Los 10 mandamientos de la historia cl&iacute;nica electr&oacute;nica</span> Revista Inform&aacute;tica M&eacute;dica 2011 (p.42-45) <a href="http://www.slideshare.net/SSMN/revista-informatica-medica-n3" target="_blank">ver online</a> </li> <li> <span>&iquest;Adi&oacute;s a la letra de m&eacute;dico?</span> Diario El Observador 2011-11-16 (secci&oacute;n Salud) <a href="http://elobservador.com.uy/noticia/213257/adios-a-la-letra-de-medico" target="_blank">ver online</a> </li> <li> <span>C&oacute;mo en contrar formaci&oacute;n de postgrado en Inform&aacute;tica M&eacute;dica y no morir en el intento</span> Revista Inform&aacute;tica M&eacute;dica 2011 (p.50-51) <a href="http://www.informaticamedica.cl/2011/06/descargue-gratis-el-ultimo-numero-de.html" target="_blank">ver online</a> </li> <li> <span>Est&aacute;ndares que salvan vidas</span> Revista Milveinticuatro 2009 (p.24-27) <a href="http://www.1024.com.uy/ediciones/leonardo_da_vinci.html" target="_blank">ver online</a> </li> </ul> <h2>Scientific papers and presentations</h2> <ul> <li><a href="http://www.slideshare.net/pablitox/" target="_blank">Read online at SlideShare</a></li> </ul> <h2>Experience</h2> <ul> <li><a href="http://www.linkedin.com/in/pablopazosgutierrez" target="_blank">Read about Pablo's experience at LinkedIn</a></li> </ul> <h2>Direct contact</h2> <ul> <li><a href=mailto:pablo.pazos@cabolabs.com">pablo.pazos@cabolabs.com</a></li> </ul> </div>
apache-2.0
gianm/tranquility
core/src/main/scala/com/metamx/tranquility/finagle/BeamService.scala
1238
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.metamx.tranquility.finagle import com.metamx.tranquility.beam.Beam import com.twitter.finagle.Service import com.twitter.util.Future import com.twitter.util.Time /** * Bridges Beams with Finagle Services. */ class BeamService[A](beam: Beam[A]) extends Service[Seq[A], Int] { def apply(request: Seq[A]): Future[Int] = beam.sendBatch(request).map(_.size) override def close(deadline: Time) = beam.close() }
apache-2.0
davidkarlsen/camel
components/camel-cdi/src/test/java/org/apache/camel/cdi/test/RawEventEndpointCdi12Test.java
3765
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.cdi.test; import java.util.concurrent.TimeUnit; import javax.enterprise.context.ApplicationScoped; import javax.enterprise.event.Event; import javax.enterprise.event.Observes; import javax.inject.Inject; import org.apache.camel.ProducerTemplate; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.cdi.CdiCamelExtension; import org.apache.camel.cdi.CdiEventEndpoint; import org.apache.camel.cdi.Uri; import org.apache.camel.component.mock.MockEndpoint; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.junit.Arquillian; import org.jboss.shrinkwrap.api.Archive; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.asset.EmptyAsset; import org.jboss.shrinkwrap.api.spec.JavaArchive; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import static org.apache.camel.component.mock.MockEndpoint.assertIsSatisfied; @RunWith(Arquillian.class) public class RawEventEndpointCdi12Test { @Inject private MockEndpoint consumed; @Inject private MockEndpoint produced; @Deployment public static Archive<?> deployment() { return ShrinkWrap.create(JavaArchive.class) // Camel CDI .addPackage(CdiCamelExtension.class.getPackage()) // Test classes .addClasses(RawEventRoute.class, RawEventObserver.class) // Bean archive deployment descriptor .addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml"); } @Before public void resetMock() { consumed.reset(); } @Test public void sendEventToConsumer(Event<Object> event) throws InterruptedException { consumed.expectedMessageCount(1); consumed.expectedBodiesReceived("test"); event.select(String.class).fire("test"); assertIsSatisfied(2L, TimeUnit.SECONDS, consumed); } @Test public void sendMessageToProducer(@Uri("direct:produce") ProducerTemplate producer) throws InterruptedException { long random = Math.round(Math.random() * Long.MAX_VALUE); produced.expectedMessageCount(1); produced.expectedBodiesReceived(random); consumed.expectedMessageCount(1); consumed.expectedBodiesReceived(random); producer.sendBody(random); assertIsSatisfied(2L, TimeUnit.SECONDS, consumed, produced); } } class RawEventRoute extends RouteBuilder { @Inject private CdiEventEndpoint rawEventEndpoint; @Override public void configure() { from(rawEventEndpoint).to("mock:consumed"); from("direct:produce").to(rawEventEndpoint); } } @ApplicationScoped class RawEventObserver { void collectEvents(@Observes long event, @Uri("mock:produced") ProducerTemplate producer) { producer.sendBody(event); } }
apache-2.0
FIRST-Tech-Challenge/appinventor-sources
appinventor/components/src/com/google/appinventor/components/runtime/FtcVuforiaLocalizer.java
46402
// -*- mode: java; c-basic-offset: 2; -*- // Copyright 2011-2016 MIT, All rights reserved // Released under the Apache License, Version 2.0 // http://www.apache.org/licenses/LICENSE-2.0 package com.google.appinventor.components.runtime; import com.google.appinventor.components.annotations.DesignerComponent; import com.google.appinventor.components.annotations.DesignerProperty; import com.google.appinventor.components.annotations.PropertyCategory; import com.google.appinventor.components.annotations.SimpleFunction; import com.google.appinventor.components.annotations.SimpleObject; import com.google.appinventor.components.annotations.SimpleProperty; import com.google.appinventor.components.annotations.UsesLibraries; import com.google.appinventor.components.common.ComponentCategory; import com.google.appinventor.components.common.PropertyTypeConstants; import com.google.appinventor.components.common.YaVersion; import com.google.appinventor.components.runtime.util.ErrorMessages; import com.google.appinventor.components.runtime.ftc.R; import org.firstinspires.ftc.robotcore.external.ClassFactory; import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix; import org.firstinspires.ftc.robotcore.external.matrices.VectorF; import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder; import org.firstinspires.ftc.robotcore.external.navigation.AxesReference; import org.firstinspires.ftc.robotcore.external.navigation.Orientation; import org.firstinspires.ftc.robotcore.external.navigation.RelicRecoveryVuMark; import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer; import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection; import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.Parameters; import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.Parameters.CameraMonitorFeedback; import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable; import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener; import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables; import java.util.ArrayList; import java.util.List; /** * A component for interacting with subsystems that can help support localization through visual * means, for an FTC robot. * * @author lizlooney@google.com (Liz Looney) */ @DesignerComponent(version = YaVersion.FTC_VUFORIA_LOCALIZER_COMPONENT_VERSION, description = "A component for interacting with subsystems that can help support localization " + "through visual means, for an FTC robot. You need to obtain your own license key to use " + "Vuforia. " + "Vuforia will not load without a valid license being provided. Vuforia 'Development' " + "license keys, which is what is needed here, can be obtained free of charge from the " + "Vuforia developer web site at https://developer.vuforia.com/license-manager", category = ComponentCategory.FIRSTTECHCHALLENGE, nonVisible = true, iconName = "images/ftc.png") @SimpleObject @UsesLibraries(libraries = "FtcRobotCore.jar,FtcVuforia.jar") public final class FtcVuforiaLocalizer extends AndroidNonvisibleComponent implements Component, OnDestroyListener, Deleteable { private volatile String vuforiaLicenseKey; private volatile VuforiaLocalizer.Parameters parameters; private volatile VuforiaLocalizer vuforiaLocalizer; private volatile OpenGLMatrix phoneLocationOnRobot; private final List<VuforiaTrackables> trackablesList = new ArrayList<VuforiaTrackables>(); private final List<VuforiaTrackable> trackableList = new ArrayList<VuforiaTrackable>(); /** * Creates a new FtcVuforiaLocalizer component. */ public FtcVuforiaLocalizer(ComponentContainer container) { super(container.$form()); form.registerForOnDestroy(this); } /** * VuforiaLicenseKey property getter. * Not visible in blocks. */ @SimpleProperty(description = "The license key with which to use Vuforia. " + "Vuforia will not load without a valid license being provided. Vuforia 'Development' " + "license keys, which is what is needed here, can be obtained free of charge from the " + "Vuforia developer web site at https://developer.vuforia.com/license-manager", category = PropertyCategory.BEHAVIOR, userVisible = false) public String VuforiaLicenseKey() { return vuforiaLicenseKey; } /** * VuforiaLicenseKey property setter. * Can only be set in designer; not visible in blocks. */ @DesignerProperty(editorType = PropertyTypeConstants.PROPERTY_TYPE_STRING, defaultValue = "") @SimpleProperty(userVisible = false) public void VuforiaLicenseKey(String vuforiaLicenseKey) { this.vuforiaLicenseKey = vuforiaLicenseKey; } /** * CameraDirection_FRONT property getter. */ @SimpleProperty(description = "The constant for CameraDirection_FRONT.", category = PropertyCategory.BEHAVIOR) public String CameraDirection_FRONT() { return CameraDirection.FRONT.toString(); } /** * CameraDirection_BACK property getter. */ @SimpleProperty(description = "The constant for CameraDirection_BACK.", category = PropertyCategory.BEHAVIOR) public String CameraDirection_BACK() { return CameraDirection.BACK.toString(); } private CameraDirection parseCameraDirection(String cameraDirection, String functionName) { for (CameraDirection cameraDirectionValue : CameraDirection.values()) { if (cameraDirectionValue.toString().equalsIgnoreCase(cameraDirection)) { return cameraDirectionValue; } } form.dispatchErrorOccurredEvent(this, functionName, ErrorMessages.ERROR_FTC_INVALID_CAMERA_DIRECTION, cameraDirection); return null; } /** * CameraMonitorFeedback_NONE property getter. */ @SimpleProperty(description = "The constant for CameraMonitorFeedback_NONE.", category = PropertyCategory.BEHAVIOR) public String CameraMonitorFeedback_NONE() { return CameraMonitorFeedback.NONE.toString(); } /** * CameraMonitorFeedback_AXES property getter. */ @SimpleProperty(description = "The constant for CameraMonitorFeedback_AXES.", category = PropertyCategory.BEHAVIOR) public String CameraMonitorFeedback_AXES() { return CameraMonitorFeedback.AXES.toString(); } /** * CameraMonitorFeedback_TEAPOT property getter. */ @SimpleProperty(description = "The constant for CameraMonitorFeedback_TEAPOT.", category = PropertyCategory.BEHAVIOR) public String CameraMonitorFeedback_TEAPOT() { return CameraMonitorFeedback.TEAPOT.toString(); } /** * CameraMonitorFeedback_BUILDINGS property getter. */ @SimpleProperty(description = "The constant for CameraMonitorFeedback_BUILDINGS.", category = PropertyCategory.BEHAVIOR) public String CameraMonitorFeedback_BUILDINGS() { return CameraMonitorFeedback.BUILDINGS.toString(); } private CameraMonitorFeedback parseCameraMonitorFeedback( String cameraMonitorFeedback, String functionName) { for (CameraMonitorFeedback cameraMonitorFeedbackValue : CameraMonitorFeedback.values()) { if (cameraMonitorFeedbackValue.toString().equalsIgnoreCase(cameraMonitorFeedback)) { return cameraMonitorFeedbackValue; } } form.dispatchErrorOccurredEvent(this, functionName, ErrorMessages.ERROR_FTC_INVALID_CAMERA_MONITOR_FEEDBACK, cameraMonitorFeedback); return null; } @SimpleFunction(description = "Create the Vuforia localizer.") public void CreateVuforiaLocalizer( String cameraDirection, boolean useExtendedTracking, String cameraMonitorFeedback, boolean fillCameraMonitorViewParent) { try { clear(); parameters = new VuforiaLocalizer.Parameters(R.id.cameraMonitorViewId); parameters.vuforiaLicenseKey = vuforiaLicenseKey; CameraDirection cameraDirectionValue = parseCameraDirection( cameraDirection, "CreateVuforiaLocalizer"); if (cameraDirectionValue != null) { parameters.cameraDirection = cameraDirectionValue; } parameters.useExtendedTracking = useExtendedTracking; CameraMonitorFeedback cameraMonitorFeedbackValue = parseCameraMonitorFeedback(cameraMonitorFeedback, "CreateVuforiaLocalizer"); if (cameraMonitorFeedbackValue != null) { parameters.cameraMonitorFeedback = cameraMonitorFeedbackValue; } parameters.fillCameraMonitorViewParent = fillCameraMonitorViewParent; vuforiaLocalizer = ClassFactory.createVuforiaLocalizer(parameters); } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "CreateVuforiaLocalizer", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } } @SimpleFunction(description = "Loads a Vuforia dataset from the indicated application asset, " + "which must be of type .XML. The corresponding .DAT asset must be a sibling. Note that " + "this operation can be extremely lengthy, possibly taking a few seconds to execute. " + "Loading datasets from an asset you stored in your application APK is the recommended " + "approach to packaging datasets so they always travel along with your code. " + "In App Inventor, assets are called media.") public void LoadTrackablesFromAsset(String assetName) { try { if (vuforiaLocalizer != null && parameters != null) { afterLoad(vuforiaLocalizer.loadTrackablesFromAsset(assetName)); } else { form.dispatchErrorOccurredEvent(this, "LoadTrackablesFromAsset", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "LoadTrackablesFromAsset", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } } @SimpleFunction(description = "Loads a Vuforia dataset from the indicated file, which must be " + "a .XML file and contain the full file path. The corresponding .DAT file must be a sibling " + "file in the same directory. Note that this operation can be extremely lengthy, possibly " + "taking a few seconds to execute. Loading datasets from an asset you stored in your " + "application APK is the recommended approach to packaging datasets so they always travel " + "along with your code.") public void LoadTrackablesFromFile(String absoluteFileName) { try { if (vuforiaLocalizer != null) { afterLoad(vuforiaLocalizer.loadTrackablesFromFile(absoluteFileName)); } else { form.dispatchErrorOccurredEvent(this, "LoadTrackablesFromFile", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "LoadTrackablesFromFile", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } } private void afterLoad(VuforiaTrackables vuforiaTrackables) { if (vuforiaTrackables != null) { trackablesList.add(vuforiaTrackables); for (VuforiaTrackable trackable : vuforiaTrackables) { trackableList.add(trackable); if (phoneLocationOnRobot != null) { ((VuforiaTrackableDefaultListener) trackable.getListener()) .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection); } } } } // VuforiaTrackables functions @SimpleFunction(description = "Activate trackables that were loaded with " + "LoadTrackablesFromAsset or LoadTrackablesFromFile.") public void ActivateTrackables() { try { if (vuforiaLocalizer != null) { for (VuforiaTrackables trackables : trackablesList) { trackables.activate(); } } else { form.dispatchErrorOccurredEvent(this, "ActivateTrackables", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "ActivateTrackables", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } } @SimpleFunction(description = "Deactivate trackables that were loaded with " + "LoadTrackablesFromAsset or LoadTrackablesFromFile.") public void DeactivateTrackables() { try { if (vuforiaLocalizer != null) { for (VuforiaTrackables trackables : trackablesList) { trackables.deactivate(); } } else { form.dispatchErrorOccurredEvent(this, "DeactivateTrackables", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "DeactivateTrackables", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } } /** * TrackableCount property getter. */ @SimpleProperty(description = "The number of trackables that were loaded with " + "LoadTrackablesFromAsset or LoadTrackablesFromFile.", category = PropertyCategory.BEHAVIOR) public int TrackableCount() { return trackableList.size(); } // Trackable functions @SimpleFunction(description = "Set the name of a trackable that was loaded with " + "LoadTrackablesFromAsset or LoadTrackablesFromFile. The trackableNumber is 0, 1, 2, etc.") public void SetTrackableName(int trackableNumber, String name) { try { if (vuforiaLocalizer != null) { if (trackableNumber >= 0 && trackableNumber < trackableList.size()) { trackableList.get(trackableNumber).setName(name); } else { form.dispatchErrorOccurredEvent(this, "SetTrackableName", ErrorMessages.ERROR_FTC_INVALID_TRACKABLE_NUMBER, trackableNumber, 0, trackableList.size() - 1); } } else { form.dispatchErrorOccurredEvent(this, "SetTrackableName", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "SetTrackableName", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } } @SimpleFunction(description = "Get the name of a trackable that was loaded with " + "LoadTrackablesFromAsset or LoadTrackablesFromFile. The trackableNumber is 0, 1, 2, etc.") public String GetTrackableName(int trackableNumber) { try { if (vuforiaLocalizer != null) { if (trackableNumber >= 0 && trackableNumber < trackableList.size()) { return trackableList.get(trackableNumber).getName(); } else { form.dispatchErrorOccurredEvent(this, "GetTrackableName", ErrorMessages.ERROR_FTC_INVALID_TRACKABLE_NUMBER, trackableNumber, 0, trackableList.size() - 1); } } else { form.dispatchErrorOccurredEvent(this, "GetTrackableName", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "GetTrackableName", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } return ""; } @SimpleFunction(description = "Set the location (an OpenGLMatrix) of a trackable in the field. The trackableNumber is 0, 1, 2, etc.") public void SetTrackableLocation(int trackableNumber, Object matrix) { try { if (vuforiaLocalizer != null) { if (trackableNumber >= 0 && trackableNumber < trackableList.size()) { if (matrix instanceof OpenGLMatrix) { trackableList.get(trackableNumber).setLocation((OpenGLMatrix) matrix); } else { form.dispatchErrorOccurredEvent(this, "SetTrackableLocation", ErrorMessages.ERROR_FTC_INVALID_OPEN_GL_MATRIX, "matrix"); } } else { form.dispatchErrorOccurredEvent(this, "SetTrackableLocation", ErrorMessages.ERROR_FTC_INVALID_TRACKABLE_NUMBER, trackableNumber, 0, trackableList.size() - 1); } } else { form.dispatchErrorOccurredEvent(this, "SetTrackableLocation", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "SetTrackableLocation", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } } @SimpleFunction(description = "Get the location (an OpenGLMatrix) of a trackable in the field. The trackableNumber is 0, 1, 2, etc.") public Object GetTrackableLocation(int trackableNumber) { try { if (vuforiaLocalizer != null) { if (trackableNumber >= 0 && trackableNumber < trackableList.size()) { return trackableList.get(trackableNumber).getLocation(); } else { form.dispatchErrorOccurredEvent(this, "GetTrackableLocation", ErrorMessages.ERROR_FTC_INVALID_TRACKABLE_NUMBER, trackableNumber, 0, trackableList.size() - 1); } } else { form.dispatchErrorOccurredEvent(this, "GetTrackableLocation", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "GetTrackableLocation", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } return null; } // VuforiaTrackableDefaultListener functions @SimpleFunction(description = "Specifies the location (an OpenGLMatrix) of the phone on the " + "robot. This is needed in order to compute the robot location.") public void SetPhoneLocationOnRobot(Object matrix) { try { if (matrix instanceof OpenGLMatrix) { phoneLocationOnRobot = (OpenGLMatrix) matrix; if (vuforiaLocalizer != null && parameters != null) { for (VuforiaTrackable trackable : trackableList) { ((VuforiaTrackableDefaultListener) trackable.getListener()) .setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection); } } else { form.dispatchErrorOccurredEvent(this, "SetPhoneLocationOnRobot", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } else { form.dispatchErrorOccurredEvent(this, "SetPhoneLocationOnRobot", ErrorMessages.ERROR_FTC_INVALID_OPEN_GL_MATRIX, "matrix"); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "SetPhoneLocationOnRobot", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } } @SimpleFunction(description = "Return true if the trackable (specified by number) is visible. The trackableNumber is 0, 1, 2, etc.") public boolean IsTrackableVisible(int trackableNumber) { try { if (vuforiaLocalizer != null) { if (trackableNumber >= 0 && trackableNumber < trackableList.size()) { return ((VuforiaTrackableDefaultListener) trackableList.get(trackableNumber).getListener()) .isVisible(); } else { form.dispatchErrorOccurredEvent(this, "IsTrackableVisible", ErrorMessages.ERROR_FTC_INVALID_TRACKABLE_NUMBER, trackableNumber, 0, trackableList.size() - 1); } } else { form.dispatchErrorOccurredEvent(this, "IsTrackableVisible", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "IsTrackableVisible", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } return false; } @SimpleFunction(description = "Returns the RelicRecoveryVuMark of the trackable (specified by number). The trackableNumber is 0, 1, 2, etc.") public String GetRelicRecoveryVuMark(int trackableNumber) { try { if (vuforiaLocalizer != null) { if (trackableNumber >= 0 && trackableNumber < trackableList.size()) { RelicRecoveryVuMark relicRecoveryVuMark = RelicRecoveryVuMark.from((VuforiaTrackableDefaultListener) trackableList.get(trackableNumber).getListener()); if (relicRecoveryVuMark != null) { return relicRecoveryVuMark.toString(); } } else { form.dispatchErrorOccurredEvent(this, "GetRelicRecoveryVuMark", ErrorMessages.ERROR_FTC_INVALID_TRACKABLE_NUMBER, trackableNumber, 0, trackableList.size() - 1); } } else { form.dispatchErrorOccurredEvent(this, "GetRelicRecoveryVuMark", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "GetRelicRecoveryVuMark", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } return ""; } @SimpleFunction(description = "Return the transform (an OpenGLMatrix) that represents the " + "location of the robot on the field computed from the specified tracker, or null if the " + "location cannot be computed. The trackableNumber is 0, 1, 2, etc.") public Object GetRobotLocation(int trackableNumber) { try { if (vuforiaLocalizer != null) { if (trackableNumber >= 0 && trackableNumber < trackableList.size()) { return ((VuforiaTrackableDefaultListener) trackableList.get(trackableNumber).getListener()) .getRobotLocation(); } else { form.dispatchErrorOccurredEvent(this, "GetRobotLocation", ErrorMessages.ERROR_FTC_INVALID_TRACKABLE_NUMBER, trackableNumber, 0, trackableList.size() - 1); } } else { form.dispatchErrorOccurredEvent(this, "GetRobotLocation", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "GetRobotLocation", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } return null; } @SimpleFunction(description = "Return the transform (an OpenGLMatrix) that represents the " + "location of the robot on the field computed from the specified tracker, but only if a new " + "location has been detected since the last call to GetUpdatedRobotLocation. The trackableNumber is 0, 1, 2, etc.") public Object GetUpdatedRobotLocation(int trackableNumber) { try { if (vuforiaLocalizer != null) { if (trackableNumber >= 0 && trackableNumber < trackableList.size()) { return ((VuforiaTrackableDefaultListener) trackableList.get(trackableNumber).getListener()) .getUpdatedRobotLocation(); } else { form.dispatchErrorOccurredEvent(this, "GetUpdatedRobotLocation", ErrorMessages.ERROR_FTC_INVALID_TRACKABLE_NUMBER, trackableNumber, 0, trackableList.size() - 1); } } else { form.dispatchErrorOccurredEvent(this, "GetUpdatedRobotLocation", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "GetUpdatedRobotLocation", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } return null; } @SimpleFunction(description = "Return the location (an OpenGLMatrix) of the trackable in the " + "phone's coordinate system, if it is currently visible, or null if it is not visible. The trackableNumber is 0, 1, 2, etc.") public Object GetPose(int trackableNumber) { try { if (vuforiaLocalizer != null) { if (trackableNumber >= 0 && trackableNumber < trackableList.size()) { return ((VuforiaTrackableDefaultListener) trackableList.get(trackableNumber).getListener()) .getPose(); } else { form.dispatchErrorOccurredEvent(this, "GetPose", ErrorMessages.ERROR_FTC_INVALID_TRACKABLE_NUMBER, trackableNumber, 0, trackableList.size() - 1); } } else { form.dispatchErrorOccurredEvent(this, "GetPose", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "GetPose", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } return null; } // listener.getRawPose @SimpleFunction(description = "Return the raw location (an OpenGLMatrix) of the trackable in " + "the phone's coordinate system, as reported by Vuforia. The trackableNumber is 0, 1, 2, etc.") public Object GetRawPose(int trackableNumber) { try { if (vuforiaLocalizer != null) { if (trackableNumber >= 0 && trackableNumber < trackableList.size()) { return ((VuforiaTrackableDefaultListener) trackableList.get(trackableNumber).getListener()) .getRawPose(); } else { form.dispatchErrorOccurredEvent(this, "GetRawPose", ErrorMessages.ERROR_FTC_INVALID_TRACKABLE_NUMBER, trackableNumber, 0, trackableList.size() - 1); } } else { form.dispatchErrorOccurredEvent(this, "GetRawPose", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "GetRawPose", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } return null; } @SimpleFunction(description = "Return the raw location (an OpenGLMatrix) of the trackable in " + "the phone's coordinate system, as reported by Vuforia, but only if a new location is " + "available since the last call to GetRawUpdatedPose. The trackableNumber is 0, 1, 2, etc.") public Object GetRawUpdatedPose(int trackableNumber) { try { if (vuforiaLocalizer != null) { if (trackableNumber >= 0 && trackableNumber < trackableList.size()) { return ((VuforiaTrackableDefaultListener) trackableList.get(trackableNumber).getListener()) .getRawUpdatedPose(); } else { form.dispatchErrorOccurredEvent(this, "GetRawUpdatedPose", ErrorMessages.ERROR_FTC_INVALID_TRACKABLE_NUMBER, trackableNumber, 0, trackableList.size() - 1); } } else { form.dispatchErrorOccurredEvent(this, "GetRawUpdatedPose", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "GetRawUpdatedPose", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } return null; } @SimpleFunction(description = "Return the last known location (an OpenGLMatrix) of the " + "trackable in the phone's coordinate system, even if the trackable is no longer visible. The trackableNumber is 0, 1, 2, etc.") public Object GetLastTrackedRawPose(int trackableNumber) { try { if (vuforiaLocalizer != null) { if (trackableNumber >= 0 && trackableNumber < trackableList.size()) { return ((VuforiaTrackableDefaultListener) trackableList.get(trackableNumber).getListener()) .getLastTrackedRawPose(); } else { form.dispatchErrorOccurredEvent(this, "GetLastTrackedRawPose", ErrorMessages.ERROR_FTC_INVALID_TRACKABLE_NUMBER, trackableNumber, 0, trackableList.size() - 1); } } else { form.dispatchErrorOccurredEvent(this, "GetLastTrackedRawPose", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "GetLastTrackedRawPose", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } return null; } /* @SimpleFunction(description = "Sets the matrix to correct for the different coordinate systems " + "used in Vuforia and our phone coordinate system here. Here, with the phone in flat front " + "of you in portrait mode (as it is when running the robot controller app), Z is pointing " + "upwards, up out of the screen, X points to your right, and Y points away from you. The trackableNumber is 0, 1, 2, etc.") public void SetPoseCorrectionMatrix(int trackableNumber, Object matrix) { try { if (vuforiaLocalizer != null && parameters != null) { if (trackableNumber >= 0 && trackableNumber < trackableList.size()) { if (matrix instanceof OpenGLMatrix) { ((VuforiaTrackableDefaultListener) trackableList.get(trackableNumber).getListener()) .setPoseCorrectionMatrix(parameters.cameraDirection, (OpenGLMatrix) matrix); } else { form.dispatchErrorOccurredEvent(this, "SetPoseCorrectionMatrix", ErrorMessages.ERROR_FTC_INVALID_OPEN_GL_MATRIX, "matrix"); } } else { form.dispatchErrorOccurredEvent(this, "SetPoseCorrectionMatrix", ErrorMessages.ERROR_FTC_INVALID_TRACKABLE_NUMBER, trackableNumber, 0, trackableList.size() - 1); } } else { form.dispatchErrorOccurredEvent(this, "SetPoseCorrectionMatrix", ErrorMessages.ERROR_FTC_VUFORIA_LOCALIZER_NOT_CREATED); } } catch (Throwable e) { e.printStackTrace(); form.dispatchErrorOccurredEvent(this, "SetPoseCorrectionMatrix", ErrorMessages.ERROR_FTC_UNEXPECTED_ERROR, e.toString()); } } */ // OpenGLMatrix functions @SimpleFunction(description = "Returns true if the given OpenGLMatrix is null.") public boolean OpenGLMatrixIsNull(Object matrix) { if (matrix == null) { return true; } if (!(matrix instanceof OpenGLMatrix)) { form.dispatchErrorOccurredEvent(this, "OpenGLMatrixIsNull", ErrorMessages.ERROR_FTC_INVALID_OPEN_GL_MATRIX, "matrix"); } return false; } @SimpleFunction(description = "Returns a new OpenGLMatrix, initialized as the identity matrix.") public Object OpenGLMatrixIdentity() { return OpenGLMatrix.identityMatrix(); } @SimpleFunction(description = "Returns a new OpenGLMatrix for rotation.") public Object OpenGLMatrixRotation(float angle, float dx, float dy, float dz) { return OpenGLMatrix.rotation(AngleUnit.DEGREES, angle, dx, dy, dz); } @SimpleFunction(description = "Returns a new OpenGLMatrix for translation.") public Object OpenGLMatrixTranslation(float dx, float dy, float dz) { return OpenGLMatrix.translation(dx, dy, dz); } @SimpleFunction(description = "Returns a new OpenGLMatrix, created by scaling an existing " + "matrix.") public Object OpenGLMatrixScaled(Object matrix, float scaleX, float scaleY, float scaleZ) { if (!(matrix instanceof OpenGLMatrix)) { form.dispatchErrorOccurredEvent(this, "OpenGLMatrixScaled", ErrorMessages.ERROR_FTC_INVALID_OPEN_GL_MATRIX, "matrix"); return null; } return ((OpenGLMatrix) matrix).scaled(scaleX, scaleY, scaleZ); } @SimpleFunction(description = "Returns a new OpenGLMatrix, created by translating an existing " + "matrix.") public Object OpenGLMatrixTranslated(Object matrix, float dx, float dy, float dz) { if (!(matrix instanceof OpenGLMatrix)) { form.dispatchErrorOccurredEvent(this, "OpenGLMatrixTranslated", ErrorMessages.ERROR_FTC_INVALID_OPEN_GL_MATRIX, "matrix"); return null; } return ((OpenGLMatrix) matrix).translated(dx, dy, dz); } @SimpleFunction(description = "Returns a new OpenGLMatrix, created by rotating an existing " + "matrix.") public Object OpenGLMatrixRotated(Object matrix, float angle, float dx, float dy, float dz) { if (!(matrix instanceof OpenGLMatrix)) { form.dispatchErrorOccurredEvent(this, "OpenGLMatrixRotated", ErrorMessages.ERROR_FTC_INVALID_OPEN_GL_MATRIX, "matrix"); return null; } return ((OpenGLMatrix) matrix).rotated(AngleUnit.DEGREES, angle, dx, dy, dz); } @SimpleFunction(description = "Returns a new OpenGLMatrix, created by inverting an existing " + "matrix.") public Object OpenGLMatrixInverted(Object matrix) { if (!(matrix instanceof OpenGLMatrix)) { form.dispatchErrorOccurredEvent(this, "OpenGLMatrixInverted", ErrorMessages.ERROR_FTC_INVALID_OPEN_GL_MATRIX, "matrix"); return null; } return ((OpenGLMatrix) matrix).inverted(); } @SimpleFunction(description = "Returns a new OpenGLMatrix, created by transposing an existing " + "matrix.") public Object OpenGLMatrixTransposed(Object matrix) { if (!(matrix instanceof OpenGLMatrix)) { form.dispatchErrorOccurredEvent(this, "OpenGLMatrixTransposed", ErrorMessages.ERROR_FTC_INVALID_OPEN_GL_MATRIX, "matrix"); return null; } return ((OpenGLMatrix) matrix).transposed(); } @SimpleFunction(description = "Returns a new OpenGLMatrix, created by multiplying two existing " + "matrices.") public Object OpenGLMatrixMultiplied(Object matrix1, Object matrix2) { if (!(matrix1 instanceof OpenGLMatrix)) { form.dispatchErrorOccurredEvent(this, "OpenGLMatrixMultiplied", ErrorMessages.ERROR_FTC_INVALID_OPEN_GL_MATRIX, "matrix1"); return null; } if (!(matrix2 instanceof OpenGLMatrix)) { form.dispatchErrorOccurredEvent(this, "OpenGLMatrixMultiplied", ErrorMessages.ERROR_FTC_INVALID_OPEN_GL_MATRIX, "matrix2"); return null; } return ((OpenGLMatrix) matrix1).multiplied((OpenGLMatrix) matrix2); } @SimpleFunction(description = "Returns the OpenGLMatrix associated with a particular set of " + "three rotational angles.") public Object OrientationGetRotationMatrix( String axesReference, String axesOrder, float angle1, float angle2, float angle3) { AxesReference axesReferenceValue = parseAxesReference(axesReference, "OrientationGetRotationMatrix"); if (axesReferenceValue == null) { return null; } AxesOrder axesOrderValue = parseAxesOrder(axesOrder, "OrientationGetRotationMatrix"); if (axesOrderValue == null) { return null; } return Orientation.getRotationMatrix( axesReferenceValue, axesOrderValue, AngleUnit.DEGREES, angle1, angle2, angle3); } @SimpleFunction(description = "Formats an OpenGLMatrix as text.") public String OpenGLMatrixFormat(Object matrix) { if (!(matrix instanceof OpenGLMatrix)) { form.dispatchErrorOccurredEvent(this, "OpenGLMatrixTransposed", ErrorMessages.ERROR_FTC_INVALID_OPEN_GL_MATRIX, "matrix"); return ""; } return ((OpenGLMatrix) matrix).formatAsTransform(); } @SimpleFunction(description = "Returns the translation (a VectorF) of the given OpenGLMatrix.") public Object OpenGLMatrixGetTranslationVector(Object matrix) { if (!(matrix instanceof OpenGLMatrix)) { form.dispatchErrorOccurredEvent(this, "OpenGLMatrixGetTranslation", ErrorMessages.ERROR_FTC_INVALID_OPEN_GL_MATRIX, "matrix"); } return ((OpenGLMatrix) matrix).getTranslation(); } @SimpleFunction(description = "Returns an Orientation object for the given OpenGLMatrix.") public Object OpenGLMatrixGetOrientation(Object matrix, String axesReference, String axesOrder) { if (!(matrix instanceof OpenGLMatrix)) { form.dispatchErrorOccurredEvent(this, "OpenGLMatrixGetOrientation", ErrorMessages.ERROR_FTC_INVALID_OPEN_GL_MATRIX, "matrix"); } AxesReference axesReferenceValue = parseAxesReference(axesReference, "OpenGLMatrixGetOrientation"); if (axesReferenceValue == null) { return null; } AxesOrder axesOrderValue = parseAxesOrder(axesOrder, "OpenGLMatrixGetOrientation"); if (axesOrderValue == null) { return null; } return Orientation.getOrientation((OpenGLMatrix) matrix, axesReferenceValue, axesOrderValue, AngleUnit.DEGREES); } // Functions to extract Orientation fields @SimpleFunction(description = "Returns the first angle of the given Orientation object.") public float OrientationFirstAngle(Object orientation) { if (!(orientation instanceof Orientation)) { form.dispatchErrorOccurredEvent(this, "OrientationFirstAngle", ErrorMessages.ERROR_FTC_INVALID_ORIENTATION, "orientation"); return 0; } return ((Orientation) orientation).firstAngle; } @SimpleFunction(description = "Returns the Second angle of the given Orientation object.") public float OrientationSecondAngle(Object orientation) { if (!(orientation instanceof Orientation)) { form.dispatchErrorOccurredEvent(this, "OrientationSecondAngle", ErrorMessages.ERROR_FTC_INVALID_ORIENTATION, "orientation"); return 0; } return ((Orientation) orientation).secondAngle; } @SimpleFunction(description = "Returns the third angle of the given Orientation object.") public float OrientationThirdAngle(Object orientation) { if (!(orientation instanceof Orientation)) { form.dispatchErrorOccurredEvent(this, "OrientationThirdAngle", ErrorMessages.ERROR_FTC_INVALID_ORIENTATION, "orientation"); return 0; } return ((Orientation) orientation).thirdAngle; } @SimpleFunction(description = "Converts an Orientation object to an equivalent one with the " + "indicated point of view. Returns an Orientation object.") public Object OrientationToAxesReference(Object orientation, String axesReference) { if (!(orientation instanceof Orientation)) { form.dispatchErrorOccurredEvent(this, "OrientationToAxesReference", ErrorMessages.ERROR_FTC_INVALID_ORIENTATION, "orientation"); return null; } AxesReference axesReferenceValue = parseAxesReference(axesReference, "OrientationToAxesReference"); if (axesReferenceValue == null) { return null; } return ((Orientation) orientation).toAxesReference(axesReferenceValue); } @SimpleFunction(description = "Converts an Orientation object to an equivalent one with the " + "indicated ordering of axes. Returns an Orientation object.") public Object OrientationToAxesOrder(Object orientation, String axesOrder) { if (!(orientation instanceof Orientation)) { form.dispatchErrorOccurredEvent(this, "OrientationToAxesOrder", ErrorMessages.ERROR_FTC_INVALID_ORIENTATION, "orientation"); return null; } AxesOrder axesOrderValue = parseAxesOrder(axesOrder, "OrientationToAxesOrder"); if (axesOrderValue == null) { return null; } return ((Orientation) orientation).toAxesOrder(axesOrderValue); } // Functions to extract VectorF fields @SimpleFunction(description = "Returns the length of the given VectorF object.") public int VectorFGetLength(Object vector) { if (!(vector instanceof VectorF)) { form.dispatchErrorOccurredEvent(this, "VectorFGetLength", ErrorMessages.ERROR_FTC_INVALID_VECTOR_F, "vector"); return 0; } return ((VectorF) vector).length(); } @SimpleFunction(description = "Returns a particular element of the given VectorF object. The index is 0, 1, 2, etc.") public float VectorFGetValue(Object vector, int index) { if (!(vector instanceof VectorF)) { form.dispatchErrorOccurredEvent(this, "VectorFGetValue", ErrorMessages.ERROR_FTC_INVALID_VECTOR_F, "vector"); return 0; } if (index < 0 || index >= ((VectorF) vector).length()) { form.dispatchErrorOccurredEvent(this, "VectorFGetValue", ErrorMessages.ERROR_FTC_INVALID_NUMBER, "index"); return 0; } return ((VectorF) vector).get(index); } // AxesOrder enum values /** * AxesOrder_XYX property getter. */ @SimpleProperty(description = "The constant for AxesOrder_XYX.", category = PropertyCategory.BEHAVIOR) public String AxesOrder_XYX() { return AxesOrder.XYX.toString(); } /** * AxesOrder_XZX property getter. */ @SimpleProperty(description = "The constant for AxesOrder_XZX.", category = PropertyCategory.BEHAVIOR) public String AxesOrder_XZX() { return AxesOrder.XZX.toString(); } /** * AxesOrder_XYZ property getter. */ @SimpleProperty(description = "The constant for AxesOrder_XYZ.", category = PropertyCategory.BEHAVIOR) public String AxesOrder_XYZ() { return AxesOrder.XYZ.toString(); } /** * AxesOrder_XZY property getter. */ @SimpleProperty(description = "The constant for AxesOrder_XZY.", category = PropertyCategory.BEHAVIOR) public String AxesOrder_XZY() { return AxesOrder.XZY.toString(); } /** * AxesOrder_YXY property getter. */ @SimpleProperty(description = "The constant for AxesOrder_YXY.", category = PropertyCategory.BEHAVIOR) public String AxesOrder_YXY() { return AxesOrder.YXY.toString(); } /** * AxesOrder_YXZ property getter. */ @SimpleProperty(description = "The constant for AxesOrder_YXZ.", category = PropertyCategory.BEHAVIOR) public String AxesOrder_YXZ() { return AxesOrder.YXZ.toString(); } /** * AxesOrder_YZX property getter. */ @SimpleProperty(description = "The constant for AxesOrder_YZX.", category = PropertyCategory.BEHAVIOR) public String AxesOrder_YZX() { return AxesOrder.YZX.toString(); } /** * AxesOrder_YZY property getter. */ @SimpleProperty(description = "The constant for AxesOrder_YZY.", category = PropertyCategory.BEHAVIOR) public String AxesOrder_YZY() { return AxesOrder.YZY.toString(); } /** * AxesOrder_ZYZ property getter. */ @SimpleProperty(description = "The constant for AxesOrder_ZYZ.", category = PropertyCategory.BEHAVIOR) public String AxesOrder_ZYZ() { return AxesOrder.ZYZ.toString(); } /** * AxesOrder_ZXZ property getter. */ @SimpleProperty(description = "The constant for AxesOrder_ZXZ.", category = PropertyCategory.BEHAVIOR) public String AxesOrder_ZXZ() { return AxesOrder.ZXZ.toString(); } /** * AxesOrder_ZYX property getter. */ @SimpleProperty(description = "The constant for AxesOrder_ZYX.", category = PropertyCategory.BEHAVIOR) public String AxesOrder_ZYX() { return AxesOrder.ZYX.toString(); } /** * AxesOrder_ZXY property getter. */ @SimpleProperty(description = "The constant for AxesOrder_ZXY.", category = PropertyCategory.BEHAVIOR) public String AxesOrder_ZXY() { return AxesOrder.ZXY.toString(); } private AxesOrder parseAxesOrder(String axesOrder, String functionName) { for (AxesOrder axesOrderValue : AxesOrder.values()) { if (axesOrderValue.toString().equalsIgnoreCase(axesOrder)) { return axesOrderValue; } } form.dispatchErrorOccurredEvent(this, functionName, ErrorMessages.ERROR_FTC_INVALID_AXES_ORDER, axesOrder); return null; } // AxesReference enum values /** * AxesReference_EXTRINSIC property getter. */ @SimpleProperty(description = "The constant for AxesReference_EXTRINSIC. " + "Indicates that the axes remain fixed in the world around the object.", category = PropertyCategory.BEHAVIOR) public String AxesReference_EXTRINSIC() { return AxesReference.EXTRINSIC.toString(); } /** * AxesReference_INTRINSIC property getter. */ @SimpleProperty(description = "The constant for AxesReference_INTRINSIC. " + "Indicates that the axes move with the object that is rotating.", category = PropertyCategory.BEHAVIOR) public String AxesReference_INTRINSIC() { return AxesReference.INTRINSIC.toString(); } private AxesReference parseAxesReference(String axesReference, String functionName) { for (AxesReference axesReferenceValue : AxesReference.values()) { if (axesReferenceValue.toString().equalsIgnoreCase(axesReference)) { return axesReferenceValue; } } form.dispatchErrorOccurredEvent(this, functionName, ErrorMessages.ERROR_FTC_INVALID_AXES_REFERENCE, axesReference); return null; } // RelicRecoveryVuMark enum values /** * RelicRecoveryVuMark_UNKNOWN property getter. */ @SimpleProperty(description = "The constant for RelicRecoveryVuMark_UNKNOWN. " + "Indicates that the axes remain fixed in the world around the object.", category = PropertyCategory.BEHAVIOR) public String RelicRecoveryVuMark_UNKNOWN() { return RelicRecoveryVuMark.UNKNOWN.toString(); } /** * RelicRecoveryVuMark_LEFT property getter. */ @SimpleProperty(description = "The constant for RelicRecoveryVuMark_LEFT. " + "Indicates that the axes move with the object that is rotating.", category = PropertyCategory.BEHAVIOR) public String RelicRecoveryVuMark_LEFT() { return RelicRecoveryVuMark.LEFT.toString(); } /** * RelicRecoveryVuMark_CENTER property getter. */ @SimpleProperty(description = "The constant for RelicRecoveryVuMark_CENTER. " + "Indicates that the axes move with the object that is rotating.", category = PropertyCategory.BEHAVIOR) public String RelicRecoveryVuMark_CENTER() { return RelicRecoveryVuMark.CENTER.toString(); } /** * RelicRecoveryVuMark_RIGHT property getter. */ @SimpleProperty(description = "The constant for RelicRecoveryVuMark_RIGHT. " + "Indicates that the axes move with the object that is rotating.", category = PropertyCategory.BEHAVIOR) public String RelicRecoveryVuMark_RIGHT() { return RelicRecoveryVuMark.RIGHT.toString(); } private RelicRecoveryVuMark parseRelicRecoveryVuMark(String relicRecoverVuMark, String functionName) { for (RelicRecoveryVuMark relicRecoverVuMarkValue : RelicRecoveryVuMark.values()) { if (relicRecoverVuMarkValue.toString().equalsIgnoreCase(relicRecoverVuMark)) { return relicRecoverVuMarkValue; } } form.dispatchErrorOccurredEvent(this, functionName, ErrorMessages.ERROR_FTC_INVALID_RELIC_RECOVERY_VU_MARK, relicRecoverVuMark); return null; } // OnDestroyListener implementation @Override public void onDestroy() { clear(); } // Deleteable implementation @Override public void onDelete() { clear(); } private void clear() { parameters = null; vuforiaLocalizer = null; trackablesList.clear(); trackableList.clear(); } }
apache-2.0
adichad/lucene
contrib/gdata-server/src/core/src/java/org/apache/lucene/gdata/storage/db4o/DB4oController.java
13227
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.gdata.storage.db4o; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Proxy; import java.security.NoSuchAlgorithmException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.lucene.gdata.data.GDataAccount; import org.apache.lucene.gdata.data.ServerBaseFeed; import org.apache.lucene.gdata.server.registry.Component; import org.apache.lucene.gdata.server.registry.ComponentType; import org.apache.lucene.gdata.server.registry.Scope; import org.apache.lucene.gdata.server.registry.ScopeVisitor; import org.apache.lucene.gdata.server.registry.configuration.Requiered; import org.apache.lucene.gdata.storage.IDGenerator; import org.apache.lucene.gdata.storage.Storage; import org.apache.lucene.gdata.storage.StorageController; import org.apache.lucene.gdata.storage.StorageException; import org.apache.lucene.gdata.storage.db4o.DB4oStorage.DB4oEntry; import org.apache.lucene.gdata.utils.Pool; import org.apache.lucene.gdata.utils.PoolObjectFactory; import org.apache.lucene.gdata.utils.SimpleObjectPool; import com.db4o.Db4o; import com.db4o.ObjectContainer; import com.db4o.ObjectServer; import com.google.gdata.data.BaseEntry; import com.google.gdata.data.BaseFeed; /** * The DB4o StorageContorller can be used as a persitence component for the * gdata-server. To use DB4o a third party jar needs to added to the lib * directory of the project. If the jar is not available in the lib directory * all db4o dependent class won't be included in the build. * <p> * If the jar is present in the lib directory this class can be configured as a * {@link org.apache.lucene.gdata.server.registry.ComponentType#STORAGECONTROLLER} * via the <i>gdata-config.xml</i> file. For detailed config documentation see * the wiki page. * </p> * <p> * The DB4oController can run as a client or as a server to serve other running * db4o clients in the network. To achive the best performance out of the db4o * caching layer connections to the server will be reused in a connection pool. * A connection will not be shared withing more than one thread. The controller * release one connection per request and returns the connection when the * request has been destroyed. * </p> * @see <a href="http://www.db4o.com">db4o website</a> * @see org.apache.lucene.gdata.utils.Pool * * * @author Simon Willnauer * */ @Component(componentType = ComponentType.STORAGECONTROLLER) @Scope(scope = Scope.ScopeType.REQUEST) public class DB4oController implements StorageController, ScopeVisitor { private static final Log LOG = LogFactory.getLog(DB4oController.class); private final ThreadLocal<Storage> threadLocalStorage = new ThreadLocal<Storage>(); private Pool<ObjectContainer> containerPool; private ObjectServer server; private final IDGenerator idGenerator; private boolean weakReferences; private boolean runAsServer; private int port; private String filePath; private String user; private String password; private String host; private int containerPoolSize; /** * @throws NoSuchAlgorithmException * */ public DB4oController() throws NoSuchAlgorithmException { this.idGenerator = new IDGenerator(15); } ObjectContainer releaseContainer() { return this.server.openClient(); } /** * @see org.apache.lucene.gdata.storage.StorageController#destroy() */ public void destroy() { this.containerPool.destroy(); this.idGenerator.stopIDGenerator(); this.server.close(); } /** * @see org.apache.lucene.gdata.storage.StorageController#getStorage() */ public Storage getStorage() throws StorageException { Storage retVal = this.threadLocalStorage.get(); if (retVal != null) return retVal; retVal = new DB4oStorage(this.containerPool.aquire(), this); this.threadLocalStorage.set(retVal); return retVal; } /** * @see org.apache.lucene.gdata.server.registry.ServerComponent#initialize() */ public void initialize() { if (LOG.isInfoEnabled()) LOG.info("Initialize " + this.toString()); Db4o.configure().objectClass(DB4oEntry.class).objectField("updated") .indexed(true); Db4o.configure().objectClass(BaseEntry.class).objectField("id") .indexed(true); Db4o.configure().objectClass(BaseFeed.class).objectField("id").indexed( true); Db4o.configure().objectClass(GDataAccount.class).objectField("name") .indexed(true); Db4o.configure().objectClass(ServerBaseFeed.class).cascadeOnDelete( false); Db4o.configure().objectClass(ServerBaseFeed.class) .maximumActivationDepth(0); Db4o.configure().objectClass(BaseFeed.class).minimumActivationDepth(1); Db4o.configure().objectClass(BaseEntry.class) .minimumActivationDepth(1); Db4o.configure().objectClass(BaseFeed.class).cascadeOnDelete(true); Db4o.configure().objectClass(DB4oEntry.class).cascadeOnDelete(true); Db4o.configure().objectClass(GDataAccount.class).cascadeOnDelete(true); Db4o.configure().weakReferences(this.weakReferences); Db4o.configure().optimizeNativeQueries(false); if (this.runAsServer) { this.server = Db4o.openServer(this.filePath, this.port); if(this.server == null) throw new RuntimeException("Can't create server at confiugred destination -- "+this.filePath); this.server.grantAccess(this.user, this.password); } else { InvocationHandler handler = new ObjectServerDecorator(this.user, this.password, this.host, this.port); this.server = (ObjectServer) Proxy.newProxyInstance(this.getClass() .getClassLoader(), new Class[] { ObjectServer.class }, handler); } PoolObjectFactory<ObjectContainer> factory = new ObjectContinerFactory( this.server); this.containerPool = new SimpleObjectPool<ObjectContainer>( this.containerPoolSize, factory); try { createAdminAccount(); } catch (StorageException e) { LOG.error("Can not create admin account -- ",e); } } private void createAdminAccount() throws StorageException { GDataAccount adminAccount = GDataAccount.createAdminAccount(); visiteInitialize(); Storage sto = this.getStorage(); try { sto.getAccount(adminAccount.getName()); } catch (Exception e) { this.getStorage().storeAccount(adminAccount); } finally { visiteDestroy(); } } /** * @see org.apache.lucene.gdata.storage.StorageController#releaseId() */ public String releaseId(){ try{ return this.idGenerator.getUID(); }catch (InterruptedException e) { throw new StorageException("ID producer has been interrupted",e); } } /** * @see org.apache.lucene.gdata.server.registry.ScopeVisitor#visiteInitialize() */ public void visiteInitialize() { if (LOG.isInfoEnabled()) LOG.info("Opened Storage -- request initialized"); Storage storage = this.threadLocalStorage.get(); if (storage != null) { LOG.warn("Storage already opened"); return; } storage = new DB4oStorage(this.containerPool.aquire(), this); this.threadLocalStorage.set(storage); } /** * @see org.apache.lucene.gdata.server.registry.ScopeVisitor#visiteDestroy() */ public void visiteDestroy() { Storage storage = this.threadLocalStorage.get(); if (storage == null) { LOG.warn("no Storage opened -- threadlocal returned null"); return; } this.containerPool.release(((DB4oStorage)storage).getContainer()); this.threadLocalStorage.remove(); if (LOG.isInfoEnabled()) LOG.info("Closed Storage -- request destroyed"); } private static class ObjectContinerFactory implements PoolObjectFactory<ObjectContainer> { private final ObjectServer server; ObjectContinerFactory(final ObjectServer server) { this.server = server; } /** * @see org.apache.lucene.gdata.utils.PoolObjectFactory#getInstance() */ public ObjectContainer getInstance() { return this.server.openClient(); } /** * @param type - * object container to destroy (close) * @see org.apache.lucene.gdata.utils.PoolObjectFactory#destroyInstance(Object) */ public void destroyInstance(ObjectContainer type) { type.close(); } } /** * @return Returns the filePath. */ public String getFilePath() { return this.filePath; } /** * @param filePath * The filePath to set. */ public void setFilePath(String filePath) { this.filePath = filePath; } /** * @return Returns the host. */ public String getHost() { return this.host; } /** * @param host * The host to set. */ @Requiered public void setHost(String host) { this.host = host; } /** * @return Returns the password. */ public String getPassword() { return this.password; } /** * @param password * The password to set. */ @Requiered public void setPassword(String password) { this.password = password; } /** * @return Returns the port. */ public int getPort() { return this.port; } /** * @param port * The port to set. */ @Requiered public void setPort(int port) { this.port = port; } /** * @return Returns the runAsServer. */ public boolean isRunAsServer() { return this.runAsServer; } /** * @param runAsServer * The runAsServer to set. */ @Requiered public void setRunAsServer(boolean runAsServer) { this.runAsServer = runAsServer; } /** * @return Returns the user. */ public String getUser() { return this.user; } /** * @param user * The user to set. */ @Requiered public void setUser(String user) { this.user = user; } /** * @return Returns the weakReferences. */ public boolean isUseWeakReferences() { return this.weakReferences; } /** * @param weakReferences * The weakReferences to set. */ @Requiered public void setUseWeakReferences(boolean weakReferences) { this.weakReferences = weakReferences; } /** * @return Returns the containerPoolSize. */ public int getContainerPoolSize() { return this.containerPoolSize; } /** * @param containerPoolSize * The containerPoolSize to set. */ @Requiered public void setContainerPoolSize(int containerPoolSize) { this.containerPoolSize = containerPoolSize < 1 ? 1 : containerPoolSize; } /** * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder builder = new StringBuilder(this.getClass().getName()) .append(" "); builder.append("host: ").append(this.host).append(" "); builder.append("port: ").append(this.port).append(" "); builder.append("pool size: ").append(this.containerPoolSize) .append(" "); builder.append("runs as server: ").append(this.runAsServer).append(" "); builder.append("use weak references: ").append(this.weakReferences) .append(" "); builder.append("user: ").append(this.user).append(" "); builder.append("password length: ").append( this.password == null ? "no password" : this.password.length()) .append(" "); return builder.toString(); } }
apache-2.0
mikkopiu/kontena
server/app/services/rpc/container_exec_handler.rb
466
module Rpc class ContainerExecHandler include Logging def initialize(grid) @grid = grid end def output(uuid, stream, chunk) MongoPubsub.publish("container_exec:#{uuid}", {stream: stream, chunk: chunk}) end def exit(uuid, exit_code) MongoPubsub.publish("container_exec:#{uuid}", {exit: exit_code}) end def error(uuid, error) MongoPubsub.publish("container_exec:#{uuid}", {error: error}) end end end
apache-2.0
elitetestnik/Back-track
templates/yoo_vox/yoo_vox/html/com_content/article/default.php
5557
<?php /** * @package yoo_vox Template * @file default.php * @version 5.5.14 May 2011 * @author YOOtheme http://www.yootheme.com * @copyright Copyright (C) 2007 - 2011 YOOtheme GmbH * @license YOOtheme Proprietary Use License (http://www.yootheme.com/license) */ // no direct access defined('_JEXEC') or die('Restricted access'); $canEdit = ($this->user->authorize('com_content', 'edit', 'content', 'all') || $this->user->authorize('com_content', 'edit', 'content', 'own')); ?> <div class="joomla <?php echo $this->params->get('pageclass_sfx')?>"> <div class="article"> <?php if ($this->params->get('show_page_title', 1) && $this->params->get('page_title') != $this->article->title) : ?> <h1 class="pagetitle"> <?php echo $this->escape($this->params->get('page_title')); ?> </h1> <?php endif; ?> <?php if ($canEdit || $this->params->get('show_title') || $this->params->get('show_pdf_icon') || $this->params->get('show_print_icon') || $this->params->get('show_email_icon')) : ?> <div class="headline"> <?php if ($this->params->get('show_title')) : ?> <h1 class="title"> <?php if ($this->params->get('link_titles') && $this->article->readmore_link != '') : ?> <a href="<?php echo $this->article->readmore_link; ?>"><?php echo $this->escape($this->article->title); ?></a> <?php else : ?> <?php echo $this->escape($this->article->title); ?> <?php endif; ?> </h1> <?php endif; ?> <?php if (!$this->print) : ?> <?php if ($canEdit) : ?> <div class="icon edit"> <?php echo JHTML::_('icon.edit', $this->article, $this->params, $this->access); ?> </div> <?php endif; ?> <?php if ($this->params->get('show_email_icon') || $this->params->get('show_print_icon') || $this->params->get('show_pdf_icon')) : ?> <div class="icons"> <?php if ($this->params->get('show_email_icon')) : ?> <div class="icon email"> <?php echo JHTML::_('icon.email', $this->article, $this->params, $this->access); ?> </div> <?php endif; ?> <?php if ($this->params->get('show_print_icon')) : ?> <div class="icon print"> <?php echo JHTML::_('icon.print_popup', $this->article, $this->params, $this->access); ?> </div> <?php endif; ?> <?php if ($this->params->get('show_pdf_icon')) : ?> <div class="icon pdf"> <?php echo JHTML::_('icon.pdf', $this->article, $this->params, $this->access); ?> </div> <?php endif; ?> </div> <?php endif; ?> <?php else : ?> <div class="icon printscreen"> <?php echo JHTML::_('icon.print_screen', $this->article, $this->params, $this->access); ?> </div> <?php endif; ?> </div> <?php endif; ?> <?php if (!$this->params->get('show_intro')) : echo $this->article->event->afterDisplayTitle; endif; ?> <?php echo $this->article->event->beforeDisplayContent; ?> <?php if ($this->params->get('show_create_date') || ($this->params->get('show_author') && $this->article->author != "") || ($this->params->get('show_section') && $this->article->sectionid) || ($this->params->get('show_category') && $this->article->catid)) : ?> <p class="articleinfo"> <?php if ($this->params->get('show_author') && ($this->article->author != "")) : ?> <span class="author"> <?php JText::printf( 'Written by', ($this->article->created_by_alias ? $this->article->created_by_alias : $this->article->author) ); ?> </span> <?php endif; ?> <?php if ($this->params->get('show_author') && ($this->article->author != "") && $this->params->get('show_create_date')) echo '|'; ?> <?php if ($this->params->get('show_create_date')) : ?> <span class="created"> <?php echo JHTML::_('date', $this->article->created, JText::_('DATE_FORMAT_LC3')) ?> </span> <?php endif; ?> <?php if ($this->params->get('show_create_date') || ($this->params->get('show_author') && $this->article->author != "")) echo '<br />' ?> <?php if (($this->params->get('show_section') && $this->article->sectionid) || ($this->params->get('show_category') && $this->article->catid)) : ?> <?php echo JText::_('Posted in '); ?> <?php if ($this->params->get('show_section') && $this->article->sectionid && isset($this->article->section)) : ?> <span> <?php if ($this->params->get('link_section')) : ?> <?php echo '<a href="'.JRoute::_(ContentHelperRoute::getSectionRoute($this->article->sectionid)).'">'; ?> <?php endif; ?> <?php echo $this->article->section; ?> <?php if ($this->params->get('link_section')) : ?> <?php echo '</a>'; ?> <?php endif; ?> <?php if ($this->params->get('show_category')) : ?> <?php echo ' - '; ?> <?php endif; ?> </span> <?php endif; ?> <?php if ($this->params->get('show_category') && $this->article->catid) : ?> <span> <?php if ($this->params->get('link_category')) : ?> <?php echo '<a href="'.JRoute::_(ContentHelperRoute::getCategoryRoute($this->article->catslug, $this->article->sectionid)).'">'; ?> <?php endif; ?> <?php echo $this->article->category; ?> <?php if ($this->params->get('link_category')) : ?> <?php echo '</a>'; ?> <?php endif; ?> </span> <?php endif; ?> <?php endif; ?> </p> <?php endif; ?> <?php if (isset ($this->article->toc)) : ?> <?php echo $this->article->toc; ?> <?php endif; ?> <?php echo $this->article->text; ?> <?php echo $this->article->event->afterDisplayContent; ?> </div> </div>
apache-2.0
bassages/homecontrol
src/main/java/nl/homeserver/klimaat/KlimaatDto.java
312
package nl.homeserver.klimaat; import java.math.BigDecimal; import java.time.LocalDateTime; import lombok.Getter; import lombok.Setter; @Getter @Setter class KlimaatDto { private long id; private LocalDateTime datumtijd; private BigDecimal temperatuur; private BigDecimal luchtvochtigheid; }
apache-2.0
mbogoevici/spring-cloud-stream
spring-cloud-stream-reactive/src/test/java/org/springframework/cloud/stream/reactive/StreamListenerReactiveReturnWithFailureTests.java
6972
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.stream.reactive; import java.util.Arrays; import java.util.Collection; import java.util.UUID; import java.util.concurrent.TimeUnit; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import reactor.core.publisher.Flux; import rx.Observable; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.cloud.stream.annotation.EnableBinding; import org.springframework.cloud.stream.annotation.Input; import org.springframework.cloud.stream.annotation.Output; import org.springframework.cloud.stream.annotation.StreamListener; import org.springframework.cloud.stream.messaging.Processor; import org.springframework.cloud.stream.test.binder.MessageCollector; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.messaging.Message; import org.springframework.messaging.handler.annotation.SendTo; import org.springframework.messaging.support.MessageBuilder; import static org.assertj.core.api.Assertions.assertThat; /** * @author Marius Bogoevici * @author Ilayaperumal Gopinathan */ @RunWith(Parameterized.class) public class StreamListenerReactiveReturnWithFailureTests { private Class<?> configClass; public StreamListenerReactiveReturnWithFailureTests(Class<?> configClass) { this.configClass = configClass; } @Parameterized.Parameters public static Collection InputConfigs() { return Arrays.asList(new Class[] { ReactorTestReturnWithFailure1.class, ReactorTestReturnWithFailure2.class, ReactorTestReturnWithFailure3.class, ReactorTestReturnWithFailure4.class, RxJava1TestReturnWithFailure1.class, RxJava1TestReturnWithFailure2.class, RxJava1TestReturnWithFailure3.class, RxJava1TestReturnWithFailure4.class }); } private static void sendMessageAndValidate(ConfigurableApplicationContext context) throws InterruptedException { @SuppressWarnings("unchecked") Processor processor = context.getBean(Processor.class); String sentPayload = "hello " + UUID.randomUUID().toString(); processor.input().send(MessageBuilder.withPayload(sentPayload).setHeader("contentType", "text/plain").build()); MessageCollector messageCollector = context.getBean(MessageCollector.class); Message<?> result = messageCollector.forChannel(processor.output()).poll(1000, TimeUnit.MILLISECONDS); assertThat(result).isNotNull(); assertThat(result.getPayload()).isEqualTo(sentPayload.toUpperCase()); } private static void sendFailingMessage(ConfigurableApplicationContext context) throws InterruptedException { @SuppressWarnings("unchecked") Processor processor = context.getBean(Processor.class); processor.input().send(MessageBuilder.withPayload("fail").setHeader("contentType", "text/plain").build()); } @Test public void testReturnWithFailure() throws Exception { ConfigurableApplicationContext context = SpringApplication.run(this.configClass, "--server.port=0"); sendMessageAndValidate(context); sendFailingMessage(context); sendMessageAndValidate(context); sendFailingMessage(context); sendMessageAndValidate(context); context.close(); } @EnableBinding(Processor.class) @EnableAutoConfiguration public static class ReactorTestReturnWithFailure1 { @StreamListener public @Output(Processor.OUTPUT) Flux<String> receive(@Input(Processor.INPUT) Flux<String> input) { return input.map(m -> { if (!m.equals("fail")) { return m.toUpperCase(); } else { throw new RuntimeException(); } }); } } @EnableBinding(Processor.class) @EnableAutoConfiguration public static class ReactorTestReturnWithFailure2 { @StreamListener(Processor.INPUT) public @Output(Processor.OUTPUT) Flux<String> receive(Flux<String> input) { return input.map(m -> { if (!m.equals("fail")) { return m.toUpperCase(); } else { throw new RuntimeException(); } }); } } @EnableBinding(Processor.class) @EnableAutoConfiguration public static class ReactorTestReturnWithFailure3 { @StreamListener(Processor.INPUT) public @SendTo(Processor.OUTPUT) Flux<String> receive(Flux<String> input) { return input.map(m -> { if (!m.equals("fail")) { return m.toUpperCase(); } else { throw new RuntimeException(); } }); } } @EnableBinding(Processor.class) @EnableAutoConfiguration public static class ReactorTestReturnWithFailure4 { @StreamListener public @SendTo(Processor.OUTPUT) Flux<String> receive(@Input(Processor.INPUT) Flux<String> input) { return input.map(m -> { if (!m.equals("fail")) { return m.toUpperCase(); } else { throw new RuntimeException(); } }); } } @EnableBinding(Processor.class) @EnableAutoConfiguration public static class RxJava1TestReturnWithFailure1 { @StreamListener public @Output(Processor.OUTPUT) Observable<String> receive(@Input(Processor.INPUT) Observable<String> input) { return input.map(m -> { if (!m.equals("fail")) { return m.toUpperCase(); } else { throw new RuntimeException(); } }); } } @EnableBinding(Processor.class) @EnableAutoConfiguration public static class RxJava1TestReturnWithFailure2 { @StreamListener public @SendTo(Processor.OUTPUT) Observable<String> receive(@Input(Processor.INPUT) Observable<String> input) { return input.map(m -> { if (!m.equals("fail")) { return m.toUpperCase(); } else { throw new RuntimeException(); } }); } } @EnableBinding(Processor.class) @EnableAutoConfiguration public static class RxJava1TestReturnWithFailure3 { @StreamListener(Processor.INPUT) public @SendTo(Processor.OUTPUT) Observable<String> receive(Observable<String> input) { return input.map(m -> { if (!m.equals("fail")) { return m.toUpperCase(); } else { throw new RuntimeException(); } }); } } @EnableBinding(Processor.class) @EnableAutoConfiguration public static class RxJava1TestReturnWithFailure4 { @StreamListener(Processor.INPUT) public @Output(Processor.OUTPUT) Observable<String> receive(Observable<String> input) { return input.map(m -> { if (!m.equals("fail")) { return m.toUpperCase(); } else { throw new RuntimeException(); } }); } } }
apache-2.0
hongyangAndroid/Highlight
highlight/src/main/java/zhy/com/highlight/shape/BaseLightShape.java
1793
package zhy.com.highlight.shape; import android.graphics.Bitmap; import android.graphics.RectF; import zhy.com.highlight.HighLight; import zhy.com.highlight.view.HightLightView; /** * <pre> * 高亮形状的超类 * Created by isanwenyu on 2016/10/26. * Copyright (c) 2016 isanwenyu@163.com. All rights reserved. * </pre> */ public abstract class BaseLightShape implements HighLight.LightShape{ protected float dx;//水平方向偏移 protected float dy;//垂直方向偏移 protected float blurRadius=15;//模糊半径 默认15 public BaseLightShape() { } /** * @param dx 水平方向偏移 * @param dy 垂直方向偏移 */ public BaseLightShape(float dx,float dy) { this.dx = dx; this.dy = dy; } /** * @param dx 水平方向偏移 * @param dy 垂直方向偏移 * @param blurRadius 模糊半径 默认15px 0不模糊 */ public BaseLightShape(float dx, float dy, float blurRadius) { this.dx = dx; this.dy = dy; this.blurRadius = blurRadius; } @Override public void shape(Bitmap bitmap, HighLight.ViewPosInfo viewPosInfo) { resetRectF4Shape(viewPosInfo.rectF,dx,dy); drawShape(bitmap,viewPosInfo); } /** * reset RectF for Shape by dx and dy. * @param viewPosInfoRectF * @param dx * @param dy */ protected abstract void resetRectF4Shape(RectF viewPosInfoRectF, float dx, float dy); /** * draw shape into bitmap * @param bitmap * @param viewPosInfo * @see zhy.com.highlight.view.HightLightView#addViewForEveryTip(HighLight.ViewPosInfo) * @see HightLightView#buildMask() */ protected abstract void drawShape(Bitmap bitmap, HighLight.ViewPosInfo viewPosInfo); }
apache-2.0
zabil/gocd
plugin-infra/go-plugin-access/test/com/thoughtworks/go/plugin/access/packagematerial/JsonMessageHandler1_0Test.java
26551
/*************************GO-LICENSE-START********************************* * Copyright 2014 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *************************GO-LICENSE-END***********************************/ package com.thoughtworks.go.plugin.access.packagematerial; import com.thoughtworks.go.plugin.api.config.Property; import com.thoughtworks.go.plugin.api.material.packagerepository.PackageConfiguration; import com.thoughtworks.go.plugin.api.material.packagerepository.PackageMaterialProperty; import com.thoughtworks.go.plugin.api.material.packagerepository.PackageRevision; import com.thoughtworks.go.plugin.api.material.packagerepository.RepositoryConfiguration; import com.thoughtworks.go.plugin.api.response.Result; import com.thoughtworks.go.plugin.api.response.validation.ValidationError; import com.thoughtworks.go.plugin.api.response.validation.ValidationResult; import org.junit.Before; import org.junit.Test; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.*; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNull.nullValue; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; public class JsonMessageHandler1_0Test { public static final String DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; private JsonMessageHandler1_0 messageHandler; private RepositoryConfiguration repositoryConfiguration; private com.thoughtworks.go.plugin.api.material.packagerepository.PackageConfiguration packageConfiguration; @Before public void setUp() throws Exception { messageHandler = new JsonMessageHandler1_0(); repositoryConfiguration = new RepositoryConfiguration(); repositoryConfiguration.add(new PackageMaterialProperty("key-one", "value-one")); repositoryConfiguration.add(new PackageMaterialProperty("key-two", "value-two")); packageConfiguration = new PackageConfiguration(); packageConfiguration.add(new PackageMaterialProperty("key-three", "value-three")); packageConfiguration.add(new PackageMaterialProperty("key-four", "value-four")); } @Test public void shouldBuildRepositoryConfigurationFromResponseBody() throws Exception { String responseBody = "{" + "\"key-one\":{}," + "\"key-two\":{\"default-value\":\"two\",\"part-of-identity\":true,\"secure\":true,\"required\":true,\"display-name\":\"display-two\",\"display-order\":\"1\"}," + "\"key-three\":{\"default-value\":\"three\",\"part-of-identity\":false,\"secure\":false,\"required\":false,\"display-name\":\"display-three\",\"display-order\":\"2\"}" + "}"; RepositoryConfiguration repositoryConfiguration = messageHandler.responseMessageForRepositoryConfiguration(responseBody); assertPropertyConfiguration((PackageMaterialProperty) repositoryConfiguration.get("key-one"), "key-one", null, true, true, false, "", 0); assertPropertyConfiguration((PackageMaterialProperty) repositoryConfiguration.get("key-two"), "key-two", "two", true, true, true, "display-two", 1); assertPropertyConfiguration((PackageMaterialProperty) repositoryConfiguration.get("key-three"), "key-three", "three", false, false, false, "display-three", 2); } @Test public void shouldBuildPackageConfigurationFromResponseBody() throws Exception { String responseBody = "{" + "\"key-one\":{}," + "\"key-two\":{\"default-value\":\"two\",\"part-of-identity\":true,\"secure\":true,\"required\":true,\"display-name\":\"display-two\",\"display-order\":\"1\"}," + "\"key-three\":{\"default-value\":\"three\",\"part-of-identity\":false,\"secure\":false,\"required\":false,\"display-name\":\"display-three\",\"display-order\":\"2\"}" + "}"; com.thoughtworks.go.plugin.api.material.packagerepository.PackageConfiguration packageConfiguration = messageHandler.responseMessageForPackageConfiguration(responseBody); assertPropertyConfiguration((PackageMaterialProperty) packageConfiguration.get("key-one"), "key-one", null, true, true, false, "", 0); assertPropertyConfiguration((PackageMaterialProperty) packageConfiguration.get("key-two"), "key-two", "two", true, true, true, "display-two", 1); assertPropertyConfiguration((PackageMaterialProperty) packageConfiguration.get("key-three"), "key-three", "three", false, false, false, "display-three", 2); } @Test public void shouldBuildRequestBodyForCheckRepositoryConfigurationValidRequest() throws Exception { String requestMessage = messageHandler.requestMessageForIsRepositoryConfigurationValid(repositoryConfiguration); assertThat(requestMessage, is("{\"repository-configuration\":{\"key-one\":{\"value\":\"value-one\"},\"key-two\":{\"value\":\"value-two\"}}}")); } @Test public void shouldBuildValidationResultFromCheckRepositoryConfigurationValidResponse() throws Exception { String responseBody = "[{\"key\":\"key-one\",\"message\":\"incorrect value\"},{\"message\":\"general error\"}]"; ValidationResult validationResult = messageHandler.responseMessageForIsRepositoryConfigurationValid(responseBody); assertValidationError(validationResult.getErrors().get(0), "key-one", "incorrect value"); assertValidationError(validationResult.getErrors().get(1), "", "general error"); } @Test public void shouldBuildSuccessValidationResultFromCheckRepositoryConfigurationValidResponse() throws Exception { assertThat(messageHandler.responseMessageForIsRepositoryConfigurationValid("").isSuccessful(), is(true)); assertThat(messageHandler.responseMessageForIsRepositoryConfigurationValid(null).isSuccessful(), is(true)); } @Test public void shouldBuildRequestBodyForCheckPackageConfigurationValidRequest() throws Exception { String requestMessage = messageHandler.requestMessageForIsPackageConfigurationValid(packageConfiguration, repositoryConfiguration); assertThat(requestMessage, is("{\"repository-configuration\":{\"key-one\":{\"value\":\"value-one\"},\"key-two\":{\"value\":\"value-two\"}},\"package-configuration\":{\"key-three\":{\"value\":\"value-three\"},\"key-four\":{\"value\":\"value-four\"}}}")); } @Test public void shouldBuildValidationResultForCheckRepositoryConfigurationValidResponse() throws Exception { String responseBody = "[{\"key\":\"key-one\",\"message\":\"incorrect value\"},{\"message\":\"general error\"}]"; ValidationResult validationResult = messageHandler.responseMessageForIsPackageConfigurationValid(responseBody); assertValidationError(validationResult.getErrors().get(0), "key-one", "incorrect value"); assertValidationError(validationResult.getErrors().get(1), "", "general error"); } @Test public void shouldBuildRequestBodyForCheckRepositoryConnectionRequest() throws Exception { String requestMessage = messageHandler.requestMessageForCheckConnectionToRepository(repositoryConfiguration); assertThat(requestMessage, is("{\"repository-configuration\":{\"key-one\":{\"value\":\"value-one\"},\"key-two\":{\"value\":\"value-two\"}}}")); } @Test public void shouldBuildSuccessResultFromCheckRepositoryConnectionResponse() throws Exception { String responseBody = "{\"status\":\"success\",messages=[\"message-one\",\"message-two\"]}"; Result result = messageHandler.responseMessageForCheckConnectionToRepository(responseBody); assertSuccessResult(result, Arrays.asList("message-one", "message-two")); } @Test public void shouldBuildFailureResultFromCheckRepositoryConnectionResponse() throws Exception { String responseBody = "{\"status\":\"failure\",messages=[\"message-one\",\"message-two\"]}"; Result result = messageHandler.responseMessageForCheckConnectionToRepository(responseBody); assertFailureResult(result, Arrays.asList("message-one", "message-two")); } @Test public void shouldHandleNullMessagesForCheckRepositoryConnectionResponse() throws Exception { assertSuccessResult(messageHandler.responseMessageForCheckConnectionToRepository("{\"status\":\"success\"}"), new ArrayList<String>()); assertFailureResult(messageHandler.responseMessageForCheckConnectionToRepository("{\"status\":\"failure\"}"), new ArrayList<String>()); } @Test public void shouldBuildRequestBodyForCheckPackageConnectionRequest() throws Exception { String requestMessage = messageHandler.requestMessageForCheckConnectionToPackage(packageConfiguration, repositoryConfiguration); assertThat(requestMessage, is("{\"repository-configuration\":{\"key-one\":{\"value\":\"value-one\"},\"key-two\":{\"value\":\"value-two\"}},\"package-configuration\":{\"key-three\":{\"value\":\"value-three\"},\"key-four\":{\"value\":\"value-four\"}}}")); } @Test public void shouldBuildSuccessResultFromCheckPackageConnectionResponse() throws Exception { String responseBody = "{\"status\":\"success\",messages=[\"message-one\",\"message-two\"]}"; Result result = messageHandler.responseMessageForCheckConnectionToPackage(responseBody); assertSuccessResult(result, Arrays.asList("message-one", "message-two")); } @Test public void shouldBuildFailureResultFromCheckPackageConnectionResponse() throws Exception { String responseBody = "{\"status\":\"failure\",messages=[\"message-one\",\"message-two\"]}"; Result result = messageHandler.responseMessageForCheckConnectionToPackage(responseBody); assertFailureResult(result, Arrays.asList("message-one", "message-two")); } @Test public void shouldHandleNullMessagesForCheckPackageConnectionResponse() throws Exception { assertSuccessResult(messageHandler.responseMessageForCheckConnectionToPackage("{\"status\":\"success\"}"), new ArrayList<String>()); assertFailureResult(messageHandler.responseMessageForCheckConnectionToPackage("{\"status\":\"failure\"}"), new ArrayList<String>()); } @Test public void shouldBuildRequestBodyForLatestRevisionRequest() throws Exception { String requestBody = messageHandler.requestMessageForLatestRevision(packageConfiguration, repositoryConfiguration); assertThat(requestBody, is("{\"repository-configuration\":{\"key-one\":{\"value\":\"value-one\"},\"key-two\":{\"value\":\"value-two\"}},\"package-configuration\":{\"key-three\":{\"value\":\"value-three\"},\"key-four\":{\"value\":\"value-four\"}}}")); } @Test public void shouldBuildPackageRevisionFromLatestRevisionResponse() throws Exception { String responseBody = "{\"revision\":\"abc.rpm\",\"timestamp\":\"2011-07-14T19:43:37.100Z\",\"user\":\"some-user\",\"revisionComment\":\"comment\"," + "\"trackbackUrl\":\"http:\\\\localhost:9999\",\"data\":{\"dataKeyOne\":\"data-value-one\",\"dataKeyTwo\":\"data-value-two\"}}"; PackageRevision packageRevision = messageHandler.responseMessageForLatestRevision(responseBody); assertPackageRevision(packageRevision, "abc.rpm", "some-user", "2011-07-14T19:43:37.100Z", "comment", "http:\\localhost:9999"); } @Test public void shouldThrowExceptionWhenAttemptingToGetLatestRevisionFromEmptyResponse(){ assertThat(getErrorMessageFromLatestRevision(""), is("Empty response body")); assertThat(getErrorMessageFromLatestRevision("{}"), is("Empty response body")); assertThat(getErrorMessageFromLatestRevision(null), is("Empty response body")); } @Test public void shouldBuildRequestBodyForLatestRevisionSinceRequest() throws Exception { Date timestamp = new SimpleDateFormat(DATE_FORMAT).parse("2011-07-13T19:43:37.100Z"); Map data = new LinkedHashMap(); data.put("dataKeyOne", "data-value-one"); data.put("dataKeyTwo", "data-value-two"); PackageRevision previouslyKnownRevision = new PackageRevision("abc.rpm", timestamp, "someuser", "comment", null, data); String requestBody = messageHandler.requestMessageForLatestRevisionSince(packageConfiguration, repositoryConfiguration, previouslyKnownRevision); String expectedValue = "{\"repository-configuration\":{\"key-one\":{\"value\":\"value-one\"},\"key-two\":{\"value\":\"value-two\"}}," + "\"package-configuration\":{\"key-three\":{\"value\":\"value-three\"},\"key-four\":{\"value\":\"value-four\"}}," + "\"previous-revision\":{\"revision\":\"abc.rpm\",\"timestamp\":\"2011-07-13T19:43:37.100Z\",\"data\":{\"dataKeyOne\":\"data-value-one\",\"dataKeyTwo\":\"data-value-two\"}}}"; assertThat(requestBody, is(expectedValue)); } @Test public void shouldBuildPackageRevisionFromLatestRevisionSinceResponse() throws Exception { String responseBody = "{\"revision\":\"abc.rpm\",\"timestamp\":\"2011-07-14T19:43:37.100Z\",\"user\":\"some-user\",\"revisionComment\":\"comment\"," + "\"trackbackUrl\":\"http:\\\\localhost:9999\",\"data\":{\"dataKeyOne\":\"data-value-one\",\"dataKeyTwo\":\"data-value-two\"}}"; PackageRevision packageRevision = messageHandler.responseMessageForLatestRevisionSince(responseBody); assertPackageRevision(packageRevision, "abc.rpm", "some-user", "2011-07-14T19:43:37.100Z", "comment", "http:\\localhost:9999"); } @Test public void shouldBuildNullPackageRevisionFromLatestRevisionSinceWhenEmptyResponse() throws Exception { assertThat(messageHandler.responseMessageForLatestRevisionSince(""), nullValue()); assertThat(messageHandler.responseMessageForLatestRevisionSince(null), nullValue()); assertThat(messageHandler.responseMessageForLatestRevisionSince("{}"), nullValue()); } @Test public void shouldValidateIncorrectJsonResponseForRepositoryConfiguration() { assertThat(errorMessageForRepositoryConfiguration(""), is("Unable to de-serialize json response. Empty response body")); assertThat(errorMessageForRepositoryConfiguration(null), is("Unable to de-serialize json response. Empty response body")); assertThat(errorMessageForRepositoryConfiguration("[{\"key-one\":\"value\"},{\"key-two\":\"value\"}]"), is("Unable to de-serialize json response. Repository configuration should be returned as a map")); assertThat(errorMessageForRepositoryConfiguration("{\"\":{}}"), is("Unable to de-serialize json response. Repository configuration key cannot be empty")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":[{}]}"), is("Unable to de-serialize json response. Repository configuration properties for key 'key' should be represented as a Map")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"part-of-identity\":\"true\"}}"), is("Unable to de-serialize json response. 'part-of-identity' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"part-of-identity\":100}}"), is("Unable to de-serialize json response. 'part-of-identity' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"part-of-identity\":\"\"}}"), is("Unable to de-serialize json response. 'part-of-identity' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"secure\":\"true\"}}"), is("Unable to de-serialize json response. 'secure' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"secure\":100}}"), is("Unable to de-serialize json response. 'secure' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"secure\":\"\"}}"), is("Unable to de-serialize json response. 'secure' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"required\":\"true\"}}"), is("Unable to de-serialize json response. 'required' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"required\":100}}"), is("Unable to de-serialize json response. 'required' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"required\":\"\"}}"), is("Unable to de-serialize json response. 'required' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"display-name\":true}}"), is("Unable to de-serialize json response. 'display-name' property for key 'key' should be of type string")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"display-name\":100}}"), is("Unable to de-serialize json response. 'display-name' property for key 'key' should be of type string")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"display-order\":true}}"), is("Unable to de-serialize json response. 'display-order' property for key 'key' should be of type integer")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"display-order\":10.0}}"), is("Unable to de-serialize json response. 'display-order' property for key 'key' should be of type integer")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"display-order\":\"\"}}"), is("Unable to de-serialize json response. 'display-order' property for key 'key' should be of type integer")); } @Test public void shouldValidateIncorrectJsonResponseForPackageConfiguration() { assertThat(errorMessageForPackageConfiguration(""), is("Unable to de-serialize json response. Empty response body")); assertThat(errorMessageForPackageConfiguration(null), is("Unable to de-serialize json response. Empty response body")); assertThat(errorMessageForPackageConfiguration("[{\"key-one\":\"value\"},{\"key-two\":\"value\"}]"), is("Unable to de-serialize json response. Package configuration should be returned as a map")); assertThat(errorMessageForPackageConfiguration("{\"\":{}}"), is("Unable to de-serialize json response. Package configuration key cannot be empty")); assertThat(errorMessageForPackageConfiguration("{\"key\":[{}]}"), is("Unable to de-serialize json response. Package configuration properties for key 'key' should be represented as a Map")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"part-of-identity\":\"true\"}}"), is("Unable to de-serialize json response. 'part-of-identity' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"part-of-identity\":100}}"), is("Unable to de-serialize json response. 'part-of-identity' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"part-of-identity\":\"\"}}"), is("Unable to de-serialize json response. 'part-of-identity' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"secure\":\"true\"}}"), is("Unable to de-serialize json response. 'secure' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"secure\":100}}"), is("Unable to de-serialize json response. 'secure' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"secure\":\"\"}}"), is("Unable to de-serialize json response. 'secure' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"required\":\"true\"}}"), is("Unable to de-serialize json response. 'required' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"required\":100}}"), is("Unable to de-serialize json response. 'required' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"required\":\"\"}}"), is("Unable to de-serialize json response. 'required' property for key 'key' should be of type boolean")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"display-name\":true}}"), is("Unable to de-serialize json response. 'display-name' property for key 'key' should be of type string")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"display-name\":100}}"), is("Unable to de-serialize json response. 'display-name' property for key 'key' should be of type string")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"display-order\":true}}"), is("Unable to de-serialize json response. 'display-order' property for key 'key' should be of type integer")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"display-order\":10.0}}"), is("Unable to de-serialize json response. 'display-order' property for key 'key' should be of type integer")); assertThat(errorMessageForRepositoryConfiguration("{\"key\":{\"display-order\":\"\"}}"), is("Unable to de-serialize json response. 'display-order' property for key 'key' should be of type integer")); } @Test public void shouldValidateIncorrectJsonForPackageRevision() { assertThat(errorMessageForPackageRevision("[{\"revision\":\"abc.rpm\"}]"), is("Unable to de-serialize json response. Package revision should be returned as a map")); assertThat(errorMessageForPackageRevision("{\"revision\":{}}"), is("Unable to de-serialize json response. Package revision should be of type string")); assertThat(errorMessageForPackageRevision("{\"revisionComment\":{}}"), is("Unable to de-serialize json response. Package revision comment should be of type string")); assertThat(errorMessageForPackageRevision("{\"user\":{}}"), is("Unable to de-serialize json response. Package revision user should be of type string")); assertThat(errorMessageForPackageRevision("{\"timestamp\":{}}"), is("Unable to de-serialize json response. Package revision timestamp should be of type string with format yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")); assertThat(errorMessageForPackageRevision("{\"timestamp\":\"12-01-2014\"}"), is("Unable to de-serialize json response. Package revision timestamp should be of type string with format yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")); } private void assertPackageRevision(PackageRevision packageRevision, String revision, String user, String timestamp, String comment, String trackbackUrl) throws ParseException { assertThat(packageRevision.getRevision(), is(revision)); assertThat(packageRevision.getUser(), is(user)); assertThat(packageRevision.getTimestamp(), is(new SimpleDateFormat(DATE_FORMAT).parse(timestamp))); assertThat(packageRevision.getRevisionComment(), is(comment)); assertThat(packageRevision.getTrackbackUrl(), is(trackbackUrl)); assertThat(packageRevision.getData().size(), is(2)); assertThat(packageRevision.getDataFor("dataKeyOne"), is("data-value-one")); assertThat(packageRevision.getDataFor("dataKeyTwo"), is("data-value-two")); } private void assertSuccessResult(Result result, List<String> messages) { assertThat(result.isSuccessful(), is(true)); assertThat(result.getMessages(), is(messages)); } private void assertFailureResult(Result result, List<String> messages) { assertThat(result.isSuccessful(), is(false)); assertThat(result.getMessages(), is(messages)); } private void assertValidationError(ValidationError validationError, String expectedKey, String expectedMessage) { assertThat(validationError.getKey(), is(expectedKey)); assertThat(validationError.getMessage(), is(expectedMessage)); } private void assertPropertyConfiguration(PackageMaterialProperty property, String key, String value, boolean partOfIdentity, boolean required, boolean secure, String displayName, int displayOrder) { assertThat(property.getKey(), is(key)); assertThat(property.getValue(), is(value)); assertThat(property.getOption(Property.PART_OF_IDENTITY), is(partOfIdentity)); assertThat(property.getOption(Property.REQUIRED), is(required)); assertThat(property.getOption(Property.SECURE), is(secure)); assertThat(property.getOption(Property.DISPLAY_NAME), is(displayName)); assertThat(property.getOption(Property.DISPLAY_ORDER), is(displayOrder)); } private String errorMessageForRepositoryConfiguration(String message) { try { messageHandler.responseMessageForRepositoryConfiguration(message); fail("should have thrown exception"); } catch (Exception e) { return e.getMessage(); } return null; } private String errorMessageForPackageConfiguration(String message) { try { messageHandler.responseMessageForPackageConfiguration(message); fail("should have thrown exception"); } catch (Exception e) { return e.getMessage(); } return null; } private String errorMessageForPackageRevision(String message) { try { messageHandler.toPackageRevision(message); fail("should have thrown exception"); } catch (Exception e) { return e.getMessage(); } return null; } private String getErrorMessageFromLatestRevision(String responseBody) { try{ messageHandler.responseMessageForLatestRevision(responseBody); fail("Should throw exception"); } catch( RuntimeException e){ return e.getMessage(); } return null; } }
apache-2.0
asimshankar/tensorflow
tensorflow/core/common_runtime/eager/kernel_and_device_test.cc
4875
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "tensorflow/core/common_runtime/eager/kernel_and_device.h" #include <memory> #include <vector> #include "absl/memory/memory.h" #include "tensorflow/cc/client/client_session.h" #include "tensorflow/cc/framework/ops.h" #include "tensorflow/cc/framework/scope.h" #include "tensorflow/cc/ops/standard_ops.h" #include "tensorflow/core/common_runtime/device_factory.h" #include "tensorflow/core/common_runtime/device_mgr.h" #include "tensorflow/core/common_runtime/eager/attr_builder.h" #include "tensorflow/core/common_runtime/function.h" #include "tensorflow/core/platform/env.h" #include "tensorflow/core/platform/test.h" #include "tensorflow/core/platform/test_benchmark.h" #include "tensorflow/core/public/version.h" namespace tensorflow { namespace { class TestEnv { public: TestEnv() : flib_def_(OpRegistry::Global(), {}) { std::vector<std::unique_ptr<Device>> devices; devices.push_back( DeviceFactory::NewDevice("CPU", {}, "/job:a/replica:0/task:0")); device_mgr_ = absl::make_unique<DeviceMgr>(std::move(devices)); flib_runtime_ = NewFunctionLibraryRuntime( device_mgr_.get(), Env::Default(), device_mgr_->ListDevices()[0], TF_GRAPH_DEF_VERSION, &flib_def_, nullptr, {}, nullptr); } FunctionLibraryRuntime* function_library_runtime() const { return flib_runtime_.get(); } private: FunctionLibraryDefinition flib_def_; std::unique_ptr<DeviceMgr> device_mgr_; std::unique_ptr<FunctionLibraryRuntime> flib_runtime_; }; void BM_CreateGraph(int iters) { for (int i = 0; i < iters; ++i) { Scope root = Scope::NewRootScope(); auto C = ops::Const(root, {{1.0, 2.0}, {3.0, 4.0}}); auto M = ops::MatMul(root, C, C); TF_CHECK_OK(root.status()); } } BENCHMARK(BM_CreateGraph); void BM_RunGraph(int iters) { tensorflow::testing::StopTiming(); Scope root = Scope::NewRootScope(); auto C = ops::Const(root, {{1.0, 2.0}, {3.0, 4.0}}); auto M = ops::MatMul(root, C, C); SessionOptions opts; opts.config.set_inter_op_parallelism_threads(1); opts.config.set_intra_op_parallelism_threads(1); ClientSession sess(root, opts); std::vector<Tensor> outputs; tensorflow::testing::StartTiming(); for (int i = 0; i < iters; ++i) { outputs.clear(); TF_CHECK_OK(sess.Run({M}, &outputs)); } } BENCHMARK(BM_RunGraph); void BM_CreateAndDestroySession(int iters) { tensorflow::testing::StopTiming(); Scope root = Scope::NewRootScope(); auto C = ops::Const(root, {{1.0, 2.0}, {3.0, 4.0}}); auto M = ops::MatMul(root, C, C); tensorflow::testing::StartTiming(); for (int i = 0; i < iters; ++i) { ClientSession sess(root); } } BENCHMARK(BM_CreateAndDestroySession); void BM_KernelAndDeviceInit(int iters) { tensorflow::testing::StopTiming(); NodeDef ndef(AttrBuilder("MatMul") .Set("T", DT_FLOAT) .Set("transpose_a", false) .Set("transpose_b", false) .NumInputs(2) .BuildNodeDef()); TestEnv env; KernelAndDevice k(nullptr, false); tensorflow::testing::StartTiming(); for (int i = 0; i < iters; ++i) { TF_CHECK_OK(KernelAndDevice::Init(ndef, env.function_library_runtime(), nullptr, &k)); } } BENCHMARK(BM_KernelAndDeviceInit); void BM_KernelAndDeviceRun(int iters) { tensorflow::testing::StopTiming(); Tensor t(Input({{1.0f, 2.0f}, {3.0f, 4.0f}}).tensor()); std::vector<Tensor> inputs; inputs.push_back(t); inputs.push_back(t); std::vector<Tensor> outputs; NodeDef ndef(AttrBuilder("MatMul") .Set("T", DT_FLOAT) .Set("transpose_a", false) .Set("transpose_b", false) .NumInputs(inputs.size()) .BuildNodeDef()); TestEnv env; KernelAndDevice kernel(nullptr, false); TF_CHECK_OK(KernelAndDevice::Init(ndef, env.function_library_runtime(), nullptr, &kernel)); tensorflow::testing::StartTiming(); for (int i = 0; i < iters; ++i) { TF_CHECK_OK(kernel.Run(&inputs, &outputs, nullptr, nullptr, nullptr)); } } BENCHMARK(BM_KernelAndDeviceRun); } // namespace } // namespace tensorflow
apache-2.0
gohugoio/hugo
parser/pageparser/item_test.go
1371
// Copyright 2019 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package pageparser import ( "testing" qt "github.com/frankban/quicktest" ) func TestItemValTyped(t *testing.T) { c := qt.New(t) c.Assert(Item{Val: []byte("3.14")}.ValTyped(), qt.Equals, float64(3.14)) c.Assert(Item{Val: []byte(".14")}.ValTyped(), qt.Equals, float64(.14)) c.Assert(Item{Val: []byte("314")}.ValTyped(), qt.Equals, 314) c.Assert(Item{Val: []byte("314x")}.ValTyped(), qt.Equals, "314x") c.Assert(Item{Val: []byte("314 ")}.ValTyped(), qt.Equals, "314 ") c.Assert(Item{Val: []byte("314"), isString: true}.ValTyped(), qt.Equals, "314") c.Assert(Item{Val: []byte("true")}.ValTyped(), qt.Equals, true) c.Assert(Item{Val: []byte("false")}.ValTyped(), qt.Equals, false) c.Assert(Item{Val: []byte("trues")}.ValTyped(), qt.Equals, "trues") }
apache-2.0
ory-am/go-iam
internal/testhelpers/server.go
287
package testhelpers import ( "net/http" "net/http/httptest" "testing" ) func FlexibleServer(t *testing.T, h *http.HandlerFunc) string { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { (*h)(w, r) })) t.Cleanup(ts.Close) return ts.URL }
apache-2.0
Soluis/phabricator
src/applications/conpherence/controller/ConpherenceListController.php
5199
<?php final class ConpherenceListController extends ConpherenceController { const SELECTED_MODE = 'selected'; const UNSELECTED_MODE = 'unselected'; /** * Two main modes of operation... * * 1 - /conpherence/ - UNSELECTED_MODE * 2 - /conpherence/<id>/ - SELECTED_MODE * * UNSELECTED_MODE is not an Ajax request while the other two are Ajax * requests. */ private function determineMode() { $request = $this->getRequest(); $mode = self::UNSELECTED_MODE; if ($request->isAjax()) { $mode = self::SELECTED_MODE; } return $mode; } public function shouldAllowPublic() { return true; } public function handleRequest(AphrontRequest $request) { $user = $request->getUser(); $title = pht('Conpherence'); $conpherence = null; $limit = (ConpherenceThreadListView::SEE_MORE_LIMIT * 2) + 1; $all_participation = array(); $mode = $this->determineMode(); switch ($mode) { case self::SELECTED_MODE: $conpherence_id = $request->getURIData('id'); $conpherence = id(new ConpherenceThreadQuery()) ->setViewer($user) ->withIDs(array($conpherence_id)) ->executeOne(); if (!$conpherence) { return new Aphront404Response(); } if ($conpherence->getTitle()) { $title = $conpherence->getTitle(); } $cursor = $conpherence->getParticipantIfExists($user->getPHID()); $data = $this->loadDefaultParticipation($limit); $all_participation = $data['all_participation']; if (!$cursor) { $menu_participation = id(new ConpherenceParticipant()) ->makeEphemeral() ->setConpherencePHID($conpherence->getPHID()) ->setParticipantPHID($user->getPHID()); } else { $menu_participation = $cursor; } // check to see if the loaded conpherence is going to show up // within the SEE_MORE_LIMIT amount of conpherences. // If its not there, then we just pre-pend it as the "first" // conpherence so folks have a navigation item in the menu. $count = 0; $found = false; foreach ($all_participation as $phid => $curr_participation) { if ($conpherence->getPHID() == $phid) { $found = true; break; } $count++; if ($count > ConpherenceThreadListView::SEE_MORE_LIMIT) { break; } } if (!$found) { $all_participation = array($conpherence->getPHID() => $menu_participation) + $all_participation; } break; case self::UNSELECTED_MODE: default: $data = $this->loadDefaultParticipation($limit); $all_participation = $data['all_participation']; break; } $threads = $this->loadConpherenceThreadData( $all_participation); $thread_view = id(new ConpherenceThreadListView()) ->setUser($user) ->setBaseURI($this->getApplicationURI()) ->setThreads($threads); switch ($mode) { case self::SELECTED_MODE: $response = id(new AphrontAjaxResponse())->setContent($thread_view); break; case self::UNSELECTED_MODE: default: $layout = id(new ConpherenceLayoutView()) ->setUser($user) ->setBaseURI($this->getApplicationURI()) ->setThreadView($thread_view) ->setRole('list'); if ($conpherence) { $layout->setThread($conpherence); } else { // make a dummy conpherence so we can render something $conpherence = ConpherenceThread::initializeNewRoom($user); $conpherence->attachHandles(array()); $conpherence->attachTransactions(array()); $conpherence->makeEphemeral(); } $policy_objects = id(new PhabricatorPolicyQuery()) ->setViewer($user) ->setObject($conpherence) ->execute(); $layout->setHeader($this->buildHeaderPaneContent( $conpherence, $policy_objects)); $response = $this->newPage() ->setTitle($title) ->appendChild($layout); break; } return $response; } private function loadDefaultParticipation($limit) { $viewer = $this->getRequest()->getUser(); $all_participation = id(new ConpherenceParticipantQuery()) ->withParticipantPHIDs(array($viewer->getPHID())) ->setLimit($limit) ->execute(); return array( 'all_participation' => $all_participation, ); } private function loadConpherenceThreadData($participation) { $user = $this->getRequest()->getUser(); $conpherence_phids = array_keys($participation); $conpherences = array(); if ($conpherence_phids) { $conpherences = id(new ConpherenceThreadQuery()) ->setViewer($user) ->withPHIDs($conpherence_phids) ->needCropPics(true) ->needParticipantCache(true) ->execute(); // this will re-sort by participation data $conpherences = array_select_keys($conpherences, $conpherence_phids); } return $conpherences; } }
apache-2.0
sergecodd/FireFox-OS
B2G/gecko/content/xslt/src/xslt/txXSLTNumber.cpp
24479
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #include "mozilla/FloatingPoint.h" #include "txXSLTNumber.h" #include "nsGkAtoms.h" #include "txCore.h" #include <math.h> #include "txExpr.h" #include "txXSLTPatterns.h" #include "txIXPathContext.h" #include "txXPathTreeWalker.h" nsresult txXSLTNumber::createNumber(Expr* aValueExpr, txPattern* aCountPattern, txPattern* aFromPattern, LevelType aLevel, Expr* aGroupSize, Expr* aGroupSeparator, Expr* aFormat, txIEvalContext* aContext, nsAString& aResult) { aResult.Truncate(); nsresult rv = NS_OK; // Parse format txList counters; nsAutoString head, tail; rv = getCounters(aGroupSize, aGroupSeparator, aFormat, aContext, counters, head, tail); NS_ENSURE_SUCCESS(rv, rv); // Create list of values to format txList values; nsAutoString valueString; rv = getValueList(aValueExpr, aCountPattern, aFromPattern, aLevel, aContext, values, valueString); NS_ENSURE_SUCCESS(rv, rv); if (!valueString.IsEmpty()) { aResult = valueString; return NS_OK; } // Create resulting string aResult = head; bool first = true; txListIterator valueIter(&values); txListIterator counterIter(&counters); valueIter.resetToEnd(); int32_t value; txFormattedCounter* counter = 0; while ((value = NS_PTR_TO_INT32(valueIter.previous()))) { if (counterIter.hasNext()) { counter = (txFormattedCounter*)counterIter.next(); } if (!first) { aResult.Append(counter->mSeparator); } counter->appendNumber(value, aResult); first = false; } aResult.Append(tail); txListIterator iter(&counters); while (iter.hasNext()) { delete (txFormattedCounter*)iter.next(); } return NS_OK; } nsresult txXSLTNumber::getValueList(Expr* aValueExpr, txPattern* aCountPattern, txPattern* aFromPattern, LevelType aLevel, txIEvalContext* aContext, txList& aValues, nsAString& aValueString) { aValueString.Truncate(); nsresult rv = NS_OK; // If the value attribute exists then use that if (aValueExpr) { nsRefPtr<txAExprResult> result; rv = aValueExpr->evaluate(aContext, getter_AddRefs(result)); NS_ENSURE_SUCCESS(rv, rv); double value = result->numberValue(); if (MOZ_DOUBLE_IS_INFINITE(value) || MOZ_DOUBLE_IS_NaN(value) || value < 0.5) { txDouble::toString(value, aValueString); return NS_OK; } aValues.add(NS_INT32_TO_PTR((int32_t)floor(value + 0.5))); return NS_OK; } // Otherwise use count/from/level txPattern* countPattern = aCountPattern; bool ownsCountPattern = false; const txXPathNode& currNode = aContext->getContextNode(); // Parse count- and from-attributes if (!aCountPattern) { ownsCountPattern = true; txNodeTest* nodeTest; uint16_t nodeType = txXPathNodeUtils::getNodeType(currNode); switch (nodeType) { case txXPathNodeType::ELEMENT_NODE: { nsCOMPtr<nsIAtom> localName = txXPathNodeUtils::getLocalName(currNode); int32_t namespaceID = txXPathNodeUtils::getNamespaceID(currNode); nodeTest = new txNameTest(0, localName, namespaceID, txXPathNodeType::ELEMENT_NODE); break; } case txXPathNodeType::TEXT_NODE: case txXPathNodeType::CDATA_SECTION_NODE: { nodeTest = new txNodeTypeTest(txNodeTypeTest::TEXT_TYPE); break; } case txXPathNodeType::PROCESSING_INSTRUCTION_NODE: { txNodeTypeTest* typeTest; typeTest = new txNodeTypeTest(txNodeTypeTest::PI_TYPE); if (typeTest) { nsAutoString nodeName; txXPathNodeUtils::getNodeName(currNode, nodeName); typeTest->setNodeName(nodeName); } nodeTest = typeTest; break; } case txXPathNodeType::COMMENT_NODE: { nodeTest = new txNodeTypeTest(txNodeTypeTest::COMMENT_TYPE); break; } case txXPathNodeType::DOCUMENT_NODE: case txXPathNodeType::ATTRIBUTE_NODE: default: { // this won't match anything as we walk up the tree // but it's what the spec says to do nodeTest = new txNameTest(0, nsGkAtoms::_asterix, 0, nodeType); break; } } NS_ENSURE_TRUE(nodeTest, NS_ERROR_OUT_OF_MEMORY); countPattern = new txStepPattern(nodeTest, false); if (!countPattern) { // XXX error reporting delete nodeTest; return NS_ERROR_OUT_OF_MEMORY; } } // Generate list of values depending on the value of the level-attribute // level = "single" if (aLevel == eLevelSingle) { txXPathTreeWalker walker(currNode); do { if (aFromPattern && !walker.isOnNode(currNode) && aFromPattern->matches(walker.getCurrentPosition(), aContext)) { break; } if (countPattern->matches(walker.getCurrentPosition(), aContext)) { aValues.add(NS_INT32_TO_PTR(getSiblingCount(walker, countPattern, aContext))); break; } } while (walker.moveToParent()); // Spec says to only match ancestors that are decendants of the // ancestor that matches the from-pattern, so keep going to make // sure that there is an ancestor that does. if (aFromPattern && aValues.getLength()) { bool hasParent; while ((hasParent = walker.moveToParent())) { if (aFromPattern->matches(walker.getCurrentPosition(), aContext)) { break; } } if (!hasParent) { aValues.clear(); } } } // level = "multiple" else if (aLevel == eLevelMultiple) { // find all ancestor-or-selfs that matches count until... txXPathTreeWalker walker(currNode); bool matchedFrom = false; do { if (aFromPattern && !walker.isOnNode(currNode) && aFromPattern->matches(walker.getCurrentPosition(), aContext)) { //... we find one that matches from matchedFrom = true; break; } if (countPattern->matches(walker.getCurrentPosition(), aContext)) { aValues.add(NS_INT32_TO_PTR(getSiblingCount(walker, countPattern, aContext))); } } while (walker.moveToParent()); // Spec says to only match ancestors that are decendants of the // ancestor that matches the from-pattern, so if none did then // we shouldn't search anything if (aFromPattern && !matchedFrom) { aValues.clear(); } } // level = "any" else if (aLevel == eLevelAny) { int32_t value = 0; bool matchedFrom = false; txXPathTreeWalker walker(currNode); do { if (aFromPattern && !walker.isOnNode(currNode) && aFromPattern->matches(walker.getCurrentPosition(), aContext)) { matchedFrom = true; break; } if (countPattern->matches(walker.getCurrentPosition(), aContext)) { ++value; } } while (getPrevInDocumentOrder(walker)); // Spec says to only count nodes that follows the first node that // matches the from pattern. So so if none did then we shouldn't // count any if (aFromPattern && !matchedFrom) { value = 0; } if (value) { aValues.add(NS_INT32_TO_PTR(value)); } } if (ownsCountPattern) { delete countPattern; } return NS_OK; } nsresult txXSLTNumber::getCounters(Expr* aGroupSize, Expr* aGroupSeparator, Expr* aFormat, txIEvalContext* aContext, txList& aCounters, nsAString& aHead, nsAString& aTail) { aHead.Truncate(); aTail.Truncate(); nsresult rv = NS_OK; nsAutoString groupSeparator; int32_t groupSize = 0; if (aGroupSize && aGroupSeparator) { nsAutoString sizeStr; rv = aGroupSize->evaluateToString(aContext, sizeStr); NS_ENSURE_SUCCESS(rv, rv); double size = txDouble::toDouble(sizeStr); groupSize = (int32_t)size; if ((double)groupSize != size) { groupSize = 0; } rv = aGroupSeparator->evaluateToString(aContext, groupSeparator); NS_ENSURE_SUCCESS(rv, rv); } nsAutoString format; if (aFormat) { rv = aFormat->evaluateToString(aContext, format); NS_ENSURE_SUCCESS(rv, rv); } uint32_t formatLen = format.Length(); uint32_t formatPos = 0; PRUnichar ch = 0; // start with header while (formatPos < formatLen && !isAlphaNumeric(ch = format.CharAt(formatPos))) { aHead.Append(ch); ++formatPos; } // If there are no formatting tokens we need to create a default one. if (formatPos == formatLen) { txFormattedCounter* defaultCounter; rv = txFormattedCounter::getCounterFor(NS_LITERAL_STRING("1"), groupSize, groupSeparator, defaultCounter); NS_ENSURE_SUCCESS(rv, rv); defaultCounter->mSeparator.AssignLiteral("."); rv = aCounters.add(defaultCounter); if (NS_FAILED(rv)) { // XXX ErrorReport: out of memory delete defaultCounter; return rv; } return NS_OK; } while (formatPos < formatLen) { nsAutoString sepToken; // parse separator token if (!aCounters.getLength()) { // Set the first counters separator to default value so that if // there is only one formatting token and we're formatting a // value-list longer then one we use the default separator. This // won't be used when formatting the first value anyway. sepToken.AssignLiteral("."); } else { while (formatPos < formatLen && !isAlphaNumeric(ch = format.CharAt(formatPos))) { sepToken.Append(ch); ++formatPos; } } // if we're at the end of the string then the previous token was the tail if (formatPos == formatLen) { aTail = sepToken; return NS_OK; } // parse formatting token nsAutoString numToken; while (formatPos < formatLen && isAlphaNumeric(ch = format.CharAt(formatPos))) { numToken.Append(ch); ++formatPos; } txFormattedCounter* counter = 0; rv = txFormattedCounter::getCounterFor(numToken, groupSize, groupSeparator, counter); if (NS_FAILED(rv)) { txListIterator iter(&aCounters); while (iter.hasNext()) { delete (txFormattedCounter*)iter.next(); } aCounters.clear(); return rv; } // Add to list of counters counter->mSeparator = sepToken; rv = aCounters.add(counter); if (NS_FAILED(rv)) { // XXX ErrorReport: out of memory txListIterator iter(&aCounters); while (iter.hasNext()) { delete (txFormattedCounter*)iter.next(); } aCounters.clear(); return rv; } } return NS_OK; } int32_t txXSLTNumber::getSiblingCount(txXPathTreeWalker& aWalker, txPattern* aCountPattern, txIMatchContext* aContext) { int32_t value = 1; while (aWalker.moveToPreviousSibling()) { if (aCountPattern->matches(aWalker.getCurrentPosition(), aContext)) { ++value; } } return value; } bool txXSLTNumber::getPrevInDocumentOrder(txXPathTreeWalker& aWalker) { if (aWalker.moveToPreviousSibling()) { while (aWalker.moveToLastChild()) { // do nothing } return true; } return aWalker.moveToParent(); } #define TX_CHAR_RANGE(ch, a, b) if (ch < a) return false; \ if (ch <= b) return true #define TX_MATCH_CHAR(ch, a) if (ch < a) return false; \ if (ch == a) return true bool txXSLTNumber::isAlphaNumeric(PRUnichar ch) { TX_CHAR_RANGE(ch, 0x0030, 0x0039); TX_CHAR_RANGE(ch, 0x0041, 0x005A); TX_CHAR_RANGE(ch, 0x0061, 0x007A); TX_MATCH_CHAR(ch, 0x00AA); TX_CHAR_RANGE(ch, 0x00B2, 0x00B3); TX_MATCH_CHAR(ch, 0x00B5); TX_CHAR_RANGE(ch, 0x00B9, 0x00BA); TX_CHAR_RANGE(ch, 0x00BC, 0x00BE); TX_CHAR_RANGE(ch, 0x00C0, 0x00D6); TX_CHAR_RANGE(ch, 0x00D8, 0x00F6); TX_CHAR_RANGE(ch, 0x00F8, 0x021F); TX_CHAR_RANGE(ch, 0x0222, 0x0233); TX_CHAR_RANGE(ch, 0x0250, 0x02AD); TX_CHAR_RANGE(ch, 0x02B0, 0x02B8); TX_CHAR_RANGE(ch, 0x02BB, 0x02C1); TX_CHAR_RANGE(ch, 0x02D0, 0x02D1); TX_CHAR_RANGE(ch, 0x02E0, 0x02E4); TX_MATCH_CHAR(ch, 0x02EE); TX_MATCH_CHAR(ch, 0x037A); TX_MATCH_CHAR(ch, 0x0386); TX_CHAR_RANGE(ch, 0x0388, 0x038A); TX_MATCH_CHAR(ch, 0x038C); TX_CHAR_RANGE(ch, 0x038E, 0x03A1); TX_CHAR_RANGE(ch, 0x03A3, 0x03CE); TX_CHAR_RANGE(ch, 0x03D0, 0x03D7); TX_CHAR_RANGE(ch, 0x03DA, 0x03F3); TX_CHAR_RANGE(ch, 0x0400, 0x0481); TX_CHAR_RANGE(ch, 0x048C, 0x04C4); TX_CHAR_RANGE(ch, 0x04C7, 0x04C8); TX_CHAR_RANGE(ch, 0x04CB, 0x04CC); TX_CHAR_RANGE(ch, 0x04D0, 0x04F5); TX_CHAR_RANGE(ch, 0x04F8, 0x04F9); TX_CHAR_RANGE(ch, 0x0531, 0x0556); TX_MATCH_CHAR(ch, 0x0559); TX_CHAR_RANGE(ch, 0x0561, 0x0587); TX_CHAR_RANGE(ch, 0x05D0, 0x05EA); TX_CHAR_RANGE(ch, 0x05F0, 0x05F2); TX_CHAR_RANGE(ch, 0x0621, 0x063A); TX_CHAR_RANGE(ch, 0x0640, 0x064A); TX_CHAR_RANGE(ch, 0x0660, 0x0669); TX_CHAR_RANGE(ch, 0x0671, 0x06D3); TX_MATCH_CHAR(ch, 0x06D5); TX_CHAR_RANGE(ch, 0x06E5, 0x06E6); TX_CHAR_RANGE(ch, 0x06F0, 0x06FC); TX_MATCH_CHAR(ch, 0x0710); TX_CHAR_RANGE(ch, 0x0712, 0x072C); TX_CHAR_RANGE(ch, 0x0780, 0x07A5); TX_CHAR_RANGE(ch, 0x0905, 0x0939); TX_MATCH_CHAR(ch, 0x093D); TX_MATCH_CHAR(ch, 0x0950); TX_CHAR_RANGE(ch, 0x0958, 0x0961); TX_CHAR_RANGE(ch, 0x0966, 0x096F); TX_CHAR_RANGE(ch, 0x0985, 0x098C); TX_CHAR_RANGE(ch, 0x098F, 0x0990); TX_CHAR_RANGE(ch, 0x0993, 0x09A8); TX_CHAR_RANGE(ch, 0x09AA, 0x09B0); TX_MATCH_CHAR(ch, 0x09B2); TX_CHAR_RANGE(ch, 0x09B6, 0x09B9); TX_CHAR_RANGE(ch, 0x09DC, 0x09DD); TX_CHAR_RANGE(ch, 0x09DF, 0x09E1); TX_CHAR_RANGE(ch, 0x09E6, 0x09F1); TX_CHAR_RANGE(ch, 0x09F4, 0x09F9); TX_CHAR_RANGE(ch, 0x0A05, 0x0A0A); TX_CHAR_RANGE(ch, 0x0A0F, 0x0A10); TX_CHAR_RANGE(ch, 0x0A13, 0x0A28); TX_CHAR_RANGE(ch, 0x0A2A, 0x0A30); TX_CHAR_RANGE(ch, 0x0A32, 0x0A33); TX_CHAR_RANGE(ch, 0x0A35, 0x0A36); TX_CHAR_RANGE(ch, 0x0A38, 0x0A39); TX_CHAR_RANGE(ch, 0x0A59, 0x0A5C); TX_MATCH_CHAR(ch, 0x0A5E); TX_CHAR_RANGE(ch, 0x0A66, 0x0A6F); TX_CHAR_RANGE(ch, 0x0A72, 0x0A74); TX_CHAR_RANGE(ch, 0x0A85, 0x0A8B); TX_MATCH_CHAR(ch, 0x0A8D); TX_CHAR_RANGE(ch, 0x0A8F, 0x0A91); TX_CHAR_RANGE(ch, 0x0A93, 0x0AA8); TX_CHAR_RANGE(ch, 0x0AAA, 0x0AB0); TX_CHAR_RANGE(ch, 0x0AB2, 0x0AB3); TX_CHAR_RANGE(ch, 0x0AB5, 0x0AB9); TX_MATCH_CHAR(ch, 0x0ABD); TX_MATCH_CHAR(ch, 0x0AD0); TX_MATCH_CHAR(ch, 0x0AE0); TX_CHAR_RANGE(ch, 0x0AE6, 0x0AEF); TX_CHAR_RANGE(ch, 0x0B05, 0x0B0C); TX_CHAR_RANGE(ch, 0x0B0F, 0x0B10); TX_CHAR_RANGE(ch, 0x0B13, 0x0B28); TX_CHAR_RANGE(ch, 0x0B2A, 0x0B30); TX_CHAR_RANGE(ch, 0x0B32, 0x0B33); TX_CHAR_RANGE(ch, 0x0B36, 0x0B39); TX_MATCH_CHAR(ch, 0x0B3D); TX_CHAR_RANGE(ch, 0x0B5C, 0x0B5D); TX_CHAR_RANGE(ch, 0x0B5F, 0x0B61); TX_CHAR_RANGE(ch, 0x0B66, 0x0B6F); TX_CHAR_RANGE(ch, 0x0B85, 0x0B8A); TX_CHAR_RANGE(ch, 0x0B8E, 0x0B90); TX_CHAR_RANGE(ch, 0x0B92, 0x0B95); TX_CHAR_RANGE(ch, 0x0B99, 0x0B9A); TX_MATCH_CHAR(ch, 0x0B9C); TX_CHAR_RANGE(ch, 0x0B9E, 0x0B9F); TX_CHAR_RANGE(ch, 0x0BA3, 0x0BA4); TX_CHAR_RANGE(ch, 0x0BA8, 0x0BAA); TX_CHAR_RANGE(ch, 0x0BAE, 0x0BB5); TX_CHAR_RANGE(ch, 0x0BB7, 0x0BB9); TX_CHAR_RANGE(ch, 0x0BE7, 0x0BF2); TX_CHAR_RANGE(ch, 0x0C05, 0x0C0C); TX_CHAR_RANGE(ch, 0x0C0E, 0x0C10); TX_CHAR_RANGE(ch, 0x0C12, 0x0C28); TX_CHAR_RANGE(ch, 0x0C2A, 0x0C33); TX_CHAR_RANGE(ch, 0x0C35, 0x0C39); TX_CHAR_RANGE(ch, 0x0C60, 0x0C61); TX_CHAR_RANGE(ch, 0x0C66, 0x0C6F); TX_CHAR_RANGE(ch, 0x0C85, 0x0C8C); TX_CHAR_RANGE(ch, 0x0C8E, 0x0C90); TX_CHAR_RANGE(ch, 0x0C92, 0x0CA8); TX_CHAR_RANGE(ch, 0x0CAA, 0x0CB3); TX_CHAR_RANGE(ch, 0x0CB5, 0x0CB9); TX_MATCH_CHAR(ch, 0x0CDE); TX_CHAR_RANGE(ch, 0x0CE0, 0x0CE1); TX_CHAR_RANGE(ch, 0x0CE6, 0x0CEF); TX_CHAR_RANGE(ch, 0x0D05, 0x0D0C); TX_CHAR_RANGE(ch, 0x0D0E, 0x0D10); TX_CHAR_RANGE(ch, 0x0D12, 0x0D28); TX_CHAR_RANGE(ch, 0x0D2A, 0x0D39); TX_CHAR_RANGE(ch, 0x0D60, 0x0D61); TX_CHAR_RANGE(ch, 0x0D66, 0x0D6F); TX_CHAR_RANGE(ch, 0x0D85, 0x0D96); TX_CHAR_RANGE(ch, 0x0D9A, 0x0DB1); TX_CHAR_RANGE(ch, 0x0DB3, 0x0DBB); TX_MATCH_CHAR(ch, 0x0DBD); TX_CHAR_RANGE(ch, 0x0DC0, 0x0DC6); TX_CHAR_RANGE(ch, 0x0E01, 0x0E30); TX_CHAR_RANGE(ch, 0x0E32, 0x0E33); TX_CHAR_RANGE(ch, 0x0E40, 0x0E46); TX_CHAR_RANGE(ch, 0x0E50, 0x0E59); TX_CHAR_RANGE(ch, 0x0E81, 0x0E82); TX_MATCH_CHAR(ch, 0x0E84); TX_CHAR_RANGE(ch, 0x0E87, 0x0E88); TX_MATCH_CHAR(ch, 0x0E8A); TX_MATCH_CHAR(ch, 0x0E8D); TX_CHAR_RANGE(ch, 0x0E94, 0x0E97); TX_CHAR_RANGE(ch, 0x0E99, 0x0E9F); TX_CHAR_RANGE(ch, 0x0EA1, 0x0EA3); TX_MATCH_CHAR(ch, 0x0EA5); TX_MATCH_CHAR(ch, 0x0EA7); TX_CHAR_RANGE(ch, 0x0EAA, 0x0EAB); TX_CHAR_RANGE(ch, 0x0EAD, 0x0EB0); TX_CHAR_RANGE(ch, 0x0EB2, 0x0EB3); TX_MATCH_CHAR(ch, 0x0EBD); TX_CHAR_RANGE(ch, 0x0EC0, 0x0EC4); TX_MATCH_CHAR(ch, 0x0EC6); TX_CHAR_RANGE(ch, 0x0ED0, 0x0ED9); TX_CHAR_RANGE(ch, 0x0EDC, 0x0EDD); TX_MATCH_CHAR(ch, 0x0F00); TX_CHAR_RANGE(ch, 0x0F20, 0x0F33); TX_CHAR_RANGE(ch, 0x0F40, 0x0F47); TX_CHAR_RANGE(ch, 0x0F49, 0x0F6A); TX_CHAR_RANGE(ch, 0x0F88, 0x0F8B); TX_CHAR_RANGE(ch, 0x1000, 0x1021); TX_CHAR_RANGE(ch, 0x1023, 0x1027); TX_CHAR_RANGE(ch, 0x1029, 0x102A); TX_CHAR_RANGE(ch, 0x1040, 0x1049); TX_CHAR_RANGE(ch, 0x1050, 0x1055); TX_CHAR_RANGE(ch, 0x10A0, 0x10C5); TX_CHAR_RANGE(ch, 0x10D0, 0x10F6); TX_CHAR_RANGE(ch, 0x1100, 0x1159); TX_CHAR_RANGE(ch, 0x115F, 0x11A2); TX_CHAR_RANGE(ch, 0x11A8, 0x11F9); TX_CHAR_RANGE(ch, 0x1200, 0x1206); TX_CHAR_RANGE(ch, 0x1208, 0x1246); TX_MATCH_CHAR(ch, 0x1248); TX_CHAR_RANGE(ch, 0x124A, 0x124D); TX_CHAR_RANGE(ch, 0x1250, 0x1256); TX_MATCH_CHAR(ch, 0x1258); TX_CHAR_RANGE(ch, 0x125A, 0x125D); TX_CHAR_RANGE(ch, 0x1260, 0x1286); TX_MATCH_CHAR(ch, 0x1288); TX_CHAR_RANGE(ch, 0x128A, 0x128D); TX_CHAR_RANGE(ch, 0x1290, 0x12AE); TX_MATCH_CHAR(ch, 0x12B0); TX_CHAR_RANGE(ch, 0x12B2, 0x12B5); TX_CHAR_RANGE(ch, 0x12B8, 0x12BE); TX_MATCH_CHAR(ch, 0x12C0); TX_CHAR_RANGE(ch, 0x12C2, 0x12C5); TX_CHAR_RANGE(ch, 0x12C8, 0x12CE); TX_CHAR_RANGE(ch, 0x12D0, 0x12D6); TX_CHAR_RANGE(ch, 0x12D8, 0x12EE); TX_CHAR_RANGE(ch, 0x12F0, 0x130E); TX_MATCH_CHAR(ch, 0x1310); TX_CHAR_RANGE(ch, 0x1312, 0x1315); TX_CHAR_RANGE(ch, 0x1318, 0x131E); TX_CHAR_RANGE(ch, 0x1320, 0x1346); TX_CHAR_RANGE(ch, 0x1348, 0x135A); TX_CHAR_RANGE(ch, 0x1369, 0x137C); TX_CHAR_RANGE(ch, 0x13A0, 0x13F4); TX_CHAR_RANGE(ch, 0x1401, 0x166C); TX_CHAR_RANGE(ch, 0x166F, 0x1676); TX_CHAR_RANGE(ch, 0x1681, 0x169A); TX_CHAR_RANGE(ch, 0x16A0, 0x16EA); TX_CHAR_RANGE(ch, 0x16EE, 0x16F0); TX_CHAR_RANGE(ch, 0x1780, 0x17B3); TX_CHAR_RANGE(ch, 0x17E0, 0x17E9); TX_CHAR_RANGE(ch, 0x1810, 0x1819); TX_CHAR_RANGE(ch, 0x1820, 0x1877); TX_CHAR_RANGE(ch, 0x1880, 0x18A8); TX_CHAR_RANGE(ch, 0x1E00, 0x1E9B); TX_CHAR_RANGE(ch, 0x1EA0, 0x1EF9); TX_CHAR_RANGE(ch, 0x1F00, 0x1F15); TX_CHAR_RANGE(ch, 0x1F18, 0x1F1D); TX_CHAR_RANGE(ch, 0x1F20, 0x1F45); TX_CHAR_RANGE(ch, 0x1F48, 0x1F4D); TX_CHAR_RANGE(ch, 0x1F50, 0x1F57); TX_MATCH_CHAR(ch, 0x1F59); TX_MATCH_CHAR(ch, 0x1F5B); TX_MATCH_CHAR(ch, 0x1F5D); TX_CHAR_RANGE(ch, 0x1F5F, 0x1F7D); TX_CHAR_RANGE(ch, 0x1F80, 0x1FB4); TX_CHAR_RANGE(ch, 0x1FB6, 0x1FBC); TX_MATCH_CHAR(ch, 0x1FBE); TX_CHAR_RANGE(ch, 0x1FC2, 0x1FC4); TX_CHAR_RANGE(ch, 0x1FC6, 0x1FCC); TX_CHAR_RANGE(ch, 0x1FD0, 0x1FD3); TX_CHAR_RANGE(ch, 0x1FD6, 0x1FDB); TX_CHAR_RANGE(ch, 0x1FE0, 0x1FEC); TX_CHAR_RANGE(ch, 0x1FF2, 0x1FF4); TX_CHAR_RANGE(ch, 0x1FF6, 0x1FFC); TX_MATCH_CHAR(ch, 0x2070); TX_CHAR_RANGE(ch, 0x2074, 0x2079); TX_CHAR_RANGE(ch, 0x207F, 0x2089); TX_MATCH_CHAR(ch, 0x2102); TX_MATCH_CHAR(ch, 0x2107); TX_CHAR_RANGE(ch, 0x210A, 0x2113); TX_MATCH_CHAR(ch, 0x2115); TX_CHAR_RANGE(ch, 0x2119, 0x211D); TX_MATCH_CHAR(ch, 0x2124); TX_MATCH_CHAR(ch, 0x2126); TX_MATCH_CHAR(ch, 0x2128); TX_CHAR_RANGE(ch, 0x212A, 0x212D); TX_CHAR_RANGE(ch, 0x212F, 0x2131); TX_CHAR_RANGE(ch, 0x2133, 0x2139); TX_CHAR_RANGE(ch, 0x2153, 0x2183); TX_CHAR_RANGE(ch, 0x2460, 0x249B); TX_MATCH_CHAR(ch, 0x24EA); TX_CHAR_RANGE(ch, 0x2776, 0x2793); TX_CHAR_RANGE(ch, 0x3005, 0x3007); TX_CHAR_RANGE(ch, 0x3021, 0x3029); TX_CHAR_RANGE(ch, 0x3031, 0x3035); TX_CHAR_RANGE(ch, 0x3038, 0x303A); TX_CHAR_RANGE(ch, 0x3041, 0x3094); TX_CHAR_RANGE(ch, 0x309D, 0x309E); TX_CHAR_RANGE(ch, 0x30A1, 0x30FA); TX_CHAR_RANGE(ch, 0x30FC, 0x30FE); TX_CHAR_RANGE(ch, 0x3105, 0x312C); TX_CHAR_RANGE(ch, 0x3131, 0x318E); TX_CHAR_RANGE(ch, 0x3192, 0x3195); TX_CHAR_RANGE(ch, 0x31A0, 0x31B7); TX_CHAR_RANGE(ch, 0x3220, 0x3229); TX_CHAR_RANGE(ch, 0x3280, 0x3289); TX_MATCH_CHAR(ch, 0x3400); TX_MATCH_CHAR(ch, 0x4DB5); TX_MATCH_CHAR(ch, 0x4E00); TX_MATCH_CHAR(ch, 0x9FA5); TX_CHAR_RANGE(ch, 0xA000, 0xA48C); TX_MATCH_CHAR(ch, 0xAC00); TX_MATCH_CHAR(ch, 0xD7A3); TX_CHAR_RANGE(ch, 0xF900, 0xFA2D); TX_CHAR_RANGE(ch, 0xFB00, 0xFB06); TX_CHAR_RANGE(ch, 0xFB13, 0xFB17); TX_MATCH_CHAR(ch, 0xFB1D); TX_CHAR_RANGE(ch, 0xFB1F, 0xFB28); TX_CHAR_RANGE(ch, 0xFB2A, 0xFB36); TX_CHAR_RANGE(ch, 0xFB38, 0xFB3C); TX_MATCH_CHAR(ch, 0xFB3E); TX_CHAR_RANGE(ch, 0xFB40, 0xFB41); TX_CHAR_RANGE(ch, 0xFB43, 0xFB44); TX_CHAR_RANGE(ch, 0xFB46, 0xFBB1); TX_CHAR_RANGE(ch, 0xFBD3, 0xFD3D); TX_CHAR_RANGE(ch, 0xFD50, 0xFD8F); TX_CHAR_RANGE(ch, 0xFD92, 0xFDC7); TX_CHAR_RANGE(ch, 0xFDF0, 0xFDFB); TX_CHAR_RANGE(ch, 0xFE70, 0xFE72); TX_MATCH_CHAR(ch, 0xFE74); TX_CHAR_RANGE(ch, 0xFE76, 0xFEFC); TX_CHAR_RANGE(ch, 0xFF10, 0xFF19); TX_CHAR_RANGE(ch, 0xFF21, 0xFF3A); TX_CHAR_RANGE(ch, 0xFF41, 0xFF5A); TX_CHAR_RANGE(ch, 0xFF66, 0xFFBE); TX_CHAR_RANGE(ch, 0xFFC2, 0xFFC7); TX_CHAR_RANGE(ch, 0xFFCA, 0xFFCF); TX_CHAR_RANGE(ch, 0xFFD2, 0xFFD7); return false; }
apache-2.0
Nipuni/product-dss
modules/integration/tests-integration/tests/src/test/java/org/wso2/dss/integration/test/fileservice/ExcelDataServiceTestCase.java
7871
/* *Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * *WSO2 Inc. licenses this file to you under the Apache License, *Version 2.0 (the "License"); you may not use this file except *in compliance with the License. *You may obtain a copy of the License at * *http://www.apache.org/licenses/LICENSE-2.0 * *Unless required by applicable law or agreed to in writing, *software distributed under the License is distributed on an *"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *KIND, either express or implied. See the License for the *specific language governing permissions and limitations *under the License. */ package org.wso2.dss.integration.test.fileservice; import org.apache.axiom.om.OMAbstractFactory; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.OMFactory; import org.apache.axiom.om.OMNamespace; import org.apache.axiom.om.util.AXIOMUtil; import org.apache.axis2.AxisFault; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import org.wso2.carbon.automation.test.utils.axis2client.AxisServiceClient; import org.wso2.carbon.automation.test.utils.common.FileManager; import org.wso2.carbon.automation.test.utils.concurrency.test.ConcurrencyTest; import org.wso2.carbon.automation.test.utils.concurrency.test.exception.ConcurrencyTestFailedError; import org.wso2.dss.integration.common.clients.ResourceAdminServiceClient; import org.wso2.carbon.registry.resource.stub.ResourceAdminServiceExceptionException; import org.wso2.dss.integration.common.utils.DSSTestCaseUtils; import org.wso2.dss.integration.test.DSSIntegrationTest; import javax.activation.DataHandler; import javax.xml.xpath.XPathExpressionException; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.rmi.RemoteException; import static org.testng.Assert.assertTrue; public class ExcelDataServiceTestCase extends DSSIntegrationTest { private static final Log log = LogFactory.getLog(ExcelDataServiceTestCase.class); private final String serviceName = "ExcelDataService"; @BeforeClass(alwaysRun = true) public void serviceDeployment() throws Exception { super.init(); addResource(); deployService(serviceName, AXIOMUtil.stringToOM(FileManager.readFile(getResourceLocation() + File.separator + "dbs" + File.separator + "excel" + File.separator + "ExcelDataService.dbs"))); } @Test(groups = "wso2.dss", description = "Check whether fault service deployed or not") public void testServiceDeployment() throws RemoteException, XPathExpressionException { DSSTestCaseUtils dssTestCaseUtils = new DSSTestCaseUtils(); assertTrue(dssTestCaseUtils.isServiceDeployed(dssContext.getContextUrls().getBackEndUrl(), sessionCookie, serviceName)); log.info(serviceName + " is deployed"); } @AfterClass(alwaysRun = true) public void destroy() throws Exception { deleteService(serviceName); deleteResource(); cleanup(); } @Test(groups = {"wso2.dss"}, invocationCount = 5, dependsOnMethods = "testServiceDeployment") public void selectOperation() throws AxisFault, XPathExpressionException { OMFactory fac = OMAbstractFactory.getOMFactory(); OMNamespace omNs = fac.createOMNamespace("http://ws.wso2.org/dataservice", "ns1"); OMElement payload = fac.createOMElement("getProducts", omNs); OMElement result = new AxisServiceClient().sendReceive(payload, getServiceUrlHttp(serviceName), "getProducts"); log.info("Response :" + result); Assert.assertTrue((result.toString().indexOf("Products") == 1), "Expected Result Not found"); Assert.assertTrue(result.toString().contains("<Product>"), "Expected Result Not found"); Assert.assertTrue(result.toString().contains("<ID>"), "Expected Result Not found"); Assert.assertTrue(result.toString().contains("<Name>"), "Expected Result Not found"); log.info("Service invocation success"); } @Test(groups = {"wso2.dss"}, invocationCount = 5, dependsOnMethods = "testServiceDeployment") public void xsltTransformation() throws AxisFault, XPathExpressionException { OMFactory fac = OMAbstractFactory.getOMFactory(); OMNamespace omNs = fac.createOMNamespace("http://ws.wso2.org/dataservice", "ns1"); OMElement payload = fac.createOMElement("getProductClassifications", omNs); OMElement result = new AxisServiceClient().sendReceive(payload, getServiceUrlHttp(serviceName), "getProductClassifications"); if (log.isDebugEnabled()) { log.debug("Response :" + result); } Assert.assertTrue((result.toString().indexOf("Products") == 1), "Expected Result Not found"); Assert.assertTrue(result.toString().contains("<Product>"), "Expected Result Not found"); Assert.assertTrue(result.toString().contains("<Product-Name>"), "Expected Result Not found"); Assert.assertTrue(result.toString().contains("<Product-Classification>"), "Expected Result Not found"); log.info("XSLT Transformation Success"); } @Test(groups = {"wso2.dss"}, dependsOnMethods = {"xsltTransformation"}, timeOut = 1000 * 60 * 1) public void concurrencyTest() throws ConcurrencyTestFailedError, InterruptedException, XPathExpressionException { OMFactory fac = OMAbstractFactory.getOMFactory(); OMNamespace omNs = fac.createOMNamespace("http://ws.wso2.org/dataservice", "ns1"); OMElement payload = fac.createOMElement("getProducts", omNs); ConcurrencyTest concurrencyTest = new ConcurrencyTest(5, 5); concurrencyTest.run(getServiceUrlHttp(serviceName), payload, "getProducts"); } private void addResource() throws RemoteException, MalformedURLException, ResourceAdminServiceExceptionException, XPathExpressionException { ResourceAdminServiceClient resourceAdmin = new ResourceAdminServiceClient(dssContext.getContextUrls().getBackEndUrl() , sessionCookie); resourceAdmin.deleteResource("/_system/config/automation/resources/"); resourceAdmin.addResource("/_system/config/automation/resources/Products.xls", "application/vnd.ms-excel", "", new DataHandler(new URL("file:///" + getResourceLocation() + File.separator + "resources" + File.separator + "Products.xls"))); resourceAdmin.addResource("/_system/config/automation/resources/transform.xslt", "application/xml", "", new DataHandler(new URL("file:///" + getResourceLocation() + File.separator + "resources" + File.separator + "transform.xslt"))); } private void deleteResource() throws RemoteException, MalformedURLException, ResourceAdminServiceExceptionException, XPathExpressionException { ResourceAdminServiceClient resourceAdmin = new ResourceAdminServiceClient(dssContext.getContextUrls().getBackEndUrl() , sessionCookie); resourceAdmin.deleteResource("/_system/config/automation/resources/"); } }
apache-2.0
polimi-giocoso/captain
app/src/main/java/it/playfellas/hicaptain/FinalActivity.java
2129
/* * Copyright 2015 Lorenzo Affetti, Giacomo Bresciani, Stefano Cappa * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.playfellas.hicaptain; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.view.View; import android.widget.Button; import android.widget.ImageButton; import butterknife.Bind; import butterknife.ButterKnife; import butterknife.OnClick; import it.playfellas.hicaptain.sounds.Baraldi; /** * Created by Stefano Cappa on 21/09/15. */ public class FinalActivity extends AppCompatActivity { @Bind(R.id.captainImageView) ImageButton captainImageView; @Bind(R.id.skipSoundButton) Button skipSoundButton; private boolean greetingsEnabled; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); ButterKnife.bind(this); } @Override protected void onResume() { super.onResume(); this.greetingsEnabled = true; } @Override protected void onDestroy() { super.onDestroy(); ButterKnife.unbind(this); } @OnClick(R.id.captainImageView) public void onClick(View v) { if (greetingsEnabled) { Baraldi.greet(this, new Runnable() { @Override public void run() { greetingsEnabled = true; } }); } greetingsEnabled = false; } @OnClick(R.id.skipSoundButton) public void skipSound(View v) { Baraldi.shutUp(true); } }
apache-2.0
KevinLoiseau/manageiq
gems/pending/disk/modules/QcowDisk.rb
18511
require 'disk/modules/MiqLargeFile' require 'memory_buffer' require 'binary_struct' require 'zlib' module QcowDisk QCOW_HEADER_PARTIAL = BinaryStruct.new([ 'A4', 'magicNumber', 'N', 'version', ]) SIZEOF_QCOW_HEADER_PARTIAL = QCOW_HEADER_PARTIAL.size QCOW_HEADER_V1 = BinaryStruct.new([ 'A4', 'magicNumber', 'N', 'version', 'N', 'backing_filename_offset_hi', 'N', 'backing_filename_offset_lo', 'N', 'backing_filename_size', 'N', 'mtime', 'N', 'size_hi', 'N', 'size_lo', 'C', 'cluster_bits', 'C', 'l2_bits', 'N', 'crypt_method', 'N', 'l1_table_offset_hi', 'N', 'l1_table_offset_lo', ]) SIZEOF_QCOW_HEADER_V1 = QCOW_HEADER_V1.size QCOW_HEADER_V2 = BinaryStruct.new([ 'A4', 'magicNumber', 'N', 'version', 'N', 'backing_filename_offset_hi', 'N', 'backing_filename_offset_lo', 'N', 'backing_filename_size', 'N', 'cluster_bits', 'N', 'size_hi', 'N', 'size_lo', 'N', 'crypt_method', 'N', 'l1_size', 'N', 'l1_table_offset_hi', 'N', 'l1_table_offset_lo', 'N', 'refcount_table_offset_hi', 'N', 'refcount_table_offset_lo', 'N', 'refcount_table_clusters', 'N', 'number_of_snapshots', 'N', 'snapshots_offset_hi', 'N', 'snapshots_offset_lo', ]) SIZEOF_QCOW_HEADER_V2 = QCOW_HEADER_V2.size QCOW_HEADER_V3 = BinaryStruct.new(QCOW_HEADER_V2.definition + [ 'Q', 'incompatible_features', 'Q', 'compatible_features', 'Q', 'autoclear_features', 'N', 'refcount_order', 'N', 'header_length' ]) SIZEOF_QCOW_HEADER_V3 = QCOW_HEADER_V3.size # indicate that the refcount of the referenced cluster is exactly one. QCOW_OFLAG_COPIED = (1 << 63) # indicate that the cluster is compressed (they never have the copied flag) QCOW_OFLAG_COMPRESSED = (1 << 62) LO63_MASK = ~QCOW_OFLAG_COPIED LO62_MASK = ~(QCOW_OFLAG_COPIED | QCOW_OFLAG_COMPRESSED) L1E_OFFSET_MASK = 0x00fffffffffffe00 L2E_OFFSET_MASK = 0x00fffffffffffe00 L2E_COMPRESSED_OFFSET_SIZE_MASK = 0x3fffffffffffffff L2E_PREALLOCATED_MASK = 0x1 SECTOR_SIZE = 512 ZLIB_WINDOW_BITS = -12 INCOMPATIBLE_FEATURES_MASK = { :dirty => 0x1, :corrupt => 0x2 } KNOWN_INCOMPATIBLE_FEATURES_MASK = 0x3 COMPATIBLE_FEATURES_MASK = { :lazy_refcounts => 0x1 } AUTOCLEAR_FEATURES_MASK = { } HEADER_EXTENSION_TYPE_SIZE = 4 HEADER_EXTENSION_LENGTH_SIZE = 4 HEADER_EXTENSION_TYPES = { :end_of_header_extension_area => 0x00000000, :backing_file_format_name => 0xE2792ACA, :feature_table_name => 0x6803f857 } def d_init self.diskType = "QCOW" self.blockSize = SECTOR_SIZE if dInfo.mountMode.nil? || dInfo.mountMode == "r" dInfo.mountMode = "r" @fileMode = "r" elsif dInfo.mountMode == "rw" @fileMode = "r+" else raise "Unrecognized mountMode: #{dInfo.mountMode}" end @filename = dInfo.fileName @dOffset = dInfo.offset @downstreamDisk = dInfo.downstreamDisk self.diskType = "#{diskType}-#{@downstreamDisk.diskType}" if @downstreamDisk # Ensure all the disks in the chain are opened before we return (required to address RHEV SSA UID issues). backing_file_handle end def getBase self end def d_read(pos, len, _offset = 0) pos += @dOffset if @dOffset return nil if pos >= @endByteAddr len = @endByteAddr - pos if (pos + len) > @endByteAddr sector_num, sector_offset = pos.divmod(SECTOR_SIZE) sector_count = ((pos + len - 1) / SECTOR_SIZE) - sector_num + 1 read_buf = read_sectors(sector_num, sector_count) buf = read_buf[sector_offset, len] buf end def d_write(_pos, _buf, _len, _offset = 0) raise "QcowDisk#d_write not implemented" end def d_close [@backing_file_handle, @file_handle].each do |h| next if h.nil? h.close h = nil end end # Disk size in sectors. def d_size uint64(header, 'size') / @blockSize end def version @version ||= header['version'] end def backing_file_name @backing_file_name ||= begin if backing_filename_offset > 0 file_handle.seek(backing_filename_offset, IO::SEEK_SET) backing_fname = file_handle.read(backing_filename_size) bfn = File.expand_path File.join(File.dirname(@filename), backing_fname) # # Check if the backing file is a logical volume from a direct lun volume group. # bfn_test = File.expand_path File.join(File.dirname(@filename), File.basename(bfn)) use_lv = false if (avm = @dInfo.applianceVolumeManager) use_lv = avm.lvHash.key?(bfn_test) end if (!File.symlink?(bfn) && !File.file?(bfn)) && use_lv bfn_test else bfn end else "" end end end def cluster_sectors @cluster_sectors ||= 1 << (cluster_bits - 9) end def total_sectors @total_sectors ||= size / SECTOR_SIZE end private def offset2sector(offset) offset / SECTOR_SIZE end def sector2offset(sector) sector * SECTOR_SIZE end def index_in_cluster(offset) offset2sector(offset) & (total_sectors - 1) end def cluster_bits @cluster_bits ||= header['cluster_bits'] end def cluster_size @cluster_size ||= 1 << cluster_bits end def l2_bits @l2_bits ||= begin case version when 1 header['l2bits'] when 2, 3 cluster_bits - 3 else raise "Unknown QCOW Version: #{version}" end end end def l2_size @l2_size ||= 1 << l2_bits end def l1_bits @l1_bits ||= 64 - l2_bits - cluster_bits end def l1_table_offset @l1_table_offset ||= uint64(header, 'l1_table_offset') end def l1_size_minimum @l1_size_minimum ||= begin shift = cluster_bits + l2_bits (size + (1 << shift) - 1) >> shift # (size / (cluster_size * l2_size)).round_up(cluster_size) end end def l1_size @l1_size ||= begin case version when 1 shift = cluster_bits + l2_bits (size + (1 << shift) - 1) >> shift when 2, 3 header['l1_size'] else raise "Unknown QCOW Version: #{version}" end end end def l1_table @l1_table ||= begin raise "l1_size (#{l1_size}) < l1_size_minimum (#{l1_size_minimum})" if l1_size < l1_size_minimum file_handle.seek(l1_table_offset, IO::SEEK_SET) read_entries(l1_size) end end def l2_table(l2_table_offset) @l2_table ||= {} @l2_table[l2_table_offset] ||= begin file_handle.seek(l2_table_offset, IO::SEEK_SET) read_entries(l2_size) end @l2_table[l2_table_offset] end def read_entries(n) entries = [] file_handle.read(n * SIZEOF_UINT64).unpack("N*").each_slice(2) { |hi, lo| entries << uint64_from_hi_lo(hi, lo) } entries end def refcount_order # https://github.com/qemu/qemu/blob/v2.2.0/docs/specs/qcow2.txt#L108 version < 3 ? 4 : @header['refcount_order'] end def header_length # https://github.com/qemu/qemu/blob/v2.2.0/docs/specs/qcow2.txt#L115 version < 3 ? SIZEOF_QCOW_HEADER_V2 : @header['header_length'] end def refcount_table_clusters return nil if version == 1 @refcount_table_clusters ||= header['refcount_table_clusters'] end def refcount_table_offset return nil if version == 1 @refcount_table_offset ||= uint64(header, 'refcount_table_offset') end def snapshots_count return nil if version == 1 @snaphshots_count ||= header['number_of_snapshots'] end def snapshots_offset return nil if version == 1 @snapshots_offset ||= uint64(header, 'snapshots_offset') end def crypt_method @crypt_method ||= header['crypt_method'] end def backing_filename_size @backing_filename_size ||= [header['backing_filename_size'], 1023].min end def backing_filename_offset @backing_filename_offset ||= uint64(header, 'backing_filename_offset') end def decompress_buffer(buf) raise "decompression buffer cannot be nil" if buf.nil? zi = Zlib::Inflate.new(ZLIB_WINDOW_BITS) rv = zi.inflate(buf) zi.finish zi.close rv end def decompress_cluster(cluster_offset) cluster_offset &= L2E_COMPRESSED_OFFSET_SIZE_MASK coffset = cluster_offset & cluster_offset_mask nb_sectors = ((cluster_offset >> csize_shift) & csize_mask) + 1 csize = nb_sectors * SECTOR_SIZE file_handle.seek(coffset, IO::SEEK_SET) buf = file_handle.read(csize) decompress_buffer(buf) end # # Data is not on the COW image - read from the base image / backing file # def read_backing_file(sector_num, nb_sectors) n = if ((sector_num + nb_sectors) <= total_sectors) nb_sectors elsif (sector_num >= total_sectors) 0 else total_sectors - sector_num end backing_buffer = '' if n > 0 nbytes = SECTOR_SIZE * n boffset = sector_num * SECTOR_SIZE backing_file_handle.seek(boffset, IO::SEEK_SET) backing_buffer = backing_file_handle.read(nbytes) raise "QCOW Backing File read returned NIL" if backing_buffer.nil? raise "QCOW Backing File read returned #{rbuf.length} bytes - requested #{nbytes} bytes" if backing_buffer.length != nbytes end backing_buffer << MemoryBuffer.create(SECTOR_SIZE * (nb_sectors - n)) backing_buffer end def read_image_file(file_offset, nbytes) raise "QCOW size #{size} is less than computed offset (#{file_offset})" if file_offset > size file_handle.seek(file_offset, IO::SEEK_SET) buffer = file_handle.read(nbytes) raise "QCOW Image File read returned NIL" if buffer.nil? raise "QCOW Image File read returned #{buffer.to_s.length} bytes - requested #{nbytes} bytes" if buffer.length != nbytes buffer end def read_sectors(sector_num, nb_sectors) buf = "" while nb_sectors > 0 index_in_cluster = sector_num & (cluster_sectors - 1) n = cluster_sectors - index_in_cluster n = nb_sectors if n > nb_sectors nbytes = SECTOR_SIZE * n cluster_offset = get_cluster_offset(sector_num * SECTOR_SIZE) if cluster_offset == 0 if backing_file_name.empty? rbuf = MemoryBuffer.create(nbytes) else rbuf = read_backing_file(sector_num, n) end elsif compressed?(cluster_offset) rbuf = decompress_cluster(cluster_offset) rbuf = rbuf[index_in_cluster * SECTOR_SIZE, nbytes] elsif preallocated?(cluster_offset) rbuf = "\0" * nbytes else cluster_offset &= L2E_OFFSET_MASK file_offset = cluster_offset + (index_in_cluster * SECTOR_SIZE) rbuf = read_image_file(file_offset, nbytes) end buf << rbuf nb_sectors -= n sector_num += n end buf end def get_cluster_offset(offset) cluster_offset = 0 l1_index = offset >> (l2_bits + cluster_bits) if l1_index < l1_size l2_offset = l1_table[l1_index] & L1E_OFFSET_MASK if l2_offset > 0 l2_index = (offset >> cluster_bits) & (l2_size - 1) cluster_offset = l2_table(l2_offset)[l2_index] & ~copied_mask end end cluster_offset end def file_handle @file_handle ||= begin if @downstreamDisk $log.debug "QcowDisk.file_handle: downstreamDisk #{@downstreamDisk.dInfo.fileName}" @downstreamDisk else $log.debug "QcowDisk.file_handle: file #{@filename}" MiqLargeFile.open(@filename, @fileMode) end end end def backing_file_handle return nil if backing_file_name.empty? @backing_file_handle ||= begin dInfo = OpenStruct.new dInfo.fileName = backing_file_name $log.debug "QcowDisk.backing_file_handle: file #{@filename}" $log.debug "QcowDisk.backing_file_handle: opening backing file #{backing_file_name}" if (avm = @dInfo.applianceVolumeManager) if (bfh = avm.lvHash[dInfo.fileName]) $log.debug "QcowDisk.backing_file_handle: using applianceVolumeManager for #{backing_file_name}" bfh.dInfo.applianceVolumeManager = avm bfh.dInfo.fileName = backing_file_name # # Here, we need to probe the disk to determine its data format, # QCOW for example. If the disk format is not flat, push a disk # supporting the format on top of this disk. Then set bfh to point # to the new top disk. # bfh = bfh.pushFormatSupport end end unless bfh bfh = MiqDisk.getDisk(dInfo) end bfh end end def header @header ||= begin file_handle.seek(0, IO::SEEK_SET) partial_header = QCOW_HEADER_PARTIAL.decode(file_handle.read(SIZEOF_QCOW_HEADER_PARTIAL)) file_handle.seek(0, IO::SEEK_SET) case partial_header['version'] when 1 raise "QCOW Version 1 is not supported" QCOW_HEADER_V1.decode(file_handle.read(SIZEOF_QCOW_HEADER_V1)) when 2 h = QCOW_HEADER_V2.decode(file_handle.read(SIZEOF_QCOW_HEADER_V2)) # TODO: Handle Encryption raise "QCOW Encryption is not supported" if h['crypt_method'] == 1 h when 3 h = QCOW_HEADER_V3.decode(file_handle.read(SIZEOF_QCOW_HEADER_V3)) # TODO: warning if dirty or corrupt (?) raise "QCOW Encryption is not supported" if h['crypt_method'] == 1 raise "Unknown QCOW incompatible features" if h['incompatible_features'] & ~KNOWN_INCOMPATIBLE_FEATURES_MASK > 0 h else raise "Uknown Version: #{partial_header['version'].inspect}" end end end def header_extensions # ... end UINT64 = BinaryStruct.new([ 'N', 'uint64_hi', 'N', 'uint64_lo', ]) SIZEOF_UINT64 = UINT64.size def uint64_from_hi_lo(hi, lo) (hi << 32) | lo end def uint64(h, name) uint64_from_hi_lo(h["#{name}_hi"], h["#{name}_lo"]) end def format_entry(e) copied = copied?(e) ? 'COPIED' : 'NOT COPIED' compressed = compressed?(e) ? 'COMPRESSED' : 'NOT COMPRESSED' "#{e} => #{lo62(e)} (#{copied}, #{compressed})" end def decode_entry(e) uint64(UINT64.decode(e), "uint64") end def get_entry(offset, index) pos = offset + (index * SIZEOF_UINT64) file_handle.seek(pos, IO::SEEK_SET) decode_entry file_handle.read(SIZEOF_UINT64) end def lo62(x) x & LO62_MASK end def lo63(x) x & LO63_MASK end def copied_mask @copied_mask ||= begin case version when 1 0 when 2, 3 1 << 63 else raise "Unknown QCOW Version: #{version}" end end end def compressed_mask @compressed_mask ||= begin case version when 1 1 << 63 when 2, 3 1 << 62 else raise "Unknown QCOW Version: #{version}" end end end def compressed?(cluster_offset) (cluster_offset & compressed_mask) > 0 end def copied?(cluster_offset) (cluster_offset & copied_mask) > 0 end def preallocated?(cluster_offset) cluster_offset & L2E_PREALLOCATED_MASK > 0 end def count_contiguous_clusters(nb_clusters, cluster_size, l2_table, l2_index, start = 0) offset = lo63(l2_table[l2_index]) return 0 if offset.zero? count = start while count < (start + nb_clusters) break if (offset + (count * cluster_size)) != lo63(l2_table[l2_index + count]) count += 1 end (count - start) end def count_contiguous_free_clusters(_nb_clusters, l2_table, l2_index) count = 0 while nb_cluster > 0 break if l2_table[l2_index + count] != 0 count += 1 nb_cluster -= 1 end count end def csize_shift @csize_shift ||= begin case version when 1 63 - cluster_bits when 2, 3 62 - (cluster_bits - 8) else raise "Unknown QCOW Version: #{version}" end end end def csize_mask @csize_mask ||= (1 << (cluster_bits - 8)) - 1 end def cluster_offset_mask @cluster_offset_mask ||= (1 << csize_shift) - 1 end def incompatible_features version < 3 ? 0 : @header['incompatible_features'] end def compatible_features version < 3 ? 0 : @header['compatible_features'] end def autoclear_features version < 3 ? 0 : @header['autoclear_features'] end def dirty? (incompatible_features & INCOMPATIBLE_FEATURES_MASK[:dirty]) == INCOMPATIBLE_FEATURES_MASK[:dirty] end def corrupt? (incompatible_features & INCOMPATIBLE_FEATURES_MASK[:corrupt]) == INCOMPATIBLE_FEATURES_MASK[:corrupt] end def lazy_refcounts? compatible_features & COMPATIBLE_FEATURES_MASK[:lazy_refcounts] end def dump out = "\#<#{self.class}:0x#{'%08x' % object_id}>\n" out << "Version : #{version}\n" out << "Image Size : #{size}\n" out << "Backing File Name : #{backing_file_name}\n" out << "Backing File Name Offset : #{backing_filename_offset}\n" out << "Backing File Name Size : #{backing_filename_size}\n" out << "Cluster Bits : #{cluster_bits}\n" out << "Cluster Size : #{cluster_size}\n" out << "Cluster Sectors : #{cluster_sectors}\n" out << "L1 Table Bits : #{l1_bits}\n" out << "L1 Table Size : #{l1_size}\n" out << "L1 Table Size Minimum : #{l1_size_minimum}\n" out << "L1 Table Offset : #{l1_table_offset}\n" out << "L2 Table Bits : #{l2_bits}\n" out << "L2 Table Size : #{l2_size}\n" out << "Crypt Method : #{crypt_method}\n" out << "Snapshot Count : #{snapshots_count}\n" out << "Snapshot Offset : #{snapshots_offset}\n" out << "RefCount Table Offset : #{refcount_table_offset}\n" out << "RefCount Table Clusters : #{refcount_table_clusters}\n" out << "RefCount Order : #{refcount_order}\n" out << "Header length : #{header_length}\n" out << "Incompatible Features : #{incompatible_features}\n" out << "Compatible Features : #{compatible_features}\n" out << "Autoclear Features : #{autoclear_features}\n" out << "Dirty : #{dirty?}\n" out << "Corrupt : #{corrupt?}\n" out end end
apache-2.0
deka108/meas_deka
latex_viewer/config.js
33416
System.config({ baseURL: "/", defaultJSExtensions: true, transpiler: "babel", babelOptions: { "optional": [ "runtime", "optimisation.modules.system" ] }, paths: { "github:*": "jspm_packages/github/*", "npm:*": "jspm_packages/npm/*", "src/": "src/*", "style/": "style/*" }, map: { "angular": "github:angular/bower-angular@1.6.2", "angular-animate": "github:angular/bower-angular-animate@1.6.2", "angular-aria": "github:angular/bower-angular-aria@1.6.2", "angular-cookie": "npm:angular-cookie@4.1.0", "angular-filter": "npm:angular-filter@0.5.15", "angular-jwt": "npm:angular-jwt@0.1.9", "angular-katex": "npm:angular-katex@0.10.0", "angular-loading-bar": "github:chieffancypants/angular-loading-bar@0.9.0", "angular-material": "github:angular/bower-material@1.1.3", "angular-material-data-table": "npm:angular-material-data-table@0.10.10", "angular-messages": "github:angular/bower-angular-messages@1.6.2", "babel": "npm:babel-core@5.8.38", "babel-runtime": "npm:babel-runtime@5.8.38", "core-js": "npm:core-js@1.2.7", "katex": "npm:katex@0.7.1", "ng-token-auth": "github:lynndylanhurley/ng-token-auth@master", "node-mathquill": "npm:node-mathquill@0.10.2", "github:angular/bower-angular-animate@1.6.2": { "angular": "github:angular/bower-angular@1.6.2" }, "github:angular/bower-angular-aria@1.6.2": { "angular": "github:angular/bower-angular@1.6.2" }, "github:angular/bower-angular-messages@1.6.2": { "angular": "github:angular/bower-angular@1.6.2" }, "github:angular/bower-material@1.1.3": { "angular": "github:angular/bower-angular@1.6.2", "angular-animate": "github:angular/bower-angular-animate@1.6.2", "angular-aria": "github:angular/bower-angular-aria@1.6.2", "css": "github:systemjs/plugin-css@0.1.32" }, "github:jspm/nodelibs-assert@0.1.0": { "assert": "npm:assert@1.4.1" }, "github:jspm/nodelibs-buffer@0.1.0": { "buffer": "npm:buffer@3.6.0" }, "github:jspm/nodelibs-constants@0.1.0": { "constants-browserify": "npm:constants-browserify@0.0.1" }, "github:jspm/nodelibs-crypto@0.1.0": { "crypto-browserify": "npm:crypto-browserify@3.11.0" }, "github:jspm/nodelibs-events@0.1.1": { "events": "npm:events@1.0.2" }, "github:jspm/nodelibs-http@1.7.1": { "Base64": "npm:Base64@0.2.1", "events": "github:jspm/nodelibs-events@0.1.1", "inherits": "npm:inherits@2.0.1", "stream": "github:jspm/nodelibs-stream@0.1.0", "url": "github:jspm/nodelibs-url@0.1.0", "util": "github:jspm/nodelibs-util@0.1.0" }, "github:jspm/nodelibs-https@0.1.0": { "https-browserify": "npm:https-browserify@0.0.0" }, "github:jspm/nodelibs-net@0.1.2": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "http": "github:jspm/nodelibs-http@1.7.1", "net": "github:jspm/nodelibs-net@0.1.2", "process": "github:jspm/nodelibs-process@0.1.2", "stream": "github:jspm/nodelibs-stream@0.1.0", "timers": "github:jspm/nodelibs-timers@0.1.0", "util": "github:jspm/nodelibs-util@0.1.0" }, "github:jspm/nodelibs-os@0.1.0": { "os-browserify": "npm:os-browserify@0.1.2" }, "github:jspm/nodelibs-path@0.1.0": { "path-browserify": "npm:path-browserify@0.0.0" }, "github:jspm/nodelibs-process@0.1.2": { "process": "npm:process@0.11.9" }, "github:jspm/nodelibs-querystring@0.1.0": { "querystring": "npm:querystring@0.2.0" }, "github:jspm/nodelibs-stream@0.1.0": { "stream-browserify": "npm:stream-browserify@1.0.0" }, "github:jspm/nodelibs-string_decoder@0.1.0": { "string_decoder": "npm:string_decoder@0.10.31" }, "github:jspm/nodelibs-timers@0.1.0": { "timers-browserify": "npm:timers-browserify@1.4.2" }, "github:jspm/nodelibs-tty@0.1.0": { "tty-browserify": "npm:tty-browserify@0.0.0" }, "github:jspm/nodelibs-url@0.1.0": { "url": "npm:url@0.10.3" }, "github:jspm/nodelibs-util@0.1.0": { "util": "npm:util@0.10.3" }, "github:jspm/nodelibs-vm@0.1.0": { "vm-browserify": "npm:vm-browserify@0.0.4" }, "github:jspm/nodelibs-zlib@0.1.0": { "browserify-zlib": "npm:browserify-zlib@0.1.4" }, "github:lynndylanhurley/ng-token-auth@master": { "config": "npm:config@0.4.37", "crypto": "npm:crypto@0.0.3", "express": "npm:express@3.5.3", "gulp-rename": "npm:gulp-rename@1.2.2", "http-proxy": "npm:http-proxy@1.1.6", "js-yaml": "npm:js-yaml@3.0.2", "process": "github:jspm/nodelibs-process@0.1.2", "request": "npm:request@2.36.0", "sitemap": "npm:sitemap@0.7.4" }, "npm:angular-filter@0.5.15": { "angular": "npm:angular@1.6.2" }, "npm:angular-katex@0.10.0": { "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:angular-material-data-table@0.10.10": { "angular": "npm:angular@1.6.2", "angular-material": "npm:angular-material@1.1.3", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:angular-material@1.1.3": { "angular": "github:angular/bower-angular@1.6.2", "angular-animate": "github:angular/bower-angular-animate@1.6.2", "angular-aria": "github:angular/bower-angular-aria@1.6.2", "angular-messages": "github:angular/bower-angular-messages@1.6.2", "css": "github:systemjs/plugin-css@0.1.32" }, "npm:argparse@0.1.16": { "assert": "github:jspm/nodelibs-assert@0.1.0", "fs": "github:jspm/nodelibs-fs@0.1.2", "path": "github:jspm/nodelibs-path@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2", "underscore": "npm:underscore@1.7.0", "underscore.string": "npm:underscore.string@2.4.0", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:asn1.js@4.9.1": { "bn.js": "npm:bn.js@4.11.6", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "inherits": "npm:inherits@2.0.1", "minimalistic-assert": "npm:minimalistic-assert@1.0.0", "vm": "github:jspm/nodelibs-vm@0.1.0" }, "npm:asn1@0.1.11": { "assert": "github:jspm/nodelibs-assert@0.1.0", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "sys": "github:jspm/nodelibs-util@0.1.0", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:assert-plus@0.1.5": { "assert": "github:jspm/nodelibs-assert@0.1.0", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2", "stream": "github:jspm/nodelibs-stream@0.1.0", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:assert@1.4.1": { "assert": "github:jspm/nodelibs-assert@0.1.0", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2", "util": "npm:util@0.10.3" }, "npm:async@0.9.2": { "process": "github:jspm/nodelibs-process@0.1.2", "systemjs-json": "github:systemjs/plugin-json@0.1.2" }, "npm:aws-sign2@0.5.0": { "crypto": "github:jspm/nodelibs-crypto@0.1.0", "url": "github:jspm/nodelibs-url@0.1.0" }, "npm:babel-runtime@5.8.38": { "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:basic-auth-connect@1.0.0": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "http": "github:jspm/nodelibs-http@1.7.1" }, "npm:batch@0.5.0": { "events": "github:jspm/nodelibs-events@0.1.1", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:bn.js@4.11.6": { "buffer": "github:jspm/nodelibs-buffer@0.1.0" }, "npm:boom@0.4.2": { "hoek": "npm:hoek@0.9.1", "http": "github:jspm/nodelibs-http@1.7.1", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:browserify-aes@1.0.6": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "buffer-xor": "npm:buffer-xor@1.0.3", "cipher-base": "npm:cipher-base@1.0.3", "create-hash": "npm:create-hash@1.1.2", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "evp_bytestokey": "npm:evp_bytestokey@1.0.0", "fs": "github:jspm/nodelibs-fs@0.1.2", "inherits": "npm:inherits@2.0.1", "systemjs-json": "github:systemjs/plugin-json@0.1.2" }, "npm:browserify-cipher@1.0.0": { "browserify-aes": "npm:browserify-aes@1.0.6", "browserify-des": "npm:browserify-des@1.0.0", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "evp_bytestokey": "npm:evp_bytestokey@1.0.0" }, "npm:browserify-des@1.0.0": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "cipher-base": "npm:cipher-base@1.0.3", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "des.js": "npm:des.js@1.0.0", "inherits": "npm:inherits@2.0.1" }, "npm:browserify-rsa@4.0.1": { "bn.js": "npm:bn.js@4.11.6", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "constants": "github:jspm/nodelibs-constants@0.1.0", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "randombytes": "npm:randombytes@2.0.3" }, "npm:browserify-sign@4.0.0": { "bn.js": "npm:bn.js@4.11.6", "browserify-rsa": "npm:browserify-rsa@4.0.1", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "create-hash": "npm:create-hash@1.1.2", "create-hmac": "npm:create-hmac@1.1.4", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "elliptic": "npm:elliptic@6.4.0", "inherits": "npm:inherits@2.0.1", "parse-asn1": "npm:parse-asn1@5.0.0", "stream": "github:jspm/nodelibs-stream@0.1.0" }, "npm:browserify-zlib@0.1.4": { "assert": "github:jspm/nodelibs-assert@0.1.0", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "pako": "npm:pako@0.2.9", "process": "github:jspm/nodelibs-process@0.1.2", "readable-stream": "npm:readable-stream@2.2.3", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:buffer-crc32@0.2.1": { "buffer": "github:jspm/nodelibs-buffer@0.1.0" }, "npm:buffer-shims@1.0.0": { "buffer": "github:jspm/nodelibs-buffer@0.1.0" }, "npm:buffer-xor@1.0.3": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "systemjs-json": "github:systemjs/plugin-json@0.1.2" }, "npm:buffer@3.6.0": { "base64-js": "npm:base64-js@0.0.8", "child_process": "github:jspm/nodelibs-child_process@0.1.0", "fs": "github:jspm/nodelibs-fs@0.1.2", "ieee754": "npm:ieee754@1.1.8", "isarray": "npm:isarray@1.0.0", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:cipher-base@1.0.3": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "inherits": "npm:inherits@2.0.1", "stream": "github:jspm/nodelibs-stream@0.1.0", "string_decoder": "github:jspm/nodelibs-string_decoder@0.1.0" }, "npm:combined-stream@0.0.7": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "delayed-stream": "npm:delayed-stream@0.0.5", "stream": "github:jspm/nodelibs-stream@0.1.0", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:commander@1.3.2": { "child_process": "github:jspm/nodelibs-child_process@0.1.0", "events": "github:jspm/nodelibs-events@0.1.1", "fs": "github:jspm/nodelibs-fs@0.1.2", "keypress": "npm:keypress@0.1.0", "path": "github:jspm/nodelibs-path@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2", "tty": "github:jspm/nodelibs-tty@0.1.0" }, "npm:compressible@1.0.0": { "assert": "github:jspm/nodelibs-assert@0.1.0", "systemjs-json": "github:systemjs/plugin-json@0.1.2" }, "npm:compression@1.0.0": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "bytes": "npm:bytes@0.2.1", "compressible": "npm:compressible@1.0.0", "negotiator": "npm:negotiator@0.3.0", "zlib": "github:jspm/nodelibs-zlib@0.1.0" }, "npm:config@0.4.37": { "fs": "github:jspm/nodelibs-fs@0.1.2", "os": "github:jspm/nodelibs-os@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:connect-timeout@1.0.0": { "debug": "npm:debug@0.8.0" }, "npm:connect@2.14.5": { "basic-auth-connect": "npm:basic-auth-connect@1.0.0", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "bytes": "npm:bytes@0.3.0", "compression": "npm:compression@1.0.0", "connect-timeout": "npm:connect-timeout@1.0.0", "cookie-parser": "npm:cookie-parser@1.0.1", "cookie-signature": "npm:cookie-signature@1.0.3", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "csurf": "npm:csurf@1.1.0", "debug": "npm:debug@0.8.0", "errorhandler": "npm:errorhandler@1.0.0", "events": "github:jspm/nodelibs-events@0.1.1", "express-session": "npm:express-session@1.0.2", "fresh": "npm:fresh@0.2.2", "fs": "github:jspm/nodelibs-fs@0.1.2", "http": "github:jspm/nodelibs-http@1.7.1", "method-override": "npm:method-override@1.0.0", "morgan": "npm:morgan@1.0.0", "multiparty": "npm:multiparty@2.2.0", "path": "github:jspm/nodelibs-path@0.1.0", "pause": "npm:pause@0.0.1", "process": "github:jspm/nodelibs-process@0.1.2", "qs": "npm:qs@0.6.6", "raw-body": "npm:raw-body@1.1.4", "response-time": "npm:response-time@1.0.0", "serve-index": "npm:serve-index@1.0.1", "serve-static": "npm:serve-static@1.1.0", "setimmediate": "npm:setimmediate@1.0.1", "static-favicon": "npm:static-favicon@1.0.2", "url": "github:jspm/nodelibs-url@0.1.0", "vhost": "npm:vhost@1.0.0" }, "npm:constants-browserify@0.0.1": { "systemjs-json": "github:systemjs/plugin-json@0.1.2" }, "npm:cookie-parser@1.0.1": { "cookie": "npm:cookie@0.1.0", "cookie-signature": "npm:cookie-signature@1.0.3" }, "npm:cookie-signature@1.0.3": { "crypto": "github:jspm/nodelibs-crypto@0.1.0" }, "npm:core-js@1.2.7": { "fs": "github:jspm/nodelibs-fs@0.1.2", "path": "github:jspm/nodelibs-path@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2", "systemjs-json": "github:systemjs/plugin-json@0.1.2" }, "npm:core-util-is@1.0.2": { "buffer": "github:jspm/nodelibs-buffer@0.1.0" }, "npm:create-ecdh@4.0.0": { "bn.js": "npm:bn.js@4.11.6", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "elliptic": "npm:elliptic@6.4.0" }, "npm:create-hash@1.1.2": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "cipher-base": "npm:cipher-base@1.0.3", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "fs": "github:jspm/nodelibs-fs@0.1.2", "inherits": "npm:inherits@2.0.1", "ripemd160": "npm:ripemd160@1.0.1", "sha.js": "npm:sha.js@2.4.8" }, "npm:create-hmac@1.1.4": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "create-hash": "npm:create-hash@1.1.2", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "inherits": "npm:inherits@2.0.1", "stream": "github:jspm/nodelibs-stream@0.1.0" }, "npm:cryptiles@0.2.2": { "boom": "npm:boom@0.4.2", "crypto": "github:jspm/nodelibs-crypto@0.1.0" }, "npm:crypto-browserify@3.11.0": { "browserify-cipher": "npm:browserify-cipher@1.0.0", "browserify-sign": "npm:browserify-sign@4.0.0", "create-ecdh": "npm:create-ecdh@4.0.0", "create-hash": "npm:create-hash@1.1.2", "create-hmac": "npm:create-hmac@1.1.4", "diffie-hellman": "npm:diffie-hellman@5.0.2", "inherits": "npm:inherits@2.0.1", "pbkdf2": "npm:pbkdf2@3.0.9", "public-encrypt": "npm:public-encrypt@4.0.0", "randombytes": "npm:randombytes@2.0.3" }, "npm:csurf@1.1.0": { "crypto": "github:jspm/nodelibs-crypto@0.1.0", "scmp": "npm:scmp@0.0.3", "uid2": "npm:uid2@0.0.3" }, "npm:ctype@0.5.3": { "assert": "github:jspm/nodelibs-assert@0.1.0", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:debug@0.7.4": { "process": "github:jspm/nodelibs-process@0.1.2", "tty": "github:jspm/nodelibs-tty@0.1.0" }, "npm:debug@0.8.0": { "process": "github:jspm/nodelibs-process@0.1.2", "tty": "github:jspm/nodelibs-tty@0.1.0" }, "npm:delayed-stream@0.0.5": { "stream": "github:jspm/nodelibs-stream@0.1.0", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:des.js@1.0.0": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "inherits": "npm:inherits@2.0.1", "minimalistic-assert": "npm:minimalistic-assert@1.0.0" }, "npm:diffie-hellman@5.0.2": { "bn.js": "npm:bn.js@4.11.6", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "miller-rabin": "npm:miller-rabin@4.0.0", "randombytes": "npm:randombytes@2.0.3", "systemjs-json": "github:systemjs/plugin-json@0.1.2" }, "npm:elliptic@6.4.0": { "bn.js": "npm:bn.js@4.11.6", "brorand": "npm:brorand@1.1.0", "hash.js": "npm:hash.js@1.0.3", "hmac-drbg": "npm:hmac-drbg@1.0.0", "inherits": "npm:inherits@2.0.1", "minimalistic-assert": "npm:minimalistic-assert@1.0.0", "minimalistic-crypto-utils": "npm:minimalistic-crypto-utils@1.0.1", "systemjs-json": "github:systemjs/plugin-json@0.1.2" }, "npm:errorhandler@1.0.0": { "fs": "github:jspm/nodelibs-fs@0.1.2", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:esprima@1.0.4": { "fs": "github:jspm/nodelibs-fs@0.1.2", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:evp_bytestokey@1.0.0": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "create-hash": "npm:create-hash@1.1.2", "crypto": "github:jspm/nodelibs-crypto@0.1.0" }, "npm:express-session@1.0.2": { "buffer-crc32": "npm:buffer-crc32@0.2.1", "cookie": "npm:cookie@0.1.0", "cookie-signature": "npm:cookie-signature@1.0.3", "debug": "npm:debug@0.7.4", "events": "github:jspm/nodelibs-events@0.1.1", "process": "github:jspm/nodelibs-process@0.1.2", "uid2": "npm:uid2@0.0.3", "url": "github:jspm/nodelibs-url@0.1.0", "utils-merge": "npm:utils-merge@1.0.0" }, "npm:express@3.5.3": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "buffer-crc32": "npm:buffer-crc32@0.2.1", "commander": "npm:commander@1.3.2", "connect": "npm:connect@2.14.5", "cookie": "npm:cookie@0.1.2", "cookie-signature": "npm:cookie-signature@1.0.3", "debug": "npm:debug@0.8.0", "fresh": "npm:fresh@0.2.2", "fs": "github:jspm/nodelibs-fs@0.1.2", "http": "github:jspm/nodelibs-http@1.7.1", "merge-descriptors": "npm:merge-descriptors@0.0.2", "methods": "npm:methods@0.1.0", "mkdirp": "npm:mkdirp@0.4.0", "path": "github:jspm/nodelibs-path@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2", "range-parser": "npm:range-parser@1.0.0", "send": "npm:send@0.3.0", "url": "github:jspm/nodelibs-url@0.1.0" }, "npm:forever-agent@0.5.2": { "http": "github:jspm/nodelibs-http@1.7.1", "https": "github:jspm/nodelibs-https@0.1.0", "net": "github:jspm/nodelibs-net@0.1.2", "tls": "github:jspm/nodelibs-tls@0.1.0", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:form-data@0.1.4": { "async": "npm:async@0.9.2", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "combined-stream": "npm:combined-stream@0.0.7", "fs": "github:jspm/nodelibs-fs@0.1.2", "http": "github:jspm/nodelibs-http@1.7.1", "https": "github:jspm/nodelibs-https@0.1.0", "mime": "npm:mime@1.2.11", "path": "github:jspm/nodelibs-path@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2", "url": "github:jspm/nodelibs-url@0.1.0", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:gulp-rename@1.2.2": { "path": "github:jspm/nodelibs-path@0.1.0", "stream": "github:jspm/nodelibs-stream@0.1.0" }, "npm:hash.js@1.0.3": { "inherits": "npm:inherits@2.0.1" }, "npm:hawk@1.0.0": { "boom": "npm:boom@0.4.2", "cryptiles": "npm:cryptiles@0.2.2", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "hoek": "npm:hoek@0.9.1", "process": "github:jspm/nodelibs-process@0.1.2", "sntp": "npm:sntp@0.2.4", "systemjs-json": "github:systemjs/plugin-json@0.1.2", "url": "github:jspm/nodelibs-url@0.1.0" }, "npm:hmac-drbg@1.0.0": { "hash.js": "npm:hash.js@1.0.3", "minimalistic-assert": "npm:minimalistic-assert@1.0.0", "minimalistic-crypto-utils": "npm:minimalistic-crypto-utils@1.0.1", "systemjs-json": "github:systemjs/plugin-json@0.1.2" }, "npm:hoek@0.9.1": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "fs": "github:jspm/nodelibs-fs@0.1.2", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:http-proxy@1.1.6": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "eventemitter3": "npm:eventemitter3@2.0.2", "fs": "github:jspm/nodelibs-fs@0.1.2", "http": "github:jspm/nodelibs-http@1.7.1", "https": "github:jspm/nodelibs-https@0.1.0", "path": "github:jspm/nodelibs-path@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2", "url": "github:jspm/nodelibs-url@0.1.0", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:http-signature@0.10.1": { "asn1": "npm:asn1@0.1.11", "assert-plus": "npm:assert-plus@0.1.5", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "ctype": "npm:ctype@0.5.3", "http": "github:jspm/nodelibs-http@1.7.1", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:https-browserify@0.0.0": { "http": "github:jspm/nodelibs-http@1.7.1" }, "npm:inherits@2.0.1": { "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:js-yaml@3.0.2": { "argparse": "npm:argparse@0.1.16", "esprima": "npm:esprima@1.0.4", "fs": "github:jspm/nodelibs-fs@0.1.2", "path": "github:jspm/nodelibs-path@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2", "systemjs-json": "github:systemjs/plugin-json@0.1.2", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:katex@0.7.1": { "match-at": "npm:match-at@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:keypress@0.1.0": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2", "tty": "github:jspm/nodelibs-tty@0.1.0" }, "npm:method-override@1.0.0": { "methods": "npm:methods@0.1.0" }, "npm:miller-rabin@4.0.0": { "bn.js": "npm:bn.js@4.11.6", "brorand": "npm:brorand@1.1.0" }, "npm:mime@1.2.11": { "assert": "github:jspm/nodelibs-assert@0.1.0", "fs": "github:jspm/nodelibs-fs@0.1.2", "path": "github:jspm/nodelibs-path@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:mkdirp@0.4.0": { "fs": "github:jspm/nodelibs-fs@0.1.2", "path": "github:jspm/nodelibs-path@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:morgan@1.0.0": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "bytes": "npm:bytes@0.2.1", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:multiparty@2.2.0": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "fs": "github:jspm/nodelibs-fs@0.1.2", "http": "github:jspm/nodelibs-http@1.7.1", "os": "github:jspm/nodelibs-os@0.1.0", "path": "github:jspm/nodelibs-path@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2", "readable-stream": "npm:readable-stream@1.1.14", "stream-counter": "npm:stream-counter@0.2.0", "string_decoder": "github:jspm/nodelibs-string_decoder@0.1.0", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:negotiator@0.3.0": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "http": "github:jspm/nodelibs-http@1.7.1" }, "npm:negotiator@0.4.2": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "http": "github:jspm/nodelibs-http@1.7.1" }, "npm:node-mathquill@0.10.2": { "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:node-uuid@1.4.7": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "crypto": "github:jspm/nodelibs-crypto@0.1.0" }, "npm:oauth-sign@0.3.0": { "assert": "github:jspm/nodelibs-assert@0.1.0", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2", "querystring": "github:jspm/nodelibs-querystring@0.1.0" }, "npm:os-browserify@0.1.2": { "os": "github:jspm/nodelibs-os@0.1.0" }, "npm:pako@0.2.9": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:parse-asn1@5.0.0": { "asn1.js": "npm:asn1.js@4.9.1", "browserify-aes": "npm:browserify-aes@1.0.6", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "create-hash": "npm:create-hash@1.1.2", "evp_bytestokey": "npm:evp_bytestokey@1.0.0", "pbkdf2": "npm:pbkdf2@3.0.9", "systemjs-json": "github:systemjs/plugin-json@0.1.2" }, "npm:parseurl@1.0.1": { "url": "github:jspm/nodelibs-url@0.1.0" }, "npm:path-browserify@0.0.0": { "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:pbkdf2@3.0.9": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "create-hmac": "npm:create-hmac@1.1.4", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:process-nextick-args@1.0.7": { "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:process@0.11.9": { "assert": "github:jspm/nodelibs-assert@0.1.0", "fs": "github:jspm/nodelibs-fs@0.1.2", "vm": "github:jspm/nodelibs-vm@0.1.0" }, "npm:public-encrypt@4.0.0": { "bn.js": "npm:bn.js@4.11.6", "browserify-rsa": "npm:browserify-rsa@4.0.1", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "create-hash": "npm:create-hash@1.1.2", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "parse-asn1": "npm:parse-asn1@5.0.0", "randombytes": "npm:randombytes@2.0.3" }, "npm:punycode@1.3.2": { "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:punycode@1.4.1": { "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:randombytes@2.0.3": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:raw-body@1.1.4": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "bytes": "npm:bytes@0.3.0", "process": "github:jspm/nodelibs-process@0.1.2", "string_decoder": "github:jspm/nodelibs-string_decoder@0.1.0" }, "npm:readable-stream@1.1.14": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "core-util-is": "npm:core-util-is@1.0.2", "events": "github:jspm/nodelibs-events@0.1.1", "inherits": "npm:inherits@2.0.1", "isarray": "npm:isarray@0.0.1", "process": "github:jspm/nodelibs-process@0.1.2", "stream-browserify": "npm:stream-browserify@1.0.0", "string_decoder": "npm:string_decoder@0.10.31" }, "npm:readable-stream@2.2.3": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "buffer-shims": "npm:buffer-shims@1.0.0", "core-util-is": "npm:core-util-is@1.0.2", "events": "github:jspm/nodelibs-events@0.1.1", "inherits": "npm:inherits@2.0.1", "isarray": "npm:isarray@1.0.0", "process": "github:jspm/nodelibs-process@0.1.2", "process-nextick-args": "npm:process-nextick-args@1.0.7", "string_decoder": "npm:string_decoder@0.10.31", "util-deprecate": "npm:util-deprecate@1.0.2" }, "npm:request@2.36.0": { "aws-sign2": "npm:aws-sign2@0.5.0", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "crypto": "github:jspm/nodelibs-crypto@0.1.0", "forever-agent": "npm:forever-agent@0.5.2", "form-data": "npm:form-data@0.1.4", "hawk": "npm:hawk@1.0.0", "http": "github:jspm/nodelibs-http@1.7.1", "http-signature": "npm:http-signature@0.10.1", "json-stringify-safe": "npm:json-stringify-safe@5.0.1", "mime": "npm:mime@1.2.11", "net": "github:jspm/nodelibs-net@0.1.2", "node-uuid": "npm:node-uuid@1.4.7", "oauth-sign": "npm:oauth-sign@0.3.0", "process": "github:jspm/nodelibs-process@0.1.2", "qs": "npm:qs@0.6.6", "querystring": "github:jspm/nodelibs-querystring@0.1.0", "stream": "github:jspm/nodelibs-stream@0.1.0", "tough-cookie": "npm:tough-cookie@2.3.2", "tunnel-agent": "npm:tunnel-agent@0.4.3", "url": "github:jspm/nodelibs-url@0.1.0", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:ripemd160@1.0.1": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:send@0.3.0": { "buffer-crc32": "npm:buffer-crc32@0.2.1", "debug": "npm:debug@0.8.0", "fresh": "npm:fresh@0.2.2", "fs": "github:jspm/nodelibs-fs@0.1.2", "http": "github:jspm/nodelibs-http@1.7.1", "mime": "npm:mime@1.2.11", "path": "github:jspm/nodelibs-path@0.1.0", "range-parser": "npm:range-parser@1.0.0", "stream": "github:jspm/nodelibs-stream@0.1.0" }, "npm:serve-index@1.0.1": { "batch": "npm:batch@0.5.0", "fs": "github:jspm/nodelibs-fs@0.1.2", "http": "github:jspm/nodelibs-http@1.7.1", "negotiator": "npm:negotiator@0.4.2", "path": "github:jspm/nodelibs-path@0.1.0", "url": "github:jspm/nodelibs-url@0.1.0" }, "npm:serve-static@1.1.0": { "parseurl": "npm:parseurl@1.0.1", "path": "github:jspm/nodelibs-path@0.1.0", "send": "npm:send@0.3.0", "url": "github:jspm/nodelibs-url@0.1.0" }, "npm:setimmediate@1.0.1": { "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:sha.js@2.4.8": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "fs": "github:jspm/nodelibs-fs@0.1.2", "inherits": "npm:inherits@2.0.1", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:sitemap@0.7.4": { "fs": "github:jspm/nodelibs-fs@0.1.2", "path": "github:jspm/nodelibs-path@0.1.0", "process": "github:jspm/nodelibs-process@0.1.2", "url": "github:jspm/nodelibs-url@0.1.0" }, "npm:sntp@0.2.4": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "dgram": "github:jspm/nodelibs-dgram@0.1.0", "dns": "github:jspm/nodelibs-dns@0.1.0", "hoek": "npm:hoek@0.9.1", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:static-favicon@1.0.2": { "crypto": "github:jspm/nodelibs-crypto@0.1.0", "fs": "github:jspm/nodelibs-fs@0.1.2" }, "npm:stream-browserify@1.0.0": { "events": "github:jspm/nodelibs-events@0.1.1", "inherits": "npm:inherits@2.0.1", "readable-stream": "npm:readable-stream@1.1.14" }, "npm:stream-counter@0.2.0": { "readable-stream": "npm:readable-stream@1.1.14", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:string_decoder@0.10.31": { "buffer": "github:jspm/nodelibs-buffer@0.1.0" }, "npm:timers-browserify@1.4.2": { "process": "npm:process@0.11.9" }, "npm:tough-cookie@2.3.2": { "net": "github:jspm/nodelibs-net@0.1.2", "punycode": "npm:punycode@1.4.1", "systemjs-json": "github:systemjs/plugin-json@0.1.2", "url": "github:jspm/nodelibs-url@0.1.0", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:tunnel-agent@0.4.3": { "assert": "github:jspm/nodelibs-assert@0.1.0", "buffer": "github:jspm/nodelibs-buffer@0.1.0", "events": "github:jspm/nodelibs-events@0.1.1", "http": "github:jspm/nodelibs-http@1.7.1", "https": "github:jspm/nodelibs-https@0.1.0", "net": "github:jspm/nodelibs-net@0.1.2", "process": "github:jspm/nodelibs-process@0.1.2", "tls": "github:jspm/nodelibs-tls@0.1.0", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:uid2@0.0.3": { "buffer": "github:jspm/nodelibs-buffer@0.1.0", "crypto": "github:jspm/nodelibs-crypto@0.1.0" }, "npm:url@0.10.3": { "assert": "github:jspm/nodelibs-assert@0.1.0", "punycode": "npm:punycode@1.3.2", "querystring": "npm:querystring@0.2.0", "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:util-deprecate@1.0.2": { "util": "github:jspm/nodelibs-util@0.1.0" }, "npm:util@0.10.3": { "inherits": "npm:inherits@2.0.1", "process": "github:jspm/nodelibs-process@0.1.2" }, "npm:vm-browserify@0.0.4": { "indexof": "npm:indexof@0.0.1" } } });
apache-2.0
patricklaw/pants
src/rust/engine/fs/brfs/src/tests.rs
6945
use tempfile; use testutil; use crate::mount; use hashing; use store::Store; use testutil::{ data::{TestData, TestDirectory}, file, }; #[tokio::test] async fn missing_digest() { let (store_dir, mount_dir) = make_dirs(); let runtime = task_executor::Executor::new(); let store = Store::local_only(runtime.clone(), store_dir.path()).expect("Error creating local store"); let _fs = mount(mount_dir.path(), store, runtime).expect("Mounting"); assert!(!&mount_dir .path() .join("digest") .join(digest_to_filepath(&TestData::roland().digest())) .exists()); } #[tokio::test] async fn read_file_by_digest() { let (store_dir, mount_dir) = make_dirs(); let runtime = task_executor::Executor::new(); let store = Store::local_only(runtime.clone(), store_dir.path()).expect("Error creating local store"); let test_bytes = TestData::roland(); store .store_file_bytes(test_bytes.bytes(), false) .await .expect("Storing bytes"); let _fs = mount(mount_dir.path(), store, runtime).expect("Mounting"); let file_path = mount_dir .path() .join("digest") .join(digest_to_filepath(&test_bytes.digest())); assert_eq!(test_bytes.bytes(), file::contents(&file_path)); assert!(file::is_executable(&file_path)); } #[tokio::test] async fn list_directory() { let (store_dir, mount_dir) = make_dirs(); let runtime = task_executor::Executor::new(); let store = Store::local_only(runtime.clone(), store_dir.path()).expect("Error creating local store"); let test_bytes = TestData::roland(); let test_directory = TestDirectory::containing_roland(); store .store_file_bytes(test_bytes.bytes(), false) .await .expect("Storing bytes"); store .record_directory(&test_directory.directory(), false) .await .expect("Storing directory"); let _fs = mount(mount_dir.path(), store, runtime).expect("Mounting"); let virtual_dir = mount_dir .path() .join("directory") .join(digest_to_filepath(&test_directory.digest())); assert_eq!(vec!["roland.ext"], file::list_dir(&virtual_dir)); } #[tokio::test] async fn read_file_from_directory() { let (store_dir, mount_dir) = make_dirs(); let runtime = task_executor::Executor::new(); let store = Store::local_only(runtime.clone(), store_dir.path()).expect("Error creating local store"); let test_bytes = TestData::roland(); let test_directory = TestDirectory::containing_roland(); store .store_file_bytes(test_bytes.bytes(), false) .await .expect("Storing bytes"); store .record_directory(&test_directory.directory(), false) .await .expect("Storing directory"); let _fs = mount(mount_dir.path(), store, runtime).expect("Mounting"); let roland = mount_dir .path() .join("directory") .join(digest_to_filepath(&test_directory.digest())) .join("roland.ext"); assert_eq!(test_bytes.bytes(), file::contents(&roland)); assert!(!file::is_executable(&roland)); } #[tokio::test] async fn list_recursive_directory() { let (store_dir, mount_dir) = make_dirs(); let runtime = task_executor::Executor::new(); let store = Store::local_only(runtime.clone(), store_dir.path()).expect("Error creating local store"); let test_bytes = TestData::roland(); let treat_bytes = TestData::catnip(); let test_directory = TestDirectory::containing_roland(); let recursive_directory = TestDirectory::recursive(); store .store_file_bytes(test_bytes.bytes(), false) .await .expect("Storing bytes"); store .store_file_bytes(treat_bytes.bytes(), false) .await .expect("Storing bytes"); store .record_directory(&test_directory.directory(), false) .await .expect("Storing directory"); store .record_directory(&recursive_directory.directory(), false) .await .expect("Storing directory"); let _fs = mount(mount_dir.path(), store, runtime).expect("Mounting"); let virtual_dir = mount_dir .path() .join("directory") .join(digest_to_filepath(&recursive_directory.digest())); assert_eq!(vec!["cats", "treats.ext"], file::list_dir(&virtual_dir)); assert_eq!( vec!["roland.ext"], file::list_dir(&virtual_dir.join("cats")) ); } #[tokio::test] async fn read_file_from_recursive_directory() { let (store_dir, mount_dir) = make_dirs(); let runtime = task_executor::Executor::new(); let store = Store::local_only(runtime.clone(), store_dir.path()).expect("Error creating local store"); let test_bytes = TestData::roland(); let treat_bytes = TestData::catnip(); let test_directory = TestDirectory::containing_roland(); let recursive_directory = TestDirectory::recursive(); store .store_file_bytes(test_bytes.bytes(), false) .await .expect("Storing bytes"); store .store_file_bytes(treat_bytes.bytes(), false) .await .expect("Storing bytes"); store .record_directory(&test_directory.directory(), false) .await .expect("Storing directory"); store .record_directory(&recursive_directory.directory(), false) .await .expect("Storing directory"); let _fs = mount(mount_dir.path(), store, runtime).expect("Mounting"); let virtual_dir = mount_dir .path() .join("directory") .join(digest_to_filepath(&recursive_directory.digest())); let treats = virtual_dir.join("treats.ext"); assert_eq!(treat_bytes.bytes(), file::contents(&treats)); assert!(!file::is_executable(&treats)); let roland = virtual_dir.join("cats").join("roland.ext"); assert_eq!(test_bytes.bytes(), file::contents(&roland)); assert!(!file::is_executable(&roland)); } #[tokio::test] async fn files_are_correctly_executable() { let (store_dir, mount_dir) = make_dirs(); let runtime = task_executor::Executor::new(); let store = Store::local_only(runtime.clone(), store_dir.path()).expect("Error creating local store"); let treat_bytes = TestData::catnip(); let directory = TestDirectory::with_mixed_executable_files(); store .store_file_bytes(treat_bytes.bytes(), false) .await .expect("Storing bytes"); store .record_directory(&directory.directory(), false) .await .expect("Storing directory"); let _fs = mount(mount_dir.path(), store, runtime).expect("Mounting"); let virtual_dir = mount_dir .path() .join("directory") .join(digest_to_filepath(&directory.digest())); assert_eq!(vec!["feed.ext", "food.ext"], file::list_dir(&virtual_dir)); assert!(file::is_executable(&virtual_dir.join("feed.ext"))); assert!(!file::is_executable(&virtual_dir.join("food.ext"))); } pub fn digest_to_filepath(digest: &hashing::Digest) -> String { format!("{}-{}", digest.hash, digest.size_bytes) } pub fn make_dirs() -> (tempfile::TempDir, tempfile::TempDir) { let store_dir = tempfile::Builder::new().prefix("store").tempdir().unwrap(); let mount_dir = tempfile::Builder::new().prefix("mount").tempdir().unwrap(); (store_dir, mount_dir) }
apache-2.0
chrismoulton/bluemix-python-eve-sample
setup.py
464
try: from setuptools import setup except ImportError: from distutils.core import setup config = { 'description': 'Python Eve Sample Deploy powered by IBM Bluemix', 'author': 'Sanjay Joshi, ...', 'url': 'http://macreduce.mybluemix.net', 'author_email': 'joshisa@us.ibm.com', 'version': '0.1', 'install_requires': ['nose', 'eve', 'redis'], 'packages': ['macreduce'], 'scripts': [], 'name': 'macreduce' } setup(**config)
apache-2.0
xingwu1/azure-sdk-for-node
lib/services/batch/lib/models/deleteCertificateError.js
2365
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ 'use strict'; /** * @summary An error encountered by the Batch service when deleting a * Certificate. * */ class DeleteCertificateError { /** * Create a DeleteCertificateError. * @property {string} [code] An identifier for the Certificate deletion * error. Codes are invariant and are intended to be consumed * programmatically. * @property {string} [message] A message describing the Certificate deletion * error, intended to be suitable for display in a user interface. * @property {array} [values] A list of additional error details related to * the Certificate deletion error. This list includes details such as the * active Pools and Compute Nodes referencing this Certificate. However, if a * large number of resources reference the Certificate, the list contains * only about the first hundred. */ constructor() { } /** * Defines the metadata of DeleteCertificateError * * @returns {object} metadata of DeleteCertificateError * */ mapper() { return { required: false, serializedName: 'DeleteCertificateError', type: { name: 'Composite', className: 'DeleteCertificateError', modelProperties: { code: { required: false, serializedName: 'code', type: { name: 'String' } }, message: { required: false, serializedName: 'message', type: { name: 'String' } }, values: { required: false, serializedName: 'values', type: { name: 'Sequence', element: { required: false, serializedName: 'NameValuePairElementType', type: { name: 'Composite', className: 'NameValuePair' } } } } } } }; } } module.exports = DeleteCertificateError;
apache-2.0
kuFEAR/crest
core/src/test/java/org/codegist/crest/util/PlaceholdersTest.java
2435
/* * Copyright 2011 CodeGist.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * =================================================================== * * More information at http://www.codegist.org. */ package org.codegist.crest.util; import org.codegist.crest.NonInstanciableClassTest; import org.junit.Test; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.regex.Pattern; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; /** * @author laurent.gilles@codegist.org */ public class PlaceholdersTest extends NonInstanciableClassTest { public PlaceholdersTest() { super(Placeholders.class); } @Test public void compileShouldReturnACompiledVersionOfGivenPlaceholdeMap(){ Map<String,String> placeholders = Collections.singletonMap("te.st", "someval"); Map<Pattern, String> actual = Placeholders.compile(placeholders); assertNotNull(actual); assertEquals(1, actual.size()); Map.Entry<Pattern,String> entry = actual.entrySet().iterator().next(); assertEquals("someval", entry.getValue()); assertEquals("\\{\\Qte.st\\E\\}", entry.getKey().pattern()); } @Test public void mergeShouldMergePlaceholderInStringWithValues(){ Map<String,String> placeholders = new HashMap<String, String>(); placeholders.put("te.st", "someval"); placeholders.put("p1", "someval2"); Map<Pattern, String> compiled = Placeholders.compile(placeholders); String actual = Placeholders.merge(compiled, "some{te.st}string{p1}with{p1}placeholders{p2}-{tesst}"); assertEquals("somesomevalstringsomeval2withsomeval2placeholders{p2}-{tesst}", actual); } @Test public void mergeShouldReturnSameIfEmpty(){ assertEquals("", Placeholders.merge(null, "")); } }
apache-2.0
franckbonin/cxx-maven-plugin
src/main/java/org/apache/maven/plugin/cxx/CoverageMojo.java
8130
package org.apache.maven.plugin.cxx; /* * Copyright (C) 2011-2016, Neticoa SAS France - Tous droits réservés. * Author(s) : Franck Bonin, Neticoa SAS France * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Arrays; import java.util.Map; import org.apache.commons.exec.CommandLine; import org.apache.commons.exec.DefaultExecutor; import org.apache.commons.exec.ExecuteException; import org.apache.commons.exec.Executor; import org.apache.maven.plugin.MojoExecutionException; /* Use FileSet and the FileManager provided in this project*/ import org.apache.maven.model.FileSet; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugin.cxx.utils.ExecutorService; import org.apache.maven.plugin.cxx.utils.FileSetManager; /** * Goal which gcovr execution. * * @author Franck Bonin */ @Mojo( name = "coverage", defaultPhase = LifecyclePhase.TEST ) public class CoverageMojo extends LaunchMojo { /** * The Report OutputFile Location. * * @since 0.0.4 */ @Parameter( property = "coverage.reportsfilePath", defaultValue = "${project.build.directory}/gcovr-reports" ) private File reportsfileDir; /** * The Report OutputFile name identifier. * * @since 0.0.4 */ @Parameter( property = "coverage.reportIdentifier", defaultValue = "" ) private String reportIdentifier; private String getReportFileName() { return "gcovr-result-" + reportIdentifier + ".xml"; } /** * Arguments to clean preexisting gcda report under workingDir * * @since 0.0.4 */ @Parameter( property = "coverage.preclean", defaultValue = "true" ) private boolean preclean; @Override protected void preExecute( Executor exec, CommandLine commandLine, Map<String, String> enviro ) throws MojoExecutionException { if ( preclean ) { FileSet afileSet = new FileSet(); afileSet.setDirectory( getWorkingDir().getAbsolutePath() ); getLog().debug( "Search for **/*.gcda from " + afileSet.getDirectory() ); afileSet.setIncludes( Arrays.asList( new String[] { "**/*.gcda" } ) ); // afileSet.setExcludes( Arrays.asList(excludes) ); FileSetManager aFileSetManager = new FileSetManager(); String[] found = aFileSetManager.getIncludedFiles( afileSet ); for ( int i = 0; i < found.length; i++ ) { File target = new File( getWorkingDir() + "/" + found[i] ); getLog().debug( "Found file " + target.getAbsolutePath() ); if ( target.exists() ) { try { if ( target.delete() ) { getLog().debug( "Succesfully delete " + target.getAbsolutePath() ); } else { getLog().warn( "Failed to delete " + target.getAbsolutePath() ); } } catch ( SecurityException e ) { getLog().warn( "SecurityException, unable to delete " + target.getAbsolutePath() ); } } else { getLog().debug( "But file " + target.getAbsolutePath() + " not exist" ); } } } } /** * Arguments for the gcovr program. Shall be -x -d ex: -x -d to produce Xml * reports and clean gcda execution reports after reading * */ @Parameter( property = "coverage.args", defaultValue = "-x -d" ) private String gcovrArgs; @Override protected void postExecute( int resultCode ) throws MojoExecutionException { String outputReportName = new String(); if ( reportsfileDir.isAbsolute() ) { outputReportName = reportsfileDir.getAbsolutePath() + "/" + getReportFileName(); } else { outputReportName = basedir.getAbsolutePath() + "/" + reportsfileDir.getPath() + "/" + getReportFileName(); } getLog().info( "Coverage report location " + outputReportName ); OutputStream outStream = System.out; File file = new File( outputReportName ); try { new File( file.getParent() ).mkdirs(); file.createNewFile(); outStream = new FileOutputStream( file ); } catch ( IOException e ) { getLog().error( "Coverage report redirected to stdout since " + outputReportName + " can't be opened" ); } InputStream pyScript = getClass().getResourceAsStream( "/gcovr.py" ); CommandLine commandLine = new CommandLine( "python" ); Executor exec = new DefaultExecutor(); String[] args = parseCommandlineArgs( "-" ); commandLine.addArguments( args, false ); args = parseCommandlineArgs( gcovrArgs ); commandLine.addArguments( args, false ); exec.setWorkingDirectory( getWorkingDir() ); try { getLog().info( "Executing command line: " + commandLine ); int res = ExecutorService.executeCommandLine( exec, commandLine, getEnvs(), outStream/* getOutputStreamOut() */, getOutputStreamErr(), pyScript/* getInputStream() */ ); // this is a hugly workaround against a random bugs from hudson cobertura // plugin. // hudson cobertura plugin randomly truncat coverage reports file to a 1024 size // multiple // while it copy reports from slave to master node for ( int j = 0; j < 200; j++ ) { for ( int i = 0; i < 80; i++ ) { outStream.write( ' ' ); } outStream.write( '\n' ); } outStream.flush(); if ( isResultCodeAFailure( res ) ) { throw new MojoExecutionException( "Result of command line execution is: '" + res + "'." ); } } catch ( ExecuteException e ) { throw new MojoExecutionException( "Command execution failed.", e ); } catch ( IOException e ) { throw new MojoExecutionException( "Command execution failed.", e ); } } /** * Set this to "true" to skip running tests, but still compile them. Its use is * NOT RECOMMENDED, but quite convenient on occasion. * * @since 0.0.5 */ @Parameter( property = "skipTests", defaultValue = "false" ) protected boolean skipTests; /** * Set this to "true" to bypass unit tests entirely. Its use is NOT RECOMMENDED, * especially if you enable it using the "maven.test.skip" property, because * maven.test.skip shall disables both running the tests and compiling the * tests. Consider using the <code>skipTests</code> parameter instead. * * @since 0.0.5 */ @Parameter( property = "maven.test.skip", defaultValue = "false" ) protected boolean skip; @Override protected boolean isSkip() { return super.isSkip() || skipTests || skip; } }
apache-2.0
Fabryprog/camel
components/camel-digitalocean/src/main/java/org/apache/camel/component/digitalocean/producer/DigitalOceanDropletsProducer.java
20230
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.digitalocean.producer; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; import com.myjeeva.digitalocean.common.ResourceType; import com.myjeeva.digitalocean.pojo.Action; import com.myjeeva.digitalocean.pojo.Actions; import com.myjeeva.digitalocean.pojo.Backups; import com.myjeeva.digitalocean.pojo.Delete; import com.myjeeva.digitalocean.pojo.Droplet; import com.myjeeva.digitalocean.pojo.Droplets; import com.myjeeva.digitalocean.pojo.Image; import com.myjeeva.digitalocean.pojo.Kernels; import com.myjeeva.digitalocean.pojo.Key; import com.myjeeva.digitalocean.pojo.Neighbors; import com.myjeeva.digitalocean.pojo.Region; import com.myjeeva.digitalocean.pojo.Resource; import com.myjeeva.digitalocean.pojo.Response; import com.myjeeva.digitalocean.pojo.Snapshots; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.component.digitalocean.DigitalOceanConfiguration; import org.apache.camel.component.digitalocean.DigitalOceanEndpoint; import org.apache.camel.component.digitalocean.constants.DigitalOceanHeaders; import org.apache.camel.component.digitalocean.constants.DigitalOceanOperations; import org.apache.camel.util.ObjectHelper; /** * The DigitalOcean producer for Droplets API. */ public class DigitalOceanDropletsProducer extends DigitalOceanProducer { private Integer dropletId; public DigitalOceanDropletsProducer(DigitalOceanEndpoint endpoint, DigitalOceanConfiguration configuration) { super(endpoint, configuration); } public void process(Exchange exchange) throws Exception { DigitalOceanOperations op = determineOperation(exchange); if (op != DigitalOceanOperations.create && op != DigitalOceanOperations.list && op != DigitalOceanOperations.listAllNeighbors) { dropletId = exchange.getIn().getHeader(DigitalOceanHeaders.ID, Integer.class); if (ObjectHelper.isEmpty(dropletId)) { throw new IllegalArgumentException(DigitalOceanHeaders.ID + " must be specified"); } } switch (op) { case create: createDroplet(exchange); break; case list: getDroplets(exchange); break; case delete: deleteDroplet(exchange); break; case get: getDroplet(exchange); break; case listActions: getDropletActions(exchange); break; case listSnapshots: getDropletSnapshots(exchange); break; case listKernels: getDropletKernels(exchange); break; case listBackups: getDropletBackups(exchange); break; case listNeighbors: getDropletNeighbors(exchange); break; case listAllNeighbors: getAllDropletNeighbors(exchange); break; case enableBackups: enableDropletBackups(exchange); break; case disableBackups: disableDropletBackups(exchange); break; case reboot: rebootDroplet(exchange); break; case powerCycle: powerCycleDroplet(exchange); break; case shutdown: shutdownDroplet(exchange); break; case powerOn: powerOnDroplet(exchange); break; case powerOff: powerOffDroplet(exchange); break; case restore: restoreDroplet(exchange); break; case resetPassword: resetDropletPassword(exchange); break; case resize: resizeDroplet(exchange); break; case rebuild: rebuildDroplet(exchange); break; case rename: renameDroplet(exchange); break; case changeKernel: changeDropletKernel(exchange); break; case enableIpv6: enableDropletIpv6(exchange); break; case enablePrivateNetworking: enableDropletPrivateNetworking(exchange); break; case takeSnapshot: takeDropletSnapshot(exchange); break; case tag: tagDroplet(exchange); break; case untag: untagDroplet(exchange); break; default: throw new IllegalArgumentException("Unsupported operation"); } } private void getDroplet(Exchange exchange) throws Exception { Droplet droplet = getEndpoint().getDigitalOceanClient().getDropletInfo(dropletId); LOG.trace("Droplet {}", droplet); exchange.getOut().setBody(droplet); } private void getDroplets(Exchange exchange) throws Exception { Droplets droplets = getEndpoint().getDigitalOceanClient().getAvailableDroplets(configuration.getPage(), configuration.getPerPage()); LOG.trace("All Droplets : page {} / {} per page [{}] ", configuration.getPage(), configuration.getPerPage(), droplets.getDroplets()); exchange.getOut().setBody(droplets.getDroplets()); } private void getDropletActions(Exchange exchange) throws Exception { Actions actions = getEndpoint().getDigitalOceanClient().getAvailableDropletActions(dropletId, configuration.getPage(), configuration.getPerPage()); LOG.trace("Actions for Droplet {} : page {} / {} per page [{}] ", dropletId, configuration.getPage(), configuration.getPerPage(), actions.getActions()); exchange.getOut().setBody(actions.getActions()); } private void getDropletKernels(Exchange exchange) throws Exception { Kernels kernels = getEndpoint().getDigitalOceanClient().getDropletKernels(dropletId, configuration.getPage(), configuration.getPerPage()); LOG.trace("Kernels for Droplet {} : page {} / {} per page [{}] ", dropletId, configuration.getPage(), configuration.getPerPage(), kernels.getKernels()); exchange.getOut().setBody(kernels.getKernels()); } private void getDropletBackups(Exchange exchange) throws Exception { Backups backups = getEndpoint().getDigitalOceanClient().getDropletBackups(dropletId, configuration.getPage(), configuration.getPerPage()); LOG.trace("Backups for Droplet {} : page {} / {} per page [{}] ", dropletId, configuration.getPage(), configuration.getPerPage(), backups.getBackups()); exchange.getOut().setBody(backups.getBackups()); } private void getDropletSnapshots(Exchange exchange) throws Exception { Snapshots snapshots = getEndpoint().getDigitalOceanClient().getDropletSnapshots(dropletId, configuration.getPage(), configuration.getPerPage()); LOG.trace("Snapshots for Droplet {} : page {} / {} per page [{}] ", dropletId, configuration.getPage(), configuration.getPerPage(), snapshots.getSnapshots()); exchange.getOut().setBody(snapshots.getSnapshots()); } private void getDropletNeighbors(Exchange exchange) throws Exception { Droplets droplets = getEndpoint().getDigitalOceanClient().getDropletNeighbors(dropletId, configuration.getPage()); LOG.trace("Neighbors for Droplet {} : page {} [{}] ", dropletId, configuration.getPage(), droplets.getDroplets()); exchange.getOut().setBody(droplets.getDroplets()); } private void getAllDropletNeighbors(Exchange exchange) throws Exception { Neighbors neighbors = getEndpoint().getDigitalOceanClient().getAllDropletNeighbors(configuration.getPage()); LOG.trace("All Neighbors : page {} [{}] ", configuration.getPage(), neighbors.getNeighbors()); exchange.getOut().setBody(neighbors.getNeighbors()); } private void deleteDroplet(Exchange exchange) throws Exception { Delete delete = getEndpoint().getDigitalOceanClient().deleteDroplet(dropletId); LOG.trace("Delete Droplet {}", delete); exchange.getOut().setBody(delete); } @SuppressWarnings("unchecked") private void createDroplet(Exchange exchange) throws Exception { Message in = exchange.getIn(); Droplet droplet = new Droplet(); if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.REGION))) { droplet.setRegion(new Region(in.getHeader(DigitalOceanHeaders.REGION, String.class))); } else { throw new IllegalArgumentException(DigitalOceanHeaders.REGION + " must be specified"); } if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.DROPLET_SIZE))) { droplet.setSize(in.getHeader(DigitalOceanHeaders.DROPLET_SIZE, String.class)); } else { throw new IllegalArgumentException(DigitalOceanHeaders.DROPLET_SIZE + " must be specified"); } if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.DROPLET_IMAGE))) { droplet.setImage(new Image(in.getHeader(DigitalOceanHeaders.DROPLET_IMAGE, String.class))); } else { throw new IllegalArgumentException(DigitalOceanHeaders.DROPLET_IMAGE + " must be specified"); } if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.DROPLET_KEYS))) { List<String> keys = (List<String>) exchange.getIn().getHeader(DigitalOceanHeaders.DROPLET_KEYS); droplet.setKeys(keys.stream().map(Key::new).collect(Collectors.toList())); } if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.DROPLET_ENABLE_BACKUPS))) { droplet.setEnableBackup(in.getHeader(DigitalOceanHeaders.DROPLET_ENABLE_BACKUPS, Boolean.class)); } if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.DROPLET_ENABLE_IPV6))) { droplet.setEnableIpv6(in.getHeader(DigitalOceanHeaders.DROPLET_ENABLE_IPV6, Boolean.class)); } if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.DROPLET_ENABLE_PRIVATE_NETWORKING))) { droplet.setEnablePrivateNetworking(in.getHeader(DigitalOceanHeaders.DROPLET_ENABLE_PRIVATE_NETWORKING, Boolean.class)); } if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.DROPLET_USER_DATA))) { droplet.setUserData(in.getHeader(DigitalOceanHeaders.DROPLET_USER_DATA, String.class)); } if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.DROPLET_VOLUMES))) { droplet.setVolumeIds((List<String>) exchange.getIn().getHeader(DigitalOceanHeaders.DROPLET_VOLUMES)); } if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.DROPLET_TAGS))) { droplet.setTags((List<String>) exchange.getIn().getHeader(DigitalOceanHeaders.DROPLET_TAGS)); } if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.NAMES))) { droplet.setNames((List<String>) in.getHeader(DigitalOceanHeaders.NAMES)); Droplets droplets = getEndpoint().getDigitalOceanClient().createDroplets(droplet); LOG.trace("Droplets created {}", droplets); exchange.getOut().setBody(droplets.getDroplets()); } else if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.NAME))) { droplet.setName(in.getHeader(DigitalOceanHeaders.NAME, String.class)); droplet = getEndpoint().getDigitalOceanClient().createDroplet(droplet); LOG.trace("Droplet created {}", droplet); exchange.getOut().setBody(droplet); } else { throw new IllegalArgumentException(DigitalOceanHeaders.NAMES + " or " + DigitalOceanHeaders.NAME + " must be specified"); } } private void restoreDroplet(Exchange exchange) throws Exception { if (ObjectHelper.isEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.IMAGE_ID))) { throw new IllegalArgumentException(DigitalOceanHeaders.IMAGE_ID + " must be specified"); } Action action = getEndpoint().getDigitalOceanClient().restoreDroplet(dropletId, exchange.getIn().getHeader(DigitalOceanHeaders.IMAGE_ID, Integer.class)); LOG.trace("DropletAction Restore [{}] ", action); exchange.getOut().setBody(action); } private void resizeDroplet(Exchange exchange) throws Exception { if (ObjectHelper.isEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.DROPLET_SIZE))) { throw new IllegalArgumentException(DigitalOceanHeaders.DROPLET_SIZE + " must be specified"); } Action action = getEndpoint().getDigitalOceanClient().resizeDroplet(dropletId, exchange.getIn().getHeader(DigitalOceanHeaders.DROPLET_SIZE, String.class)); LOG.trace("DropletAction Resize [{}] ", action); exchange.getOut().setBody(action); } private void rebuildDroplet(Exchange exchange) throws Exception { if (ObjectHelper.isEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.IMAGE_ID))) { throw new IllegalArgumentException(DigitalOceanHeaders.IMAGE_ID + " must be specified"); } Action action = getEndpoint().getDigitalOceanClient().rebuildDroplet(dropletId, exchange.getIn().getHeader(DigitalOceanHeaders.IMAGE_ID, Integer.class)); LOG.trace("Rebuild Droplet {} : [{}] ", dropletId, action); exchange.getOut().setBody(action); } private void renameDroplet(Exchange exchange) throws Exception { if (ObjectHelper.isEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.NAME))) { throw new IllegalArgumentException(DigitalOceanHeaders.NAME + " must be specified"); } Action action = getEndpoint().getDigitalOceanClient().renameDroplet(dropletId, exchange.getIn().getHeader(DigitalOceanHeaders.NAME, String.class)); LOG.trace("Rename Droplet {} : [{}] ", dropletId, action); exchange.getOut().setBody(action); } private void changeDropletKernel(Exchange exchange) throws Exception { if (ObjectHelper.isEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.KERNEL_ID))) { throw new IllegalArgumentException(DigitalOceanHeaders.KERNEL_ID + " must be specified"); } Action action = getEndpoint().getDigitalOceanClient().changeDropletKernel(dropletId, exchange.getIn().getHeader(DigitalOceanHeaders.KERNEL_ID, Integer.class)); LOG.trace("Change Droplet {} : [{}] ", dropletId, action); exchange.getOut().setBody(action); } private void resetDropletPassword(Exchange exchange) throws Exception { Action action = getEndpoint().getDigitalOceanClient().resetDropletPassword(dropletId); LOG.trace("Reset password Droplet {} : [{}] ", dropletId, action); exchange.getOut().setBody(action); } private void powerOnDroplet(Exchange exchange) throws Exception { Action action = getEndpoint().getDigitalOceanClient().powerOnDroplet(dropletId); LOG.trace("Power on Droplet {} : [{}] ", dropletId, action); exchange.getOut().setBody(action); } private void powerOffDroplet(Exchange exchange) throws Exception { Action action = getEndpoint().getDigitalOceanClient().powerOffDroplet(dropletId); LOG.trace("Power off Droplet {} : [{}] ", dropletId, action); exchange.getOut().setBody(action); } private void shutdownDroplet(Exchange exchange) throws Exception { Action action = getEndpoint().getDigitalOceanClient().shutdownDroplet(dropletId); LOG.trace("Shutdown Droplet {} : [{}] ", dropletId, action); exchange.getOut().setBody(action); } private void powerCycleDroplet(Exchange exchange) throws Exception { Action action = getEndpoint().getDigitalOceanClient().powerCycleDroplet(dropletId); LOG.trace("Power cycle Droplet {} : [{}] ", dropletId, action); exchange.getOut().setBody(action); } private void enableDropletBackups(Exchange exchange) throws Exception { Action action = getEndpoint().getDigitalOceanClient().enableDropletBackups(dropletId); LOG.trace("Enable backups Droplet {} : [{}] ", dropletId, action); exchange.getOut().setBody(action); } private void disableDropletBackups(Exchange exchange) throws Exception { Action action = getEndpoint().getDigitalOceanClient().disableDropletBackups(dropletId); LOG.trace("Disable backups for Droplet {} : [{}] ", dropletId, action); exchange.getOut().setBody(action); } private void enableDropletIpv6(Exchange exchange) throws Exception { Action action = getEndpoint().getDigitalOceanClient().enableDropletIpv6(dropletId); LOG.trace("Enable IP v6 for Droplet {} : [{}] ", dropletId, action); exchange.getOut().setBody(action); } private void enableDropletPrivateNetworking(Exchange exchange) throws Exception { Action action = getEndpoint().getDigitalOceanClient().enableDropletPrivateNetworking(dropletId); LOG.trace("Enable private networking for Droplet {} : [{}] ", dropletId, action); exchange.getOut().setBody(action); } private void rebootDroplet(Exchange exchange) throws Exception { Action action = getEndpoint().getDigitalOceanClient().rebootDroplet(dropletId); LOG.trace("Reboot Droplet {} : [{}] ", dropletId, action); exchange.getOut().setBody(action); } private void takeDropletSnapshot(Exchange exchange) throws Exception { Action action; if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.NAME))) { action = getEndpoint().getDigitalOceanClient().takeDropletSnapshot(dropletId, exchange.getIn().getHeader(DigitalOceanHeaders.NAME, String.class)); } else { action = getEndpoint().getDigitalOceanClient().takeDropletSnapshot(dropletId); } LOG.trace("Take Snapshot for Droplet {} : [{}] ", dropletId, action); exchange.getOut().setBody(action); } private void tagDroplet(Exchange exchange) throws Exception { if (ObjectHelper.isEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.NAME))) { throw new IllegalArgumentException(DigitalOceanHeaders.NAME + " must be specified"); } ArrayList<Resource> resources = new ArrayList<>(1); resources.add(new Resource(dropletId.toString(), ResourceType.DROPLET)); Response response = getEndpoint().getDigitalOceanClient().tagResources(dropletId.toString(), resources); LOG.trace("Tag Droplet {} : [{}] ", dropletId, response); exchange.getOut().setBody(response); } private void untagDroplet(Exchange exchange) throws Exception { if (ObjectHelper.isEmpty(exchange.getIn().getHeader(DigitalOceanHeaders.NAME))) { throw new IllegalArgumentException(DigitalOceanHeaders.NAME + " must be specified"); } ArrayList<Resource> resources = new ArrayList<>(1); resources.add(new Resource(dropletId.toString(), ResourceType.DROPLET)); Response response = getEndpoint().getDigitalOceanClient().untagResources(dropletId.toString(), resources); LOG.trace("Untag Droplet {} : [{}] ", dropletId, response); exchange.getOut().setBody(response); } }
apache-2.0
Skarlso/gocd
server/src/main/webapp/WEB-INF/rails/lib/extensions/java_lang_enum.rb
693
# # Copyright 2022 ThoughtWorks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # java.lang.Enum.class_eval do def as_json(_options=nil) to_s end def inspect to_s end end
apache-2.0
apache/directory-server
core-integ/src/test/java/org/apache/directory/server/core/operations/search/SearchWithIndexedMVAttributeIT.java
4710
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.server.core.operations.search; import static org.junit.jupiter.api.Assertions.assertEquals; import org.apache.directory.api.ldap.model.cursor.EntryCursor; import org.apache.directory.api.ldap.model.entry.DefaultEntry; import org.apache.directory.api.ldap.model.message.ModifyRequest; import org.apache.directory.api.ldap.model.message.ModifyRequestImpl; import org.apache.directory.api.ldap.model.message.SearchScope; import org.apache.directory.api.ldap.model.name.Dn; import org.apache.directory.ldap.client.api.LdapConnection; import org.apache.directory.server.core.annotations.CreateDS; import org.apache.directory.server.core.api.partition.Partition; import org.apache.directory.server.core.factory.DefaultDirectoryServiceFactory; import org.apache.directory.server.core.factory.DirectoryServiceFactory; import org.apache.directory.server.core.integ.AbstractLdapTestUnit; import org.apache.directory.server.core.integ.ApacheDSTestExtension; import org.apache.directory.server.core.integ.IntegrationUtils; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; /** * Test for MV attributes with index * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ @ExtendWith( { ApacheDSTestExtension.class } ) @CreateDS(name = "SearchMVWithIndicesIT") public class SearchWithIndexedMVAttributeIT extends AbstractLdapTestUnit { private static LdapConnection connection; @BeforeEach public void createData() throws Exception { connection = IntegrationUtils.getAdminConnection( getService() ); Partition systemPartition = getService().getSystemPartition(); DirectoryServiceFactory dsFactory = DefaultDirectoryServiceFactory.class.newInstance(); // Add an index for the Member AT dsFactory.getPartitionFactory().addIndex( systemPartition, "member", 100 ); // Restart the service so that the index is created getService().shutdown(); getService().startup(); // ------------------------------------------------------------------- // Add an entry with a groupOfNames OC // ------------------------------------------------------------------- addGroupOfNames( "testGroup0", 0 ); addGroupOfNames( "testGroup1", 1 ); addGroupOfNames( "testGroup2", 2 ); addGroupOfNames( "testGroup4", 4 ); addGroupOfNames( "testGroup5", 5 ); } private void addGroupOfNames( String name, int number ) throws Exception { String dn = "cn=" + name + ",ou=groups,ou=system"; connection.add( new DefaultEntry( dn, "objectClass: top", "objectClass: groupOfnames", "cn", name, "member", "cn=test,ou=users,ou=system" ) ); // now, add thousands of members in some of those entries ModifyRequest modRequest = new ModifyRequestImpl(); modRequest.setName( new Dn( dn ) ); for ( int i = 0; i < number * 320; i++ ) { modRequest.add( "member", "cn=test" + i + ",ou=users,ou=system" ); } connection.modify( modRequest ); } @Test public void testSearch() throws Exception { //long t0 = System.currentTimeMillis(); EntryCursor cursor = connection.search( "ou=system", "(&(member=cn=test74,ou=users,ou=system)(objectClass=groupOfNames))", SearchScope.SUBTREE, "member" ); int nbFound = 0; while ( cursor.next() ) { nbFound++; } cursor.close(); //long t1 = System.currentTimeMillis(); //System.out.println( "Search done in " + ( t1 - t0 ) + "msec" ); assertEquals( 4, nbFound ); } }
apache-2.0
apache/velocity-engine
velocity-engine-core/src/main/java/org/apache/velocity/exception/MacroOverflowException.java
2564
package org.apache.velocity.exception; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * Application-level exception thrown when macro calls within macro calls * exceeds the maximum allowed depth. The maximum allowable depth is given * in the configuration as velocimacro.max.depth. * @since 1.6 */ public class MacroOverflowException extends VelocityException { /** * Version Id for serializable */ private static final long serialVersionUID = 7305635093478106342L; /** * @param exceptionMessage The message to register. */ public MacroOverflowException(final String exceptionMessage) { super(exceptionMessage); } /** * @param exceptionMessage The message to register. * @param wrapped A throwable object that caused the Exception. */ public MacroOverflowException(final String exceptionMessage, final Throwable wrapped) { super(exceptionMessage, wrapped); } /** * @param exceptionMessage The message to register. * @param wrapped A throwable object that caused the Exception. * @param stacktrace VTL stacktrace * @since 2.2 */ public MacroOverflowException(final String exceptionMessage, final Throwable wrapped, final String[] stacktrace) { super(exceptionMessage, wrapped, stacktrace); } /** * @param wrapped A throwable object that caused the Exception. */ public MacroOverflowException(final Throwable wrapped) { super(wrapped); } /** * @param wrapped A throwable object that caused the Exception. * @param stacktrace VTL stacktrace * @since 2.2 */ public MacroOverflowException(final Throwable wrapped, final String[] stacktrace) { super(wrapped, stacktrace); } }
apache-2.0
patelkarn/crash
src/main/java/org/acra/collections/ImmutableMap.java
3186
/* * Copyright 2016 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.acra.collections; import android.support.annotation.NonNull; import java.io.Serializable; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Set; /** * Naive (not optimized) implementation of an Immutable Map * * @author F43nd1r * @since 4.9.0 */ public final class ImmutableMap<K, V> implements Map<K, V>, Serializable { private final Map<K, V> mMap; public ImmutableMap(Map<K, V> map) { this.mMap = new HashMap<K, V>(map); } @Override public void clear() { throw new UnsupportedOperationException(); } @Override public boolean containsKey(Object key) { return mMap.containsKey(key); } @Override public boolean containsValue(Object value) { return mMap.containsValue(value); } @NonNull @Override public Set<Entry<K, V>> entrySet() { final Set<Entry<K, V>> original = mMap.entrySet(); final ImmutableSet.Builder<Entry<K, V>> builder = new ImmutableSet.Builder<Entry<K, V>>(); for (Entry<K, V> entry : original) { builder.add(new ImmutableEntryWrapper<K, V>(entry)); } return builder.build(); } @Override public V get(Object key) { return mMap.get(key); } @Override public boolean isEmpty() { return mMap.isEmpty(); } @NonNull @Override public Set<K> keySet() { return new ImmutableSet<K>(mMap.keySet()); } @Override public V put(K key, V value) { throw new UnsupportedOperationException(); } @Override public void putAll(@NonNull Map<? extends K, ? extends V> map) { throw new UnsupportedOperationException(); } @Override public V remove(Object object) { throw new UnsupportedOperationException(); } @Override public int size() { return mMap.size(); } @NonNull @Override public Collection<V> values() { return new ImmutableList<V>(mMap.values()); } public static class ImmutableEntryWrapper<K, V> implements Map.Entry<K, V> { private final Map.Entry<K, V> mEntry; ImmutableEntryWrapper(Entry<K, V> mEntry) { this.mEntry = mEntry; } @Override public K getKey() { return mEntry.getKey(); } @Override public V getValue() { return mEntry.getValue(); } @Override public V setValue(Object object) { throw new UnsupportedOperationException(); } } }
apache-2.0
BigBoss424/portfolio
v6/node_modules/eslint-plugin-es/lib/rules/no-set.js
1174
/** * @author Toru Nagashima <https://github.com/mysticatea> * See LICENSE file in root directory for full license. */ "use strict" const { READ, ReferenceTracker } = require("eslint-utils") module.exports = { meta: { docs: { description: "disallow the `Set` class.", category: "ES2015", recommended: false, url: "http://mysticatea.github.io/eslint-plugin-es/rules/no-set.html", }, fixable: null, messages: { forbidden: "ES2015 '{{name}}' class is forbidden.", }, schema: [], type: "problem", }, create(context) { return { "Program:exit"() { const tracker = new ReferenceTracker(context.getScope()) for (const { node, path } of tracker.iterateGlobalReferences({ Set: { [READ]: true }, })) { context.report({ node, messageId: "forbidden", data: { name: path.join(".") }, }) } }, } }, }
apache-2.0
aws/aws-sdk-cpp
aws-cpp-sdk-qldb/source/model/UpdateLedgerPermissionsModeRequest.cpp
847
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/qldb/model/UpdateLedgerPermissionsModeRequest.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::QLDB::Model; using namespace Aws::Utils::Json; using namespace Aws::Utils; UpdateLedgerPermissionsModeRequest::UpdateLedgerPermissionsModeRequest() : m_nameHasBeenSet(false), m_permissionsMode(PermissionsMode::NOT_SET), m_permissionsModeHasBeenSet(false) { } Aws::String UpdateLedgerPermissionsModeRequest::SerializePayload() const { JsonValue payload; if(m_permissionsModeHasBeenSet) { payload.WithString("PermissionsMode", PermissionsModeMapper::GetNameForPermissionsMode(m_permissionsMode)); } return payload.View().WriteReadable(); }
apache-2.0
phanindra1212/gaevfs
test/src/com/newatlanta/appengine/taskqueue/TestDeferred.java
1263
/* * Copyright 2009 New Atlanta Communications, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.newatlanta.appengine.taskqueue; import java.util.logging.Logger; import com.newatlanta.appengine.taskqueue.Deferred.Deferrable; public class TestDeferred implements Deferrable { private static final long serialVersionUID = 1L; private static final Logger log = Logger.getLogger( TestDeferred.class.getName() ); private byte[] arg; public TestDeferred() { this( 0 ); } public TestDeferred( int size ) { arg = new byte[ size ]; } @Override public void doTask() { log.info( "arg size = " + arg.length ); } }
apache-2.0
tumblr/goji
server.go
2556
package main import ( "encoding/json" "fmt" "github.com/byxorna/goji/marathon" "io/ioutil" "log" "net/http" "os" ) func ListenForEvents(listenAddr string) error { hostname, err := os.Hostname() if err != nil { return err } cb := fmt.Sprintf("http://%s:%d/event", hostname, config.HttpPort) cbRegistered, err := client.HasCallback(cb) if err != nil { return err } if !cbRegistered { client.RegisterCallback(cb) } else { log.Printf("Event callback already registered; skipping registration\n") } // cleanup registered callback if we catch a signal go func() { s := <-sigChan log.Printf("Cleaning up registered callback after signal %s\n", s) err := client.RemoveCallback(cb) if err != nil { log.Fatal(err.Error()) } os.Exit(0) }() http.HandleFunc("/event", handleEvent) return http.ListenAndServe(listenAddr, nil) } func handleEvent(res http.ResponseWriter, req *http.Request) { log.Printf("%s %s %s %s\n", req.Method, req.RemoteAddr, req.RequestURI, req.Proto) body, err := ioutil.ReadAll(req.Body) res.Header().Set("Content-Type", "text/plain") if err != nil { log.Printf(err.Error()) fmt.Fprintf(res, "Error reading event body") res.WriteHeader(http.StatusBadRequest) return } fmt.Fprintf(res, "Thanks for the event!") // lets deal with parsing and identifying the event out of the request handler go determineEventRelevancy(body) } func determineEventRelevancy(body []byte) { e := marathon.Event{} err := json.Unmarshal(body, &e) if err != nil { log.Printf("Unable to decode event body: %s\n", err.Error()) return } var processEvent = true switch e.EventType { case "status_update_event": ev := marathon.StatusUpdateEvent{} err := json.Unmarshal(body, &ev) if err != nil { log.Printf("Unable to decode StatusUpdateEvent: %s\n", err.Error()) } log.Printf("Task %s in %s on %s:%v is now %s\n", ev.TaskId, ev.AppId, ev.Host, ev.Ports, ev.TaskStatus) case "health_status_changed_event": ev := marathon.HealthStatusChangedEvent{} err := json.Unmarshal(body, &ev) if err != nil { log.Printf("Unable to decode HealthStatusChangedEvent: %s\n", err.Error()) return } status := "dead" if ev.Alive { status = "alive" } log.Printf("Task %s in %s is now %s\n", ev.TaskId, ev.AppId, status) case "failed_health_check_event": log.Printf("Task %s in %s failed its health check\n", e.TaskId, e.AppId) default: processEvent = false } if processEvent { eventChan <- e.EventType } else { log.Printf("Ignoring event type %s\n", e.EventType) } }
apache-2.0
sxjscience/tvm
python/tvm/topi/bifrost/depthwise_conv2d.py
4548
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # pylint: disable=invalid-name,unused-variable,unused-argument """depthwise_conv2d schedule on ARM Mali GPU""" from __future__ import absolute_import as _abs import tvm from tvm import te from .. import util from .. import tag def schedule_depthwise_conv2d_nchw(outs): """Schedule for depthwise_conv2d nchw forward. Parameters ---------- outs: Array of Tensor The computation graph description of depthwise_conv2d in the format of an array of tensors. Returns ------- s: Schedule The computation schedule for depthwise_conv2d nchw. """ outs = [outs] if isinstance(outs, te.tensor.Tensor) else outs s = te.create_schedule([x.op for x in outs]) def _schedule(pad_data, kernel, conv): raw_data = s[pad_data].op.input_tensors[0] if conv.op not in s.outputs: # has bias or relu output = outs[0] else: # no bias or relu output = conv def tile_and_bind3d(tensor, z, y, x, z_factor=2, y_factor=None, x_factor=None): """ tile and bind 3d """ y_factor = y_factor or z_factor x_factor = x_factor or y_factor zo, zi = s[tensor].split(z, z_factor) yo, yi = s[tensor].split(y, y_factor) xo, xi = s[tensor].split(x, x_factor) s[tensor].bind(zo, te.thread_axis("blockIdx.z")) s[tensor].bind(zi, te.thread_axis("threadIdx.z")) s[tensor].bind(yo, te.thread_axis("blockIdx.y")) s[tensor].bind(yi, te.thread_axis("threadIdx.y")) s[tensor].bind(xo, te.thread_axis("blockIdx.x")) s[tensor].bind(xi, te.thread_axis("threadIdx.x")) return zo, zi, yo, yi, xo, xi # set tunable parameters VH = 1 VW = 1 num_thread = 4 while util.get_const_int(conv.shape[3]) % (VW * 2) == 0 and VW * 2 <= 4: VW = VW * 2 while util.get_const_int(conv.shape[2]) % (VH * 2) == 0 and VH * 2 <= 2: VH = VH * 2 if raw_data.dtype == "float16": if util.get_const_int(conv.shape[3]) % (VW * 2) == 0: VW *= 2 num_thread *= 2 else: num_thread *= 2 # schedule padding _, c, y, x = s[pad_data].op.axis tile_and_bind3d(pad_data, c, y, x, num_thread, 1, 1) # schedule conv di, dj = s[conv].op.reduce_axis s[conv].unroll(di) s[conv].unroll(dj) _, c, y, x = s[output].op.axis y, x, yi, xi = s[output].tile(y, x, VH, VW) s[output].unroll(yi) s[output].vectorize(xi) _, _, _, _, _, ji = tile_and_bind3d(output, c, y, x, num_thread, 1, 1) if conv.op not in s.outputs: _, c, y, x = s[conv].op.axis y, x, yi, xi = s[conv].tile(y, x, VH, VW) s[conv].unroll(yi) s[conv].vectorize(xi) s[conv].compute_at(s[output], ji) def traverse(op): """Internal traverse function""" # inline all one-to-one-mapping operators except the last stage (output) if tag.is_broadcast(op.tag): if op not in s.outputs: s[op].compute_inline() for tensor in op.input_tensors: if tensor.op.input_tensors: traverse(tensor.op) # schedule depthwise_conv2d if op.tag == "depthwise_conv2d_nchw": pad_data = op.input_tensors[0] kernel = op.input_tensors[1] if isinstance(kernel.op, tvm.te.ComputeOp) and "dilate" in kernel.op.tag: s[kernel].compute_inline() conv = op.output(0) _schedule(pad_data, kernel, conv) traverse(outs[0].op) return s
apache-2.0
Herve-M/sablecc
src/org/sablecc/sablecc/semantics/LocalNameSpace.java
2879
/* This file is part of SableCC ( http://sablecc.org ). * * See the NOTICE file distributed with this work for copyright information. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sablecc.sablecc.semantics; import java.util.*; import java.util.Map.Entry; public class LocalNameSpace<T extends LocalDeclaration> { private Map<String, T> nameMap = new TreeMap<String, T>(); private Set<String> nameSet = new TreeSet<String>(); LocalNameSpace( List<T> localDeclarations) { Map<String, List<T>> declarationMap = new TreeMap<String, List<T>>(); List<T> anonymousList = new LinkedList<T>(); for (T localDeclaration : localDeclarations) { String name = localDeclaration.getName(); if (name == null) { anonymousList.add(localDeclaration); } else { List<T> declarations = declarationMap.get(name); if (declarations == null) { declarations = new LinkedList<T>(); declarationMap.put(name, declarations); } declarations.add(localDeclaration); } } for (Entry<String, List<T>> entry : declarationMap.entrySet()) { String name = entry.getKey(); this.nameSet.add(name); List<T> declarations = entry.getValue(); if (declarations.size() == 1) { T declaration = declarations.get(0); declaration.setUnambiguousAndInternalNames(name, name); this.nameMap.put(name, declaration); } else { int index = 1; for (T declaration : declarations) { String internalName = name + "." + index++; declaration.setUnambiguousAndInternalNames(null, internalName); } } } int index = 1; for (T declaration : anonymousList) { String internalName = "." + index++; declaration.setUnambiguousAndInternalNames(null, internalName); } } public T get( String name) { return this.nameMap.get(name); } public boolean has( String name) { return this.nameSet.contains(name); } }
apache-2.0
blox/blox
integ-tests/src/cucumberTest/java/cucumber/steps/helpers/InputCreator.java
5606
/* * Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may * not use this file except in compliance with the License. A copy of the * License is located at * * http://aws.amazon.com/apache2.0/ * * or in the "LICENSE" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package cucumber.steps.helpers; import com.amazonaws.blox.dataservicemodel.v1.model.Cluster; import com.amazonaws.blox.dataservicemodel.v1.model.EnvironmentId; import com.amazonaws.blox.dataservicemodel.v1.model.EnvironmentType; import com.amazonaws.blox.dataservicemodel.v1.model.wrappers.CreateEnvironmentRequest; import com.amazonaws.blox.dataservicemodel.v1.model.wrappers.DeleteEnvironmentRequest; import com.amazonaws.blox.dataservicemodel.v1.model.wrappers.DescribeEnvironmentRequest; import com.amazonaws.blox.dataservicemodel.v1.model.wrappers.ListClustersRequest; import com.amazonaws.blox.dataservicemodel.v1.model.wrappers.ListEnvironmentsRequest; import com.amazonaws.blox.dataservicemodel.v1.model.wrappers.UpdateEnvironmentRequest; import java.util.StringJoiner; import java.util.UUID; import lombok.Getter; import lombok.Setter; public class InputCreator { private static final String DEFAULT_ACCOUNT_ID = "123456789012"; private static final String DEFAULT_CLUSTER_NAME = "Cluster"; private static final String DEFAULT_ENVIRONMENT_NAME = "Environment"; private static final String NAMING_PREFIX = "blox-integ-tests"; private final String sharedId = UUID.randomUUID().toString(); @Getter @Setter private String accountId = DEFAULT_ACCOUNT_ID; private String getTaskDefinitionArn() { return "arn:aws:ecs:us-east-1:" + getAccountId() + ":task-definition/sleep"; } private String getRoleArn() { return "arn:aws:iam::" + getAccountId() + ":role/testRole"; } public String prefixName(final String name) { return new StringJoiner("-").add(NAMING_PREFIX).add(sharedId).add(name).toString(); } public CreateEnvironmentRequest createEnvironmentRequest() { return createEnvironmentRequest(DEFAULT_ENVIRONMENT_NAME, DEFAULT_CLUSTER_NAME); } public CreateEnvironmentRequest createEnvironmentRequest(final String environmentName) { return createEnvironmentRequest(environmentName, DEFAULT_CLUSTER_NAME); } public DescribeEnvironmentRequest describeEnvironmentRequest(final String environmentName) { return describeEnvironmentRequest(environmentName, DEFAULT_CLUSTER_NAME); } public CreateEnvironmentRequest createEnvironmentRequest( final String environmentName, final String cluster) { EnvironmentId id = environmentId(environmentName, cluster); return createEnvironmentRequest(id); } private CreateEnvironmentRequest createEnvironmentRequest(final EnvironmentId id) { return CreateEnvironmentRequest.builder() .environmentId(id) .role(getRoleArn()) .taskDefinition(getTaskDefinitionArn()) .environmentType(EnvironmentType.Daemon) .deploymentMethod("ReplaceAfterTerminate") .build(); } private EnvironmentId environmentId(final String environmentName, final String cluster) { return EnvironmentId.builder() .accountId(getAccountId()) .cluster(prefixName(cluster)) .environmentName(prefixName(environmentName)) .build(); } private DescribeEnvironmentRequest describeEnvironmentRequest( final String environmentName, final String cluster) { EnvironmentId id = environmentId(environmentName, cluster); return describeEnvironmentRequest(id); } public DescribeEnvironmentRequest describeEnvironmentRequest(final EnvironmentId id) { return DescribeEnvironmentRequest.builder().environmentId(id).build(); } public UpdateEnvironmentRequest updateEnvironmentRequestWithNewCluster( final String environmentName, final String cluster) { EnvironmentId id = environmentId(environmentName, cluster); return updateEnvironmentRequest(id); } private UpdateEnvironmentRequest updateEnvironmentRequest(final EnvironmentId id) { return UpdateEnvironmentRequest.builder() .environmentId(id) .taskDefinition(getTaskDefinitionArn()) .build(); } public DeleteEnvironmentRequest deleteEnvironmentRequest(final EnvironmentId environmentId) { return DeleteEnvironmentRequest.builder() .environmentId(environmentId) .forceDelete(false) .build(); } public DeleteEnvironmentRequest deleteEnvironmentRequest( final String environmentName, final String cluster) { return DeleteEnvironmentRequest.builder() .environmentId(environmentId(environmentName, cluster)) .forceDelete(false) .build(); } public ListClustersRequest listClustersRequest() { return ListClustersRequest.builder() .accountId(getAccountId()) .clusterNamePrefix(prefixName("")) .build(); } public ListEnvironmentsRequest listEnvironmentsRequest( final String clusterName, final String environmentNamePrefix) { return ListEnvironmentsRequest.builder() .cluster( Cluster.builder() .accountId(getAccountId()) .clusterName(prefixName(clusterName)) .build()) .environmentNamePrefix(environmentNamePrefix) .build(); } }
apache-2.0
Terminator-Aaron/Katana
aspnetwebsrc/System.Web.Http/HttpDeleteAttribute.cs
849
// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved. See License.txt in the project root for license information. using System.Collections.ObjectModel; using System.Net.Http; using System.Web.Http.Controllers; namespace System.Web.Http { /// <summary> /// Specifies that an action supports the DELETE HTTP method. /// </summary> [AttributeUsage(AttributeTargets.Method, AllowMultiple = false, Inherited = true)] public sealed class HttpDeleteAttribute : Attribute, IActionHttpMethodProvider { private static readonly Collection<HttpMethod> _supportedMethods = new Collection<HttpMethod>(new HttpMethod[] { HttpMethod.Delete }); public Collection<HttpMethod> HttpMethods { get { return _supportedMethods; } } } }
apache-2.0
samwash/torgo
src/test/java/org/tros/utils/logging/LoggerTest.java
7075
/* * Copyright 2015 Matthew Aguirre * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.tros.utils.logging; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; /** * * @author matta */ public class LoggerTest { public LoggerTest() { } @BeforeClass public static void setUpClass() { } @AfterClass public static void tearDownClass() { } @Before public void setUp() { } @After public void tearDown() { } /** * Test of warn method, of class CommonsLogger. */ @Test public void testWarn_String() { System.out.println("warn"); String message = "warn"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.warn(message); } /** * Test of debug method, of class CommonsLogger. */ @Test public void testDebug_String() { System.out.println("debug"); String message = "debug"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.debug(message); } /** * Test of error method, of class CommonsLogger. */ @Test public void testError_String() { System.out.println("error"); String message = "error"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.error(message); } /** * Test of info method, of class CommonsLogger. */ @Test public void testInfo_String() { System.out.println("info"); String message = "info"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.info(message); } /** * Test of verbose method, of class CommonsLogger. */ @Test public void testVerbose_String() { System.out.println("verbose"); String message = "verbose"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.verbose(message); } /** * Test of warn method, of class CommonsLogger. */ @Test public void testWarn_String_ObjectArr() { System.out.println("warn"); String message = "warn"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.warn("Message: {0}", new Object[]{message}); } /** * Test of debug method, of class CommonsLogger. */ @Test public void testDebug_String_ObjectArr() { System.out.println("debug"); String message = "debug"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.debug("Message: {0}", new Object[]{message}); } /** * Test of error method, of class CommonsLogger. */ @Test public void testError_String_ObjectArr() { System.out.println("error"); String message = "error"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.error("Message: {0}", new Object[]{message}); } /** * Test of info method, of class CommonsLogger. */ @Test public void testInfo_String_ObjectArr() { System.out.println("info"); String message = "info"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.info("Message: {0}", new Object[]{message}); } /** * Test of verbose method, of class CommonsLogger. */ @Test public void testVerbose_String_ObjectArr() { System.out.println("verbose"); String message = "verbose"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.verbose("Message: {0}", new Object[]{message}); } /** * Test of warn method, of class CommonsLogger. */ @Test public void testWarn_String_Throwable() { System.out.println("warn"); String message = "warn"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.warn("Message: ", new Throwable(message)); } /** * Test of debug method, of class CommonsLogger. */ @Test public void testDebug_String_Throwable() { System.out.println("debug"); String message = "debug"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.debug("Message: ", new Throwable(message)); } /** * Test of error method, of class CommonsLogger. */ @Test public void testError_String_Throwable() { System.out.println("error"); String message = "error"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.error("Message: ", new Throwable(message)); } /** * Test of info method, of class CommonsLogger. */ @Test public void testInfo_String_Throwable() { System.out.println("info"); String message = "info"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.info("Message: ", new Throwable(message)); } /** * Test of verbose method, of class CommonsLogger. */ @Test public void testVerbose_String_Throwable() { System.out.println("verbose"); String message = "verbose"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.verbose("Message: ", new Throwable(message)); } /** * Test of fatal method, of class CommonsLogger. */ @Test public void testFatal_String() { System.out.println("fatal"); String message = "fatal"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.fatal(message); } /** * Test of fatal method, of class CommonsLogger. */ @Test public void testFatal_String_ObjectArr() { System.out.println("fatal"); String message = "fatal"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.fatal("Message: {0}", new Object[]{message}); } /** * Test of fatal method, of class CommonsLogger. */ @Test public void testFatal_String_Throwable() { System.out.println("fatal"); String message = "fatal"; Logger instance = Logging.getLogFactory().getLogger(LoggerTest.class); instance.fatal("Message: ", new Throwable(message)); } }
apache-2.0
potto007/druid-avro
processing/src/test/java/io/druid/query/aggregation/MetricManipulatorFnsTest.java
4711
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.query.aggregation; import io.druid.query.aggregation.hyperloglog.HyperLogLogCollector; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; import io.druid.segment.LongColumnSelector; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.util.ArrayList; import java.util.Arrays; @RunWith(Parameterized.class) public class MetricManipulatorFnsTest { private static final String NAME = "name"; private static final String FIELD = "field"; @Parameterized.Parameters(name = "{0}") public static Iterable<Object[]> constructorFeeder() { final ArrayList<Object[]> constructorArrays = new ArrayList<>(); final long longVal = 13789; LongMinAggregator longMinAggregator = new LongMinAggregator( new LongColumnSelector() { @Override public long get() { return longVal; } } ); LongMinAggregatorFactory longMinAggregatorFactory = new LongMinAggregatorFactory(NAME, FIELD); constructorArrays.add( new Object[]{ longMinAggregatorFactory, longMinAggregator, longMinAggregator, longMinAggregator, longVal, longVal } ); HyperUniquesAggregatorFactory hyperUniquesAggregatorFactory = new HyperUniquesAggregatorFactory(NAME, FIELD); HyperLogLogCollector collector = HyperLogLogCollector.makeLatestCollector(); collector.add((short) 1, (byte) 5); constructorArrays.add( new Object[]{ hyperUniquesAggregatorFactory, collector, collector, collector.estimateCardinality(), collector.toByteArray(), collector } ); LongSumAggregatorFactory longSumAggregatorFactory = new LongSumAggregatorFactory(NAME, FIELD); LongSumAggregator longSumAggregator = new LongSumAggregator( new LongColumnSelector() { @Override public long get() { return longVal; } } ); constructorArrays.add( new Object[]{ longSumAggregatorFactory, longSumAggregator, longSumAggregator, longSumAggregator, longVal, longVal } ); for (Object[] argList : constructorArrays) { Assert.assertEquals( String.format( "Arglist %s is too short. Expected 6 found %d", Arrays.toString(argList), argList.length ), 6, argList.length ); } return constructorArrays; } private final AggregatorFactory aggregatorFactory; private final Object agg; private final Object identity; private final Object finalize; private final Object serialForm; private final Object deserForm; public MetricManipulatorFnsTest( AggregatorFactory aggregatorFactory, Object agg, Object identity, Object finalize, Object serialForm, Object deserForm ) { this.aggregatorFactory = aggregatorFactory; this.agg = agg; this.identity = identity; this.finalize = finalize; this.serialForm = serialForm; this.deserForm = deserForm; } @Test public void testIdentity() { Assert.assertEquals(identity, agg); Assert.assertEquals(identity, MetricManipulatorFns.identity().manipulate(aggregatorFactory, agg)); } @Test public void testFinalize() { Assert.assertEquals(identity, agg); Assert.assertEquals(finalize, MetricManipulatorFns.finalizing().manipulate(aggregatorFactory, agg)); } @Test public void testDeserialize() { Assert.assertEquals(identity, agg); Assert.assertEquals(deserForm, MetricManipulatorFns.deserializing().manipulate(aggregatorFactory, serialForm)); } }
apache-2.0
sdague/home-assistant
homeassistant/components/openweathermap/const.py
5320
"""Consts for the OpenWeatherMap.""" from homeassistant.components.weather import ( ATTR_FORECAST_CONDITION, ATTR_FORECAST_PRECIPITATION, ATTR_FORECAST_TEMP, ATTR_FORECAST_TEMP_LOW, ATTR_FORECAST_TIME, ATTR_FORECAST_WIND_BEARING, ATTR_FORECAST_WIND_SPEED, ) from homeassistant.const import ( DEGREE, DEVICE_CLASS_HUMIDITY, DEVICE_CLASS_PRESSURE, DEVICE_CLASS_TEMPERATURE, DEVICE_CLASS_TIMESTAMP, LENGTH_MILLIMETERS, PERCENTAGE, PRESSURE_PA, SPEED_METERS_PER_SECOND, TEMP_CELSIUS, ) DOMAIN = "openweathermap" DEFAULT_NAME = "OpenWeatherMap" DEFAULT_LANGUAGE = "en" ATTRIBUTION = "Data provided by OpenWeatherMap" CONF_LANGUAGE = "language" CONFIG_FLOW_VERSION = 2 ENTRY_NAME = "name" ENTRY_WEATHER_COORDINATOR = "weather_coordinator" ATTR_API_PRECIPITATION = "precipitation" ATTR_API_DATETIME = "datetime" ATTR_API_WEATHER = "weather" ATTR_API_TEMPERATURE = "temperature" ATTR_API_WIND_SPEED = "wind_speed" ATTR_API_WIND_BEARING = "wind_bearing" ATTR_API_HUMIDITY = "humidity" ATTR_API_PRESSURE = "pressure" ATTR_API_CONDITION = "condition" ATTR_API_CLOUDS = "clouds" ATTR_API_RAIN = "rain" ATTR_API_SNOW = "snow" ATTR_API_WEATHER_CODE = "weather_code" ATTR_API_FORECAST = "forecast" SENSOR_NAME = "sensor_name" SENSOR_UNIT = "sensor_unit" SENSOR_DEVICE_CLASS = "sensor_device_class" UPDATE_LISTENER = "update_listener" COMPONENTS = ["sensor", "weather"] FORECAST_MODE_HOURLY = "hourly" FORECAST_MODE_DAILY = "daily" FORECAST_MODE_FREE_DAILY = "freedaily" FORECAST_MODE_ONECALL_HOURLY = "onecall_hourly" FORECAST_MODE_ONECALL_DAILY = "onecall_daily" FORECAST_MODES = [ FORECAST_MODE_HOURLY, FORECAST_MODE_DAILY, FORECAST_MODE_ONECALL_HOURLY, FORECAST_MODE_ONECALL_DAILY, ] DEFAULT_FORECAST_MODE = FORECAST_MODE_ONECALL_DAILY MONITORED_CONDITIONS = [ ATTR_API_WEATHER, ATTR_API_TEMPERATURE, ATTR_API_WIND_SPEED, ATTR_API_WIND_BEARING, ATTR_API_HUMIDITY, ATTR_API_PRESSURE, ATTR_API_CLOUDS, ATTR_API_RAIN, ATTR_API_SNOW, ATTR_API_CONDITION, ATTR_API_WEATHER_CODE, ] FORECAST_MONITORED_CONDITIONS = [ ATTR_FORECAST_CONDITION, ATTR_FORECAST_PRECIPITATION, ATTR_FORECAST_TEMP, ATTR_FORECAST_TEMP_LOW, ATTR_FORECAST_TIME, ATTR_FORECAST_WIND_BEARING, ATTR_FORECAST_WIND_SPEED, ] LANGUAGES = [ "af", "al", "ar", "az", "bg", "ca", "cz", "da", "de", "el", "en", "es", "eu", "fa", "fi", "fr", "gl", "he", "hi", "hr", "hu", "id", "it", "ja", "kr", "la", "lt", "mk", "nl", "no", "pl", "pt", "pt_br", "ro", "ru", "se", "sk", "sl", "sp", "sr", "sv", "th", "tr", "ua", "uk", "vi", "zh_cn", "zh_tw", "zu", ] CONDITION_CLASSES = { "cloudy": [803, 804], "fog": [701, 741], "hail": [906], "lightning": [210, 211, 212, 221], "lightning-rainy": [200, 201, 202, 230, 231, 232], "partlycloudy": [801, 802], "pouring": [504, 314, 502, 503, 522], "rainy": [300, 301, 302, 310, 311, 312, 313, 500, 501, 520, 521], "snowy": [600, 601, 602, 611, 612, 620, 621, 622], "snowy-rainy": [511, 615, 616], "sunny": [800], "windy": [905, 951, 952, 953, 954, 955, 956, 957], "windy-variant": [958, 959, 960, 961], "exceptional": [711, 721, 731, 751, 761, 762, 771, 900, 901, 962, 903, 904], } WEATHER_SENSOR_TYPES = { ATTR_API_WEATHER: {SENSOR_NAME: "Weather"}, ATTR_API_TEMPERATURE: { SENSOR_NAME: "Temperature", SENSOR_UNIT: TEMP_CELSIUS, SENSOR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE, }, ATTR_API_WIND_SPEED: { SENSOR_NAME: "Wind speed", SENSOR_UNIT: SPEED_METERS_PER_SECOND, }, ATTR_API_WIND_BEARING: {SENSOR_NAME: "Wind bearing", SENSOR_UNIT: DEGREE}, ATTR_API_HUMIDITY: { SENSOR_NAME: "Humidity", SENSOR_UNIT: PERCENTAGE, SENSOR_DEVICE_CLASS: DEVICE_CLASS_HUMIDITY, }, ATTR_API_PRESSURE: { SENSOR_NAME: "Pressure", SENSOR_UNIT: PRESSURE_PA, SENSOR_DEVICE_CLASS: DEVICE_CLASS_PRESSURE, }, ATTR_API_CLOUDS: {SENSOR_NAME: "Cloud coverage", SENSOR_UNIT: PERCENTAGE}, ATTR_API_RAIN: {SENSOR_NAME: "Rain", SENSOR_UNIT: LENGTH_MILLIMETERS}, ATTR_API_SNOW: {SENSOR_NAME: "Snow", SENSOR_UNIT: LENGTH_MILLIMETERS}, ATTR_API_CONDITION: {SENSOR_NAME: "Condition"}, ATTR_API_WEATHER_CODE: {SENSOR_NAME: "Weather Code"}, } FORECAST_SENSOR_TYPES = { ATTR_FORECAST_CONDITION: {SENSOR_NAME: "Condition"}, ATTR_FORECAST_PRECIPITATION: {SENSOR_NAME: "Precipitation"}, ATTR_FORECAST_TEMP: { SENSOR_NAME: "Temperature", SENSOR_UNIT: TEMP_CELSIUS, SENSOR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE, }, ATTR_FORECAST_TEMP_LOW: { SENSOR_NAME: "Temperature Low", SENSOR_UNIT: TEMP_CELSIUS, SENSOR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE, }, ATTR_FORECAST_TIME: { SENSOR_NAME: "Time", SENSOR_DEVICE_CLASS: DEVICE_CLASS_TIMESTAMP, }, ATTR_API_WIND_BEARING: {SENSOR_NAME: "Wind bearing", SENSOR_UNIT: DEGREE}, ATTR_API_WIND_SPEED: { SENSOR_NAME: "Wind speed", SENSOR_UNIT: SPEED_METERS_PER_SECOND, }, }
apache-2.0
foryou2030/incubator-carbondata
processing/src/main/java/org/apache/carbondata/processing/newflow/parser/impl/ArrayParserImpl.java
2307
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.carbondata.processing.newflow.parser.impl; import java.util.ArrayList; import java.util.List; import java.util.regex.Pattern; import org.apache.carbondata.core.util.CarbonUtil; import org.apache.carbondata.processing.newflow.complexobjects.ArrayObject; import org.apache.carbondata.processing.newflow.parser.ComplexParser; import org.apache.carbondata.processing.newflow.parser.GenericParser; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; /** * It parses the string to @{@link ArrayObject} using delimiter. * It is thread safe as the state of class don't change while * calling @{@link GenericParser#parse(String)} method */ public class ArrayParserImpl implements ComplexParser<ArrayObject> { private Pattern pattern; private List<GenericParser> children = new ArrayList<>(); public ArrayParserImpl(String delimiter) { pattern = Pattern.compile(CarbonUtil.delimiterConverter(delimiter)); } @Override public ArrayObject parse(String data) { if (StringUtils.isNotEmpty(data)) { String[] split = pattern.split(data, -1); if (ArrayUtils.isNotEmpty(split)) { Object[] array = new Object[children.size()]; for (int i = 0; i < children.size(); i++) { array[i] = children.get(i).parse(split[i]); } return new ArrayObject(array); } } return null; } @Override public void addChildren(GenericParser parser) { children.add(parser); } }
apache-2.0
NickAndroid/Scalpel
scalpel/src/main/java/com/nick/scalpel/annotation/binding/FindBool.java
964
/* * Copyright (c) 2016 Nick Guo * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.nick.scalpel.annotation.binding; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import java.lang.annotation.Target; import static java.lang.annotation.ElementType.FIELD; import static java.lang.annotation.RetentionPolicy.RUNTIME; @Target({FIELD}) @Retention(RUNTIME) @Documented public @interface FindBool { int id(); }
apache-2.0
vijaykanthm28/nilavu
app/controllers/cockpits_controller.rb
965
## ## Copyright [2013-2016] [Megam Systems] ## ## Licensed under the Apache License, Version 2.0 (the "License"); ## you may not use this file except in compliance with the License. ## You may obtain a copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or agreed to in writing, software ## distributed under the License is distributed on an "AS IS" BASIS, ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ## See the License for the specific language governing permissions and ## limitations under the License. ## class CockpitsController < ApplicationController include CockpitListResponder respond_to :html, :js skip_before_filter :check_xhr before_action :add_authkeys_for_api, only: [:index] def entrance render 'cockpits/entrance' end def index respond_with_list(Api::Assemblies.new.list(params).baked.flatten) end end
apache-2.0
frreiss/tensorflow-fred
tensorflow/python/kernel_tests/xent_op_test_base.py
12186
# Copyright 2015-2021 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for SoftmaxCrossEntropyWithLogits op.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.eager import backprop from tensorflow.python.framework import config from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import gradient_checker from tensorflow.python.ops import gradients_impl from tensorflow.python.ops import math_ops from tensorflow.python.ops import nn_ops # The following import is required to register the gradient function. from tensorflow.python.ops.nn_grad import _SoftmaxCrossEntropyWithLogitsGrad # pylint: disable=unused-import from tensorflow.python.platform import test class XentOpTestBase(test.TestCase): def _opFwdBwd(self, labels, logits, axis=-1): """ Runs the op-under-test both forwards and backwards.""" logits = ops.convert_to_tensor(logits) # needed for the gradient tape with backprop.GradientTape() as tape: tape.watch(logits) loss = nn_ops.softmax_cross_entropy_with_logits( labels=labels, logits=logits, dim=axis) return loss, tape.gradient(loss, logits) def _npXent(self, labels, logits, dim=-1): if dim == -1: dim = len(logits.shape) - 1 one_only_on_dim = list(logits.shape) one_only_on_dim[dim] = 1 e = np.exp(logits - np.reshape(np.amax(logits, axis=dim), one_only_on_dim)) probs = e / np.reshape(np.sum(e, axis=dim), one_only_on_dim) bp = (probs - labels) l = -np.sum(labels * np.log(probs + 1.0e-20), axis=dim) return l, bp # TODO(b/123860949): The values are constant folded for XLA, so placeholders # are needed. def _testXent2D(self, np_labels, np_logits, with_placeholders=False, expected_gradient=None): np_loss, np_gradient = self._npXent(labels=np_labels, logits=np_logits) if expected_gradient is not None: np_gradient = expected_gradient with self.cached_session() as sess: if with_placeholders: logits_placeholder = array_ops.placeholder(np_logits.dtype) labels_placeholder = array_ops.placeholder(np_labels.dtype) loss, gradient = self._opFwdBwd(labels_placeholder, logits_placeholder) tf_loss, tf_gradient = sess.run([loss, gradient], feed_dict={ labels_placeholder: np_labels, logits_placeholder: np_logits }) else: loss, gradient = self._opFwdBwd(np_labels, np_logits) tf_loss, tf_gradient = self.evaluate([loss, gradient]) self.assertAllCloseAccordingToType(np_loss, tf_loss, half_rtol=1e-2) self.assertAllCloseAccordingToType(np_gradient, tf_gradient) def _testXentND(self, np_labels, np_logits, dim=-1): np_loss, _ = self._npXent(np_labels, np_logits, dim=dim) loss = nn_ops.softmax_cross_entropy_with_logits( labels=np_labels, logits=np_logits, dim=dim) tf_loss = self.evaluate(loss) self.assertAllCloseAccordingToType(np_loss, tf_loss) def _testSingleClass(self, expected_gradient=[[2.0], [1.0], [0.0], [0.0]]): for dtype in np.float16, np.float32: loss, gradient = self._opFwdBwd( labels=np.array([[-1.], [0.], [1.], [1.]]).astype(dtype), logits=np.array([[1.], [-1.], [0.], [1.]]).astype(dtype)) self.assertAllClose([0.0, 0.0, 0.0, 0.0], loss) self.assertAllClose(expected_gradient, gradient) def testSingleClass(self): """This method is structured to be easily overridden by a child class.""" self._testSingleClass() def testNpXent(self): # We create 2 batches of logits for testing. # batch 0 is the boring uniform distribution: 1, 1, 1, 1, with target 3. # batch 1 has a bit of difference: 1, 2, 3, 4, with soft targets (1, 2). logits = [[1., 1., 1., 1.], [1., 2., 3., 4.]] labels = [[0., 0., 0., 1.], [0., .5, .5, 0.]] # For batch 0, we expect the uniform distribution: 0.25, 0.25, 0.25, 0.25 # With a hard target 3, the gradient is [0.25, 0.25, 0.25, -0.75] # The loss for this batch is -log(0.25) = 1.386 # # For batch 1, we have: # exp(0) = 1 # exp(1) = 2.718 # exp(2) = 7.389 # exp(3) = 20.085 # SUM = 31.192 # So we have as probabilities: # exp(0) / SUM = 0.032 # exp(1) / SUM = 0.087 # exp(2) / SUM = 0.237 # exp(3) / SUM = 0.644 # With a soft target (1, 2), the gradient is # [0.032, 0.087 - 0.5 = -0.413, 0.237 - 0.5 = -0.263, 0.644] # The loss for this batch is [0.5 * -log(0.087), 0.5 * -log(0.237)] # = [1.3862, 1.9401] np_loss, np_gradient = self._npXent(np.array(labels), np.array(logits)) self.assertAllClose( np.array([[0.25, 0.25, 0.25, -0.75], [0.0321, -0.4129, -0.2632, 0.6439]]), np_gradient, rtol=1.e-3, atol=1.e-3) self.assertAllClose( np.array([1.3862, 1.9401]), np_loss, rtol=1.e-3, atol=1.e-3) # TODO(b/123860949): The values are constant folded for XLA, so placeholders # are needed. @test_util.run_deprecated_v1 def _testLabelsBroadcast(self, uniform_labels_gradient): labels = np.array([[0., 0., 0., 1.]]).astype(np.float16) logits = np.array([[1., 1., 1., 1.], [1., 2., 3., 4.]]).astype(np.float16) self._testXent2D(labels, logits, with_placeholders=True) labels = np.array([[1.]]).astype(np.float16) logits = np.array([[1.], [2.]]).astype(np.float16) self._testXent2D(labels, logits, with_placeholders=True) labels = np.array([[0.], [2.], [0.25]]).astype(np.float16) logits = np.array([[1., 1., 1., 1.], [1., 2., 3., 4.], [1., 2., 3., 4.]]).astype(np.float16) self._testXent2D( labels, logits, with_placeholders=True, expected_gradient=uniform_labels_gradient) def testLabelsBroadcast(self): """This method is structured to be easily overridden by a child class.""" self._testLabelsBroadcast(uniform_labels_gradient=[[ 0.25, 0.25, 0.25, 0.25 ], [-1.968, -1.913, -1.763, -1.355], [-0.218, -0.163, -0.013, 0.394]]) @test_util.run_deprecated_v1 def testShapeMismatch(self): with self.cached_session(): with self.assertRaises(ValueError): self._opFwdBwd( labels=[[0., 1., 0.], [1., 0., 0.]], logits=[[0., 1.], [2., 3.]]) def testHalf(self): labels = np.array([[0., 0., 0., 1.], [0., .5, .5, 0.]]).astype(np.float16) logits = np.array([[1., 1., 1., 1.], [1., 2., 3., 4.]]).astype(np.float16) self._testXent2D(labels, logits) def testFloat(self): labels = np.array([[0., 0., 0., 1.], [0., .5, .5, 0.]]).astype(np.float32) logits = np.array([[1., 1., 1., 1.], [1., 2., 3., 4.]]).astype(np.float32) self._testXent2D(labels, logits) def testDouble(self): labels = np.array([[0., 0., 0., 1.], [0., .5, .5, 0.]]).astype(np.float64) logits = np.array([[1., 1., 1., 1.], [1., 2., 3., 4.]]).astype(np.float64) self._testXent2D(labels, logits) @test_util.run_deprecated_v1 def testGradient(self): with self.cached_session() as sess: labels = constant_op.constant( [0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.5], shape=[3, 4], dtype=dtypes.float64, name="labels") logits = constant_op.constant( [0.1, 0.2, 0.3, 0.4, 0.1, 0.4, 0.9, 1.6, 0.1, 0.8, 2.7, 6.4], shape=[3, 4], dtype=dtypes.float64, name="logits") x = nn_ops.softmax_cross_entropy_with_logits( labels=labels, logits=logits, name="xent") err = gradient_checker.compute_gradient_error(logits, [3, 4], x, [3]) # Check that no extra computation gets performed. When only the first # derivative is requested, the second derivative must not be computed. # So when there is no second derivative, there is no `BatchMatMul` op # in the graph. op_names = [ op.op_def.name for op in sess.graph.get_operations() if op.op_def ] self.assertNotIn("BatchMatMul", op_names) self.assertNotIn("BatchMatMulV2", op_names) self.assertLess(err, 5e-8) @test_util.run_deprecated_v1 def testGradientLabelWithV2(self): with self.cached_session(): labels = constant_op.constant( [0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.5], shape=[3, 4], dtype=dtypes.float64, name="labels") logits = constant_op.constant( [0.1, 0.2, 0.3, 0.4, 0.1, 0.4, 0.9, 1.6, 0.1, 0.8, 2.7, 6.4], shape=[3, 4], dtype=dtypes.float64, name="logits") x = nn_ops.softmax_cross_entropy_with_logits_v2( labels=labels, logits=logits, name="xent") err = gradient_checker.compute_gradient_error(labels, [3, 4], x, [3]) self.assertLess(err, 5e-8) @test_util.run_deprecated_v1 def testSecondGradient(self): with self.cached_session() as sess: labels = constant_op.constant([ 0.0, 0.0, 1.0 / 3, 0.0, 1.0 / 3, 0.0, 0.0, 0.0, 0.0, 0.5 / 3, 0.0, 0.5 / 3 ], shape=[12], dtype=dtypes.float64, name="labels") logits = constant_op.constant( [0.1, 0.2, 0.3, 0.4, 0.1, 0.4, 0.9, 1.6, 0.1, 0.8, 2.7, 6.4], shape=[12], dtype=dtypes.float64, name="logits") x = nn_ops.softmax_cross_entropy_with_logits( labels=labels, logits=logits, name="xent") loss = math_ops.reduce_sum(x) gradients = gradients_impl.gradients(loss, [logits])[0] err = gradient_checker.compute_gradient_error(logits, [12], gradients, [12]) if not config.deterministic_ops_enabled(): # Check how second derivative is calculated. # (it is equivalent to a `BatchMatMul` op being in the graph because of # the implementation in SoftmaxCrossEntropyWithLogitsGrad) op_names = [ op.op_def.name for op in sess.graph.get_operations() if op.op_def ] self.assertIn("BatchMatMulV2", op_names) self.assertLess(err, 5e-8) def test3D(self): labels = np.array([[[0., 0., 0., 1.], [0., 1., 0., 0.]], [[0., 0.5, 0.5, 0.], [0.5, 0.5, 0., 0.]], [[0., 1., 0., 0.], [0., 0., 1., 0.]]]).astype(np.float32) logits = np.array([[[1., 1., 1., 1.], [1., 2., 3., 4.]], [[2., 3., 4., 5.], [6., 7., 8., 9.]], [[5., 4., 3., 2.], [1., 2., 3., 4.]]]).astype(np.float32) self._testXentND(labels, logits, dim=0) self._testXentND(labels, logits, dim=1) self._testXentND(labels, logits, dim=-1) def testZeroDimension(self): labels = np.zeros([0, 2, 4]).astype(np.float32) logits = np.zeros([0, 2, 4]).astype(np.float32) np_loss, _ = self._npXent(labels=labels, logits=logits) loss = nn_ops.softmax_cross_entropy_with_logits( labels=labels, logits=logits) tf_loss = self.evaluate(loss) self.assertAllEqual(np_loss, tf_loss)
apache-2.0
cberner/presto
presto-main/src/main/java/com/facebook/presto/sql/rewrite/ShowQueriesRewrite.java
26784
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.rewrite; import com.facebook.presto.Session; import com.facebook.presto.metadata.FunctionKind; import com.facebook.presto.metadata.Metadata; import com.facebook.presto.metadata.QualifiedObjectName; import com.facebook.presto.metadata.SessionPropertyManager.SessionPropertyValue; import com.facebook.presto.metadata.SqlFunction; import com.facebook.presto.metadata.TableHandle; import com.facebook.presto.metadata.TableLayout; import com.facebook.presto.metadata.TableLayoutResult; import com.facebook.presto.metadata.ViewDefinition; import com.facebook.presto.security.AccessControl; import com.facebook.presto.spi.CatalogSchemaName; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.ColumnMetadata; import com.facebook.presto.spi.ConnectorTableMetadata; import com.facebook.presto.spi.Constraint; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.session.PropertyMetadata; import com.facebook.presto.sql.analyzer.QueryExplainer; import com.facebook.presto.sql.analyzer.SemanticException; import com.facebook.presto.sql.parser.ParsingException; import com.facebook.presto.sql.parser.SqlParser; import com.facebook.presto.sql.tree.AllColumns; import com.facebook.presto.sql.tree.ArrayConstructor; import com.facebook.presto.sql.tree.AstVisitor; import com.facebook.presto.sql.tree.BooleanLiteral; import com.facebook.presto.sql.tree.Cast; import com.facebook.presto.sql.tree.ColumnDefinition; import com.facebook.presto.sql.tree.CreateTable; import com.facebook.presto.sql.tree.CreateView; import com.facebook.presto.sql.tree.DoubleLiteral; import com.facebook.presto.sql.tree.Explain; import com.facebook.presto.sql.tree.Expression; import com.facebook.presto.sql.tree.GroupBy; import com.facebook.presto.sql.tree.LikePredicate; import com.facebook.presto.sql.tree.LongLiteral; import com.facebook.presto.sql.tree.Node; import com.facebook.presto.sql.tree.OrderBy; import com.facebook.presto.sql.tree.QualifiedName; import com.facebook.presto.sql.tree.Query; import com.facebook.presto.sql.tree.Relation; import com.facebook.presto.sql.tree.SelectItem; import com.facebook.presto.sql.tree.ShowCatalogs; import com.facebook.presto.sql.tree.ShowColumns; import com.facebook.presto.sql.tree.ShowCreate; import com.facebook.presto.sql.tree.ShowFunctions; import com.facebook.presto.sql.tree.ShowPartitions; import com.facebook.presto.sql.tree.ShowSchemas; import com.facebook.presto.sql.tree.ShowSession; import com.facebook.presto.sql.tree.ShowTables; import com.facebook.presto.sql.tree.SimpleGroupBy; import com.facebook.presto.sql.tree.SingleColumn; import com.facebook.presto.sql.tree.SortItem; import com.facebook.presto.sql.tree.Statement; import com.facebook.presto.sql.tree.StringLiteral; import com.facebook.presto.sql.tree.TableElement; import com.facebook.presto.sql.tree.Values; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import static com.facebook.presto.connector.informationSchema.InformationSchemaMetadata.TABLE_COLUMNS; import static com.facebook.presto.connector.informationSchema.InformationSchemaMetadata.TABLE_INTERNAL_PARTITIONS; import static com.facebook.presto.connector.informationSchema.InformationSchemaMetadata.TABLE_SCHEMATA; import static com.facebook.presto.connector.informationSchema.InformationSchemaMetadata.TABLE_TABLES; import static com.facebook.presto.metadata.MetadataUtil.createCatalogSchemaName; import static com.facebook.presto.metadata.MetadataUtil.createQualifiedName; import static com.facebook.presto.metadata.MetadataUtil.createQualifiedObjectName; import static com.facebook.presto.spi.StandardErrorCode.INVALID_TABLE_PROPERTY; import static com.facebook.presto.sql.QueryUtil.aliased; import static com.facebook.presto.sql.QueryUtil.aliasedName; import static com.facebook.presto.sql.QueryUtil.aliasedNullToEmpty; import static com.facebook.presto.sql.QueryUtil.ascending; import static com.facebook.presto.sql.QueryUtil.caseWhen; import static com.facebook.presto.sql.QueryUtil.equal; import static com.facebook.presto.sql.QueryUtil.functionCall; import static com.facebook.presto.sql.QueryUtil.identifier; import static com.facebook.presto.sql.QueryUtil.logicalAnd; import static com.facebook.presto.sql.QueryUtil.ordering; import static com.facebook.presto.sql.QueryUtil.row; import static com.facebook.presto.sql.QueryUtil.selectAll; import static com.facebook.presto.sql.QueryUtil.selectList; import static com.facebook.presto.sql.QueryUtil.simpleQuery; import static com.facebook.presto.sql.QueryUtil.singleValueQuery; import static com.facebook.presto.sql.QueryUtil.subquery; import static com.facebook.presto.sql.QueryUtil.table; import static com.facebook.presto.sql.QueryUtil.unaliasedName; import static com.facebook.presto.sql.SqlFormatter.formatSql; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.CATALOG_NOT_SPECIFIED; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_SCHEMA; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.MISSING_TABLE; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.NOT_SUPPORTED; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.VIEW_PARSE_ERROR; import static com.facebook.presto.sql.tree.BooleanLiteral.FALSE_LITERAL; import static com.facebook.presto.sql.tree.BooleanLiteral.TRUE_LITERAL; import static com.facebook.presto.sql.tree.ShowCreate.Type.TABLE; import static com.facebook.presto.sql.tree.ShowCreate.Type.VIEW; import static com.facebook.presto.util.ImmutableCollectors.toImmutableList; import static com.google.common.base.Strings.nullToEmpty; import static com.google.common.collect.Iterables.getOnlyElement; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.toList; final class ShowQueriesRewrite implements StatementRewrite.Rewrite { @Override public Statement rewrite( Session session, Metadata metadata, SqlParser parser, Optional<QueryExplainer> queryExplainer, Statement node, List<Expression> parameters, AccessControl accessControl) { return (Statement) new Visitor(metadata, parser, session, parameters).process(node, null); } private static class Visitor extends AstVisitor<Node, Void> { private final Metadata metadata; private final Session session; private final SqlParser sqlParser; List<Expression> parameters; public Visitor(Metadata metadata, SqlParser sqlParser, Session session, List<Expression> parameters) { this.metadata = requireNonNull(metadata, "metadata is null"); this.sqlParser = requireNonNull(sqlParser, "sqlParser is null"); this.session = requireNonNull(session, "session is null"); this.parameters = requireNonNull(parameters, "parameters is null"); } @Override protected Node visitExplain(Explain node, Void context) { Statement statement = (Statement) process(node.getStatement(), null); return new Explain( node.getLocation().get(), node.isAnalyze(), statement, node.getOptions()); } @Override protected Node visitShowTables(ShowTables showTables, Void context) { CatalogSchemaName schema = createCatalogSchemaName(session, showTables, showTables.getSchema()); if (!metadata.schemaExists(session, schema)) { throw new SemanticException(MISSING_SCHEMA, showTables, "Schema '%s' does not exist", schema.getSchemaName()); } Expression predicate = equal(identifier("table_schema"), new StringLiteral(schema.getSchemaName())); Optional<String> likePattern = showTables.getLikePattern(); if (likePattern.isPresent()) { Expression likePredicate = new LikePredicate(identifier("table_name"), new StringLiteral(likePattern.get()), null); predicate = logicalAnd(predicate, likePredicate); } return simpleQuery( selectList(aliasedName("table_name", "Table")), from(schema.getCatalogName(), TABLE_TABLES), predicate, ordering(ascending("table_name"))); } @Override protected Node visitShowSchemas(ShowSchemas node, Void context) { if (!node.getCatalog().isPresent() && !session.getCatalog().isPresent()) { throw new SemanticException(CATALOG_NOT_SPECIFIED, node, "Catalog must be specified when session catalog is not set"); } Optional<Expression> predicate = Optional.empty(); Optional<String> likePattern = node.getLikePattern(); if (likePattern.isPresent()) { predicate = Optional.of(new LikePredicate(identifier("schema_name"), new StringLiteral(likePattern.get()), null)); } return simpleQuery( selectList(aliasedName("schema_name", "Schema")), from(node.getCatalog().orElseGet(() -> session.getCatalog().get()), TABLE_SCHEMATA), predicate, Optional.of(ordering(ascending("schema_name")))); } @Override protected Node visitShowCatalogs(ShowCatalogs node, Void context) { List<Expression> rows = metadata.getCatalogNames(session).keySet().stream() .map(name -> row(new StringLiteral(name))) .collect(toList()); Optional<Expression> predicate = Optional.empty(); Optional<String> likePattern = node.getLikePattern(); if (likePattern.isPresent()) { predicate = Optional.of(new LikePredicate(identifier("Catalog"), new StringLiteral(likePattern.get()), null)); } return simpleQuery( selectList(new AllColumns()), aliased(new Values(rows), "catalogs", ImmutableList.of("Catalog")), predicate, Optional.of(ordering(ascending("Catalog")))); } @Override protected Node visitShowColumns(ShowColumns showColumns, Void context) { QualifiedObjectName tableName = createQualifiedObjectName(session, showColumns, showColumns.getTable()); if (!metadata.getView(session, tableName).isPresent() && !metadata.getTableHandle(session, tableName).isPresent()) { throw new SemanticException(MISSING_TABLE, showColumns, "Table '%s' does not exist", tableName); } return simpleQuery( selectList( aliasedName("column_name", "Column"), aliasedName("data_type", "Type"), aliasedNullToEmpty("extra_info", "Extra"), aliasedNullToEmpty("comment", "Comment")), from(tableName.getCatalogName(), TABLE_COLUMNS), logicalAnd( equal(identifier("table_schema"), new StringLiteral(tableName.getSchemaName())), equal(identifier("table_name"), new StringLiteral(tableName.getObjectName()))), ordering(ascending("ordinal_position"))); } private static <T> Expression getExpression(PropertyMetadata<T> property, Object value) throws PrestoException { return toExpression(property.encode(property.getJavaType().cast(value))); } private static Expression toExpression(Object value) throws PrestoException { if (value instanceof String) { return new StringLiteral(value.toString()); } if (value instanceof Boolean) { return new BooleanLiteral(value.toString()); } if (value instanceof Long || value instanceof Integer) { return new LongLiteral(value.toString()); } if (value instanceof Double) { return new DoubleLiteral(value.toString()); } if (value instanceof List) { List<?> list = (List<?>) value; return new ArrayConstructor(list.stream() .map(Visitor::toExpression) .collect(toList())); } throw new PrestoException(INVALID_TABLE_PROPERTY, format("Failed to convert object of type %s to expression: %s", value.getClass().getName(), value)); } @Override protected Node visitShowPartitions(ShowPartitions showPartitions, Void context) { QualifiedObjectName table = createQualifiedObjectName(session, showPartitions, showPartitions.getTable()); Optional<TableHandle> tableHandle = metadata.getTableHandle(session, table); if (!tableHandle.isPresent()) { throw new SemanticException(MISSING_TABLE, showPartitions, "Table '%s' does not exist", table); } List<TableLayoutResult> layouts = metadata.getLayouts(session, tableHandle.get(), Constraint.alwaysTrue(), Optional.empty()); if (layouts.size() != 1) { throw new SemanticException(NOT_SUPPORTED, showPartitions, "Table does not have exactly one layout: %s", table); } TableLayout layout = getOnlyElement(layouts).getLayout(); if (!layout.getDiscretePredicates().isPresent()) { throw new SemanticException(NOT_SUPPORTED, showPartitions, "Table does not have partition columns: %s", table); } List<ColumnHandle> partitionColumns = layout.getDiscretePredicates().get().getColumns(); /* Generate a dynamic pivot to output one column per partition key. For example, a table with two partition keys (ds, cluster_name) would generate the following query: SELECT partition_number , max(CASE WHEN partition_key = 'ds' THEN partition_value END) ds , max(CASE WHEN partition_key = 'cluster_name' THEN partition_value END) cluster_name FROM ... GROUP BY partition_number The values are also cast to the type of the partition column. The query is then wrapped to allow custom filtering and ordering. */ ImmutableList.Builder<SelectItem> selectList = ImmutableList.builder(); ImmutableList.Builder<SelectItem> wrappedList = ImmutableList.builder(); selectList.add(unaliasedName("partition_number")); for (ColumnHandle columnHandle : partitionColumns) { ColumnMetadata column = metadata.getColumnMetadata(session, tableHandle.get(), columnHandle); Expression key = equal(identifier("partition_key"), new StringLiteral(column.getName())); Expression value = caseWhen(key, identifier("partition_value")); value = new Cast(value, column.getType().getTypeSignature().toString()); Expression function = functionCall("max", value); selectList.add(new SingleColumn(function, column.getName())); wrappedList.add(unaliasedName(column.getName())); } Query query = simpleQuery( selectAll(selectList.build()), from(table.getCatalogName(), TABLE_INTERNAL_PARTITIONS), Optional.of(logicalAnd( equal(identifier("table_schema"), new StringLiteral(table.getSchemaName())), equal(identifier("table_name"), new StringLiteral(table.getObjectName())))), Optional.of(new GroupBy(false, ImmutableList.of(new SimpleGroupBy(ImmutableList.of(identifier("partition_number")))))), Optional.empty(), Optional.empty(), Optional.empty()); return simpleQuery( selectAll(wrappedList.build()), subquery(query), showPartitions.getWhere(), Optional.empty(), Optional.empty(), Optional.of(new OrderBy(ImmutableList.<SortItem>builder() .addAll(showPartitions.getOrderBy()) .add(ascending("partition_number")) .build())), showPartitions.getLimit()); } @Override protected Node visitShowCreate(ShowCreate node, Void context) { QualifiedObjectName objectName = createQualifiedObjectName(session, node, node.getName()); Optional<ViewDefinition> viewDefinition = metadata.getView(session, objectName); if (node.getType() == VIEW) { if (!viewDefinition.isPresent()) { if (metadata.getTableHandle(session, objectName).isPresent()) { throw new SemanticException(NOT_SUPPORTED, node, "Relation '%s' is a table, not a view", objectName); } throw new SemanticException(MISSING_TABLE, node, "View '%s' does not exist", objectName); } Query query = parseView(viewDefinition.get().getOriginalSql(), objectName, node); String sql = formatSql(new CreateView(createQualifiedName(objectName), query, false), Optional.of(parameters)).trim(); return singleValueQuery("Create View", sql); } if (node.getType() == TABLE) { if (viewDefinition.isPresent()) { throw new SemanticException(NOT_SUPPORTED, node, "Relation '%s' is a view, not a table", objectName); } Optional<TableHandle> tableHandle = metadata.getTableHandle(session, objectName); if (!tableHandle.isPresent()) { throw new SemanticException(MISSING_TABLE, node, "Table '%s' does not exist", objectName); } ConnectorTableMetadata connectorTableMetadata = metadata.getTableMetadata(session, tableHandle.get()).getMetadata(); List<TableElement> columns = connectorTableMetadata.getColumns().stream() .filter(column -> !column.isHidden()) .map(column -> new ColumnDefinition(column.getName(), column.getType().getDisplayName(), Optional.ofNullable(column.getComment()))) .collect(toImmutableList()); Map<String, Object> properties = connectorTableMetadata.getProperties(); Map<String, PropertyMetadata<?>> allTableProperties = metadata.getTablePropertyManager().getAllProperties().get(tableHandle.get().getConnectorId()); Map<String, Expression> sqlProperties = new HashMap<>(); for (Map.Entry<String, Object> propertyEntry : properties.entrySet()) { String propertyName = propertyEntry.getKey(); Object value = propertyEntry.getValue(); if (value == null) { throw new PrestoException(INVALID_TABLE_PROPERTY, format("Property %s for table %s cannot have a null value", propertyName, objectName)); } PropertyMetadata<?> property = allTableProperties.get(propertyName); if (!property.getJavaType().isInstance(value)) { throw new PrestoException(INVALID_TABLE_PROPERTY, format( "Property %s for table %s should have value of type %s, not %s", propertyName, objectName, property.getJavaType().getName(), value.getClass().getName())); } Expression sqlExpression = getExpression(property, value); sqlProperties.put(propertyName, sqlExpression); } CreateTable createTable = new CreateTable(QualifiedName.of(objectName.getCatalogName(), objectName.getSchemaName(), objectName.getObjectName()), columns, false, sqlProperties); return singleValueQuery("Create Table", formatSql(createTable, Optional.of(parameters)).trim()); } throw new UnsupportedOperationException("SHOW CREATE only supported for tables and views"); } @Override protected Node visitShowFunctions(ShowFunctions node, Void context) { ImmutableList.Builder<Expression> rows = ImmutableList.builder(); for (SqlFunction function : metadata.listFunctions()) { rows.add(row( new StringLiteral(function.getSignature().getName()), new StringLiteral(function.getSignature().getReturnType().toString()), new StringLiteral(Joiner.on(", ").join(function.getSignature().getArgumentTypes())), new StringLiteral(getFunctionType(function)), function.isDeterministic() ? TRUE_LITERAL : FALSE_LITERAL, new StringLiteral(nullToEmpty(function.getDescription())))); } Map<String, String> columns = ImmutableMap.<String, String>builder() .put("function_name", "Function") .put("return_type", "Return Type") .put("argument_types", "Argument Types") .put("function_type", "Function Type") .put("deterministic", "Deterministic") .put("description", "Description") .build(); return simpleQuery( selectAll(columns.entrySet().stream() .map(entry -> aliasedName(entry.getKey(), entry.getValue())) .collect(toImmutableList())), aliased(new Values(rows.build()), "functions", ImmutableList.copyOf(columns.keySet())), ordering( ascending("function_name"), ascending("return_type"), ascending("argument_types"), ascending("function_type"))); } private static String getFunctionType(SqlFunction function) { FunctionKind kind = function.getSignature().getKind(); switch (kind) { case AGGREGATE: return "aggregate"; case WINDOW: return "window"; case SCALAR: return "scalar"; } throw new IllegalArgumentException("Unsupported function kind: " + kind); } @Override protected Node visitShowSession(ShowSession node, Void context) { ImmutableList.Builder<Expression> rows = ImmutableList.builder(); List<SessionPropertyValue> sessionProperties = metadata.getSessionPropertyManager().getAllSessionProperties(session, metadata.getCatalogNames(session)); for (SessionPropertyValue sessionProperty : sessionProperties) { if (sessionProperty.isHidden()) { continue; } String value = sessionProperty.getValue(); String defaultValue = sessionProperty.getDefaultValue(); rows.add(row( new StringLiteral(sessionProperty.getFullyQualifiedName()), new StringLiteral(nullToEmpty(value)), new StringLiteral(nullToEmpty(defaultValue)), new StringLiteral(sessionProperty.getType()), new StringLiteral(sessionProperty.getDescription()), TRUE_LITERAL)); } // add bogus row so we can support empty sessions StringLiteral empty = new StringLiteral(""); rows.add(row(empty, empty, empty, empty, empty, FALSE_LITERAL)); return simpleQuery( selectList( aliasedName("name", "Name"), aliasedName("value", "Value"), aliasedName("default", "Default"), aliasedName("type", "Type"), aliasedName("description", "Description")), aliased( new Values(rows.build()), "session", ImmutableList.of("name", "value", "default", "type", "description", "include")), identifier("include")); } private Query parseView(String view, QualifiedObjectName name, Node node) { try { Statement statement = sqlParser.createStatement(view); return (Query) statement; } catch (ParsingException e) { throw new SemanticException(VIEW_PARSE_ERROR, node, "Failed parsing stored view '%s': %s", name, e.getMessage()); } } private static Relation from(String catalog, SchemaTableName table) { return table(QualifiedName.of(catalog, table.getSchemaName(), table.getTableName())); } @Override protected Node visitNode(Node node, Void context) { return node; } } }
apache-2.0
haeusser/tensorflow
tensorflow/compiler/jit/xla_device.cc
8975
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "tensorflow/compiler/jit/xla_device.h" #include <stdlib.h> #include <unordered_set> #include "tensorflow/compiler/jit/defs.h" #include "tensorflow/compiler/jit/xla_compilation_cache.h" #include "tensorflow/compiler/jit/xla_device_context.h" #include "tensorflow/compiler/jit/xla_device_ops.h" #include "tensorflow/compiler/tf2xla/dump_graph.h" #include "tensorflow/compiler/tf2xla/xla_op_registry.h" #include "tensorflow/compiler/xla/client/client_library.h" #include "tensorflow/core/common_runtime/device.h" #include "tensorflow/core/common_runtime/device_factory.h" #include "tensorflow/core/common_runtime/dma_helper.h" #include "tensorflow/core/common_runtime/function.h" #include "tensorflow/core/framework/allocator.h" #include "tensorflow/core/framework/device_base.h" #include "tensorflow/core/framework/function.h" #include "tensorflow/core/framework/node_def_builder.h" #include "tensorflow/core/framework/op_kernel.h" #include "tensorflow/core/framework/tensor.h" #include "tensorflow/core/framework/types.h" #include "tensorflow/core/graph/graph_constructor.h" #include "tensorflow/core/lib/core/notification.h" #include "tensorflow/core/lib/core/status.h" #include "tensorflow/core/platform/logging.h" #include "tensorflow/core/platform/stream_executor_no_cuda.h" #include "tensorflow/core/public/session_options.h" #include "tensorflow/core/public/version.h" #include "tensorflow/core/util/device_name_utils.h" #include "tensorflow/core/util/stream_executor_util.h" namespace tensorflow { /* static */ Status XlaDevice::Create( const string& platform_name, const string& device_name, int device_ordinal, const string& jit_device_name, const SessionOptions& options, const string& name_prefix, std::unique_ptr<XlaDevice>* device) { VLOG(1) << "XlaDevice::Create " << platform_name << " " << device_name << ":" << device_ordinal; // These are no-ops if they have already been done previously for // this device_name/compilation_device_name pair. XlaOpRegistry::RegisterCompilationKernels(); XlaOpRegistry::DeviceRegistration registration; registration.compilation_device_name = jit_device_name; registration.requires_compilation = true; registration.enable_jit_by_default = false; registration.compile_resource_ops = true; XlaOpRegistry::RegisterCompilationDevice(device_name, registration); auto platform = perftools::gputools::MultiPlatformManager::PlatformWithName( platform_name); if (!platform.ok()) { return StreamExecutorUtil::ConvertStatus(platform.status()); } const DeviceAttributes attrs = Device::BuildDeviceAttributes( strings::StrCat(name_prefix, "/device:", device_name, ":", device_ordinal), DeviceType(device_name), Bytes(16ULL << 30), DeviceLocality(), strings::StrCat("device: ", device_name, " device")); static Allocator* allocator = new XlaDeviceAllocator; device->reset(new XlaDevice(options, attrs, device_ordinal, DeviceType(jit_device_name), platform.ValueOrDie(), allocator)); return Status::OK(); } XlaDevice::Metadata::Metadata(int device_ordinal, perftools::gputools::Platform* platform, const DeviceType& device_type) : device_ordinal_(device_ordinal), device_type_(device_type), platform_(platform) {} int XlaDevice::Metadata::device_ordinal() const { return device_ordinal_; } perftools::gputools::Platform* XlaDevice::Metadata::platform() const { return platform_; } XlaDevice::Metadata::~Metadata() {} xla::LocalClient* XlaDevice::Metadata::client() const { auto client = xla::ClientLibrary::GetOrCreateLocalClient(platform_); return client.ValueOrDie(); } const DeviceType& XlaDevice::Metadata::jit_device_type() const { return device_type_; } string XlaDevice::Metadata::DebugString() { return "XLA device metadata"; } XlaDevice::XlaDevice(const SessionOptions& options, const DeviceAttributes& attrs, int device_ordinal, const DeviceType& jit_device_name, perftools::gputools::Platform* platform, Allocator* xla_allocator) : LocalDevice(options, attrs, xla_allocator), device_ordinal_(device_ordinal), jit_device_name_(jit_device_name), xla_allocator_(xla_allocator), platform_(platform) { // Store the platform in the resource manager so Ops can retrieve it // e.g., to lazily create a XlaCompilationCache object. TF_CHECK_OK(resource_manager()->Create<Metadata>( resource_manager()->default_container(), "xla_metadata", new Metadata(device_ordinal_, platform_, jit_device_name_))); } XlaDevice::~XlaDevice() {} xla::LocalClient* XlaDevice::client() const { // We lazily create the client because the platform commits to the // details of the host hardware when the client is created, so we // don't want to do it until we get a chance to hook the platform up // to a simulator. // For now GetOrCreateLocalClient always returns success when passed // a non-null platform. If that changes we may have to plumb in some // way to pass Status back. return xla::ClientLibrary::GetOrCreateLocalClient(platform_).ValueOrDie(); } Allocator* XlaDevice::GetAllocator(AllocatorAttributes attr) { if (attr.on_host()) { return cpu_allocator(); } else { return xla_allocator_; } } Status XlaDevice::FillContextMap(const Graph* graph, DeviceContextMap* device_context_map) { VLOG(1) << "XlaDevice::FillContextMap"; device_context_map->resize(graph->num_node_ids()); XlaDeviceContext* ctx = new XlaDeviceContext(client()); for (Node* n : graph->nodes()) { VLOG(2) << n->id() << " : " << n->type_string() << " : " << n->name(); ctx->Ref(); (*device_context_map)[n->id()] = ctx; } ctx->Unref(); return Status::OK(); } void XlaDevice::Compute(OpKernel* op_kernel, OpKernelContext* context) { VLOG(1) << "XlaDevice::Compute " << op_kernel->name() << ":" << op_kernel->type_string(); op_kernel->Compute(context); } void XlaDevice::ComputeAsync(AsyncOpKernel* op_kernel, OpKernelContext* context, AsyncOpKernel::DoneCallback done) { VLOG(1) << "XlaDevice::ComputeAsync " << op_kernel->name() << ":" << op_kernel->type_string(); op_kernel->ComputeAsync(context, done); } Status XlaDevice::MakeTensorFromProto(const TensorProto& tensor_proto, const AllocatorAttributes alloc_attrs, Tensor* tensor) { VLOG(1) << "XlaDevice::MakeTensorFromProto"; Tensor parsed(tensor_proto.dtype()); if (!parsed.FromProto(cpu_allocator(), tensor_proto)) { return errors::InvalidArgument("Cannot parse tensor from proto: ", tensor_proto.DebugString()); } Status status; if (alloc_attrs.on_host()) { *tensor = parsed; } else { Tensor copy(GetAllocator(alloc_attrs), parsed.dtype(), parsed.shape()); Notification n; XlaTransferManager manager(client()); manager.CopyCPUTensorToDevice(&parsed, this, &copy, [&n, &status](const Status& s) { status = s; n.Notify(); }); n.WaitForNotification(); *tensor = copy; } VLOG(2) << "Allocated tensor at " << DMAHelper::base(tensor); return status; } XlaDeviceOpRegistrations* RegisterXlaDeviceKernels(const char* device, const char* jit_device) { XlaDeviceOpRegistrations* registrations = new XlaDeviceOpRegistrations; auto dummy_factory = [](OpKernelConstruction* context) -> OpKernel* { return new XlaDeviceDummyOp(context); }; for (const KernelDef* jit_def : XlaOpRegistry::DeviceKernels(jit_device)) { KernelDef* def = new KernelDef(*jit_def); def->set_device_type(device); registrations->op_kernel_registrars.emplace_back( new kernel_factory::OpKernelRegistrar(def, "XlaDeviceDummyOp", dummy_factory)); } return registrations; } } // namespace tensorflow
apache-2.0
alvarocjunq/QuemQuerFazerHistoria.old
QuemQuerFazerHistoria/src/br/com/quemquerfazerhistoria/dao/PersonagemDAO.java
1482
package br.com.quemquerfazerhistoria.dao; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import br.com.quemquerfazerhistoria.model.Personagem; import br.com.quemquerfazerhistoria.utils.Log; public class PersonagemDAO { private final Connection connection; public PersonagemDAO(Connection connection) { this.connection = connection; } public List<Personagem> lista() { try { List<Personagem> lstPersonagem = new ArrayList<Personagem>(); String sql = "SELECT PersonagemID," +" Descricao," +" Tratamento," +" CaminhoImagem" +" FROM tblPersonagem"; PreparedStatement stmt = this.connection.prepareStatement (sql); ResultSet rs = stmt.executeQuery(); while (rs.next()) { Personagem personagem = new Personagem(); personagem.setPersonagemid(rs.getInt("PersonagemID")); personagem.setDescricao(rs.getString("Descricao")); personagem.setTratamento(rs.getString("Tratamento")); personagem.setCaminhoimagem(rs.getString("CaminhoImagem")); lstPersonagem.add(personagem); } rs.close(); stmt.close(); return lstPersonagem; } catch (SQLException e) { Log.setErro("-----------------------\nListar Personagens\n-----------------------"); Log.setErro(e.getErrorCode() + " - " + e.getMessage()); return null; } } }
artistic-2.0
bric3/homebrew-cask
Casks/recut.rb
672
cask "recut" do version "1.1.2,364" sha256 "1f6d33a99a74422db9cb8fdee09b6ba8b3690a3661fdf6f284733278765e87ac" url "https://updates.getrecut.com/Recut-#{version.before_comma}.dmg" name "Recut" desc "Remove silence from videos and automatically generate a cut list" homepage "https://getrecut.com/" livecheck do url "https://updates.getrecut.com/appcast.xml" strategy :sparkle end auto_updates true depends_on macos: ">= :high_sierra" app "Recut.app" zap trash: [ "~/Library/Caches/co.tinywins.recut", "~/Library/Preferences/co.tinywins.recut.plist", "~/Library/Saved Application State/co.tinywins.recut.savedState", ] end
bsd-2-clause
yarden-livnat/dnd-radial
app/src/model.js
985
/** * Created by yarden on 3/11/15. */ define(function(require) { var N_GROUPS = 15, N_ROWS = 6, N_COLS = 16, N_PORTS = 40, N_NODES = 4, N_CORES = 24; return { N_GROUPS: N_GROUPS, N_ROWS: N_ROWS, N_COLS: N_COLS, N_PORTS:N_PORTS, N_NODES:N_NODES, N_CORES:N_CORES, port_id: function(g, r, c, p) { return ((g*N_ROWS + r)*N_COLS + c)*N_PORTS + p; }, router_id: function(g, r, c) { if (arguments.length == 1) return (g.g*N_ROWS + g.r)*N_COLS + g.c; return (g*N_ROWS + r)*N_COLS + c; }, link_id: function(sg, sr, sc, dg, dr, dc) { return sg+':'+sr+':'+sc+'-'+dg+':'+dr+':'+dc; }, node_id: function(g,r,c,n) { if (arguments.length == 1) return ((g.g*N_ROWS + g.r)*N_COLS + g.c)*N_NODES+ g.n; return ((g*N_ROWS + r)*N_COLS + c)*N_NODES+ n; }, core_id:function(item) { return (((item.g*N_ROWS + item.r)*N_COLS + item.c)*N_NODES+ item.n)*N_CORES + item.core; } }; });
bsd-2-clause
sebastienros/jint
Jint.Tests.Test262/test/built-ins/Array/prototype/flatMap/proxy-access-count.js
1555
// Copyright (C) 2018 Richard Lawrence. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /*--- esid: sec-array.prototype.flatMap description: > properties are accessed correct number of times by .flatMap info: | Array.prototype.flatMap ( mapperFunction [ , thisArg ] ) ... 6. Perform ? FlattenIntoArray(A, O, sourceLen, 0, 1, mapperFunction, T). FlattenIntoArray (target, source, sourceLen, start, depth [ , mapperFunction, thisArg ]) 3. Repeat, while sourceIndex < sourceLen a. Let P be ! ToString(sourceIndex). b. Let exists be ? HasProperty(source, P). c. If exists is true, then i. Let element be ? Get(source, P). features: [Array.prototype.flatMap] includes: [compareArray.js] ---*/ assert.sameValue(typeof Array.prototype.flatMap, 'function'); const getCalls = [], hasCalls = []; const handler = { get : function (t, p, r) { getCalls.push(p); return Reflect.get(t, p, r); }, has : function (t, p, r) { hasCalls.push(p); return Reflect.has(t, p, r); } } const tier2 = new Proxy([4, 3], handler); const tier1 = new Proxy([2, [3, 4, 2, 2], 5, tier2, 6], handler); Array.prototype.flatMap.call(tier1, function(a){ return a; }); assert.compareArray(getCalls, ["length", "constructor", "0", "1", "2", "3", "length", "0", "1", "4"], 'getProperty by .flatMap should occur exactly once per property and once for length and constructor'); assert.compareArray(hasCalls, ["0", "1", "2", "3", "0", "1", "4"], 'hasProperty by .flatMap should occur exactly once per property');
bsd-2-clause
CraK-Gaming/Crak-Website
include/websiteinfo_defaults.php
627
<?php if(!isset($websiteInfo['forum_newindicator'])) { $websiteInfo['forum_newindicator'] = 7; } if(!isset($websiteInfo['emailqueue_delay']) || $websiteInfo['emailqueue_delay'] == "" || $websiteInfo['emailqueue_delay'] < 5) { $websiteInfo['emailqueue_delay'] = 30; // Default check every 30 min } if(!isset($websiteInfo['split_downloads'])) { $websiteInfo['split_downloads'] = false; } if(!isset($websiteInfo['default_timezone']) || $websiteInfo['default_timezone'] == "") { $websiteInfo['default_timezone'] = "UTC"; } if(!isset($websiteInfo['allow_multiple_ips'])) { $websiteInfo['allow_multiple_ips'] = true; } ?>
bsd-2-clause
geekdenz/ol3
src/ol/webgl/ShaderBuilder.js
18307
/** * Classes and utilities for generating shaders from literal style objects * @module ol/webgl/ShaderBuilder */ import { ValueTypes, expressionToGlsl, getStringNumberEquivalent, } from '../style/expressions.js'; /** * @typedef {Object} VaryingDescription * @property {string} name Varying name, as will be declared in the header. * @property {string} type Varying type, either `float`, `vec2`, `vec4`... * @property {string} expression Expression which will be assigned to the varying in the vertex shader, and * passed on to the fragment shader. */ /** * @classdesc * This class implements a classic builder pattern for generating many different types of shaders. * Methods can be chained, e. g.: * * ```js * const shader = new ShaderBuilder() * .addVarying('v_width', 'float', 'a_width') * .addUniform('u_time') * .setColorExpression('...') * .setSizeExpression('...') * .outputSymbolFragmentShader(); * ``` */ export class ShaderBuilder { constructor() { /** * Uniforms; these will be declared in the header (should include the type). * @type {Array<string>} * @private */ this.uniforms = []; /** * Attributes; these will be declared in the header (should include the type). * @type {Array<string>} * @private */ this.attributes = []; /** * Varyings with a name, a type and an expression. * @type {Array<VaryingDescription>} * @private */ this.varyings = []; /** * @type {string} * @private */ this.sizeExpression = 'vec2(1.0)'; /** * @type {string} * @private */ this.rotationExpression = '0.0'; /** * @type {string} * @private */ this.offsetExpression = 'vec2(0.0)'; /** * @type {string} * @private */ this.colorExpression = 'vec4(1.0)'; /** * @type {string} * @private */ this.texCoordExpression = 'vec4(0.0, 0.0, 1.0, 1.0)'; /** * @type {string} * @private */ this.discardExpression = 'false'; /** * @type {boolean} * @private */ this.rotateWithView = false; } /** * Adds a uniform accessible in both fragment and vertex shaders. * The given name should include a type, such as `sampler2D u_texture`. * @param {string} name Uniform name * @return {ShaderBuilder} the builder object */ addUniform(name) { this.uniforms.push(name); return this; } /** * Adds an attribute accessible in the vertex shader, read from the geometry buffer. * The given name should include a type, such as `vec2 a_position`. * @param {string} name Attribute name * @return {ShaderBuilder} the builder object */ addAttribute(name) { this.attributes.push(name); return this; } /** * Adds a varying defined in the vertex shader and accessible from the fragment shader. * The type and expression of the varying have to be specified separately. * @param {string} name Varying name * @param {'float'|'vec2'|'vec3'|'vec4'} type Type * @param {string} expression Expression used to assign a value to the varying. * @return {ShaderBuilder} the builder object */ addVarying(name, type, expression) { this.varyings.push({ name: name, type: type, expression: expression, }); return this; } /** * Sets an expression to compute the size of the shape. * This expression can use all the uniforms and attributes available * in the vertex shader, and should evaluate to a `vec2` value. * @param {string} expression Size expression * @return {ShaderBuilder} the builder object */ setSizeExpression(expression) { this.sizeExpression = expression; return this; } /** * Sets an expression to compute the rotation of the shape. * This expression can use all the uniforms and attributes available * in the vertex shader, and should evaluate to a `float` value in radians. * @param {string} expression Size expression * @return {ShaderBuilder} the builder object */ setRotationExpression(expression) { this.rotationExpression = expression; return this; } /** * Sets an expression to compute the offset of the symbol from the point center. * This expression can use all the uniforms and attributes available * in the vertex shader, and should evaluate to a `vec2` value. * Note: will only be used for point geometry shaders. * @param {string} expression Offset expression * @return {ShaderBuilder} the builder object */ setSymbolOffsetExpression(expression) { this.offsetExpression = expression; return this; } /** * Sets an expression to compute the color of the shape. * This expression can use all the uniforms, varyings and attributes available * in the fragment shader, and should evaluate to a `vec4` value. * @param {string} expression Color expression * @return {ShaderBuilder} the builder object */ setColorExpression(expression) { this.colorExpression = expression; return this; } /** * Sets an expression to compute the texture coordinates of the vertices. * This expression can use all the uniforms and attributes available * in the vertex shader, and should evaluate to a `vec4` value. * @param {string} expression Texture coordinate expression * @return {ShaderBuilder} the builder object */ setTextureCoordinateExpression(expression) { this.texCoordExpression = expression; return this; } /** * Sets an expression to determine whether a fragment (pixel) should be discarded, * i.e. not drawn at all. * This expression can use all the uniforms, varyings and attributes available * in the fragment shader, and should evaluate to a `bool` value (it will be * used in an `if` statement) * @param {string} expression Fragment discard expression * @return {ShaderBuilder} the builder object */ setFragmentDiscardExpression(expression) { this.discardExpression = expression; return this; } /** * Sets whether the symbols should rotate with the view or stay aligned with the map. * Note: will only be used for point geometry shaders. * @param {boolean} rotateWithView Rotate with view * @return {ShaderBuilder} the builder object */ setSymbolRotateWithView(rotateWithView) { this.rotateWithView = rotateWithView; return this; } /** * @returns {string} Previously set size expression */ getSizeExpression() { return this.sizeExpression; } /** * @returns {string} Previously set symbol offset expression */ getOffsetExpression() { return this.offsetExpression; } /** * @returns {string} Previously set color expression */ getColorExpression() { return this.colorExpression; } /** * @returns {string} Previously set texture coordinate expression */ getTextureCoordinateExpression() { return this.texCoordExpression; } /** * @returns {string} Previously set fragment discard expression */ getFragmentDiscardExpression() { return this.discardExpression; } /** * Generates a symbol vertex shader from the builder parameters, * intended to be used on point geometries. * * Three uniforms are hardcoded in all shaders: `u_projectionMatrix`, `u_offsetScaleMatrix`, * `u_offsetRotateMatrix`, `u_time`. * * The following attributes are hardcoded and expected to be present in the vertex buffers: * `vec2 a_position`, `float a_index` (being the index of the vertex in the quad, 0 to 3). * * The following varyings are hardcoded and gives the coordinate of the pixel both in the quad and on the texture: * `vec2 v_quadCoord`, `vec2 v_texCoord` * * @param {boolean} [forHitDetection] If true, the shader will be modified to include hit detection variables * (namely, hit color with encoded feature id). * @returns {string} The full shader as a string. */ getSymbolVertexShader(forHitDetection) { const offsetMatrix = this.rotateWithView ? 'u_offsetScaleMatrix * u_offsetRotateMatrix' : 'u_offsetScaleMatrix'; let attributes = this.attributes; let varyings = this.varyings; if (forHitDetection) { attributes = attributes.concat('vec4 a_hitColor'); varyings = varyings.concat({ name: 'v_hitColor', type: 'vec4', expression: 'a_hitColor', }); } return `precision mediump float; uniform mat4 u_projectionMatrix; uniform mat4 u_offsetScaleMatrix; uniform mat4 u_offsetRotateMatrix; uniform float u_time; uniform float u_zoom; uniform float u_resolution; ${this.uniforms .map(function (uniform) { return 'uniform ' + uniform + ';'; }) .join('\n')} attribute vec2 a_position; attribute float a_index; ${attributes .map(function (attribute) { return 'attribute ' + attribute + ';'; }) .join('\n')} varying vec2 v_texCoord; varying vec2 v_quadCoord; ${varyings .map(function (varying) { return 'varying ' + varying.type + ' ' + varying.name + ';'; }) .join('\n')} void main(void) { mat4 offsetMatrix = ${offsetMatrix}; vec2 halfSize = ${this.sizeExpression} * 0.5; vec2 offset = ${this.offsetExpression}; float angle = ${this.rotationExpression}; float offsetX; float offsetY; if (a_index == 0.0) { offsetX = (offset.x - halfSize.x) * cos(angle) + (offset.y - halfSize.y) * sin(angle); offsetY = (offset.y - halfSize.y) * cos(angle) - (offset.x - halfSize.x) * sin(angle); } else if (a_index == 1.0) { offsetX = (offset.x + halfSize.x) * cos(angle) + (offset.y - halfSize.y) * sin(angle); offsetY = (offset.y - halfSize.y) * cos(angle) - (offset.x + halfSize.x) * sin(angle); } else if (a_index == 2.0) { offsetX = (offset.x + halfSize.x) * cos(angle) + (offset.y + halfSize.y) * sin(angle); offsetY = (offset.y + halfSize.y) * cos(angle) - (offset.x + halfSize.x) * sin(angle); } else { offsetX = (offset.x - halfSize.x) * cos(angle) + (offset.y + halfSize.y) * sin(angle); offsetY = (offset.y + halfSize.y) * cos(angle) - (offset.x - halfSize.x) * sin(angle); } vec4 offsets = offsetMatrix * vec4(offsetX, offsetY, 0.0, 0.0); gl_Position = u_projectionMatrix * vec4(a_position, 0.0, 1.0) + offsets; vec4 texCoord = ${this.texCoordExpression}; float u = a_index == 0.0 || a_index == 3.0 ? texCoord.s : texCoord.p; float v = a_index == 2.0 || a_index == 3.0 ? texCoord.t : texCoord.q; v_texCoord = vec2(u, v); u = a_index == 0.0 || a_index == 3.0 ? 0.0 : 1.0; v = a_index == 2.0 || a_index == 3.0 ? 0.0 : 1.0; v_quadCoord = vec2(u, v); ${varyings .map(function (varying) { return ' ' + varying.name + ' = ' + varying.expression + ';'; }) .join('\n')} }`; } /** * Generates a symbol fragment shader from the builder parameters, * intended to be used on point geometries. * * Expects the following varyings to be transmitted by the vertex shader: * `vec2 v_quadCoord`, `vec2 v_texCoord` * * @param {boolean} [forHitDetection] If true, the shader will be modified to include hit detection variables * (namely, hit color with encoded feature id). * @returns {string} The full shader as a string. */ getSymbolFragmentShader(forHitDetection) { const hitDetectionBypass = forHitDetection ? ' if (gl_FragColor.a < 0.1) { discard; } gl_FragColor = v_hitColor;' : ''; let varyings = this.varyings; if (forHitDetection) { varyings = varyings.concat({ name: 'v_hitColor', type: 'vec4', expression: 'a_hitColor', }); } return `precision mediump float; uniform float u_time; uniform float u_zoom; uniform float u_resolution; ${this.uniforms .map(function (uniform) { return 'uniform ' + uniform + ';'; }) .join('\n')} varying vec2 v_texCoord; varying vec2 v_quadCoord; ${varyings .map(function (varying) { return 'varying ' + varying.type + ' ' + varying.name + ';'; }) .join('\n')} void main(void) { if (${this.discardExpression}) { discard; } gl_FragColor = ${this.colorExpression}; gl_FragColor.rgb *= gl_FragColor.a; ${hitDetectionBypass} }`; } } /** * @typedef {Object} StyleParseResult * @property {ShaderBuilder} builder Shader builder pre-configured according to a given style * @property {Object.<string,import("./Helper").UniformValue>} uniforms Uniform definitions. * @property {Array<import("../renderer/webgl/PointsLayer").CustomAttribute>} attributes Attribute descriptions. */ /** * Parses a {@link import("../style/LiteralStyle").LiteralStyle} object and returns a {@link ShaderBuilder} * object that has been configured according to the given style, as well as `attributes` and `uniforms` * arrays to be fed to the `WebGLPointsRenderer` class. * * Also returns `uniforms` and `attributes` properties as expected by the * {@link module:ol/renderer/webgl/PointsLayer~WebGLPointsLayerRenderer}. * * @param {import("../style/LiteralStyle").LiteralStyle} style Literal style. * @returns {StyleParseResult} Result containing shader params, attributes and uniforms. */ export function parseLiteralStyle(style) { const symbStyle = style.symbol; const size = symbStyle.size !== undefined ? symbStyle.size : 1; const color = symbStyle.color || 'white'; const texCoord = symbStyle.textureCoord || [0, 0, 1, 1]; const offset = symbStyle.offset || [0, 0]; const opacity = symbStyle.opacity !== undefined ? symbStyle.opacity : 1; const rotation = symbStyle.rotation !== undefined ? symbStyle.rotation : 0; /** * @type {import("../style/expressions.js").ParsingContext} */ const vertContext = { inFragmentShader: false, variables: [], attributes: [], stringLiteralsMap: {}, }; const parsedSize = expressionToGlsl( vertContext, size, ValueTypes.NUMBER_ARRAY | ValueTypes.NUMBER ); const parsedOffset = expressionToGlsl( vertContext, offset, ValueTypes.NUMBER_ARRAY ); const parsedTexCoord = expressionToGlsl( vertContext, texCoord, ValueTypes.NUMBER_ARRAY ); const parsedRotation = expressionToGlsl( vertContext, rotation, ValueTypes.NUMBER ); /** * @type {import("../style/expressions.js").ParsingContext} */ const fragContext = { inFragmentShader: true, variables: vertContext.variables, attributes: [], stringLiteralsMap: vertContext.stringLiteralsMap, }; const parsedColor = expressionToGlsl(fragContext, color, ValueTypes.COLOR); const parsedOpacity = expressionToGlsl( fragContext, opacity, ValueTypes.NUMBER ); let opacityFilter = '1.0'; const visibleSize = `vec2(${expressionToGlsl( fragContext, size, ValueTypes.NUMBER_ARRAY | ValueTypes.NUMBER )}).x`; switch (symbStyle.symbolType) { case 'square': break; case 'image': break; // taken from https://thebookofshaders.com/07/ case 'circle': opacityFilter = `(1.0-smoothstep(1.-4./${visibleSize},1.,dot(v_quadCoord-.5,v_quadCoord-.5)*4.))`; break; case 'triangle': const st = '(v_quadCoord*2.-1.)'; const a = `(atan(${st}.x,${st}.y))`; opacityFilter = `(1.0-smoothstep(.5-3./${visibleSize},.5,cos(floor(.5+${a}/2.094395102)*2.094395102-${a})*length(${st})))`; break; default: throw new Error('Unexpected symbol type: ' + symbStyle.symbolType); } const builder = new ShaderBuilder() .setSizeExpression(`vec2(${parsedSize})`) .setRotationExpression(parsedRotation) .setSymbolOffsetExpression(parsedOffset) .setTextureCoordinateExpression(parsedTexCoord) .setSymbolRotateWithView(!!symbStyle.rotateWithView) .setColorExpression( `vec4(${parsedColor}.rgb, ${parsedColor}.a * ${parsedOpacity} * ${opacityFilter})` ); if (style.filter) { const parsedFilter = expressionToGlsl( fragContext, style.filter, ValueTypes.BOOLEAN ); builder.setFragmentDiscardExpression(`!${parsedFilter}`); } /** @type {Object.<string,import("../webgl/Helper").UniformValue>} */ const uniforms = {}; // define one uniform per variable fragContext.variables.forEach(function (varName) { builder.addUniform(`float u_${varName}`); uniforms[`u_${varName}`] = function () { if (!style.variables || style.variables[varName] === undefined) { throw new Error( `The following variable is missing from the style: ${varName}` ); } let value = style.variables[varName]; if (typeof value === 'string') { value = getStringNumberEquivalent(vertContext, value); } return value !== undefined ? value : -9999999; // to avoid matching with the first string literal }; }); if (symbStyle.symbolType === 'image' && symbStyle.src) { const texture = new Image(); texture.src = symbStyle.src; builder .addUniform('sampler2D u_texture') .setColorExpression( builder.getColorExpression() + ' * texture2D(u_texture, v_texCoord)' ); uniforms['u_texture'] = texture; } // for each feature attribute used in the fragment shader, define a varying that will be used to pass data // from the vertex to the fragment shader, as well as an attribute in the vertex shader (if not already present) fragContext.attributes.forEach(function (attrName) { if (vertContext.attributes.indexOf(attrName) === -1) { vertContext.attributes.push(attrName); } builder.addVarying(`v_${attrName}`, 'float', `a_${attrName}`); }); // for each feature attribute used in the vertex shader, define an attribute in the vertex shader. vertContext.attributes.forEach(function (attrName) { builder.addAttribute(`float a_${attrName}`); }); return { builder: builder, attributes: vertContext.attributes.map(function (attributeName) { return { name: attributeName, callback: function (feature, props) { let value = props[attributeName]; if (typeof value === 'string') { value = getStringNumberEquivalent(vertContext, value); } return value !== undefined ? value : -9999999; // to avoid matching with the first string literal }, }; }), uniforms: uniforms, }; }
bsd-2-clause
timchen86/gdcmdtools
gdcmdtools/get.py
7193
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import division import re import os import json import io import sys import pprint import logging logger = logging.getLogger() logger.setLevel(logging.DEBUG) import requests from requests_oauthlib import OAuth2Session from apiclient import errors from gdcmdtools.auth import GDAuth from gdcmdtools.auth import SCOPE from base import GDBase export_format = { "application/vnd.google-apps.spreadsheet": ["pdf", "ods", "xlsx"], "application/vnd.google-apps.document": ["pdf", "docx", "rtf", "odt", "html", "txt"], "application/vnd.google-apps.presentation": ["pdf", "pptx", "txt"], "application/vnd.google-apps.drawing": ["png", "pdf", "jpeg", "svg"], "application/vnd.google-apps.script+json": ["json"], } class GDGet: def __init__(self, file_id, format, save_as): # base auth = GDAuth() self.credentials = auth.get_credentials() if self.credentials is None: raise Exception("Failed to retrieve credentials") self.http = auth.get_authorized_http() base = GDBase() self.service = base.get_drive_service(self.http) self.file_id = base.get_id_from_url(file_id) self.format = format if save_as is None: self.save_as = None else: self.save_as = os.path.abspath(save_as) def parse_gas_json(self, file_content, save_as): map_type_ext = {"server_js": "js", "html": "html"} try: jsons = json.loads(file_content) new_json = {"files": []} for j in jsons["files"]: file_id = j["id"] file_name = j["name"] file_source = j["source"] file_type = j["type"] if file_type in map_type_ext.keys(): file_ext = map_type_ext[file_type] else: file_ext = file_type file_fullname = "%s.%s" % (file_name, file_ext) with open(file_fullname, 'wb+') as f: f.write(file_source.encode('utf8')) # We need unicode! j.pop("source") new_json["files"].append(j) # save the project id, we need the id to upload project new_json["id"] = self.file_id with open(save_as, 'wb+') as f: f.write(json.dumps(new_json, indent=4)) except Exception as e: logger.error(e) raise def run(self): try: service_response = self.get() # Content-Length from http header is None self.file_size = service_response.get('fileSize', None) result_title_format = self.get_title_format(service_response) logger.debug(result_title_format) title, return_format = result_title_format if self.format != "raw": _, ext = os.path.splitext(title) if(self.format != ext[1:]): title = title + "." + self.format if self.format not in return_format.keys(): raise Exception( "The specified format \'%s\' is not allowed, available format are \"%s\", please correct option: --export_format" % (self.format, ', '.join( return_format.keys()))) if self.save_as is None: self.save_as = title if self.format == "json": result, file_content, local_size = self.get_by_format( self.save_as, return_format[self.format]) self.parse_gas_json(file_content, self.save_as) else: # FIXME: handle return value result, file_content, local_size = self.get_by_format( self.save_as, return_format[self.format]) if(result == False): raise Exception( "File size check failed, download may be incompleted. local size is %d" % local_size) except Exception as e: logger.error(e) raise return return_format def get(self): try: response = self.service.files().get(fileId=self.file_id).execute() logger.debug(pprint.pformat(response)) return response except errors.HttpError as error: logger.error('An error occurred: %s' % error) return None def get_title_format(self, service_response): export_links = service_response.get('exportLinks', None) return_format = {} title = service_response.get('title', None) logger.debug(title) logger.debug(export_links) if export_links is None: download_link = service_response.get(u'downloadUrl', None) return_format["raw"] = download_link else: export_link_values = export_links.values() if len(export_link_values) > 0: for link in export_link_values: m = re.match(r'^.*[Ff]ormat=(.*)$', link) return_format[m.group(1)] = link return title, return_format def get_by_format(self, save_as, url): ''' Get file from URL and save to save_as. Return result,content,filesize ''' fd = io.FileIO(save_as, mode='wb') creds = self.credentials # move to auth.py? token = {"access_token": creds.access_token, "token_type": "Bearer"} session = OAuth2Session(creds.client_id, scope=SCOPE, token=token) with open(save_as, 'wb') as f: response = session.get(url, stream=True) return_content = response.content if self.file_size: total_length = int(self.file_size) print "total size = %d Bytes" % total_length mega = 1048576 # 1024*1024 downloaded = 0 total_in_mega = int(total_length / mega) for data in response.iter_content(chunk_size=mega): f.write(data) downloaded += len(data) done = int(50 * downloaded / total_length) done_percent = int(downloaded / total_length * 100) done_in_mega = int(downloaded / mega) sys.stdout.write("\r[%s%s] %3d%%, %d of %d MB" % ( '=' * done, ' ' * (50 - done), done_percent, done_in_mega, total_in_mega)) sys.stdout.flush() else: f.write(return_content) # for sys.stdout.flush() print "" # local size check local_size = int(os.path.getsize(save_as)) print "File location: %s" % save_as if self.file_size: if(int(self.file_size) == local_size): return True, return_content, local_size else: return False, return_content, local_size else: print "File size in bytes: %d" % local_size return True, return_content, local_size
bsd-2-clause
ShivaHuang/homebrew-core
Formula/snow.rb
1147
class Snow < Formula desc "Whitespace steganography: coded messages using whitespace" homepage "http://www.darkside.com.au/snow/" # The upstream website seems to be rejecting curl connections. # Consistently returns "HTTP/1.1 406 Not Acceptable". url "https://dl.bintray.com/homebrew/mirror/snow-20130616.tar.gz" sha256 "c0b71aa74ed628d121f81b1cd4ae07c2842c41cfbdf639b50291fc527c213865" bottle do cellar :any_skip_relocation revision 1 sha256 "5121a5196c5ed20b7496a5190830bf2e49bdd18c3950fc6b1b8fabb239c9ef7c" => :el_capitan sha256 "f4e949f65f946916a5f0b018a75e741336fed9e6434f1802d906e003e9da6b65" => :yosemite sha256 "4d6bd4ca3de8ee330802495bdb04b0928afa21bb47a8fb1cde71d8a0c7919ada" => :mavericks end def install system "make" bin.install "snow" man1.install "snow.1" end test do touch "in.txt" touch "out.txt" system "#{bin}/snow", "-C", "-m", "'Secrets Abound Here'", "-p", "'hello world'", "in.txt", "out.txt" # The below should get the response 'Secrets Abound Here' when testing. system "#{bin}/snow", "-C", "-p", "'hello world'", "out.txt" end end
bsd-2-clause
gholms/euca2ools
euca2ools/commands/autoscaling/describenotificationconfigurations.py
2723
# Copyright (c) 2013-2016 Hewlett Packard Enterprise Development LP # # Redistribution and use of this software in source and binary forms, # with or without modification, are permitted provided that the following # conditions are met: # # Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from requestbuilder import Arg from requestbuilder.mixins import TabifyingMixin from requestbuilder.response import PaginatedResponse from euca2ools.commands.autoscaling import AutoScalingRequest class DescribeNotificationConfigurations(AutoScalingRequest, TabifyingMixin): DESCRIPTION = ('Describe notification actions associated with ' 'auto-scaling groups') ARGS = [Arg('AutoScalingGroupNames.member', metavar='ASGROUP', nargs='*', help='limit results to specific auto-scaling groups')] LIST_TAGS = ['NotificationConfigurations'] def main(self): return PaginatedResponse(self, (None,), ('NotificationConfigurations',)) def prepare_for_page(self, page): # Pages are defined by NextToken self.params['NextToken'] = page # pylint: disable=no-self-use def get_next_page(self, response): return response.get('NextToken') or None # pylint: enable=no-self-use def print_result(self, result): for config in result.get('NotificationConfigurations', []): print self.tabify(('NOTIFICATION-CONFIG', config.get('AutoScalingGroupName'), config.get('TopicARN'), config.get('NotificationType')))
bsd-2-clause
spaam/homebrew-core
Formula/sc-im.rb
1427
class ScIm < Formula desc "Spreadsheet program for the terminal, using ncurses" homepage "https://github.com/andmarti1424/sc-im" url "https://github.com/andmarti1424/sc-im/archive/v0.7.0.tar.gz" sha256 "87225918cb6f52bbc068ee6b12eaf176c7c55ba9739b29ca08cb9b6699141cad" license "BSD-4-Clause" head "https://github.com/andmarti1424/sc-im.git", branch: "freeze" bottle do sha256 arm64_big_sur: "f7ad6c86caf94bf75c40da5422ac4eeaef52e7c5b6065f32981dec2c54bef5c9" sha256 big_sur: "d419452946bad457347ca8c59b3d53a90f3976af74d1e652de8e1ad4d0982f9a" sha256 catalina: "24cb0ad706b03a9933cdb24dba862b38a3fcb59f96f9942227d8f9f79ff93ea5" sha256 mojave: "67180ab11eedd56f8eaffb0d2f12a90ca9636bbd93ff693914450be8248702ce" sha256 high_sierra: "275a0a9dbd1a1271119e36b2767a54587aae57a65ee92278e701e0e1236a192c" sha256 sierra: "f346970ef805cec01ae6485365d8fb5002533255c01e81bdd44d072058d00081" sha256 el_capitan: "50e8d50e0373ac626ad617057eb1246c779e1e3b05171f4be2aa547c5b8ddf4c" end depends_on "ncurses" def install cd "src" do system "make", "prefix=#{prefix}" system "make", "prefix=#{prefix}", "install" end end test do input = <<~EOS let A1=1+1 getnum A1 EOS output = pipe_output( "#{bin}/scim --nocurses --quit_afterload 2>/dev/null", input ) assert_equal "2", output.lines.last.chomp end end
bsd-2-clause
zhuyue1314/cle
cle/tls.py
4731
from collections import namedtuple, defaultdict import struct from .absobj import AbsObj from .memory import Clemory TLSArchinfo = namedtuple('TLSArchInfo', ('variant', 'tcbhead_size', 'head_offsets', 'dtv_offsets', 'pthread_offsets')) tls_archinfo = { 'AMD64': TLSArchinfo( 2, 704, [16], [8], [0] ), 'X86': TLSArchinfo( 2, 56, [8], [4], [0] ), 'AARCH64': TLSArchinfo( 1, 32, [], [0], [] ), 'ARM': TLSArchinfo( 1, 32, [], [0], [] ), 'ARMEL': TLSArchinfo( 1, 8, [], [0], [] ), 'ARMHF': TLSArchinfo( 1, 8, [], [0], [] ), 'MIPS32': TLSArchinfo( 1, 8, [], [0], [] ), 'MIPS64': TLSArchinfo( 1, 16, [], [0], [] ), 'PPC32': TLSArchinfo( 1, 52, [], [48], [] ), 'PPC64': TLSArchinfo( 1, 92, [], [84], [] ), } TLS_BLOCK_ALIGN = 0x10 TLS_TOTAL_HEAD_SIZE = 0x4000 TLS_HEAD_ALIGN = 0x10000 TLS_DTV_INITIAL_CAPACITY = 0x10 TLS_ALLOC_SIZE = 0x30000 def roundup(val, to=TLS_BLOCK_ALIGN): #val -= 1 #diff = to - (val % to) #val += diff #return val return val - 1 + (to - ((val - 1) % to)) class TLSObj(AbsObj): def __init__(self, modules): super(TLSObj, self).__init__('##cle_tls##') self.modules = modules self.arch = self.modules[0].arch self.memory = Clemory(self.arch) self.tlsinfo = tls_archinfo[self.arch.name] module_id = 1 self.total_blocks_size = 0 for module in modules: module.tls_module_id = module_id module_id += 1 module.tls_block_offset = self.total_blocks_size self.total_blocks_size += roundup(module.tls_block_size) self.total_blocks_size = roundup(self.total_blocks_size, TLS_HEAD_ALIGN) for module in modules: if self.tlsinfo.variant == 1: module.tls_block_offset += TLS_TOTAL_HEAD_SIZE else: module.tls_block_offset = -roundup(module.tls_block_size) - module.tls_block_offset self.dtv_start = TLS_TOTAL_HEAD_SIZE + 2*self.arch.bytes self.tp_offset = 0 if self.tlsinfo.variant == 1 else self.total_blocks_size def finalize(self): assert self.rebase_addr != 0 temp_dict = defaultdict(lambda: '\0') def drop(string, offset): for i, c in enumerate(string): temp_dict[i + offset] = c def drop_int(num, offset): drop(struct.pack(self.arch.struct_fmt(), num), offset) # Set the appropriate pointers in the tcbhead for off in self.tlsinfo.head_offsets: drop_int(self.thread_pointer, off + self.tp_offset) for off in self.tlsinfo.dtv_offsets: drop_int(self.rebase_addr + self.dtv_start, off + self.tp_offset) for off in self.tlsinfo.pthread_offsets: drop_int(self.thread_pointer + self.tlsinfo.tcbhead_size, off + self.tp_offset) # Write the init images from each of the modules' tdata sections for module in self.modules: module.memory.seek(module.tls_tdata_start) drop(module.memory.read(module.tls_tdata_size), self.tp_offset + module.tls_block_offset) # Set up the DTV # TODO: lmao capacity it's 2:30am please help me drop_int(TLS_DTV_INITIAL_CAPACITY-1, self.dtv_start - 2*self.arch.bytes) drop_int(len(self.modules), self.dtv_start) for module in self.modules: drop_int(self.tp_offset + module.tls_block_offset, self.dtv_start + (2*self.arch.bytes)*module.tls_module_id) drop_int(1, self.dtv_start + (2*self.arch.bytes)*module.tls_module_id + self.arch.bytes) self.memory.add_backer(0, ''.join(temp_dict[i] for i in xrange(0, TLS_ALLOC_SIZE))) @property def thread_pointer(self): return self.rebase_addr + self.tp_offset def get_min_addr(self): return self.rebase_addr def get_max_addr(self): return TLS_ALLOC_SIZE + self.rebase_addr def get_addr(self, module_id, offset): ''' basically __tls_get_addr ''' return self.thread_pointer + self.modules[module_id-1].tls_block_offset + offset
bsd-2-clause
AerysBat/slimCat
slimCat/Utilities/Converters/MessageThicknessConverter.cs
1414
#region Copyright // <copyright file="MessageThicknessConverter.cs"> // Copyright (c) 2013-2015, Justin Kadrovach, All rights reserved. // // This source is subject to the Simplified BSD License. // Please see the License.txt file for more information. // All other rights reserved. // // THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY // KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A // PARTICULAR PURPOSE. // </copyright> #endregion namespace slimCat.Utilities { #region Usings using System.Windows; using Models; #endregion public sealed class MessageThicknessConverter : OneWayConverter { public override object Convert(object value, object parameter) { const int top = 0; const int left = 0; var bottom = 0; var right = 0; var message = value as IMessage; if (message == null) return new Thickness(left, top, right, bottom); if (message.IsLastViewed) bottom = 2; else if (message.Type == MessageType.Ad) bottom = 1; else if (message.IsOfInterest) { right = 8; bottom = 2; } return new Thickness(left, top, right, bottom); } } }
bsd-2-clause
ASMlover/study
3rdparty/boost/include/boost/geometry/strategies/cartesian/envelope_box.hpp
3479
// Boost.Geometry (aka GGL, Generic Geometry Library) // Copyright (c) 2007-2015 Barend Gehrels, Amsterdam, the Netherlands. // Copyright (c) 2008-2015 Bruno Lalande, Paris, France. // Copyright (c) 2009-2015 Mateusz Loskot, London, UK. // This file was modified by Oracle on 2015-2019. // Modifications copyright (c) 2015-2019, Oracle and/or its affiliates. // Contributed and/or modified by Vissarion Fysikopoulos, on behalf of Oracle // Contributed and/or modified by Menelaos Karavelas, on behalf of Oracle // Contributed and/or modified by Adam Wulkiewicz, on behalf of Oracle // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #ifndef BOOST_GEOMETRY_STRATEGIES_CARTESIAN_ENVELOPE_BOX_HPP #define BOOST_GEOMETRY_STRATEGIES_CARTESIAN_ENVELOPE_BOX_HPP #include <cstddef> #include <boost/geometry/core/access.hpp> #include <boost/geometry/core/coordinate_dimension.hpp> #include <boost/geometry/core/tags.hpp> #include <boost/geometry/views/detail/indexed_point_view.hpp> #include <boost/geometry/algorithms/detail/convert_point_to_point.hpp> #include <boost/geometry/algorithms/detail/normalize.hpp> #include <boost/geometry/algorithms/detail/envelope/transform_units.hpp> #include <boost/geometry/algorithms/dispatch/envelope.hpp> #include <boost/geometry/strategies/cartesian/expand_box.hpp> #include <boost/geometry/strategies/envelope.hpp> namespace boost { namespace geometry { #ifndef DOXYGEN_NO_DETAIL namespace detail { namespace envelope { template < std::size_t Index, std::size_t Dimension, std::size_t DimensionCount > struct envelope_indexed_box { template <typename BoxIn, typename BoxOut> static inline void apply(BoxIn const& box_in, BoxOut& mbr) { detail::indexed_point_view<BoxIn const, Index> box_in_corner(box_in); detail::indexed_point_view<BoxOut, Index> mbr_corner(mbr); detail::conversion::point_to_point < detail::indexed_point_view<BoxIn const, Index>, detail::indexed_point_view<BoxOut, Index>, Dimension, DimensionCount >::apply(box_in_corner, mbr_corner); } }; }} // namespace detail::envelope #endif // DOXYGEN_NO_DETAIL namespace strategy { namespace envelope { struct cartesian_box { typedef cartesian_tag cs_tag; typedef strategy::expand::cartesian_box box_expand_strategy_type; static inline box_expand_strategy_type get_box_expand_strategy() { return box_expand_strategy_type(); } template<typename BoxIn, typename BoxOut> static inline void apply(BoxIn const& box_in, BoxOut& mbr) { geometry::detail::envelope::envelope_indexed_box < min_corner, 0, dimension<BoxIn>::value >::apply(box_in, mbr); geometry::detail::envelope::envelope_indexed_box < max_corner, 0, dimension<BoxIn>::value >::apply(box_in, mbr); } }; #ifndef DOXYGEN_NO_STRATEGY_SPECIALIZATIONS namespace services { template <typename CalculationType> struct default_strategy<box_tag, cartesian_tag, CalculationType> { typedef strategy::envelope::cartesian_box type; }; } #endif // DOXYGEN_NO_STRATEGY_SPECIALIZATIONS }} // namespace strategy::envelope }} // namespace boost::geometry #endif // BOOST_GEOMETRY_STRATEGIES_CARTESIAN_ENVELOPE_BOX_HPP
bsd-2-clause
pytorch/cpuinfo
test/mock/galaxy-s3-us.cc
13003
#include <gtest/gtest.h> #include <cpuinfo.h> #include <cpuinfo-mock.h> TEST(PROCESSORS, count) { ASSERT_EQ(2, cpuinfo_get_processors_count()); } TEST(PROCESSORS, non_null) { ASSERT_TRUE(cpuinfo_get_processors()); } TEST(PROCESSORS, smt_id) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { ASSERT_EQ(0, cpuinfo_get_processor(i)->smt_id); } } TEST(PROCESSORS, core) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { ASSERT_EQ(cpuinfo_get_core(i), cpuinfo_get_processor(i)->core); } } TEST(PROCESSORS, cluster) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { ASSERT_EQ(cpuinfo_get_cluster(0), cpuinfo_get_processor(i)->cluster); } } TEST(PROCESSORS, package) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_processor(i)->package); } } TEST(PROCESSORS, linux_id) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { ASSERT_EQ(i, cpuinfo_get_processor(i)->linux_id); } } TEST(PROCESSORS, l1i) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { ASSERT_EQ(cpuinfo_get_l1i_cache(i), cpuinfo_get_processor(i)->cache.l1i); } } TEST(PROCESSORS, l1d) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { ASSERT_EQ(cpuinfo_get_l1d_cache(i), cpuinfo_get_processor(i)->cache.l1d); } } TEST(PROCESSORS, l2) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { ASSERT_EQ(cpuinfo_get_l2_cache(0), cpuinfo_get_processor(i)->cache.l2); } } TEST(PROCESSORS, l3) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { ASSERT_FALSE(cpuinfo_get_processor(i)->cache.l3); } } TEST(PROCESSORS, l4) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { ASSERT_FALSE(cpuinfo_get_processor(i)->cache.l4); } } TEST(CORES, count) { ASSERT_EQ(2, cpuinfo_get_cores_count()); } TEST(CORES, non_null) { ASSERT_TRUE(cpuinfo_get_cores()); } TEST(CORES, processor_start) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { ASSERT_EQ(i, cpuinfo_get_core(i)->processor_start); } } TEST(CORES, processor_count) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { ASSERT_EQ(1, cpuinfo_get_core(i)->processor_count); } } TEST(CORES, core_id) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { ASSERT_EQ(i, cpuinfo_get_core(i)->core_id); } } TEST(CORES, cluster) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { ASSERT_EQ(cpuinfo_get_cluster(0), cpuinfo_get_core(i)->cluster); } } TEST(CORES, package) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_core(i)->package); } } TEST(CORES, vendor) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { ASSERT_EQ(cpuinfo_vendor_qualcomm, cpuinfo_get_core(i)->vendor); } } TEST(CORES, uarch) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { ASSERT_EQ(cpuinfo_uarch_krait, cpuinfo_get_core(i)->uarch); } } TEST(CORES, midr) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { ASSERT_EQ(UINT32_C(0x511F04D4), cpuinfo_get_core(i)->midr); } } TEST(CORES, DISABLED_frequency) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { ASSERT_EQ(UINT64_C(1512000000), cpuinfo_get_core(i)->frequency); } } TEST(CLUSTERS, count) { ASSERT_EQ(1, cpuinfo_get_clusters_count()); } TEST(CLUSTERS, non_null) { ASSERT_TRUE(cpuinfo_get_clusters()); } TEST(CLUSTERS, processor_start) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { ASSERT_EQ(0, cpuinfo_get_cluster(i)->processor_start); } } TEST(CLUSTERS, processor_count) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { ASSERT_EQ(2, cpuinfo_get_cluster(i)->processor_count); } } TEST(CLUSTERS, core_start) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { ASSERT_EQ(0, cpuinfo_get_cluster(i)->core_start); } } TEST(CLUSTERS, core_count) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { ASSERT_EQ(2, cpuinfo_get_cluster(i)->core_count); } } TEST(CLUSTERS, cluster_id) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { ASSERT_EQ(i, cpuinfo_get_cluster(i)->cluster_id); } } TEST(CLUSTERS, package) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_cluster(i)->package); } } TEST(CLUSTERS, vendor) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { ASSERT_EQ(cpuinfo_vendor_qualcomm, cpuinfo_get_cluster(i)->vendor); } } TEST(CLUSTERS, uarch) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { ASSERT_EQ(cpuinfo_uarch_krait, cpuinfo_get_cluster(i)->uarch); } } TEST(CLUSTERS, midr) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { ASSERT_EQ(UINT32_C(0x511F04D4), cpuinfo_get_cluster(i)->midr); } } TEST(CLUSTERS, DISABLED_frequency) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { ASSERT_EQ(UINT64_C(1512000000), cpuinfo_get_cluster(i)->frequency); } } TEST(PACKAGES, count) { ASSERT_EQ(1, cpuinfo_get_packages_count()); } TEST(PACKAGES, name) { for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { ASSERT_EQ("Qualcomm MSM8960", std::string(cpuinfo_get_package(i)->name, strnlen(cpuinfo_get_package(i)->name, CPUINFO_PACKAGE_NAME_MAX))); } } TEST(PACKAGES, processor_start) { for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { ASSERT_EQ(0, cpuinfo_get_package(i)->processor_start); } } TEST(PACKAGES, processor_count) { for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { ASSERT_EQ(2, cpuinfo_get_package(i)->processor_count); } } TEST(PACKAGES, core_start) { for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { ASSERT_EQ(0, cpuinfo_get_package(i)->core_start); } } TEST(PACKAGES, core_count) { for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { ASSERT_EQ(2, cpuinfo_get_package(i)->core_count); } } TEST(PACKAGES, cluster_start) { for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { ASSERT_EQ(0, cpuinfo_get_package(i)->cluster_start); } } TEST(PACKAGES, cluster_count) { for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { ASSERT_EQ(1, cpuinfo_get_package(i)->cluster_count); } } TEST(ISA, thumb) { ASSERT_TRUE(cpuinfo_has_arm_thumb()); } TEST(ISA, thumb2) { ASSERT_TRUE(cpuinfo_has_arm_thumb2()); } TEST(ISA, armv5e) { ASSERT_TRUE(cpuinfo_has_arm_v5e()); } TEST(ISA, armv6) { ASSERT_TRUE(cpuinfo_has_arm_v6()); } TEST(ISA, armv6k) { ASSERT_TRUE(cpuinfo_has_arm_v6k()); } TEST(ISA, armv7) { ASSERT_TRUE(cpuinfo_has_arm_v7()); } TEST(ISA, armv7mp) { ASSERT_TRUE(cpuinfo_has_arm_v7mp()); } TEST(ISA, idiv) { ASSERT_TRUE(cpuinfo_has_arm_idiv()); } TEST(ISA, vfpv2) { ASSERT_FALSE(cpuinfo_has_arm_vfpv2()); } TEST(ISA, vfpv3) { ASSERT_TRUE(cpuinfo_has_arm_vfpv3()); } TEST(ISA, vfpv3_d32) { ASSERT_TRUE(cpuinfo_has_arm_vfpv3_d32()); } TEST(ISA, vfpv3_fp16) { ASSERT_TRUE(cpuinfo_has_arm_vfpv3_fp16()); } TEST(ISA, vfpv3_fp16_d32) { ASSERT_TRUE(cpuinfo_has_arm_vfpv3_fp16_d32()); } TEST(ISA, vfpv4) { ASSERT_TRUE(cpuinfo_has_arm_vfpv4()); } TEST(ISA, vfpv4_d32) { ASSERT_TRUE(cpuinfo_has_arm_vfpv4_d32()); } TEST(ISA, wmmx) { ASSERT_FALSE(cpuinfo_has_arm_wmmx()); } TEST(ISA, wmmx2) { ASSERT_FALSE(cpuinfo_has_arm_wmmx2()); } TEST(ISA, neon) { ASSERT_TRUE(cpuinfo_has_arm_neon()); } TEST(ISA, neon_fp16) { ASSERT_TRUE(cpuinfo_has_arm_neon_fp16()); } TEST(ISA, neon_fma) { ASSERT_TRUE(cpuinfo_has_arm_neon_fma()); } TEST(ISA, atomics) { ASSERT_FALSE(cpuinfo_has_arm_atomics()); } TEST(ISA, neon_rdm) { ASSERT_FALSE(cpuinfo_has_arm_neon_rdm()); } TEST(ISA, fp16_arith) { ASSERT_FALSE(cpuinfo_has_arm_fp16_arith()); } TEST(ISA, neon_fp16_arith) { ASSERT_FALSE(cpuinfo_has_arm_neon_fp16_arith()); } TEST(ISA, neon_dot) { ASSERT_FALSE(cpuinfo_has_arm_neon_dot()); } TEST(ISA, jscvt) { ASSERT_FALSE(cpuinfo_has_arm_jscvt()); } TEST(ISA, fcma) { ASSERT_FALSE(cpuinfo_has_arm_fcma()); } TEST(ISA, aes) { ASSERT_FALSE(cpuinfo_has_arm_aes()); } TEST(ISA, sha1) { ASSERT_FALSE(cpuinfo_has_arm_sha1()); } TEST(ISA, sha2) { ASSERT_FALSE(cpuinfo_has_arm_sha2()); } TEST(ISA, pmull) { ASSERT_FALSE(cpuinfo_has_arm_pmull()); } TEST(ISA, crc32) { ASSERT_FALSE(cpuinfo_has_arm_crc32()); } TEST(L1I, count) { ASSERT_EQ(2, cpuinfo_get_l1i_caches_count()); } TEST(L1I, non_null) { ASSERT_TRUE(cpuinfo_get_l1i_caches()); } TEST(L1I, size) { for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { ASSERT_EQ(16 * 1024, cpuinfo_get_l1i_cache(i)->size); } } TEST(L1I, associativity) { for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { ASSERT_EQ(4, cpuinfo_get_l1i_cache(i)->associativity); } } TEST(L1I, sets) { for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { ASSERT_EQ(cpuinfo_get_l1i_cache(i)->size, cpuinfo_get_l1i_cache(i)->sets * cpuinfo_get_l1i_cache(i)->line_size * cpuinfo_get_l1i_cache(i)->partitions * cpuinfo_get_l1i_cache(i)->associativity); } } TEST(L1I, partitions) { for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { ASSERT_EQ(1, cpuinfo_get_l1i_cache(i)->partitions); } } TEST(L1I, line_size) { for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { ASSERT_EQ(64, cpuinfo_get_l1i_cache(i)->line_size); } } TEST(L1I, flags) { for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { ASSERT_EQ(0, cpuinfo_get_l1i_cache(i)->flags); } } TEST(L1I, processors) { for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { ASSERT_EQ(i, cpuinfo_get_l1i_cache(i)->processor_start); ASSERT_EQ(1, cpuinfo_get_l1i_cache(i)->processor_count); } } TEST(L1D, count) { ASSERT_EQ(2, cpuinfo_get_l1d_caches_count()); } TEST(L1D, non_null) { ASSERT_TRUE(cpuinfo_get_l1d_caches()); } TEST(L1D, size) { for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { ASSERT_EQ(16 * 1024, cpuinfo_get_l1d_cache(i)->size); } } TEST(L1D, associativity) { for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { ASSERT_EQ(4, cpuinfo_get_l1d_cache(i)->associativity); } } TEST(L1D, sets) { for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { ASSERT_EQ(cpuinfo_get_l1d_cache(i)->size, cpuinfo_get_l1d_cache(i)->sets * cpuinfo_get_l1d_cache(i)->line_size * cpuinfo_get_l1d_cache(i)->partitions * cpuinfo_get_l1d_cache(i)->associativity); } } TEST(L1D, partitions) { for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { ASSERT_EQ(1, cpuinfo_get_l1d_cache(i)->partitions); } } TEST(L1D, line_size) { for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { ASSERT_EQ(64, cpuinfo_get_l1d_cache(i)->line_size); } } TEST(L1D, flags) { for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { ASSERT_EQ(0, cpuinfo_get_l1d_cache(i)->flags); } } TEST(L1D, processors) { for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { ASSERT_EQ(i, cpuinfo_get_l1d_cache(i)->processor_start); ASSERT_EQ(1, cpuinfo_get_l1d_cache(i)->processor_count); } } TEST(L2, count) { ASSERT_EQ(1, cpuinfo_get_l2_caches_count()); } TEST(L2, non_null) { ASSERT_TRUE(cpuinfo_get_l2_caches()); } TEST(L2, size) { for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { switch (i) { ASSERT_EQ(1 * 1024 * 1024, cpuinfo_get_l2_cache(i)->size); } } } TEST(L2, associativity) { for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { ASSERT_EQ(8, cpuinfo_get_l2_cache(i)->associativity); } } TEST(L2, sets) { for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { ASSERT_EQ(cpuinfo_get_l2_cache(i)->size, cpuinfo_get_l2_cache(i)->sets * cpuinfo_get_l2_cache(i)->line_size * cpuinfo_get_l2_cache(i)->partitions * cpuinfo_get_l2_cache(i)->associativity); } } TEST(L2, partitions) { for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { ASSERT_EQ(1, cpuinfo_get_l2_cache(i)->partitions); } } TEST(L2, line_size) { for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { ASSERT_EQ(128, cpuinfo_get_l2_cache(i)->line_size); } } TEST(L2, flags) { for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { ASSERT_EQ(0, cpuinfo_get_l2_cache(i)->flags); } } TEST(L2, processors) { for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { ASSERT_EQ(0, cpuinfo_get_l2_cache(i)->processor_start); ASSERT_EQ(2, cpuinfo_get_l2_cache(i)->processor_count); } } TEST(L3, none) { ASSERT_EQ(0, cpuinfo_get_l3_caches_count()); ASSERT_FALSE(cpuinfo_get_l3_caches()); } TEST(L4, none) { ASSERT_EQ(0, cpuinfo_get_l4_caches_count()); ASSERT_FALSE(cpuinfo_get_l4_caches()); } #include <galaxy-s3-us.h> int main(int argc, char* argv[]) { #if CPUINFO_ARCH_ARM cpuinfo_set_hwcap(UINT32_C(0x0001B0D7)); #endif cpuinfo_mock_filesystem(filesystem); #ifdef __ANDROID__ cpuinfo_mock_android_properties(properties); #endif cpuinfo_initialize(); ::testing::InitGoogleTest(&argc, argv); return RUN_ALL_TESTS(); }
bsd-2-clause
amirajdhawan/AutomatedLibrarySystem
FinalService/FinalService/Properties/Resources.Designer.cs
2852
//------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // Runtime Version:4.0.30319.235 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ namespace FinalService.Properties { /// <summary> /// A strongly-typed resource class, for looking up localized strings, etc. /// </summary> // This class was auto-generated by the StronglyTypedResourceBuilder // class via a tool like ResGen or Visual Studio. // To add or remove a member, edit your .ResX file then rerun ResGen // with the /str option, or rebuild your VS project. [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")] [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] internal class Resources { private static global::System.Resources.ResourceManager resourceMan; private static global::System.Globalization.CultureInfo resourceCulture; [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] internal Resources() { } /// <summary> /// Returns the cached ResourceManager instance used by this class. /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] internal static global::System.Resources.ResourceManager ResourceManager { get { if ((resourceMan == null)) { global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("FinalService.Properties.Resources", typeof(Resources).Assembly); resourceMan = temp; } return resourceMan; } } /// <summary> /// Overrides the current thread's CurrentUICulture property for all /// resource lookups using this strongly typed resource class. /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] internal static global::System.Globalization.CultureInfo Culture { get { return resourceCulture; } set { resourceCulture = value; } } } }
bsd-2-clause
Maratyszcza/cpuinfo
test/mock/pixel-2-xl.cc
17023
#include <gtest/gtest.h> #include <cpuinfo.h> #include <cpuinfo-mock.h> TEST(PROCESSORS, count) { ASSERT_EQ(8, cpuinfo_get_processors_count()); } TEST(PROCESSORS, non_null) { ASSERT_TRUE(cpuinfo_get_processors()); } TEST(PROCESSORS, smt_id) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { ASSERT_EQ(0, cpuinfo_get_processor(i)->smt_id); } } TEST(PROCESSORS, core) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { ASSERT_EQ(cpuinfo_get_core(i), cpuinfo_get_processor(i)->core); } } TEST(PROCESSORS, cluster) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { switch (i) { case 0: case 1: case 2: case 3: ASSERT_EQ(cpuinfo_get_cluster(0), cpuinfo_get_processor(i)->cluster); break; case 4: case 5: case 6: case 7: ASSERT_EQ(cpuinfo_get_cluster(1), cpuinfo_get_processor(i)->cluster); break; } } } TEST(PROCESSORS, package) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_processor(i)->package); } } TEST(PROCESSORS, linux_id) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { switch (i) { case 0: case 1: case 2: case 3: ASSERT_EQ(i + 4, cpuinfo_get_processor(i)->linux_id); break; case 4: case 5: case 6: case 7: ASSERT_EQ(i - 4, cpuinfo_get_processor(i)->linux_id); break; } } } TEST(PROCESSORS, l1i) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { ASSERT_EQ(cpuinfo_get_l1i_cache(i), cpuinfo_get_processor(i)->cache.l1i); } } TEST(PROCESSORS, l1d) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { ASSERT_EQ(cpuinfo_get_l1d_cache(i), cpuinfo_get_processor(i)->cache.l1d); } } TEST(PROCESSORS, l2) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { switch (i) { case 0: case 1: case 2: case 3: ASSERT_EQ(cpuinfo_get_l2_cache(0), cpuinfo_get_processor(i)->cache.l2); break; case 4: case 5: case 6: case 7: ASSERT_EQ(cpuinfo_get_l2_cache(1), cpuinfo_get_processor(i)->cache.l2); break; } } } TEST(PROCESSORS, l3) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { ASSERT_FALSE(cpuinfo_get_processor(i)->cache.l3); } } TEST(PROCESSORS, l4) { for (uint32_t i = 0; i < cpuinfo_get_processors_count(); i++) { ASSERT_FALSE(cpuinfo_get_processor(i)->cache.l4); } } TEST(CORES, count) { ASSERT_EQ(8, cpuinfo_get_cores_count()); } TEST(CORES, non_null) { ASSERT_TRUE(cpuinfo_get_cores()); } TEST(CORES, processor_start) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { ASSERT_EQ(i, cpuinfo_get_core(i)->processor_start); } } TEST(CORES, processor_count) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { ASSERT_EQ(1, cpuinfo_get_core(i)->processor_count); } } TEST(CORES, core_id) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { ASSERT_EQ(i, cpuinfo_get_core(i)->core_id); } } TEST(CORES, cluster) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { switch (i) { case 0: case 1: case 2: case 3: ASSERT_EQ(cpuinfo_get_cluster(0), cpuinfo_get_core(i)->cluster); break; case 4: case 5: case 6: case 7: ASSERT_EQ(cpuinfo_get_cluster(1), cpuinfo_get_core(i)->cluster); break; } } } TEST(CORES, package) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_core(i)->package); } } TEST(CORES, vendor) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { ASSERT_EQ(cpuinfo_vendor_arm, cpuinfo_get_core(i)->vendor); } } TEST(CORES, uarch) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { switch (i) { case 0: case 1: case 2: case 3: ASSERT_EQ(cpuinfo_uarch_cortex_a73, cpuinfo_get_core(i)->uarch); break; case 4: case 5: case 6: case 7: ASSERT_EQ(cpuinfo_uarch_cortex_a53, cpuinfo_get_core(i)->uarch); break; } } } TEST(CORES, midr) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { switch (i) { case 0: case 1: case 2: case 3: ASSERT_EQ(UINT32_C(0x51AF8001), cpuinfo_get_core(i)->midr); break; case 4: case 5: case 6: case 7: ASSERT_EQ(UINT32_C(0x51AF8014), cpuinfo_get_core(i)->midr); break; } } } TEST(CORES, DISABLED_frequency) { for (uint32_t i = 0; i < cpuinfo_get_cores_count(); i++) { switch (i) { case 0: case 1: case 2: case 3: ASSERT_EQ(UINT64_C(2457600000), cpuinfo_get_core(i)->frequency); break; case 4: case 5: case 6: case 7: ASSERT_EQ(UINT64_C(1900800000), cpuinfo_get_core(i)->frequency); break; } } } TEST(CLUSTERS, count) { ASSERT_EQ(2, cpuinfo_get_clusters_count()); } TEST(CLUSTERS, non_null) { ASSERT_TRUE(cpuinfo_get_clusters()); } TEST(CLUSTERS, processor_start) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { switch (i) { case 0: ASSERT_EQ(0, cpuinfo_get_cluster(i)->processor_start); break; case 1: ASSERT_EQ(4, cpuinfo_get_cluster(i)->processor_start); break; } } } TEST(CLUSTERS, processor_count) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { ASSERT_EQ(4, cpuinfo_get_cluster(i)->processor_count); } } TEST(CLUSTERS, core_start) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { switch (i) { case 0: ASSERT_EQ(0, cpuinfo_get_cluster(i)->core_start); break; case 1: ASSERT_EQ(4, cpuinfo_get_cluster(i)->core_start); break; } } } TEST(CLUSTERS, core_count) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { ASSERT_EQ(4, cpuinfo_get_cluster(i)->core_count); } } TEST(CLUSTERS, cluster_id) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { ASSERT_EQ(i, cpuinfo_get_cluster(i)->cluster_id); } } TEST(CLUSTERS, package) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { ASSERT_EQ(cpuinfo_get_package(0), cpuinfo_get_cluster(i)->package); } } TEST(CLUSTERS, vendor) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { ASSERT_EQ(cpuinfo_vendor_arm, cpuinfo_get_cluster(i)->vendor); } } TEST(CLUSTERS, uarch) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { switch (i) { case 0: ASSERT_EQ(cpuinfo_uarch_cortex_a73, cpuinfo_get_cluster(i)->uarch); break; case 1: ASSERT_EQ(cpuinfo_uarch_cortex_a53, cpuinfo_get_cluster(i)->uarch); break; } } } TEST(CLUSTERS, midr) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { switch (i) { case 0: ASSERT_EQ(UINT32_C(0x51AF8001), cpuinfo_get_cluster(i)->midr); break; case 1: ASSERT_EQ(UINT32_C(0x51AF8014), cpuinfo_get_cluster(i)->midr); break; } } } TEST(CLUSTERS, DISABLED_frequency) { for (uint32_t i = 0; i < cpuinfo_get_clusters_count(); i++) { switch (i) { case 0: ASSERT_EQ(UINT64_C(2457600000), cpuinfo_get_cluster(i)->frequency); break; case 1: ASSERT_EQ(UINT64_C(1900800000), cpuinfo_get_cluster(i)->frequency); break; } } } TEST(PACKAGES, count) { ASSERT_EQ(1, cpuinfo_get_packages_count()); } TEST(PACKAGES, name) { for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { ASSERT_EQ("Qualcomm MSM8998", std::string(cpuinfo_get_package(i)->name, strnlen(cpuinfo_get_package(i)->name, CPUINFO_PACKAGE_NAME_MAX))); } } TEST(PACKAGES, processor_start) { for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { ASSERT_EQ(0, cpuinfo_get_package(i)->processor_start); } } TEST(PACKAGES, processor_count) { for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { ASSERT_EQ(8, cpuinfo_get_package(i)->processor_count); } } TEST(PACKAGES, core_start) { for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { ASSERT_EQ(0, cpuinfo_get_package(i)->core_start); } } TEST(PACKAGES, core_count) { for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { ASSERT_EQ(8, cpuinfo_get_package(i)->core_count); } } TEST(PACKAGES, cluster_start) { for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { ASSERT_EQ(0, cpuinfo_get_package(i)->cluster_start); } } TEST(PACKAGES, cluster_count) { for (uint32_t i = 0; i < cpuinfo_get_packages_count(); i++) { ASSERT_EQ(2, cpuinfo_get_package(i)->cluster_count); } } TEST(ISA, thumb) { #if CPUINFO_ARCH_ARM ASSERT_TRUE(cpuinfo_has_arm_thumb()); #elif CPUINFO_ARCH_ARM64 ASSERT_FALSE(cpuinfo_has_arm_thumb()); #endif } TEST(ISA, thumb2) { #if CPUINFO_ARCH_ARM ASSERT_TRUE(cpuinfo_has_arm_thumb2()); #elif CPUINFO_ARCH_ARM64 ASSERT_FALSE(cpuinfo_has_arm_thumb2()); #endif } TEST(ISA, armv5e) { #if CPUINFO_ARCH_ARM ASSERT_TRUE(cpuinfo_has_arm_v5e()); #elif CPUINFO_ARCH_ARM64 ASSERT_FALSE(cpuinfo_has_arm_v5e()); #endif } TEST(ISA, armv6) { #if CPUINFO_ARCH_ARM ASSERT_TRUE(cpuinfo_has_arm_v6()); #elif CPUINFO_ARCH_ARM64 ASSERT_FALSE(cpuinfo_has_arm_v6()); #endif } TEST(ISA, armv6k) { #if CPUINFO_ARCH_ARM ASSERT_TRUE(cpuinfo_has_arm_v6k()); #elif CPUINFO_ARCH_ARM64 ASSERT_FALSE(cpuinfo_has_arm_v6k()); #endif } TEST(ISA, armv7) { #if CPUINFO_ARCH_ARM ASSERT_TRUE(cpuinfo_has_arm_v7()); #elif CPUINFO_ARCH_ARM64 ASSERT_FALSE(cpuinfo_has_arm_v7()); #endif } TEST(ISA, armv7mp) { #if CPUINFO_ARCH_ARM ASSERT_TRUE(cpuinfo_has_arm_v7mp()); #elif CPUINFO_ARCH_ARM64 ASSERT_FALSE(cpuinfo_has_arm_v7mp()); #endif } TEST(ISA, idiv) { ASSERT_TRUE(cpuinfo_has_arm_idiv()); } TEST(ISA, vfpv2) { ASSERT_FALSE(cpuinfo_has_arm_vfpv2()); } TEST(ISA, vfpv3) { ASSERT_TRUE(cpuinfo_has_arm_vfpv3()); } TEST(ISA, vfpv3_d32) { ASSERT_TRUE(cpuinfo_has_arm_vfpv3_d32()); } TEST(ISA, vfpv3_fp16) { ASSERT_TRUE(cpuinfo_has_arm_vfpv3_fp16()); } TEST(ISA, vfpv3_fp16_d32) { ASSERT_TRUE(cpuinfo_has_arm_vfpv3_fp16_d32()); } TEST(ISA, vfpv4) { ASSERT_TRUE(cpuinfo_has_arm_vfpv4()); } TEST(ISA, vfpv4_d32) { ASSERT_TRUE(cpuinfo_has_arm_vfpv4_d32()); } TEST(ISA, wmmx) { ASSERT_FALSE(cpuinfo_has_arm_wmmx()); } TEST(ISA, wmmx2) { ASSERT_FALSE(cpuinfo_has_arm_wmmx2()); } TEST(ISA, neon) { ASSERT_TRUE(cpuinfo_has_arm_neon()); } TEST(ISA, neon_fp16) { ASSERT_TRUE(cpuinfo_has_arm_neon_fp16()); } TEST(ISA, neon_fma) { ASSERT_TRUE(cpuinfo_has_arm_neon_fma()); } TEST(ISA, atomics) { ASSERT_FALSE(cpuinfo_has_arm_atomics()); } TEST(ISA, neon_rdm) { ASSERT_FALSE(cpuinfo_has_arm_neon_rdm()); } TEST(ISA, fp16_arith) { ASSERT_FALSE(cpuinfo_has_arm_fp16_arith()); } TEST(ISA, neon_fp16_arith) { ASSERT_FALSE(cpuinfo_has_arm_neon_fp16_arith()); } TEST(ISA, neon_dot) { ASSERT_FALSE(cpuinfo_has_arm_neon_dot()); } TEST(ISA, jscvt) { ASSERT_FALSE(cpuinfo_has_arm_jscvt()); } TEST(ISA, fcma) { ASSERT_FALSE(cpuinfo_has_arm_fcma()); } TEST(ISA, aes) { ASSERT_TRUE(cpuinfo_has_arm_aes()); } TEST(ISA, sha1) { ASSERT_TRUE(cpuinfo_has_arm_sha1()); } TEST(ISA, sha2) { ASSERT_TRUE(cpuinfo_has_arm_sha2()); } TEST(ISA, pmull) { ASSERT_TRUE(cpuinfo_has_arm_pmull()); } TEST(ISA, crc32) { ASSERT_TRUE(cpuinfo_has_arm_crc32()); } TEST(L1I, count) { ASSERT_EQ(8, cpuinfo_get_l1i_caches_count()); } TEST(L1I, non_null) { ASSERT_TRUE(cpuinfo_get_l1i_caches()); } TEST(L1I, size) { for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { switch (i) { case 0: case 1: case 2: case 3: ASSERT_EQ(64 * 1024, cpuinfo_get_l1i_cache(i)->size); break; case 4: case 5: case 6: case 7: ASSERT_EQ(32 * 1024, cpuinfo_get_l1i_cache(i)->size); break; } } } TEST(L1I, associativity) { for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { switch (i) { case 0: case 1: case 2: case 3: ASSERT_EQ(4, cpuinfo_get_l1i_cache(i)->associativity); break; case 4: case 5: case 6: case 7: ASSERT_EQ(2, cpuinfo_get_l1i_cache(i)->associativity); break; } } } TEST(L1I, sets) { for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { ASSERT_EQ(cpuinfo_get_l1i_cache(i)->size, cpuinfo_get_l1i_cache(i)->sets * cpuinfo_get_l1i_cache(i)->line_size * cpuinfo_get_l1i_cache(i)->partitions * cpuinfo_get_l1i_cache(i)->associativity); } } TEST(L1I, partitions) { for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { ASSERT_EQ(1, cpuinfo_get_l1i_cache(i)->partitions); } } TEST(L1I, line_size) { for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { ASSERT_EQ(64, cpuinfo_get_l1i_cache(i)->line_size); } } TEST(L1I, flags) { for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { ASSERT_EQ(0, cpuinfo_get_l1i_cache(i)->flags); } } TEST(L1I, processors) { for (uint32_t i = 0; i < cpuinfo_get_l1i_caches_count(); i++) { ASSERT_EQ(i, cpuinfo_get_l1i_cache(i)->processor_start); ASSERT_EQ(1, cpuinfo_get_l1i_cache(i)->processor_count); } } TEST(L1D, count) { ASSERT_EQ(8, cpuinfo_get_l1d_caches_count()); } TEST(L1D, non_null) { ASSERT_TRUE(cpuinfo_get_l1d_caches()); } TEST(L1D, size) { for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { switch (i) { case 0: case 1: case 2: case 3: ASSERT_EQ(64 * 1024, cpuinfo_get_l1d_cache(i)->size); break; case 4: case 5: case 6: case 7: ASSERT_EQ(32 * 1024, cpuinfo_get_l1d_cache(i)->size); break; } } } TEST(L1D, associativity) { for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { switch (i) { case 0: case 1: case 2: case 3: ASSERT_EQ(16, cpuinfo_get_l1d_cache(i)->associativity); break; case 4: case 5: case 6: case 7: ASSERT_EQ(4, cpuinfo_get_l1d_cache(i)->associativity); break; } } } TEST(L1D, sets) { for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { ASSERT_EQ(cpuinfo_get_l1d_cache(i)->size, cpuinfo_get_l1d_cache(i)->sets * cpuinfo_get_l1d_cache(i)->line_size * cpuinfo_get_l1d_cache(i)->partitions * cpuinfo_get_l1d_cache(i)->associativity); } } TEST(L1D, partitions) { for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { ASSERT_EQ(1, cpuinfo_get_l1d_cache(i)->partitions); } } TEST(L1D, line_size) { for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { ASSERT_EQ(64, cpuinfo_get_l1d_cache(i)->line_size); } } TEST(L1D, flags) { for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { ASSERT_EQ(0, cpuinfo_get_l1d_cache(i)->flags); } } TEST(L1D, processors) { for (uint32_t i = 0; i < cpuinfo_get_l1d_caches_count(); i++) { ASSERT_EQ(i, cpuinfo_get_l1d_cache(i)->processor_start); ASSERT_EQ(1, cpuinfo_get_l1d_cache(i)->processor_count); } } TEST(L2, count) { ASSERT_EQ(2, cpuinfo_get_l2_caches_count()); } TEST(L2, non_null) { ASSERT_TRUE(cpuinfo_get_l2_caches()); } TEST(L2, size) { for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { switch (i) { case 0: ASSERT_EQ(2 * 1024 * 1024, cpuinfo_get_l2_cache(i)->size); break; case 1: ASSERT_EQ(1 * 1024 * 1024, cpuinfo_get_l2_cache(i)->size); break; } } } TEST(L2, associativity) { for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { ASSERT_EQ(16, cpuinfo_get_l2_cache(i)->associativity); } } TEST(L2, sets) { for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { ASSERT_EQ(cpuinfo_get_l2_cache(i)->size, cpuinfo_get_l2_cache(i)->sets * cpuinfo_get_l2_cache(i)->line_size * cpuinfo_get_l2_cache(i)->partitions * cpuinfo_get_l2_cache(i)->associativity); } } TEST(L2, partitions) { for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { ASSERT_EQ(1, cpuinfo_get_l2_cache(i)->partitions); } } TEST(L2, line_size) { for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { ASSERT_EQ(64, cpuinfo_get_l2_cache(i)->line_size); } } TEST(L2, flags) { for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { switch (i) { case 0: ASSERT_EQ(CPUINFO_CACHE_INCLUSIVE, cpuinfo_get_l2_cache(i)->flags); break; case 1: ASSERT_EQ(0, cpuinfo_get_l2_cache(i)->flags); break; } } } TEST(L2, processors) { for (uint32_t i = 0; i < cpuinfo_get_l2_caches_count(); i++) { switch (i) { case 0: ASSERT_EQ(0, cpuinfo_get_l2_cache(i)->processor_start); ASSERT_EQ(4, cpuinfo_get_l2_cache(i)->processor_count); break; case 1: ASSERT_EQ(4, cpuinfo_get_l2_cache(i)->processor_start); ASSERT_EQ(4, cpuinfo_get_l2_cache(i)->processor_count); break; } } } TEST(L3, none) { ASSERT_EQ(0, cpuinfo_get_l3_caches_count()); ASSERT_FALSE(cpuinfo_get_l3_caches()); } TEST(L4, none) { ASSERT_EQ(0, cpuinfo_get_l4_caches_count()); ASSERT_FALSE(cpuinfo_get_l4_caches()); } #include <pixel-2-xl.h> int main(int argc, char* argv[]) { #if CPUINFO_ARCH_ARM cpuinfo_set_hwcap(UINT32_C(0x0037B0D6)); cpuinfo_set_hwcap2(UINT32_C(0x0000001F)); #elif CPUINFO_ARCH_ARM64 cpuinfo_set_hwcap(UINT32_C(0x000000FF)); #endif cpuinfo_mock_filesystem(filesystem); #ifdef __ANDROID__ cpuinfo_mock_android_properties(properties); #endif cpuinfo_initialize(); ::testing::InitGoogleTest(&argc, argv); return RUN_ALL_TESTS(); }
bsd-2-clause
ckxng/wakeup
tag/tag30.go
46
// Package tag. package tag println("tag30")
bsd-2-clause
anurag95/clearnlp
src/main/java/com/googlecode/clearnlp/util/triple/Triple.java
2370
/** * Copyright (c) 2009/09-2012/08, Regents of the University of Colorado * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /** * Copyright 2012/09-2013/04, University of Massachusetts Amherst * Copyright 2013/05-Present, IPSoft Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.googlecode.clearnlp.util.triple; public class Triple<T1, T2, T3> { public T1 o1; public T2 o2; public T3 o3; public Triple(T1 o1, T2 o2, T3 o3) { set(o1, o2, o3); } public void set(T1 o1, T2 o2, T3 o3) { this.o1 = o1; this.o2 = o2; this.o3 = o3; } }
bsd-2-clause
uwgraphics/Ubiqu-Ity
Ity/Importers/Importer.py
1025
# coding=utf-8 __author__ = 'kohlmannj' import abc import os from Ity import metadata_root class Importer(object): __metaclass__ = abc.ABCMeta @classmethod @abc.abstractmethod def get_export_path(cls): """ Returns a string representing the path to the file or folder of files this Importer creates after importing metadata. This merely a path; no further validation is performed. """ pass # def __init__(self, overwrite=True): # self.overwrite = overwrite # # Get the export path. This could be a file or folder, depending on how # # the author of a subclass decides it needs to work. # self.export_path = __name__.get_export_path() # # See if the export is valid. # if os.path.exists(self.export_path): # if not self.overwrite: # raise ValueError("Export path already exists (and we were told not to overwrite).") # # Some common importing operations might go here.
bsd-2-clause
siteadmin/codevalidator
src/main/java/org/sitenv/vocabularies/validation/NodeValidatorFactory.java
193
package org.sitenv.vocabularies.validation; /** * Created by Brian on 2/7/2016. */ public interface NodeValidatorFactory { NodeValidation getVocabularyValidator(String validatorType); }
bsd-2-clause
Mickasso90/homebrew-cask
Casks/gitx.rb
159
class Gitx < Cask url 'http://frim.frim.nl/GitXStable.app.zip' homepage 'http://gitx.frim.nl/' version 'latest' sha256 :no_check link 'GitX.app' end
bsd-2-clause
johnrsibert/tagest
21/recap_t2.cpp
1140
#include <iostream.h> #include <iomanip.h> #include "recap_t.h" #include "modlarea.h" int operator == (recaptype& a, recaptype& b) { return(!recaptype_compare(a,b)); } recaptype& operator += (recaptype& a, recaptype& b) { a.returns += b.returns; return(a); } int recaptype_compare(const void* _a, const void* _b) { recaptype a = *(recaptype*)_a; recaptype b = *(recaptype*)_b; int cmp = recaptype_compare(a, b); return(cmp); } int recaptype_compare(recaptype& a, recaptype& b) { if (a.date < b.date) return(-1); else if (a.date > b.date) return(1); else { int cmp = strcmp(a.fleet, b.fleet); if (cmp) return(cmp); } if (a.cohort < b.cohort) return (-1); else if (a.cohort > b.cohort) return(1); else if (a.i < b.i) return (-1); else if (a.i > b.i) return(1); else if (a.j < b.j) return (-1); else if (a.j > b.j) return(1); else return(0); } void recaptype_sort(recaptype_vector& v, const int n) { cout << "qsorting " << n << " full recaptype_ records" << endl; qsort((void *)v.base(), n, sizeof(recaptype), recaptype_compare); }
bsd-2-clause
auraphp/Aura.Html
src/Helper/Styles.php
6211
<?php /** * * This file is part of Aura for PHP. * * @license http://opensource.org/licenses/bsd-license.php BSD * */ namespace Aura\Html\Helper; /** * * Helper for a series of <link rel="stylesheet" ... /> tags. * * @package Aura.Html * */ class Styles extends AbstractSeries { /** * Temporary storage or params passed to caputre functions * * @var mixed * * @access private */ private $capture; /** * * Adds a <link rel="stylesheet" ... /> tag to the series. * * @param string $href The source href for the stylesheet. * * @param array $attr Additional attributes for the <link> tag. * * @param int $pos The stylesheet position in the series. * * @return self * */ public function add($href, array $attr = null, $pos = 100) { $attr = $this->fixAttr($href, $attr); $tag = $this->void('link', $attr); $this->addElement($pos, $tag); return $this; } /** * * Adds a conditional `<!--[if ...]><link rel="stylesheet" ... /><![endif] -->` * tag to the stack. * * @param string $cond The conditional expression for the stylesheet. * * @param string $href The source href for the stylesheet. * * @param array $attr Additional attributes for the <link> tag. * * @param string $pos The stylesheet position in the stack. * * @return self * */ public function addCond($cond, $href, array $attr = null, $pos = 100) { $attr = $this->fixAttr($href, $attr); $link = $this->void('link', $attr); $cond = $this->escaper->html($cond); $tag = "<!--[if $cond]>$link<![endif]-->"; $this->addElement($pos, $tag); return $this; } /** * Returns a "style" tag * * @param mixed $css The source CSS * @param array $attr The attributes for the tag * * @return string * * @access protected */ protected function style($css, array $attr = null) { $attr = $this->fixInternalAttr($attr); $attr = $this->escaper->attr($attr); return "<style $attr>$css</style>"; } /** * addInternal * * @param mixed $css The source CSS * @param array $attr Additional attributes for the <style> tag * @param int $pos The position in the stack. * * @return self * * @access public */ public function addInternal($css, array $attr = null, $pos = 100) { $style = $this->style($css, $attr); $this->addElement($pos, $style); return $this; } /** * * Adds a conditional `<!--[if ...]><style ... /><![endif] -->` * tag to the stack. * * @param string $cond The conditional expression for the css. * * @param string $css The source css. * * @param array $attr Additional attributes for the <style> tag. * * @param string $pos The position in the stack. * * @return self * */ public function addCondInternal($cond, $css, array $attr = null, $pos = 100) { $style = $this->style($css, $attr); $cond = $this->escaper->html($cond); $tag = "<!--[if $cond]>$style<![endif]-->"; $this->addElement($pos, $tag); return $this; } /** * Begins output buffering for a conditional style tag * * @param array $attr Additional attributes for the <style> tag. * @param int $pos The style position in the stack * * @return void * * @access public */ public function beginInternal(array $attr = null, $pos = 100) { $this->capture[] = array( 'attr' => $attr, 'pos' => $pos ); ob_start(); } /** * Begins output buffering for a conditional style tag * * @param mixed $cond The conditional expression for the css * @param array $attr Additional attributes for the <style> tag. * @param int $pos The style position in the stack * * @return void * * @access public */ public function beginCondInternal($cond, array $attr = null, $pos = 100) { $this->capture[] = array( 'attr' => $attr, 'pos' => $pos, 'cond' => $cond ); ob_start(); } /** * Ends buffering and retains output for the most-recent internal. * * @return self * * @access public */ public function endInternal() { $params = array_pop($this->capture); $css = ob_get_clean(); if (isset($params['cond'])) { return $this->addCondInternal( $params['cond'], $css, $params['attr'], $params['pos'] ); } return $this->addInternal( $css, $params['attr'], $params['pos'] ); } /** * Fixes the attributes for the internal stylesheet. * * @param array $attr Additional attributes for the <style> tag. * * @return array * * @access protected */ protected function fixInternalAttr(array $attr = null) { $attr = (array) $attr; $base = array( 'type' => 'text/css', 'media' => 'screen', ); unset($attr['rel']); unset($attr['href']); return array_merge($base, (array) $attr); } /** * * Fixes the attributes for the stylesheet. * * @param string $href The source href for the stylesheet. * * @param array $attr Additional attributes for the <link> tag. * * @return array The fixed attributes. * */ protected function fixAttr($href, array $attr = null) { $attr = (array) $attr; $base = array( 'rel' => 'stylesheet', 'href' => $href, 'type' => 'text/css', 'media' => 'screen', ); unset($attr['rel']); unset($attr['href']); return array_merge($base, (array) $attr); } }
bsd-2-clause
windoze/Argos
query/match.cpp
645
// // match.cpp // Argos // // Created by Windoze on 12-7-6. // Copyright (c) 2012 0d0a.com. All rights reserved. // #include "index/forward_index.h" #include "query/match.h" namespace argos { namespace query { doc_iterator_impl_ptr_t MatchNone::match(common::ExecutionContext &ctx) { return match_none_ptr; } doc_iterator_impl_ptr_t MatchAll::match(common::ExecutionContext &ctx) { return doc_iterator_impl_ptr_t(new detail::match_all_doc_iterator_impl(ctx.get_forward_index()->get_last_doc())); } } // End of namespace query } // End of namespace argos
bsd-2-clause
spaam/homebrew-core
Formula/infer.rb
5152
class Infer < Formula desc "Static analyzer for Java, C, C++, and Objective-C" homepage "https://fbinfer.com/" # pull from git tag to get submodules url "https://github.com/facebook/infer.git", tag: "v0.17.0", revision: "99464c01da5809e7159ed1a75ef10f60d34506a4" license "MIT" livecheck do url :stable regex(/^v?(\d+(?:\.\d+)+)$/i) end bottle do rebuild 1 sha256 cellar: :any, catalina: "1dc9c75c759611c8fe0efa8f63d7e55bbaa35d8dc2863f7a527069b11759f244" sha256 cellar: :any, mojave: "74b2dddff2bea362066395e28a797078d33514774511cc64771d0f89eea2466d" sha256 cellar: :any, high_sierra: "7630571f8e391ce0ba991ffe7a5d7b2b4a1029cda1d56497800d8ae0a260d4b6" end deprecate! date: "2020-11-13", because: :does_not_build depends_on "autoconf" => :build depends_on "automake" => :build depends_on "cmake" => :build depends_on "libtool" => :build depends_on "ocaml" => :build depends_on "ocaml-findlib" => :build depends_on "ocaml-num" => :build depends_on "opam" => :build depends_on "openjdk@8" => [:build, :test] depends_on "pkg-config" => :build depends_on "gmp" depends_on :macos # Due to Python 2 (https://github.com/facebook/infer/issues/934) depends_on "mpfr" depends_on "sqlite" uses_from_macos "m4" => :build uses_from_macos "unzip" => :build uses_from_macos "ncurses" uses_from_macos "xz" uses_from_macos "zlib" # Remove camlp4 dependency, which is deprecated # Addressed in 0.18.x patch do url "https://github.com/facebook/infer/commit/f52b5fc981c692776210d7eb9681c2b8c3117c93.patch?full_index=1" sha256 "5487b9b39607c94821bede8d4f0ec2a0ed08d5213d5f048b1344819dac53b2f5" end def install # needed to build clang ENV.permit_arch_flags # Apple's libstdc++ is too old to build LLVM ENV.libcxx if ENV.compiler == :clang opamroot = buildpath/"opamroot" opamroot.mkpath ENV["OPAMROOT"] = opamroot ENV["OPAMYES"] = "1" ENV["OPAMVERBOSE"] = "1" system "opam", "init", "--no-setup", "--disable-sandboxing" # do not attempt to use the clang in facebook-clang-plugins/ as it hasn't been built yet ENV["INFER_CONFIGURE_OPTS"] = "--prefix=#{prefix} --without-fcp-clang" # Let's try build clang faster ENV["JOBS"] = ENV.make_jobs.to_s ENV["CLANG_CMAKE_ARGS"] = "-DLLVM_OCAML_INSTALL_PATH=#{`opam var lib`.chomp}/ocaml" # Release build touch ".release" # Pin updated dependencies which are required to build on brew ocaml # Remove from this when Infer updates their opam.locked to use at least these versions pinned_deps = { "mlgmpidl" => "1.2.12", "octavius" => "1.2.1", "parmap" => "1.0-rc11", "ppx_tools" => "5.3+4.08.0", } pinned_deps.each { |dep, ver| system "opam", "pin", "add", dep, ver, "--locked" } # Unfortunately, opam can't cope if a system ocaml-num happens to be installed. # Instead, we depend on Homebrew's ocaml-num and fool opam into using it. # https://github.com/ocaml/opam-repository/issues/14646 system "opam", "pin", "add", "ocamlfind", Formula["ocaml-findlib"].version.to_s, "--locked", "--fake" system "opam", "pin", "add", "num", Formula["ocaml-num"].version.to_s, "--locked", "--fake" # Relax the dependency lock on a specific ocaml # Also ignore anything we pinned above ENV["OPAMIGNORECONSTRAINTS"] = "ocaml,ocamlfind,num,#{pinned_deps.keys.join(",")}" # Remove ocaml-variants dependency (we won't be using it) inreplace "opam.locked", /^ +"ocaml-variants" \{= ".*?"\}$\n/, "" system "opam", "exec", "--", "./build-infer.sh", "all", "--yes", "--user-opam-switch" system "opam", "exec", "--", "make", "install-with-libs" end test do (testpath/"FailingTest.c").write <<~EOS #include <stdio.h> int main() { int *s = NULL; *s = 42; return 0; } EOS (testpath/"PassingTest.c").write <<~EOS #include <stdio.h> int main() { int *s = NULL; if (s != NULL) { *s = 42; } return 0; } EOS shell_output("#{bin}/infer --fail-on-issue -P -- clang -c FailingTest.c", 2) shell_output("#{bin}/infer --fail-on-issue -P -- clang -c PassingTest.c") (testpath/"FailingTest.java").write <<~EOS class FailingTest { String mayReturnNull(int i) { if (i > 0) { return "Hello, Infer!"; } return null; } int mayCauseNPE() { String s = mayReturnNull(0); return s.length(); } } EOS (testpath/"PassingTest.java").write <<~EOS class PassingTest { String mayReturnNull(int i) { if (i > 0) { return "Hello, Infer!"; } return null; } int mayCauseNPE() { String s = mayReturnNull(0); return s == null ? 0 : s.length(); } } EOS shell_output("#{bin}/infer --fail-on-issue -P -- javac FailingTest.java", 2) shell_output("#{bin}/infer --fail-on-issue -P -- javac PassingTest.java") end end
bsd-2-clause
raphaelahrens/doto
doto/model/crud.py
3003
""" Module for crud operations """ def add_one(store, cls, insert_query, obj): """ Add one row """ row_parameters = cls.obj_to_row(obj) cur = store.execute(insert_query, row_parameters) obj.id = cur.lastrowid return obj def add_and_cache(store, cls, insert_query, obj): """ Add a new row and store the id and type in the last event cache """ obj = add_one(store, cls, insert_query, obj) store.set_last(obj) return obj def insert(insert_query, cls, add_fn=add_one): """ Create insert function @param insert_query the query for the new insert function @param obj_to_row the function to create the row_parameters from the obj @param cls the class or module which has a obj_to_row function """ def insert_clojure(store, obj_s): """ Add a new event to the store @param store the database store @param obj the new event """ try: return [add_fn(store, cls, insert_query, obj) for obj in obj_s] except TypeError: return add_fn(store, cls, insert_query, obj_s) return insert_clojure def delete(delete_query): """ Create delete function @param delete_query the query for the new delete function """ def delete_clojure(store, obj): """ Delete an event from the store @param store the database store @param obj the event that will be deleted """ store.execute(delete_query, (obj.id, )) return delete_clojure def update(update_query, cls): """ Create update function @param update_query the query for the new delete function @param cls the class or module which has a obj_to_row function """ def update_clojure(store, obj): """ Update the obj @param store the database store @param obj the event that will be updated """ row_dict = cls.obj_to_row(obj) store.execute(update_query, row_dict) return obj return update_clojure def get(select_query, cls): """ Create update function @param select_query the query which is used to select the object with the id @param cls the class or module which has a row_to_obj function """ def get_clojure(store, select_id): """ Get one object by its id @param store the database store @param select_id the id of the obect that shall be fetched """ return store.get_one(cls.row_to_obj, select_query, {'id': select_id}) return get_clojure def get_count(count_query): """ Get the number of rows with the SELECT COUNT(*) query """ def get_count_clojure(store): """ Get the count in table of database. """ def tuple_to_count(row, _store): """ Extract the first value of the row. """ (count,) = row return count return store.get_one(tuple_to_count, count_query) return get_count_clojure
bsd-2-clause
WillYingling/protoparse
protoparse.go
555
package protoparse import ( pbbgp "github.com/CSUNetSec/netsec-protobufs/protocol/bgp" ) //A pbval is an interface that takes a byte slice and populates the //underlying pb. all supported pbs must implement it. type PbVal interface { Parse() (PbVal, error) String() string } type BGPUpdater interface { PbVal GetUpdate() *pbbgp.BGPUpdate } type BGP4MPHeaderer interface { PbVal GetHeader() *pbbgp.BGP4MPHeader } type MRTHeaderer interface { PbVal GetHeader() *pbbgp.MrtHeader } type RIBHeaderer interface { PbVal GetHeader() *pbbgp.RIB }
bsd-2-clause
sebastienros/jint
Jint.Tests.Test262/test/built-ins/String/prototype/concat/S15.5.4.6_A1_T10.js
1386
// Copyright 2009 the Sputnik authors. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /*--- info: String.prototype.concat([,[...]]) es5id: 15.5.4.6_A1_T10 description: Call concat([,[...]]) function with object arguments ---*/ var __obj = { toString: function() { return "\u0041"; }, valueOf: function() { return "_\u0041_"; } } var __obj2 = { toString: function() { return true; } } var __obj3 = { toString: function() { return 42; } } var __str = "lego"; ////////////////////////////////////////////////////////////////////////////// //CHECK#1 if (__str.concat(__obj) !== "legoA") { $ERROR('#1: var x; var __obj = {toString:function(){return "\u0041";}, valueOf:function(){return "_\u0041_";}}; var __str = "lego"; __str.concat(__obj) === "legoA". Actual: ' + __str.concat(__obj)); } if (__str.concat(__obj, __obj2, __obj3, x) !== "legoAtrue42undefined") { $ERROR('#2: var x; var __obj = {toString:function(){return "\u0041";}, valueOf:function(){return "_\u0041_";}}; var __obj2 = {toString:function(){return true;}}; var __obj3 = {toString:function(){return 42;}}; var __str = "lego"; __str.concat(__obj, __obj2, __obj3, x) === "legoAtrue42undefined". Actual: ' + __str.concat(__obj, __obj2, __obj3, x)); } // ////////////////////////////////////////////////////////////////////////////// var x;
bsd-2-clause
albertz/music-player
mac/pyobjc-core/Lib/PyObjCTools/TestSupport.py
30527
""" Helper code for implementing unittests. This module is unsupported and is primairily used in the PyObjC testsuite. """ from __future__ import print_function import plistlib as _pl import unittest as _unittest import objc import os as _os import gc as _gc import subprocess as _subprocess import sys as _sys import struct as _struct from distutils.sysconfig import get_config_var as _get_config_var import re as _re import warnings # Ensure that methods in this module get filtered in the tracebacks # from unittest __unittest = False # Have a way to disable the autorelease pool behaviour _usepool = not _os.environ.get('PYOBJC_NO_AUTORELEASE') def _typemap(tp): # XXX: Is this needed? if tp is None: return None return tp.replace(b'_NSRect', b'CGRect').replace(b'_NSPoint', b'CGPoint').replace(b'_NSSize', b'CGSize') def sdkForPython(_cache=[]): """ Return the SDK version used to compile Python itself, or None if no framework was used """ if not _cache: cflags = _get_config_var('CFLAGS') m = _re.search('-isysroot\s+([^ ]*)(\s|$)', cflags) if m is None: _cache.append(None) return None path = m.group(1) if path == '/': result = tuple(map(int, os_release().split('.'))) _cache.append(result) return result bn = _os.path.basename(path) version = bn[6:-4] if version.endswith('u'): version = version[:-1] result = tuple(map(int, version.split('.'))) _cache.append(result) return result return _cache[0] def fourcc(v): """ Decode four-character-code integer definition (e.g. 'abcd') """ return _struct.unpack('>i', v)[0] def cast_int(value): """ Cast value to 32bit integer Usage: cast_int(1 << 31) == -1 (where as: 1 << 31 == 2147483648) """ value = value & 0xffffffff if value & 0x80000000: value = ~value + 1 & 0xffffffff return -value else: return value def cast_longlong(value): """ Cast value to 64bit integer Usage: cast_longlong(1 << 63) == -1 """ value = value & 0xffffffffffffffff if value & 0x8000000000000000: value = ~value + 1 & 0xffffffffffffffff return -value else: return value def cast_uint(value): """ Cast value to 32bit integer Usage: cast_int(1 << 31) == 2147483648 """ value = value & 0xffffffff return value def cast_ulonglong(value): """ Cast value to 64bit integer """ value = value & 0xffffffffffffffff return value _os_release = None def os_release(): """ Returns '10.5' on all releases of Leopard, simularly for other major releases. """ global _os_release if _os_release is not None: return _os_release pl = _pl.readPlist('/System/Library/CoreServices/SystemVersion.plist') v = pl['ProductVersion'] return '.'.join(v.split('.')[:2]) def is32Bit(): """ Return True if we're running in 32-bit mode """ if _sys.maxsize > 2 ** 32: return False return True def onlyIf(expr, message=None): """ Usage:: class Tests (unittest.TestCase): @onlyIf(1 == 2) def testUnlikely(self): pass The test only runs when the argument expression is true """ def callback(function): if not expr: if hasattr(_unittest, 'skip'): return _unittest.skip(message)(function) return lambda self: None # pragma: no cover (py2.6) else: return function return callback def onlyPython2(function): """ Usage: class Tests (unittest.TestCase): @onlyPython2 def testPython2(self): pass The test is only executed for Python 2.x """ return onlyIf(_sys.version_info[0] == 2, "python2.x only")(function) def onlyPython3(function): """ Usage: class Tests (unittest.TestCase): @onlyPython3 def testPython3(self): pass The test is only executed for Python 3.x """ return onlyIf(_sys.version_info[0] == 3, "python3.x only")(function) def onlyOn32Bit(function): """ Usage:: class Tests (unittest.TestCase): @onlyOn32Bit def test32BitOnly(self): pass The test runs only on 32-bit systems """ return onlyIf(is32Bit(), "32-bit only")(function) def onlyOn64Bit(function): """ Usage:: class Tests (unittest.TestCase): @onlyOn64Bit def test64BitOnly(self): pass The test runs only on 64-bit systems """ return onlyIf(not is32Bit(), "64-bit only")(function) def min_os_level(release): """ Usage:: class Tests (unittest.TestCase): @min_os_level('10.6') def testSnowLeopardCode(self): pass """ return onlyIf(os_release() >= release) def max_os_level(release): """ Usage:: class Tests (unittest.TestCase): @max_os_level('10.5') def testUntilLeopard(self): pass """ return onlyIf(os_release() <= release) _poolclass = objc.lookUpClass('NSAutoreleasePool') # NOTE: On at least OSX 10.8 there are multiple proxy classes for CFTypeRef... _nscftype = tuple(cls for cls in objc.getClassList() if 'NSCFType' in cls.__name__) _typealias = {} if not is32Bit(): _typealias[objc._C_LNG_LNG] = objc._C_LNG _typealias[objc._C_ULNG_LNG] = objc._C_ULNG else: # pragma: no cover (32-bit) _typealias[objc._C_LNG] = objc._C_INT _typealias[objc._C_ULNG] = objc._C_UINT class TestCase (_unittest.TestCase): """ A version of TestCase that wraps every test into its own autorelease pool. This also adds a number of useful assertion methods """ def assertIsCFType(self, tp, message = None): if not isinstance(tp, objc.objc_class): self.fail(message or "%r is not a CFTypeRef type"%(tp,)) if any(x is tp for x in _nscftype): self.fail(message or "%r is not a unique CFTypeRef type"%(tp,)) #if not issubclass(tp, _nscftype): # self.fail(message or "%r is not a CFTypeRef subclass"%(tp,)) def assertIsOpaquePointer(self, tp, message = None): if not hasattr(tp, "__pointer__"): self.fail(message or "%r is not an opaque-pointer"%(tp,)) if not hasattr(tp, "__typestr__"): self.fail(message or "%r is not an opaque-pointer"%(tp,)) def assertResultIsNullTerminated(self, method, message = None): info = method.__metadata__() if not info.get('retval', {}).get('c_array_delimited_by_null'): self.fail(message or "result of %r is not a null-terminated array"%(method,)) def assertIsNullTerminated(self, method, message = None): info = method.__metadata__() if not info.get('c_array_delimited_by_null') or not info.get('variadic'): self.fail(message or "%s is not a variadic function with a null-terminated list of arguments"%(method,)) def assertArgIsNullTerminated(self, method, argno, message = None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() try: if not info['arguments'][argno+offset].get('c_array_delimited_by_null'): self.fail(message or "argument %d of %r is not a null-terminated array"%(argno, method)) except (KeyError, IndexError): self.fail(message or "argument %d of %r is not a null-terminated array"%(argno, method)) def assertArgIsVariableSize(self, method, argno, message = None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() try: if not info['arguments'][argno+offset].get('c_array_of_variable_length'): self.fail(message or "argument %d of %r is not a variable sized array"%(argno, method,)) except (KeyError, IndexError): self.fail(message or "argument %d of %r is not a variable sized array"%(argno, method,)) def assertResultIsVariableSize(self, method, message = None): info = method.__metadata__() if not info.get('retval', {}).get('c_array_of_variable_length', False): self.fail(message or "result of %r is not a variable sized array"%(argno, method)) def assertArgSizeInResult(self, method, argno, message = None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() try: if not info['arguments'][argno+offset].get('c_array_length_in_result'): self.fail(message or "argument %d of %r does not have size in result"%(argno, method)) except (KeyError, IndexError): self.fail(message or "argument %d of %r does not have size in result"%(argno, method)) def assertArgIsPrintf(self, method, argno, message = None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() if not info.get('variadic'): self.fail(message or "%r is not a variadic function"%(method,)) try: if not info['arguments'][argno+offset].get('printf_format'): self.fail(message or "%r argument %d is not a printf format string"%(method, argno)) except (KeyError, IndexError): self.fail(message or "%r argument %d is not a printf format string"%(method, argno)) def assertArgIsCFRetained(self, method, argno, message = None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() try: if not info['arguments'][argno+offset]['already_cfretained']: self.fail(message or "%r is not cfretained"%(method,)) except (KeyError, IndexError): self.fail(message or "%r is not cfretained"%(method,)) def assertArgIsNotCFRetained(self, method, argno, message = None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() try: if info['arguments'][argno+offset]['already_cfretained']: self.fail(message or "%r is cfretained"%(method,)) except (KeyError, IndexError): pass def assertResultIsCFRetained(self, method, message = None): info = method.__metadata__() if not info.get('retval', {}).get('already_cfretained', False): self.fail(message or "%r is not cfretained"%(method,)) def assertResultIsNotCFRetained(self, method, message = None): info = method.__metadata__() if info.get('retval', {}).get('already_cfretained', False): self.fail(message or "%r is cfretained"%(method,)) def assertArgIsRetained(self, method, argno, message = None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() try: if not info['arguments'][argno+offset]['already_retained']: self.fail(message or "%r is not retained"%(method,)) except (KeyError, IndexError): self.fail(message or "%r is not retained"%(method,)) def assertArgIsNotRetained(self, method, argno, message = None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() try: if info['arguments'][argno+offset]['already_retained']: self.fail(message or "%r is retained"%(method,)) except (KeyError, IndexError): pass def assertResultIsRetained(self, method, message = None): info = method.__metadata__() if not info.get('retval', {}).get('already_retained', False): self.fail(message or "%r is not retained"%(method,)) def assertResultIsNotRetained(self, method, message = None): info = method.__metadata__() if info.get('retval', {}).get('already_retained', False): self.fail(message or "%r is retained"%(method,)) def assertResultHasType(self, method, tp, message=None): info = method.__metadata__() type = info.get('retval').get('type', b'v') if type != tp and _typemap(type) != _typemap(tp) \ and _typealias.get(type, type) != _typealias.get(tp, tp): self.fail(message or "result of %r is not of type %r, but %r"%( method, tp, type)) def assertArgHasType(self, method, argno, tp, message=None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() try: i = info['arguments'][argno+offset] except (KeyError, IndexError): self.fail(message or "arg %d of %s has no metadata (or doesn't exist)"%(argno, method)) else: type = i.get('type', b'@') if type != tp and _typemap(type) != _typemap(tp) \ and _typealias.get(type, type) != _typealias.get(tp, tp): self.fail(message or "arg %d of %s is not of type %r, but %r"%( argno, method, tp, type)) def assertArgIsFunction(self, method, argno, sel_type, retained, message=None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() try: i = info['arguments'][argno+offset] except (KeyError, IndexError): self.fail(message or "arg %d of %s has no metadata (or doesn't exist)"%(argno, method)) else: type = i.get('type', b'@') if type != b'^?': self.fail(message or "arg %d of %s is not of type function_pointer"%( argno, method)) st = i.get('callable') if st is None: self.fail(message or "arg %d of %s is not of type function_pointer"%( argno, method)) try: iface = st['retval']['type'] for a in st['arguments']: iface += a['type'] except KeyError: self.fail(message or "arg %d of %s is a function pointer with incomplete type information"%(argno, method)) if iface != sel_type: self.fail(message or "arg %d of %s is not a function_pointer with type %r, but %r"%(argno, method, sel_type, iface)) st = info['arguments'][argno+offset].get('callable_retained', False) if bool(st) != bool(retained): self.fail(message or "arg %d of %s; retained: %r, expected: %r"%( argno, method, st, retained)) def assertResultIsFunction(self, method, sel_type, message=None): info = method.__metadata__() try: i = info['retval'] except (KeyError, IndexError): self.fail(message or "result of %s has no metadata (or doesn't exist)"%(method,)) else: type = i.get('type', b'@') if type != b'^?': self.fail(message or "result of %s is not of type function_pointer"%( method, )) st = i.get('callable') if st is None: self.fail(message or "result of %s is not of type function_pointer"%( method, )) try: iface = st['retval']['type'] for a in st['arguments']: iface += a['type'] except KeyError: self.fail(message or "result of %s is a function pointer with incomplete type information"%(method,)) if iface != sel_type: self.fail(message or "result of %s is not a function_pointer with type %r, but %r"%(method, sel_type, iface)) def assertArgIsBlock(self, method, argno, sel_type, message=None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() try: type = info['arguments'][argno+offset]['type'] except (IndexError, KeyError): self.fail("arg %d of %s does not exist"%(argno, method)) if type != b'@?': self.fail(message or "arg %d of %s is not of type block: %s"%( argno, method, type)) st = info['arguments'][argno+offset].get('callable') if st is None: self.fail(message or "arg %d of %s is not of type block: no callable"%( argno, method)) try: iface = st['retval']['type'] if st['arguments'][0]['type'] != b'^v': self.fail(message or "arg %d of %s has an invalid block signature"%(argno, method)) for a in st['arguments'][1:]: iface += a['type'] except KeyError: self.fail(message or "result of %s is a block pointer with incomplete type information"%(method,)) if iface != sel_type: self.fail(message or "arg %d of %s is not a block with type %r, but %r"%(argno, method, sel_type, iface)) def assertResultIsBlock(self, method, sel_type, message=None): info = method.__metadata__() try: type = info['retval']['type'] if type != b'@?': self.fail(message or "result of %s is not of type block: %s"%( method, type)) except KeyError: self.fail(message or "result of %s is not of type block: %s"%( method, b'v')) st = info['retval'].get('callable') if st is None: self.fail(message or "result of %s is not of type block: no callable specified"%( method)) try: iface = st['retval']['type'] if st['arguments'][0]['type'] != b'^v': self.fail(message or "result %s has an invalid block signature"%(method)) for a in st['arguments'][1:]: iface += a['type'] except KeyError: self.fail(message or "result of %s is a block pointer with incomplete type information"%(method,)) if iface != sel_type: self.fail(message or "result of %s is not a block with type %r, but %r"%(method, sel_type, iface)) def assertArgIsSEL(self, method, argno, sel_type, message=None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() try: i = info['arguments'][argno+offset] except (KeyError, IndexError): self.fail(message or "arg %d of %s has no metadata (or doesn't exist)"%(argno, method)) type = i.get('type', b'@') if type != objc._C_SEL: self.fail(message or "arg %d of %s is not of type SEL"%( argno, method)) st = i.get('sel_of_type') if st != sel_type and _typemap(st) != _typemap(sel_type): self.fail(message or "arg %d of %s doesn't have sel_type %r but %r"%( argno, method, sel_type, st)) def assertResultIsBOOL(self, method, message=None): info = method.__metadata__() type = info['retval']['type'] if type != objc._C_NSBOOL: self.fail(message or "result of %s is not of type BOOL, but %r"%( method, type)) def assertArgIsBOOL(self, method, argno, message=None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() type = info['arguments'][argno+offset]['type'] if type != objc._C_NSBOOL: self.fail(message or "arg %d of %s is not of type BOOL, but %r"%( argno, method, type)) def assertArgIsFixedSize(self, method, argno, count, message=None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() try: cnt = info['arguments'][argno+offset]['c_array_of_fixed_length'] if cnt != count: self.fail(message or "arg %d of %s is not a C-array of length %d"%( argno, method, count)) except (KeyError, IndexError): self.fail(message or "arg %d of %s is not a C-array of length %d"%( argno, method, count)) def assertResultIsFixedSize(self, method, count, message=None): info = method.__metadata__() try: cnt = info['retval']['c_array_of_fixed_length'] if cnt != count: self.fail(message or "result of %s is not a C-array of length %d"%( method, count)) except (KeyError, IndexError): self.fail(message or "result of %s is not a C-array of length %d"%( method, count)) def assertArgSizeInArg(self, method, argno, count, message=None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() try: cnt = info['arguments'][argno+offset]['c_array_length_in_arg'] except (KeyError, IndexError): self.fail(message or "arg %d of %s is not a C-array of with length in arg %s"%( argno, method, count)) if isinstance(count, (list, tuple)): count2 = tuple(x + offset for x in count) else: count2 = count + offset if cnt != count2: self.fail(message or "arg %d of %s is not a C-array of with length in arg %s"%( argno, method, count)) def assertResultSizeInArg(self, method, count, message=None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() cnt = info['retval']['c_array_length_in_arg'] if cnt != count + offset: self.fail(message or "result %s is not a C-array of with length in arg %d"%( method, count)) def assertArgIsOut(self, method, argno, message=None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() type = info['arguments'][argno+offset]['type'] if not type.startswith(b'o^'): self.fail(message or "arg %d of %s is not an 'out' argument"%( argno, method)) def assertArgIsInOut(self, method, argno, message=None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() type = info['arguments'][argno+offset]['type'] if not type.startswith(b'N^'): self.fail(message or "arg %d of %s is not an 'inout' argument"%( argno, method)) def assertArgIsIn(self, method, argno, message=None): if isinstance(method, objc.selector): offset = 2 else: offset = 0 info = method.__metadata__() type = info['arguments'][argno+offset]['type'] if not type.startswith(b'n^'): self.fail(message or "arg %d of %s is not an 'in' argument"%( argno, method)) # # Addition assert methods, all of them should only be necessary for # python 2.7 or later # if not hasattr(_unittest.TestCase, 'assertItemsEqual'): # pragma: no cover def assertItemsEqual(self, seq1, seq2, message=None): # This is based on unittest.util._count_diff_all_purpose from # Python 2.7 s, t = list(seq1), list(seq2) m, n = len(s), len(t) NULL = object() result = [] for i, elem in enumerate(s): if elem is NULL: continue cnt_s = cnt_t = 0 for j in range(i, m): if s[j] == elem: cnt_s += 1 s[j] = NULL for j, other_elem in enumerate(t): if other_elem == elem: cnt_t += 1 t[j] = NULL if cnt_s != cnt_t: result.append((cnt_s, cnt_t, elem)) for i, elem in enumerate(t): if elem is NULL: continue cnt_t = 0 for j in range(i, n): if t[j] == elem: cnt_t += 1 t[j] = NULL result.append((0, cnt_t, elem)) if result: for actual, expected, value in result: print("Seq1 %d, Seq2: %d value: %r"%(actual, expected, value)) self.fail(message or ("sequences do not contain the same items:" + "\n".join(["Seq1 %d, Seq2: %d value: %r"%(item) for item in result]))) if not hasattr(_unittest.TestCase, 'assertIs'): # pragma: no cover def assertIs(self, value, test, message = None): if value is not test: self.fail(message or "%r (id=%r) is not %r (id=%r) "%(value, id(value), test, id(test))) if not hasattr(_unittest.TestCase, 'assertIsNot'): # pragma: no cover def assertIsNot(self, value, test, message = None): if value is test: self.fail(message or "%r is %r"%(value, test)) if not hasattr(_unittest.TestCase, 'assertIsNone'): # pragma: no cover def assertIsNone(self, value, message = None): self.assertIs(value, None) if not hasattr(_unittest.TestCase, 'assertIsNotNone'): # pragma: no cover def assertIsNotNone(self, value, message = None): if value is None: sel.fail(message, "%r is not %r"%(value, test)) if not hasattr(_unittest.TestCase, 'assertStartsWith'): # pragma: no cover def assertStartswith(self, value, check, message=None): if not value.startswith(check): self.fail(message or "not %r.startswith(%r)"%(value, check)) if not hasattr(_unittest.TestCase, 'assertHasAttr'): # pragma: no cover def assertHasAttr(self, value, key, message=None): if not hasattr(value, key): self.fail(message or "%s is not an attribute of %r"%(key, value)) if not hasattr(_unittest.TestCase, 'assertNotHasAttr'): # pragma: no cover def assertNotHasAttr(self, value, key, message=None): if hasattr(value, key): self.fail(message or "%s is an attribute of %r"%(key, value)) if not hasattr(_unittest.TestCase, 'assertIsInstance'): # pragma: no cover def assertIsInstance(self, value, types, message=None): if not isinstance(value, types): self.fail(message or "%s is not an instance of %r but %s"%(value, types, type(value))) if not hasattr(_unittest.TestCase, 'assertIsNotInstance'): # pragma: no cover def assertIsNotInstance(self, value, types, message=None): if isinstance(value, types): self.fail(message or "%s is an instance of %r"%(value, types)) if not hasattr(_unittest.TestCase, 'assertIn'): # pragma: no cover def assertIn(self, value, seq, message=None): if value not in seq: self.fail(message or "%r is not in %r"%(value, seq)) if not hasattr(_unittest.TestCase, 'assertNotIn'): # pragma: no cover def assertNotIn(self, value, seq, message=None): if value in seq: self.fail(message or "%r is in %r"%(value, seq)) if not hasattr(_unittest.TestCase, 'assertGreaterThan'): # pragma: no cover def assertGreaterThan(self, val, test, message=None): if not (val > test): self.fail(message or '%r <= %r'%(val, test)) if not hasattr(_unittest.TestCase, 'assertGreaterEqual'): # pragma: no cover def assertGreaterEqual(self, val, test, message=None): if not (val >= test): self.fail(message or '%r < %r'%(val, test)) if not hasattr(_unittest.TestCase, 'assertLessThan'): # pragma: no cover def assertLessThan(self, val, test, message=None): if not (val < test): self.fail(message or '%r >= %r'%(val, test)) if not hasattr(_unittest.TestCase, 'assertLessEqual'): # pragma: no cover def assertLessEqual(self, val, test, message=None): if not (val <= test): self.fail(message or '%r > %r'%(val, test)) if not hasattr(_unittest.TestCase, "assertAlmostEquals"): # pragma: no cover def assertAlmostEquals(self, val1, val2, message=None): self.failUnless(abs (val1 - val2) < 0.00001, message or 'abs(%r - %r) >= 0.00001'%(val1, val2)) def run(self, *args): """ Run the test, same as unittest.TestCase.run, but every test is run with a fresh autorelease pool. """ if _usepool: p = _poolclass.alloc().init() else: p = 1 try: _unittest.TestCase.run(self, *args) finally: _gc.collect() del p _gc.collect() main = _unittest.main if hasattr(_unittest, 'expectedFailure'): expectedFailure = _unittest.expectedFailure else: # pragma: no cover (py2.6) def expectedFailure(func): def test(self): try: func(self) except AssertionError: return self.fail("test unexpectedly passed") test.__name__ == func.__name__ return test # XXX: filterwarnings relies on implementation details of # the warnings module class filterWarnings (object): def __init__(self, kind, category): self._kind = kind self._category = category def __enter__(self): warnings.filterwarnings(self._kind, category=self._category) def __exit__(self, type, value, tp): del warnings.filters[0]
bsd-2-clause
farseerfc/jgments
java/com/google/jgments/TokenMatcher.java
2663
// Copyright 2010 Google Inc. All Rights Reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package com.google.jgments; import java.util.regex.Matcher; import java.util.regex.Pattern; /** Holds a regex to apply and actions to be performed if the regex matches. */ public class TokenMatcher { /** The compiled regular expression to attempt to match against input text. */ private final Pattern pattern; /** The token-yielding action to apply if the regex matches. */ private final TokenActions.Action tokenAction; /** The state transition action to apply if the regex matches. */ private final StateActions.Action stateAction; public TokenMatcher(String regex, TokenActions.Action tokenAction, StateActions.Action stateAction) { // TODO(jacobly): support other regex types besides multiline. this.pattern = Pattern.compile(regex, Pattern.MULTILINE); this.tokenAction = tokenAction; this.stateAction = stateAction; } public Matcher getMatcher(String toMatch) { return pattern.matcher(toMatch); } public TokenActions.Action getTokenAction() { return tokenAction; } public StateActions.Action getStateAction() { return stateAction; } }
bsd-2-clause
aavanian/bokeh
bokeh/models/filters.py
6331
from __future__ import absolute_import import inspect from textwrap import dedent from types import FunctionType from ..core.properties import Bool, Dict, Either, Int, Seq, String, AnyRef from ..model import Model from ..util.dependencies import import_required from ..util.compiler import nodejs_compile, CompilationError class Filter(Model): ''' A Filter model represents a filtering operation that returns a row-wise subset of data when applied to a ColumnDataSource. ''' filter = Either(Seq(Int), Seq(Bool), help=""" A list that can be either integer indices or booleans representing a row-wise subset of data. """) def __init__(self, *args, **kw): if len(args) == 1 and "filter" not in kw: kw["filter"] = args[0] super(Filter, self).__init__(**kw) class IndexFilter(Filter): ''' An IndexFilter filters data by returning the subset of data at a given set of indices. ''' indices = Seq(Int, help=""" A list of integer indices representing the subset of data to select. """) def __init__(self, *args, **kw): if len(args) == 1 and "indices" not in kw: kw["indices"] = args[0] super(IndexFilter, self).__init__(**kw) class BooleanFilter(Filter): ''' A BooleanFilter filters data by returning the subset of data corresponding to indices where the values of the booleans array is True. ''' booleans = Seq(Bool, help=""" A list of booleans indicating which rows of data to select. """) def __init__(self, *args, **kw): if len(args) == 1 and "booleans" not in kw: kw["booleans"] = args[0] super(BooleanFilter, self).__init__(**kw) class GroupFilter(Filter): ''' A GroupFilter represents the rows of a ColumnDataSource where the values of the categorical column column_name match the group variable. ''' column_name = String(help=""" The name of the column to perform the group filtering operation on. """) group = String(help=""" The value of the column indicating the rows of data to keep. """) def __init__(self, *args, **kw): if len(args) == 2 and "column_name" not in kw and "group" not in kw: kw["column_name"] = args[0] kw["group"] = args[1] super(GroupFilter, self).__init__(**kw) class CustomJSFilter(Filter): ''' Filter data sources with a custom defined JavaScript function. .. warning:: The explicit purpose of this Bokeh Model is to embed *raw JavaScript code* for a browser to execute. If any part of the code is derived from untrusted user inputs, then you must take appropriate care to sanitize the user input prior to passing to Bokeh. ''' @classmethod def from_py_func(cls, func): ''' Create a CustomJSFilter instance from a Python function. The fucntion is translated to JavaScript using PScript. The ``func`` function namespace will contain the variable ``source`` at render time. This will be the data source associated with the CDSView that this filter is added to. ''' if not isinstance(func, FunctionType): raise ValueError('CustomJSFilter.from_py_func only accepts function objects.') pscript = import_required( 'pscript', dedent("""\ To use Python functions for CustomJSFilter, you need PScript '("conda install -c conda-forge pscript" or "pip install pscript")""") ) argspec = inspect.getargspec(func) default_names = argspec.args default_values = argspec.defaults or [] if len(default_names) - len(default_values) != 0: raise ValueError("Function may only contain keyword arguments.") # should the following be all of the values need to be Models? if default_values and not any([isinstance(value, Model) for value in default_values]): raise ValueError("Default value must be a plot object.") func_kwargs = dict(zip(default_names, default_values)) code = pscript.py2js(func, 'filter') + 'return filter(%s);\n' % ', '.join(default_names) return cls(code=code, args=func_kwargs) @classmethod def from_coffeescript(cls, code, args={}): ''' Create a CustomJSFilter instance from CoffeeScript snippets. The function bodies are translated to JavaScript functions using node and therefore require return statements. The ``code`` function namespace will contain the variable ``source`` at render time. This will be the data source associated with the CDSView that this filter is added to. ''' compiled = nodejs_compile(code, lang="coffeescript", file="???") if "error" in compiled: raise CompilationError(compiled.error) else: return cls(code=compiled.code, args=args) args = Dict(String, AnyRef, help=""" A mapping of names to Python objects. In particular those can be bokeh's models. These objects are made available to the callback's code snippet as the values of named parameters to the callback. """) code = String(default="", help=""" A snippet of JavaScript code to filter data contained in a columnar data source. The code is made into the body of a function, and all of of the named objects in ``args`` are available as parameters that the code can use. The variable ``source`` will contain the data source that is associated with the CDSView this filter is added to. The code should either return the indices of the subset or an array of booleans to use to subset data source rows. Example: .. code-block:: javascript code = ''' var indices = []; for (var i = 0; i <= source.data['some_column'].length; i++){ if (source.data['some_column'][i] == 'some_value') { indices.push(i) } } return indices; ''' .. note:: Use ``CustomJS.from_coffeescript()`` for CoffeeScript source code. """) use_strict = Bool(default=False, help=""" Enables or disables automatic insertion of ``"use strict";`` into ``code``. """)
bsd-3-clause
yorg76/test
modules/barcode/classes/Base/QRBarcode.php
4718
<?php defined('SYSPATH') or die('No direct script access.'); /** * Copyright 2011-2012 Spadefoot * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ include_once(MODPATH.'barcode/vendor/google_chart_api/GoggleBarcodeGenerator.php'); /** * This class generates a Quick Response (QR) barcode. * * @package Barcode * @category Creator * @version 2012-01-09 * * @see http://en.wikipedia.org/wiki/QR_code */ abstract class Base_QRBarcode extends Kohana_Core implements Barcode_Interface { /** * This variable stores the file's URI. * * @access protected * @var array */ protected $file = NULL; /** * This variable stores the height and width of the image. * * @access protected * @var array */ protected $size = NULL; /** * Initializes this bar code creator. * * @access public * @param $data string the data string to be encoded * @param $size integer the size of the barcode * @param $margin integer the margin around the barcode */ public function __construct($data, $size = 150, $margin = 4) { $this->file = GoogleBarcodeGenerator::qr_code($data, $size, 'UTF-8', 'L', $margin); $this->size = $size; } /** * This function controllers which properties are accessible. * * @access public * @param string $key the name of the property * @return mixed the value of the property */ public function __get($key) { switch ($key) { case 'file': return $this->file; case 'width': case 'height': return $this->size; default: return NULL; } } /** * This function sends back the bar code image. * * @access public * @param $file_name the file name */ public function output($file_name = NULL) { $barcode = file_get_contents($this->file); header("Cache-Control: no-cache, must-revalidate"); header("Expires: Sat, 26 Jul 1997 05:00:00 GMT"); header('Content-Type: image/png'); if (is_string($file_name)) { header("Content-Disposition: attachment; filename=\"{$file_name}\""); } echo $barcode; exit(); } /** * This function renders the HTML image tag for displaying the bar code. * * @access public * @param array $attributes any additional attributes to be added * to the HTML image tag * @return string the HTML image tag */ public function render($attributes = array()) { $properties = ''; if (is_array($attributes)) { foreach ($attributes as $key => $val) { $properties .= "{$key}=\"{$val}\" "; } } $html = "<img src=\"{$this->file}\" {$properties}/>"; return $html; } /** * This function saves the image of the QR code to disk. * * @access public * @param string $file the URI for where the image will be stored */ public function save($file) { file_put_contents($file, file_get_contents($this->file)); $this->file = $file; } ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// /** * This function will encode a data string. * * @access public * @static * @param $data string the data string to be encoded * @param array $attributes any additional attributes to be added * to the HTML image tag * @return string the HTML image tag */ public static function encode($data, $attributes = array()) { $barcode = new QRBarcode($data); return $barcode->render($attributes); } /** * This function returns an instance of this class. * * @access public * @static * @param $data string the data string to be encoded * @param $size integer the size of the barcode * @param $margin integer the margin around the barcode * @return QRBarcode an instance of this class */ public static function factory($data, $size = 150, $margin = 4) { return new QRBarcode($data, $size, $margin); } } ?>
bsd-3-clause
murrayrowan/hacktools
public/assets/jquery.ui.datepicker-sk-9f078d1181a665eb9bc876786f7b276d.js
818
/* Slovak initialisation for the jQuery UI date picker plugin. */ /* Written by Vojtech Rinik (vojto@hmm.sk). */ jQuery(function(a){a.datepicker.regional.sk={closeText:"Zavrieť",prevText:"&#x3C;Predchádzajúci",nextText:"Nasledujúci&#x3E;",currentText:"Dnes",monthNames:["január","február","marec","apríl","máj","jún","júl","august","september","október","november","december"],monthNamesShort:["Jan","Feb","Mar","Apr","Máj","Jún","Júl","Aug","Sep","Okt","Nov","Dec"],dayNames:["nedeľa","pondelok","utorok","streda","štvrtok","piatok","sobota"],dayNamesShort:["Ned","Pon","Uto","Str","Štv","Pia","Sob"],dayNamesMin:["Ne","Po","Ut","St","Št","Pia","So"],weekHeader:"Ty",dateFormat:"dd.mm.yy",firstDay:1,isRTL:!1,showMonthAfterYear:!1,yearSuffix:""},a.datepicker.setDefaults(a.datepicker.regional.sk)});
bsd-3-clause
Bysmyyr/chromium-crosswalk
net/spdy/fuzzing/hpack_fuzz_util_test.cc
5092
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "net/spdy/fuzzing/hpack_fuzz_util.h" #include <map> #include "base/base_paths.h" #include "base/files/file.h" #include "base/files/file_util.h" #include "base/path_service.h" #include "net/spdy/spdy_test_utils.h" #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" namespace net { namespace test { using base::StringPiece; using std::map; using std::string; using test::a2b_hex; TEST(HpackFuzzUtilTest, GeneratorContextInitialization) { HpackFuzzUtil::GeneratorContext context; HpackFuzzUtil::InitializeGeneratorContext(&context); // Context was seeded with initial name & value fixtures. EXPECT_LT(0u, context.names.size()); EXPECT_LT(0u, context.values.size()); } TEST(HpackFuzzUtil, GeneratorContextExpansion) { HpackFuzzUtil::GeneratorContext context; SpdyHeaderBlock headers = HpackFuzzUtil::NextGeneratedHeaderSet(&context); // Headers were generated, and the generator context was expanded. EXPECT_LT(0u, headers.size()); EXPECT_LT(0u, context.names.size()); EXPECT_LT(0u, context.values.size()); } // TODO(jgraettinger): A better test would mock a random generator and // evaluate SampleExponential along fixed points of the [0,1] domain. TEST(HpackFuzzUtilTest, SampleExponentialRegression) { // TODO(jgraettinger): Upstream uses a seeded random generator here to pin // the behavior of SampleExponential. Chromium's random generation utilities // are strongly secure, but provide no way to seed the generator. for (size_t i = 0; i != 100; ++i) { EXPECT_GE(30u, HpackFuzzUtil::SampleExponential(10, 30)); } } TEST(HpackFuzzUtilTest, ParsesSequenceOfHeaderBlocks) { char fixture[] = "\x00\x00\x00\x05""aaaaa" "\x00\x00\x00\x04""bbbb" "\x00\x00\x00\x03""ccc" "\x00\x00\x00\x02""dd" "\x00\x00\x00\x01""e" "\x00\x00\x00\x00""" "\x00\x00\x00\x03""fin"; HpackFuzzUtil::Input input; input.input.assign(fixture, arraysize(fixture) - 1); StringPiece block; EXPECT_TRUE(HpackFuzzUtil::NextHeaderBlock(&input, &block)); EXPECT_EQ("aaaaa", block); EXPECT_TRUE(HpackFuzzUtil::NextHeaderBlock(&input, &block)); EXPECT_EQ("bbbb", block); EXPECT_TRUE(HpackFuzzUtil::NextHeaderBlock(&input, &block)); EXPECT_EQ("ccc", block); EXPECT_TRUE(HpackFuzzUtil::NextHeaderBlock(&input, &block)); EXPECT_EQ("dd", block); EXPECT_TRUE(HpackFuzzUtil::NextHeaderBlock(&input, &block)); EXPECT_EQ("e", block); EXPECT_TRUE(HpackFuzzUtil::NextHeaderBlock(&input, &block)); EXPECT_EQ("", block); EXPECT_TRUE(HpackFuzzUtil::NextHeaderBlock(&input, &block)); EXPECT_EQ("fin", block); EXPECT_FALSE(HpackFuzzUtil::NextHeaderBlock(&input, &block)); } TEST(HpackFuzzUtilTest, SerializedHeaderBlockPrefixes) { EXPECT_EQ(string("\x00\x00\x00\x00", 4), HpackFuzzUtil::HeaderBlockPrefix(0)); EXPECT_EQ(string("\x00\x00\x00\x05", 4), HpackFuzzUtil::HeaderBlockPrefix(5)); EXPECT_EQ(string("\x4f\xb3\x0a\x91", 4), HpackFuzzUtil::HeaderBlockPrefix(1337133713)); } TEST(HpackFuzzUtilTest, PassValidInputThroughAllStages) { // Example lifted from HpackDecoderTest.SectionD4RequestHuffmanExamples. string input = a2b_hex("828684418cf1e3c2e5f23a6ba0ab90f4" "ff"); HpackFuzzUtil::FuzzerContext context; HpackFuzzUtil::InitializeFuzzerContext(&context); EXPECT_TRUE( HpackFuzzUtil::RunHeaderBlockThroughFuzzerStages(&context, input)); SpdyHeaderBlock expect; expect[":method"] = "GET"; expect[":scheme"] = "http"; expect[":path"] = "/"; expect[":authority"] = "www.example.com"; EXPECT_EQ(expect, context.third_stage->decoded_block()); } TEST(HpackFuzzUtilTest, ValidFuzzExamplesRegressionTest) { base::FilePath source_root; ASSERT_TRUE(PathService::Get(base::DIR_SOURCE_ROOT, &source_root)); // Load the example fixtures versioned with the source tree. HpackFuzzUtil::Input input; ASSERT_TRUE(base::ReadFileToString( source_root.Append(FILE_PATH_LITERAL("net")) .Append(FILE_PATH_LITERAL("data")) .Append(FILE_PATH_LITERAL("spdy_tests")) .Append(FILE_PATH_LITERAL("examples_07.hpack")), &input.input)); HpackFuzzUtil::FuzzerContext context; HpackFuzzUtil::InitializeFuzzerContext(&context); StringPiece block; while (HpackFuzzUtil::NextHeaderBlock(&input, &block)) { // As these are valid examples, all fuzz stages should succeed. EXPECT_TRUE(HpackFuzzUtil::RunHeaderBlockThroughFuzzerStages( &context, block)); } } TEST(HpackFuzzUtilTest, FlipBitsMutatesBuffer) { char buffer[] = "testbuffer1234567890"; string unmodified(buffer, arraysize(buffer) - 1); EXPECT_EQ(unmodified, buffer); HpackFuzzUtil::FlipBits(reinterpret_cast<uint8*>(buffer), arraysize(buffer) - 1, 1); EXPECT_NE(unmodified, buffer); } } // namespace test } // namespace net
bsd-3-clause
ChromiumWebApps/chromium
chrome/common/extensions/docs/server2/local_file_system.py
3205
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os import sys from docs_server_utils import StringIdentity from file_system import FileSystem, FileNotFoundError, StatInfo from future import Gettable, Future from path_util import AssertIsDirectory, AssertIsValid from test_util import ChromiumPath def _ConvertToFilepath(path): return path.replace('/', os.sep) def _ConvertFromFilepath(path): return path.replace(os.sep, '/') def _ReadFile(filename): try: with open(filename, 'rb') as f: return f.read() except IOError as e: raise FileNotFoundError('Read failed for %s: %s' % (filename, e)) def _ListDir(dir_name): all_files = [] try: files = os.listdir(dir_name) except OSError as e: raise FileNotFoundError('os.listdir failed for %s: %s' % (dir_name, e)) for os_path in files: posix_path = _ConvertFromFilepath(os_path) if os_path.startswith('.'): continue if os.path.isdir(os.path.join(dir_name, os_path)): all_files.append(posix_path + '/') else: all_files.append(posix_path) return all_files def _CreateStatInfo(path): try: path_mtime = os.stat(path).st_mtime if os.path.isdir(path): child_versions = dict((_ConvertFromFilepath(filename), os.stat(os.path.join(path, filename)).st_mtime) for filename in os.listdir(path)) # This file system stat mimics subversion, where the stat of directories # is max(file stats). That means we need to recursively check the whole # file system tree :\ so approximate that by just checking this dir. version = max([path_mtime] + child_versions.values()) else: child_versions = None version = path_mtime return StatInfo(version, child_versions) except OSError as e: raise FileNotFoundError('os.stat failed for %s: %s' % (path, e)) class LocalFileSystem(FileSystem): '''FileSystem implementation which fetches resources from the local filesystem. ''' def __init__(self, base_path): AssertIsDirectory(base_path) self._base_path = _ConvertToFilepath(base_path) @staticmethod def Create(*path): return LocalFileSystem(ChromiumPath(*path)) def Read(self, paths): def resolve(): result = {} for path in paths: AssertIsValid(path) full_path = os.path.join(self._base_path, _ConvertToFilepath(path).lstrip(os.sep)) if path == '' or path.endswith('/'): result[path] = _ListDir(full_path) else: result[path] = _ReadFile(full_path) return result return Future(delegate=Gettable(resolve)) def Refresh(self): return Future(value=()) def Stat(self, path): AssertIsValid(path) full_path = os.path.join(self._base_path, _ConvertToFilepath(path).lstrip(os.sep)) return _CreateStatInfo(full_path) def GetIdentity(self): return '@'.join((self.__class__.__name__, StringIdentity(self._base_path))) def __repr__(self): return 'LocalFileSystem(%s)' % self._base_path
bsd-3-clause
tomocchino/react
packages/react-devtools-shared/src/types.js
3297
/** * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @flow */ export type Wall = {| // `listen` returns the "unlisten" function. listen: (fn: Function) => Function, send: (event: string, payload: any, transferable?: Array<any>) => void, |}; // WARNING // The values below are referenced by ComponentFilters (which are saved via localStorage). // Do not change them or it will break previously saved user customizations. // If new element types are added, use new numbers rather than re-ordering existing ones. // // Changing these types is also a backwards breaking change for the standalone shell, // since the frontend and backend must share the same values- // and the backend is embedded in certain environments (like React Native). export const ElementTypeClass = 1; export const ElementTypeContext = 2; export const ElementTypeFunction = 5; export const ElementTypeForwardRef = 6; export const ElementTypeHostComponent = 7; export const ElementTypeMemo = 8; export const ElementTypeOtherOrUnknown = 9; export const ElementTypeProfiler = 10; export const ElementTypeRoot = 11; export const ElementTypeSuspense = 12; export const ElementTypeSuspenseList = 13; // Different types of elements displayed in the Elements tree. // These types may be used to visually distinguish types, // or to enable/disable certain functionality. export type ElementType = 1 | 2 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13; // WARNING // The values below are referenced by ComponentFilters (which are saved via localStorage). // Do not change them or it will break previously saved user customizations. // If new filter types are added, use new numbers rather than re-ordering existing ones. export const ComponentFilterElementType = 1; export const ComponentFilterDisplayName = 2; export const ComponentFilterLocation = 3; export const ComponentFilterHOC = 4; export type ComponentFilterType = 1 | 2 | 3 | 4; // Hide all elements of types in this Set. // We hide host components only by default. export type ElementTypeComponentFilter = {| isEnabled: boolean, type: 1, value: ElementType, |}; // Hide all elements with displayNames or paths matching one or more of the RegExps in this Set. // Path filters are only used when elements include debug source location. export type RegExpComponentFilter = {| isEnabled: boolean, isValid: boolean, type: 2 | 3, value: string, |}; export type BooleanComponentFilter = {| isEnabled: boolean, isValid: boolean, type: 4, |}; export type ComponentFilter = | BooleanComponentFilter | ElementTypeComponentFilter | RegExpComponentFilter; export type HookName = string | null; // Map of hook source ("<filename>:<line-number>:<column-number>") to name. // Hook source is used instead of the hook itself becuase the latter is not stable between element inspections. // We use a Map rather than an Array because of nested hooks and traversal ordering. export type HookSourceLocationKey = string; export type HookNames = Map<HookSourceLocationKey, HookName>; export type LRUCache<K, V> = {| get: (key: K) => V, has: (key: K) => boolean, reset: () => void, set: (key: K, value: V) => void, |};
bsd-3-clause
nwjs/chromium.src
content/browser/media/media_browsertest.cc
14036
// Copyright (c) 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/browser/media/media_browsertest.h" #include <memory> #include "base/command_line.h" #include "base/strings/string_number_conversions.h" #include "base/strings/utf_string_conversions.h" #include "build/build_config.h" #include "build/chromeos_buildflags.h" #include "content/public/browser/web_contents.h" #include "content/public/common/content_features.h" #include "content/public/common/content_switches.h" #include "content/public/test/browser_test.h" #include "content/public/test/browser_test_utils.h" #include "content/public/test/content_browser_test_utils.h" #include "content/shell/browser/shell.h" #include "content/shell/common/shell_switches.h" #include "media/audio/audio_features.h" #include "media/base/media_switches.h" #include "media/base/supported_types.h" #include "media/base/test_data_util.h" #include "media/media_buildflags.h" #include "net/test/embedded_test_server/embedded_test_server.h" #include "url/url_util.h" namespace content { #if defined(OS_ANDROID) // Title set by android cleaner page after short timeout. const char16_t kClean[] = u"CLEAN"; #endif void MediaBrowserTest::SetUpCommandLine(base::CommandLine* command_line) { command_line->AppendSwitchASCII( switches::kAutoplayPolicy, switches::autoplay::kNoUserGestureRequiredPolicy); command_line->AppendSwitch(switches::kExposeInternalsForTesting); std::vector<base::Feature> enabled_features = { #if defined(OS_ANDROID) features::kLogJsConsoleMessages, #endif }; std::vector<base::Feature> disabled_features = { // Disable fallback after decode error to avoid unexpected test pass on // the fallback path. media::kFallbackAfterDecodeError, #if defined(OS_LINUX) || defined(OS_CHROMEOS) // Disable out of process audio on Linux due to process spawn // failures. http://crbug.com/986021 features::kAudioServiceOutOfProcess, #endif #if defined(OS_CHROMEOS) media::kDeprecateLowUsageCodecs, #endif }; scoped_feature_list_.InitWithFeatures(enabled_features, disabled_features); } void MediaBrowserTest::RunMediaTestPage(const std::string& html_page, const base::StringPairs& query_params, const std::string& expected_title, bool http) { GURL gurl; std::string query = media::GetURLQueryString(query_params); std::unique_ptr<net::EmbeddedTestServer> http_test_server; if (http) { http_test_server = std::make_unique<net::EmbeddedTestServer>(); http_test_server->ServeFilesFromSourceDirectory(media::GetTestDataPath()); CHECK(http_test_server->Start()); gurl = http_test_server->GetURL("/" + html_page + "?" + query); } else { gurl = content::GetFileUrlWithQuery(media::GetTestDataFilePath(html_page), query); } std::string final_title = RunTest(gurl, expected_title); EXPECT_EQ(expected_title, final_title); } std::string MediaBrowserTest::RunTest(const GURL& gurl, const std::string& expected_title) { VLOG(0) << "Running test URL: " << gurl; TitleWatcher title_watcher(shell()->web_contents(), base::ASCIIToUTF16(expected_title)); AddTitlesToAwait(&title_watcher); EXPECT_TRUE(NavigateToURL(shell(), gurl)); std::u16string result = title_watcher.WaitAndGetTitle(); CleanupTest(); return base::UTF16ToASCII(result); } void MediaBrowserTest::CleanupTest() { #if defined(OS_ANDROID) // We only do this cleanup on Android, as a workaround for a test-only OOM // bug. See http://crbug.com/727542 const std::u16string cleaner_title = kClean; TitleWatcher clean_title_watcher(shell()->web_contents(), cleaner_title); GURL cleaner_url = content::GetFileUrlWithQuery( media::GetTestDataFilePath("cleaner.html"), ""); EXPECT_TRUE(NavigateToURL(shell(), cleaner_url)); std::u16string cleaner_result = clean_title_watcher.WaitAndGetTitle(); EXPECT_EQ(cleaner_result, cleaner_title); #endif } std::string MediaBrowserTest::EncodeErrorMessage( const std::string& original_message) { url::RawCanonOutputT<char> buffer; url::EncodeURIComponent(original_message.data(), original_message.size(), &buffer); return std::string(buffer.data(), buffer.length()); } void MediaBrowserTest::AddTitlesToAwait(content::TitleWatcher* title_watcher) { title_watcher->AlsoWaitForTitle(base::ASCIIToUTF16(media::kEndedTitle)); title_watcher->AlsoWaitForTitle(base::ASCIIToUTF16(media::kErrorTitle)); title_watcher->AlsoWaitForTitle(base::ASCIIToUTF16(media::kErrorEventTitle)); title_watcher->AlsoWaitForTitle(base::ASCIIToUTF16(media::kFailedTitle)); } // Tests playback and seeking of an audio or video file over file or http based // on a test parameter. Test starts with playback, then, after X seconds or the // ended event fires, seeks near end of file; see player.html for details. The // test completes when either the last 'ended' or an 'error' event fires. class MediaTest : public testing::WithParamInterface<bool>, public MediaBrowserTest { public: // Play specified audio over http:// or file:// depending on |http| setting. void PlayAudio(const std::string& media_file, bool http) { PlayMedia("audio", media_file, http); } // Play specified video over http:// or file:// depending on |http| setting. void PlayVideo(const std::string& media_file, bool http) { PlayMedia("video", media_file, http); } void PlayMedia(const std::string& tag, const std::string& media_file, bool http) { base::StringPairs query_params; query_params.emplace_back(tag, media_file); RunMediaTestPage("player.html", query_params, media::kEndedTitle, http); } void RunErrorMessageTest(const std::string& tag, const std::string& media_file, const std::string& expected_error_substring, bool http) { base::StringPairs query_params; query_params.emplace_back(tag, media_file); query_params.emplace_back("error_substr", EncodeErrorMessage(expected_error_substring)); RunMediaTestPage("player.html", query_params, media::kErrorEventTitle, http); } void RunVideoSizeTest(const char* media_file, int width, int height) { std::string expected_title = std::string(media::kEndedTitle) + " " + base::NumberToString(width) + " " + base::NumberToString(height); base::StringPairs query_params; query_params.emplace_back("video", media_file); query_params.emplace_back("sizetest", "true"); RunMediaTestPage("player.html", query_params, expected_title, false); } }; #if defined(OS_ANDROID) class AndroidPlayerMediaTest : public MediaTest { private: void SetUpCommandLine(base::CommandLine* command_line) override { MediaTest::SetUpCommandLine(command_line); command_line->AppendSwitch(switches::kDisableAcceleratedVideoDecode); } }; // TODO(crbug.com/1094571): Flaky. IN_PROC_BROWSER_TEST_P(AndroidPlayerMediaTest, DISABLED_VideoBearMp4) { PlayVideo("bear.mp4", GetParam()); } INSTANTIATE_TEST_SUITE_P(File, AndroidPlayerMediaTest, ::testing::Values(false)); INSTANTIATE_TEST_SUITE_P(Http, AndroidPlayerMediaTest, ::testing::Values(true)); #endif // defined(OS_ANDROID) // Android doesn't support Theora. #if !defined(OS_ANDROID) IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearTheora) { PlayVideo("bear.ogv", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearSilentTheora) { PlayVideo("bear_silent.ogv", GetParam()); } #endif // !defined(OS_ANDROID) IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearWebm) { PlayVideo("bear.webm", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, AudioBearOpusWebm) { PlayAudio("bear-opus.webm", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, AudioBearOpusMp4) { PlayAudio("bear-opus.mp4", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, AudioBearOpusOgg) { PlayAudio("bear-opus.ogg", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearSilentWebm) { PlayVideo("bear_silent.webm", GetParam()); } // We don't expect android devices to support highbit yet. #if defined(ARCH_CPU_X86_FAMILY) && !defined(OS_ANDROID) IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearHighBitDepthVP9) { PlayVideo("bear-320x180-hi10p-vp9.webm", GetParam()); } // TODO(crbug.com/1222748): Flaky on Mac. #if defined(OS_MAC) #define MAYBE_VideoBear12DepthVP9 DISABLED_VideoBear12DepthVP9 #else #define MAYBE_VideoBear12DepthVP9 VideoBear12DepthVP9 #endif IN_PROC_BROWSER_TEST_P(MediaTest, MAYBE_VideoBear12DepthVP9) { PlayVideo("bear-320x180-hi12p-vp9.webm", GetParam()); } #endif IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearMp4Vp9) { PlayVideo("bear-320x240-v_frag-vp9.mp4", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, AudioBearFlacMp4) { PlayAudio("bear-flac.mp4", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, AudioBearFlac192kHzMp4) { PlayAudio("bear-flac-192kHz.mp4", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearMovPcmS16be) { PlayAudio("bear_pcm_s16be.mov", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearMovPcmS24be) { PlayAudio("bear_pcm_s24be.mov", GetParam()); } #if BUILDFLAG(USE_PROPRIETARY_CODECS) IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearMp4) { PlayVideo("bear.mp4", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearSilentMp4) { PlayVideo("bear_silent.mp4", GetParam()); } IN_PROC_BROWSER_TEST_F(MediaTest, VideoBearRotated0) { RunVideoSizeTest("bear_rotate_0.mp4", 1280, 720); } IN_PROC_BROWSER_TEST_F(MediaTest, VideoBearRotated90) { RunVideoSizeTest("bear_rotate_90.mp4", 720, 1280); } IN_PROC_BROWSER_TEST_F(MediaTest, VideoBearRotated180) { RunVideoSizeTest("bear_rotate_180.mp4", 1280, 720); } IN_PROC_BROWSER_TEST_F(MediaTest, VideoBearRotated270) { RunVideoSizeTest("bear_rotate_270.mp4", 720, 1280); } #if !defined(OS_ANDROID) // Android devices usually only support baseline, main and high. IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearHighBitDepthMp4) { PlayVideo("bear-320x180-hi10p.mp4", GetParam()); } // Android can't reliably load lots of videos on a page. // See http://crbug.com/749265 // TODO(crbug.com/1222852): Flaky on Mac. #if defined(OS_MAC) #define MAYBE_LoadManyVideos DISABLED_LoadManyVideos #else #define MAYBE_LoadManyVideos LoadManyVideos #endif IN_PROC_BROWSER_TEST_F(MediaTest, MAYBE_LoadManyVideos) { base::StringPairs query_params; RunMediaTestPage("load_many_videos.html", query_params, media::kEndedTitle, true); } #endif // !defined(OS_ANDROID) #if BUILDFLAG(IS_CHROMEOS_ASH) IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearAviMp3Mpeg4) { PlayVideo("bear_mpeg4_mp3.avi", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearAviMp3Mpeg4Asp) { PlayVideo("bear_mpeg4asp_mp3.avi", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearAviMp3Divx) { PlayVideo("bear_divx_mp3.avi", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoBear3gpAacH264) { PlayVideo("bear_h264_aac.3gp", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoBear3gpAmrnbMpeg4) { PlayVideo("bear_mpeg4_amrnb.3gp", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearWavGsmms) { PlayAudio("bear_gsm_ms.wav", GetParam()); } #endif // BUILDFLAG(IS_CHROMEOS_ASH) #endif // BUILDFLAG(USE_PROPRIETARY_CODECS) IN_PROC_BROWSER_TEST_P(MediaTest, AudioBearFlac) { PlayAudio("bear.flac", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, AudioBearFlacOgg) { PlayAudio("bear-flac.ogg", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearWavAlaw) { PlayAudio("bear_alaw.wav", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearWavMulaw) { PlayAudio("bear_mulaw.wav", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearWavPcm) { PlayAudio("bear_pcm.wav", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearWavPcm3kHz) { PlayAudio("bear_3kHz.wav", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearWavPcm192kHz) { PlayAudio("bear_192kHz.wav", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoTulipWebm) { PlayVideo("tulip2.webm", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoErrorMissingResource) { RunErrorMessageTest("video", "nonexistent_file.webm", "MEDIA_ELEMENT_ERROR: Format error", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoErrorEmptySrcAttribute) { RunErrorMessageTest("video", "", "MEDIA_ELEMENT_ERROR: Empty src attribute", GetParam()); } IN_PROC_BROWSER_TEST_P(MediaTest, VideoErrorNoSupportedStreams) { // The test doesn't work from file: scheme without AllowFileAccessFromFiles. // TODO(wolenetz): https://crbug.com/1071473: Investigate and reenable the // test. if (!GetParam()) return; RunErrorMessageTest("video", "no_streams.webm", "DEMUXER_ERROR_NO_SUPPORTED_STREAMS: FFmpegDemuxer: no " "supported streams", GetParam()); } // Covers tear-down when navigating away as opposed to browser exiting. IN_PROC_BROWSER_TEST_F(MediaTest, Navigate) { PlayVideo("bear.webm", false); EXPECT_TRUE(NavigateToURL(shell(), GURL(url::kAboutBlankURL))); EXPECT_FALSE(shell()->web_contents()->IsCrashed()); } IN_PROC_BROWSER_TEST_P(MediaTest, AudioOnly_XHE_AAC_MP4) { if (media::IsSupportedAudioType( {media::AudioCodec::kAAC, media::AudioCodecProfile::kXHE_AAC})) { PlayAudio("noise-xhe-aac.mp4", GetParam()); } } INSTANTIATE_TEST_SUITE_P(File, MediaTest, ::testing::Values(false)); INSTANTIATE_TEST_SUITE_P(Http, MediaTest, ::testing::Values(true)); } // namespace content
bsd-3-clause
jdetle/nteract
packages/core/__tests__/views/toolbar-spec.js
2599
import React from "react"; import { mount } from "enzyme"; import Toolbar from "../../src/components/toolbar"; import toJSON from "enzyme-to-json"; describe("Toolbar View", () => { test("should be able to render a toolbar", () => { const toolbar = mount(<Toolbar />); expect(toJSON(toolbar)).toMatchSnapshot(); toolbar.find(".toggle-menu").simulate("click"); expect(toJSON(toolbar)).toMatchSnapshot(); }); test("clearOutputs can be clicked", () => { const dummyFunc = jest.fn(); const toolbar = mount(<Toolbar type="code" clearOutputs={dummyFunc} />); toolbar.find(".toggle-menu").simulate("click"); toolbar.find(".clearOutput").simulate("click"); expect(dummyFunc).toHaveBeenCalled(); }); test("toggleCellInputVisibility can be clicked", () => { const dummyFunc = jest.fn(); const toolbar = mount( <Toolbar type="code" toggleCellInputVisibility={dummyFunc} /> ); toolbar.find(".toggle-menu").simulate("click"); toolbar.find(".inputVisibility").simulate("click"); expect(dummyFunc).toHaveBeenCalled(); }); test("toggleCellOutputVisibility can be clicked", () => { const dummyFunc = jest.fn(); const toolbar = mount( <Toolbar type="code" toggleCellOutputVisibility={dummyFunc} /> ); toolbar.find(".toggle-menu").simulate("click"); toolbar.find(".outputVisibility").simulate("click"); expect(dummyFunc).toHaveBeenCalled(); }); test("toggleOutputExpaned can be clicked", () => { const dummyFunc = jest.fn(); const toolbar = mount( <Toolbar type="code" toggleOutputExpansion={dummyFunc} /> ); toolbar.find(".toggle-menu").simulate("click"); toolbar.find(".outputExpanded").simulate("click"); expect(dummyFunc).toHaveBeenCalled(); }); test("changeCellType can be clicked", () => { const dummyFunc = jest.fn(); const toolbar = mount(<Toolbar type="code" changeCellType={dummyFunc} />); toolbar.find(".toggle-menu").simulate("click"); toolbar.find(".changeType").simulate("click"); expect(dummyFunc).toHaveBeenCalled(); }); test('shows "convert to code cell" menu entry for markdown type', () => { const toolbar = mount(<Toolbar type={"markdown"} />); toolbar.find(".toggle-menu").simulate("click"); expect(toolbar.text()).toContain("Convert to Code Cell"); }); test('shows "convert to markdown cell" menu entry for code type', () => { const toolbar = mount(<Toolbar type="code" />); toolbar.find(".toggle-menu").simulate("click"); expect(toolbar.text()).toContain("Convert to Markdown Cell"); }); });
bsd-3-clause
beeftornado/sentry
tests/snuba/test_util.py
2483
from __future__ import absolute_import from datetime import datetime, timedelta from sentry.models import GroupHash from sentry.testutils import TestCase, SnubaTestCase from sentry.utils import snuba class SnubaUtilTest(TestCase, SnubaTestCase): def test_filter_keys_set(self): snuba.raw_query( start=datetime.now(), end=datetime.now(), filter_keys={"project_id": set([1]), "logger": set(["asdf"])}, aggregations=[["count()", "", "count"]], ) def test_shrink_timeframe(self): now = datetime.now() year_ago = now - timedelta(days=365) issues = None assert snuba.shrink_time_window(issues, year_ago) == year_ago issues = [] assert snuba.shrink_time_window(issues, year_ago) == year_ago group1 = self.create_group() group1.first_seen = now - timedelta(hours=1) group1.last_seen = now group1.save() GroupHash.objects.create(project_id=group1.project_id, group=group1, hash="a" * 32) group2 = self.create_group() GroupHash.objects.create(project_id=group2.project_id, group=group2, hash="b" * 32) issues = [group1.id] assert snuba.shrink_time_window(issues, year_ago) == now - timedelta(hours=1, minutes=5) issues = [group1.id, group2.id] assert snuba.shrink_time_window(issues, year_ago) == year_ago # with pytest.raises(snuba.QueryOutsideGroupActivityError): # # query a group for a time range before it had any activity # snuba.raw_query( # start=group1.first_seen - timedelta(days=1, hours=1), # end=group1.first_seen - timedelta(days=1), # filter_keys={ # 'project_id': [group1.project_id], # 'issue': [group1.id], # }, # aggregations=[ # ['count()', '', 'count'], # ], # ) def test_override_options(self): assert snuba.OVERRIDE_OPTIONS == {"consistent": False} with snuba.options_override({"foo": 1}): assert snuba.OVERRIDE_OPTIONS == {"foo": 1, "consistent": False} with snuba.options_override({"foo": 2}): assert snuba.OVERRIDE_OPTIONS == {"foo": 2, "consistent": False} assert snuba.OVERRIDE_OPTIONS == {"foo": 1, "consistent": False} assert snuba.OVERRIDE_OPTIONS == {"consistent": False}
bsd-3-clause
enthought/distarray
distarray/globalapi/maps.py
38350
# encoding: utf-8 # --------------------------------------------------------------------------- # Copyright (C) 2008-2014, IPython Development Team and Enthought, Inc. # Distributed under the terms of the BSD License. See COPYING.rst. # --------------------------------------------------------------------------- """ Distribution class and auxiliary ClientMap classes. The Distribution is a multi-dimensional map class that manages the one-dimensional maps for each DistArray dimension. The Distribution class represents the *distribution* information for a distributed array, independent of the distributed array's *data*. Distributions allow DistArrays to reduce overall communication when indexing and slicing by determining which processes own (or may possibly own) the indices in question. Two DistArray objects can share the same Distribution if they have the exact same distribution. The one-dimensional ClientMap classes keep track of which process owns which index in that dimension. This class has several subclasses for specific distribution types, including `BlockMap`, `CyclicMap`, `NoDistMap`, and `UnstructuredMap`. """ from __future__ import division, absolute_import import operator from itertools import product from abc import ABCMeta, abstractmethod from numbers import Integral import numpy as np from distarray.externals.six import add_metaclass from distarray.externals.six.moves import range, reduce from distarray.utils import remove_elements from distarray.metadata_utils import (normalize_dist, normalize_grid_shape, normalize_dim_dict, normalize_reduction_axes, make_grid_shape, sanitize_indices, _start_stop_block, tuple_intersection, shapes_from_dim_data_per_rank, condense, strides_from_shape) def _dedup_dim_dicts(dim_dicts): """ Internal helper function to take a list of dimension dictionaries and remove the dupes. What remains should be one dictionary per rank (for this dimension of the process grid). """ # Workaround to make the dictionary's contents hashable. for d in dim_dicts: if 'indices' in d: d['indices'] = tuple(d['indices']) try: return [dict(u) for u in set(tuple(sorted(d.items())) for d in dim_dicts)] except TypeError: result = [] for i, d in enumerate(dim_dicts): if d not in dim_dicts[i+1:]: result.append(d) return result # --------------------------------------------------------------------------- # Functions for creating Map objects # --------------------------------------------------------------------------- def choose_map(dist_type): """Choose a map class given one of the distribution types.""" cls_from_dist_type = { 'b': BlockMap, 'c': BlockCyclicMap, 'n': NoDistMap, 'u': UnstructuredMap, } if dist_type not in cls_from_dist_type: raise ValueError("unknown distribution type for %r" % dist_type) return cls_from_dist_type[dist_type] def _map_from_axis_dim_dicts(axis_dim_dicts): """ Generates a ClientMap instance from a sanitized sequence of dimension dictionaries. Parameters ---------- axis_dim_dicts: sequence of dictionaries Each dictionary is a "dimension dictionary" from the distributed array protocol, one per process in this dimension of the process grid. The dimension dictionaries shall all have the same keys and values for global attributes: `dist_type`, `size`, `proc_grid_size`, and perhaps others. Returns ------- An instance of a subclass of MapBase. """ # check that all processes / ranks are accounted for. proc_ranks = sorted(dd['proc_grid_rank'] for dd in axis_dim_dicts) if proc_ranks != list(range(len(axis_dim_dicts))): msg = "Ranks of processes (%r) not consistent." raise ValueError(msg % proc_ranks) # Sort axis_dim_dicts according to proc_grid_rank. axis_dim_dicts = sorted(axis_dim_dicts, key=lambda d: d['proc_grid_rank']) dist_type = axis_dim_dicts[0]['dist_type'] map_class = choose_map(dist_type) return map_class.from_axis_dim_dicts(axis_dim_dicts) def map_from_global_dim_dict(global_dim_dict): """Given a global_dim_dict return map.""" dist_type = global_dim_dict['dist_type'] map_class = choose_map(dist_type) return map_class.from_global_dim_dict(global_dim_dict) def map_from_sizes(size, dist_type, grid_size): """ Returns an instance of the appropriate subclass of MapBase. """ map_class = choose_map(dist_type) return map_class(size, grid_size) # --------------------------------------------------------------------------- # Map classes # --------------------------------------------------------------------------- @add_metaclass(ABCMeta) class MapBase(object): """Base class for one-dimensional client-side maps. Maps keep track of the relevant distribution information for a single dimension of a distributed array. Maps allow distributed arrays to keep track of which process to talk to when indexing and slicing. Classes that inherit from `MapBase` must implement the `index_owners()` abstractmethod. """ @classmethod @abstractmethod def from_global_dim_dict(cls, glb_dim_dict): """Make a Map from a global dimension dictionary.""" pass @classmethod @abstractmethod def from_axis_dim_dicts(cls, axis_dim_dicts): """Make a Map from a sequence of process-local dimension dictionaries. There should be one such dimension dictionary per process. """ pass @abstractmethod def __init__(self): """Create a new Map. Parameters may vary for different subtypes.""" pass @abstractmethod def index_owners(self, idx): """ Returns a list of process IDs in this dimension that might possibly own `idx`. Raises `IndexError` if `idx` is out of bounds. """ raise IndexError() @abstractmethod def get_dimdicts(self): """Return a dim_dict per process in this dimension.""" pass def _is_compatible_degenerate(self, map): right_types = all(isinstance(m, (NoDistMap, BlockMap, BlockCyclicMap)) for m in (self, map)) return (right_types and self.grid_size == map.grid_size == 1 and self.size == map.size) def is_compatible(self, map): if self._is_compatible_degenerate(map): return True else: return ((self.dist == map.dist) and (vars(self) == vars(map))) # --------------------------------------------------------------------------- # 1-D Map classes # --------------------------------------------------------------------------- class NoDistMap(MapBase): dist = 'n' @classmethod def from_global_dim_dict(cls, glb_dim_dict): if glb_dim_dict['dist_type'] != 'n': msg = "Wrong dist_type (%r) for non-distributed map." raise ValueError(msg % glb_dim_dict['dist_type']) size = glb_dim_dict['size'] return cls(size, grid_size=1) @classmethod def from_axis_dim_dicts(cls, axis_dim_dicts): if len(axis_dim_dicts) != 1: msg = ("Number of dimension dictionaries " "non-unitary for non-distributed dimension.") raise ValueError(msg) dd = axis_dim_dicts[0] if dd['dist_type'] != 'n': msg = "Wrong dist_type (%r) for non-distributed map." raise ValueError(msg % dd['dist_type']) grid_size = dd['proc_grid_size'] size = dd['size'] return cls(size, grid_size) def __init__(self, size, grid_size): if grid_size != 1: msg = "grid_size for NoDistMap must be 1 (given %s)" raise ValueError(msg % grid_size) self.size = size self.grid_size = grid_size def index_owners(self, idx): return [0] if 0 <= idx < self.size else [] def slice_owners(self, idx): start = idx.start if idx.start is not None else 0 stop = idx.stop if idx.stop is not None else self.size step = idx.step if idx.step is not None else 1 if tuple_intersection((start, stop, step), (0, self.size)): return [0] else: return [] def get_dimdicts(self): return ({ 'dist_type': 'n', 'size': self.size, 'proc_grid_size': 1, 'proc_grid_rank': 0, },) def slice(self, idx): """Make a new Map from a slice.""" start = idx.start if idx.start is not None else 0 stop = idx.stop if idx.stop is not None else self.size step = idx.step if idx.step is not None else 1 isection = tuple_intersection((start, stop, step), (0, self.size)) if isection: step = idx.step if idx.step is not None else 1 isection_size = int(np.ceil((isection[1] - isection[0]) / step)) else: isection_size = 0 return self.__class__(size=isection_size, grid_size=1) def view(self, new_dimsize): """Scale this map for the `view` method.""" return self.__class__(size=int(new_dimsize), grid_size=1) def is_compatible(self, other): return (isinstance(other, (NoDistMap, BlockMap, BlockCyclicMap)) and other.grid_size == self.grid_size and other.size == self.size) class BlockMap(MapBase): dist = 'b' @classmethod def from_global_dim_dict(cls, glb_dim_dict): if glb_dim_dict['dist_type'] != 'b': msg = "Wrong dist_type (%r) for block map." raise ValueError(msg % glb_dim_dict['dist_type']) bounds = glb_dim_dict['bounds'] tuple_bounds = list(zip(bounds[:-1], bounds[1:])) size = bounds[-1] grid_size = max(len(bounds) - 1, 1) comm_padding = int(glb_dim_dict.get('comm_padding', 0)) boundary_padding = int(glb_dim_dict.get('boundary_padding', 0)) return cls(size=size, grid_size=grid_size, bounds=tuple_bounds, comm_padding=comm_padding, boundary_padding=boundary_padding) @classmethod def from_axis_dim_dicts(cls, axis_dim_dicts): dd = axis_dim_dicts[0] if dd['dist_type'] != 'b': msg = "Wrong dist_type (%r) for block map." raise ValueError(msg % dd['dist_type']) size = dd['size'] grid_size = dd['proc_grid_size'] if grid_size != len(axis_dim_dicts): msg = ("Number of dimension dictionaries (%r)" "inconsistent with proc_grid_size (%r).") raise ValueError(msg % (len(axis_dim_dicts), grid_size)) bounds = [(d['start'], d['stop']) for d in axis_dim_dicts] boundary_padding, comm_padding = dd.get('padding', (0, 0)) return cls(size=size, grid_size=grid_size, bounds=bounds, comm_padding=comm_padding, boundary_padding=boundary_padding) def __init__(self, size, grid_size, bounds=None, comm_padding=None, boundary_padding=None): self.size = size self.grid_size = grid_size if bounds is None: self.bounds = [_start_stop_block(size, grid_size, grid_rank) for grid_rank in range(grid_size)] else: self.bounds = bounds self.comm_padding = comm_padding or 0 self.boundary_padding = boundary_padding or 0 def index_owners(self, idx): coords = [] for (coord, (lower, upper)) in enumerate(self.bounds): if lower <= idx < upper: coords.append(coord) return coords def slice_owners(self, idx): coords = [] start = idx.start if idx.start is not None else 0 stop = idx.stop if idx.stop is not None else self.size step = idx.step if idx.step is not None else 1 for (coord, (lower, upper)) in enumerate(self.bounds): if tuple_intersection((start, stop, step), (lower, upper)): coords.append(coord) return coords def get_dimdicts(self): bounds = self.bounds or [[0, 0]] grid_ranks = range(len(bounds)) cpadding = self.comm_padding padding = [[cpadding, cpadding] for _ in grid_ranks] if len(padding) > 0: padding[0][0] = self.boundary_padding padding[-1][-1] = self.boundary_padding data_tuples = zip(grid_ranks, padding, bounds) # Build the result out = [] for grid_rank, padding, (start, stop) in data_tuples: out.append({ 'dist_type': 'b', 'size': self.size, 'proc_grid_size': self.grid_size, 'proc_grid_rank': grid_rank, 'start': start, 'stop': stop, 'padding': padding, }) return tuple(out) def slice(self, idx): """Make a new Map from a slice.""" new_bounds = [0] start = idx.start if idx.start is not None else 0 step = idx.step if idx.step is not None else 1 # iterate over the processes in this dimension for proc_start, proc_stop in self.bounds: stop = idx.stop if idx.stop is not None else proc_stop isection = tuple_intersection((start, stop, step), (proc_start, proc_stop)) if isection: isection_size = int(np.ceil((isection[1] - (isection[0])) / step)) new_bounds.append(isection_size + new_bounds[-1]) if new_bounds == [0]: new_bounds = [] size = new_bounds[-1] if len(new_bounds) > 0 else 0 grid_size = max(len(new_bounds) - 1, 1) new_bounds = list(zip(new_bounds[:-1], new_bounds[1:])) return self.__class__(size=size, grid_size=grid_size, bounds=new_bounds) def view(self, new_dimsize): """Scale this map for the `view` method.""" factor = new_dimsize / self.size new_bounds = [(int(start*factor), int(stop*factor)) for (start, stop) in self.bounds] return self.__class__(size=int(new_dimsize), grid_size=self.grid_size, bounds=new_bounds) def is_compatible(self, other): if isinstance(other, NoDistMap): return other.is_compatible(self) return super(BlockMap, self).is_compatible(other) class BlockCyclicMap(MapBase): dist = 'c' @classmethod def from_global_dim_dict(cls, glb_dim_dict): if glb_dim_dict['dist_type'] != 'c': msg = "Wrong dist_type (%r) for cyclic map." raise ValueError(msg % glb_dim_dict['dist_type']) size = glb_dim_dict['size'] grid_size = glb_dim_dict['proc_grid_size'] block_size = glb_dim_dict.get('block_size', 1) return cls(size, grid_size, block_size) @classmethod def from_axis_dim_dicts(cls, axis_dim_dicts): dd = axis_dim_dicts[0] if dd['dist_type'] != 'c': msg = "Wrong dist_type (%r) for cyclic map." raise ValueError(msg % dd['dist_type']) size = dd['size'] grid_size = dd['proc_grid_size'] if grid_size != len(axis_dim_dicts): msg = ("Number of dimension dictionaries (%r)" "inconsistent with proc_grid_size (%r).") raise ValueError(msg % (len(axis_dim_dicts), grid_size)) block_size = dd.get('block_size', 1) return cls(size, grid_size, block_size) def __init__(self, size, grid_size, block_size=1): self.size = size self.grid_size = grid_size self.block_size = block_size def index_owners(self, idx): idx_block = idx // self.block_size return [idx_block % self.grid_size] def get_dimdicts(self): return tuple(({'dist_type': 'c', 'size': self.size, 'proc_grid_size': self.grid_size, 'proc_grid_rank': grid_rank, 'start': grid_rank * self.block_size, 'block_size': self.block_size, }) for grid_rank in range(self.grid_size)) def is_compatible(self, other): if isinstance(other, NoDistMap): return other.is_compatible(self) return super(BlockCyclicMap, self).is_compatible(other) class UnstructuredMap(MapBase): dist = 'u' @classmethod def from_global_dim_dict(cls, glb_dim_dict): if glb_dim_dict['dist_type'] != 'u': msg = "Wrong dist_type (%r) for unstructured map." raise ValueError(msg % glb_dim_dict['dist_type']) indices = tuple(np.asarray(i) for i in glb_dim_dict['indices']) size = sum(len(i) for i in indices) grid_size = len(indices) return cls(size, grid_size, indices=indices) @classmethod def from_axis_dim_dicts(cls, axis_dim_dicts): dd = axis_dim_dicts[0] if dd['dist_type'] != 'u': msg = "Wrong dist_type (%r) for unstructured map." raise ValueError(msg % dd['dist_type']) size = dd['size'] grid_size = dd['proc_grid_size'] if grid_size != len(axis_dim_dicts): msg = ("Number of dimension dictionaries (%r)" "inconsistent with proc_grid_size (%r).") raise ValueError(msg % (len(axis_dim_dicts), grid_size)) indices = [dd['indices'] for dd in axis_dim_dicts] return cls(size, grid_size, indices=indices) def __init__(self, size, grid_size, indices=None): self.size = size self.grid_size = grid_size self.indices = indices if self.indices is not None: # Convert to NumPy arrays if not already. self.indices = [np.asarray(ind) for ind in self.indices] self._index_owners = range(self.grid_size) def index_owners(self, idx): # TODO: FIXME: for now, the unstructured map just returns all # processes. Can be optimized if we know the upper and lower bounds # for each local array's global indices. return self._index_owners def get_dimdicts(self): if self.indices is None: raise ValueError() return tuple(({ 'dist_type': 'u', 'size': self.size, 'proc_grid_size': self.grid_size, 'proc_grid_rank': grid_rank, 'indices': ii, }) for grid_rank, ii in enumerate(self.indices)) # --------------------------------------------------------------------------- # N-Dimensional map. # --------------------------------------------------------------------------- def asdistribution(context, shape_or_dist, dist=None, grid_shape=None, targets=None): if isinstance(shape_or_dist, Distribution): return shape_or_dist return Distribution(context=context, shape=shape_or_dist, dist=dist, grid_shape=grid_shape, targets=targets) class Distribution(object): """ Governs the mapping between global indices and process ranks for multi-dimensional objects. """ @classmethod def from_maps(cls, context, maps, targets=None): """Create a Distribution from a sequence of `Map`\s. Parameters ---------- context : Context object maps : Sequence of Map objects targets : Sequence of int, optional Sequence of engine target numbers. Default: all available Returns ------- Distribution """ # This constructor is called by all the others self = super(Distribution, cls).__new__(cls) self.context = context self.targets = sorted(targets or context.targets) self._comm = None self.maps = maps self.shape = tuple(m.size for m in self.maps) self.ndim = len(self.maps) self.dist = tuple(m.dist for m in self.maps) self.grid_shape = tuple(m.grid_size for m in self.maps) self.grid_shape = normalize_grid_shape(self.grid_shape, self.shape, self.dist, len(self.targets)) nelts = reduce(operator.mul, self.grid_shape, 1) self.rank_from_coords = np.arange(nelts).reshape(self.grid_shape) return self @classmethod def from_dim_data_per_rank(cls, context, dim_data_per_rank, targets=None): """Create a Distribution from a sequence of `dim_data` tuples. Parameters ---------- context : Context object dim_data_per_rank : Sequence of dim_data tuples, one per rank See the "Distributed Array Protocol" for a description of dim_data tuples. targets : Sequence of int, optional Sequence of engine target numbers. Default: all available Returns ------- Distribution """ for dim_data in dim_data_per_rank: for dim_dict in dim_data: normalize_dim_dict(dim_dict) # `axis_dim_dicts_per_axis` is the zip of `dim_data_per_rank`, # with duplicates removed. It is a list of `axis_dim_dicts`. # Each `axis_dim_dicts` is a list of dimension dictionaries, one per # process on a single axis of the process grid. axis_dim_dicts_per_axis = [_dedup_dim_dicts(axis_dim_dicts) for axis_dim_dicts in zip(*dim_data_per_rank)] ndim = len(dim_data_per_rank[0]) if len(axis_dim_dicts_per_axis) != ndim: raise ValueError("Inconsistent dimensions.") maps = [_map_from_axis_dim_dicts(axis_dim_dicts) for axis_dim_dicts in axis_dim_dicts_per_axis] return cls.from_maps(context=context, maps=maps, targets=targets) def __new__(cls, context, shape, dist=None, grid_shape=None, targets=None): """Create a Distribution from a `shape` and other optional args. Parameters ---------- context : Context object shape : tuple of int Shape of the resulting Distribution, one integer per dimension. dist : str, list, tuple, or dict, optional Shorthand data structure representing the distribution type for every dimension. Default: {0: 'b'}, with all other dimensions 'n'. grid_shape : tuple of int targets : Sequence of int, optional Sequence of engine target numbers. Default: all available Returns ------- Distribution """ # special case when dist is all 'n's. if (dist is not None) and all(d == 'n' for d in dist): if (targets is not None) and (len(targets) != 1): raise ValueError('target dist conflict') elif targets is None: targets = [context.targets[0]] else: # then targets is set correctly pass ndim = len(shape) dist = dist or {0: 'b'} dist = normalize_dist(dist, ndim) targets = sorted(targets or context.targets) grid_shape = grid_shape or make_grid_shape(shape, dist, len(targets)) grid_shape = normalize_grid_shape(grid_shape, shape, dist, len(targets)) # choose targets from grid_shape ntargets = reduce(operator.mul, grid_shape, 1) targets = targets[:ntargets] # list of `ClientMap` objects, one per dimension. maps = [map_from_sizes(*args) for args in zip(shape, dist, grid_shape)] self = cls.from_maps(context=context, maps=maps, targets=targets) # TODO: FIXME: this is a workaround. The reason we slice here is to # return a distribution with no empty local shapes. The `from_maps()` # classmethod should be fixed to ensure no empty local arrays are # created in the first place. That will remove the need to slice the # distribution to remove empty localshapes. if all(d in ('n', 'b') for d in self.dist): self = self.slice((slice(None),)*self.ndim) return self @classmethod def from_global_dim_data(cls, context, global_dim_data, targets=None): """Make a Distribution from a global_dim_data structure. Parameters ---------- context : Context object global_dim_data : tuple of dict A global dimension dictionary per dimension. See following `Note` section. targets : Sequence of int, optional Sequence of engine target numbers. Default: all available Returns ------- Distribution Note ---- The `global_dim_data` tuple is a simple, straightforward data structure that allows full control over all aspects of a DistArray's distribution information. It does not contain any of the array's *data*, only the *metadata* needed to specify how the array is to be distributed. Each dimension of the array is represented by corresponding dictionary in the tuple, one per dimension. All dictionaries have a `dist_type` key that specifies whether the array is block, cyclic, or unstructured. The other keys in the dictionary are dependent on the `dist_type` key. **Block** * ``dist_type`` is ``'b'``. * ``bounds`` is a sequence of integers, at least two elements. The ``bounds`` sequence always starts with 0 and ends with the global ``size`` of the array. The other elements indicate the local array global index boundaries, such that successive pairs of elements from ``bounds`` indicates the ``start`` and ``stop`` indices of the corresponding local array. * ``comm_padding`` integer, greater than or equal to zero. * ``boundary_padding`` integer, greater than or equal to zero. These integer values indicate the communication or boundary padding, respectively, for the local arrays. Currently only a single value for both ``boundary_padding`` and ``comm_padding`` is allowed for the entire dimension. **Cyclic** * ``dist_type`` is ``'c'`` * ``proc_grid_size`` integer, greater than or equal to one. The size of the process grid in this dimension. Equivalent to the number of local arrays in this dimension and determines the number of array sections. * ``size`` integer, greater than or equal to zero. The global size of the array in this dimension. * ``block_size`` integer, optional. Greater than or equal to one. If not present, equivalent to being present with value of one. **Unstructured** * ``dist_type`` is ``'u'`` * ``indices`` sequence of one-dimensional numpy integer arrays or buffers. The ``len(indices)`` is the number of local unstructured arrays in this dimension. To compute the global size of the array in this dimension, compute ``sum(len(ii) for ii in indices)``. **Not-distributed** The ``'n'`` distribution type is a convenience to specify that an array is not distributed along this dimension. * ``dist_type`` is ``'n'`` * ``size`` integer, greater than or equal to zero. The global size of the array in this dimension. """ maps = [map_from_global_dim_dict(gdd) for gdd in global_dim_data] return cls.from_maps(context=context, maps=maps, targets=targets) def __getitem__(self, idx): return self.maps[idx] def __len__(self): return len(self.maps) @property def comm(self): if self._comm is None: self._comm = self.context.make_subcomm(self.targets) return self._comm @property def has_precise_index(self): """ Does the client-side Distribution know precisely who owns all indices? This can be used to determine whether one needs to use the `checked` version of `__getitem__` or `__setitem__` on LocalArrays. """ return not any(isinstance(m, UnstructuredMap) for m in self.maps) def slice(self, index_tuple): """Make a new Distribution from a slice.""" new_targets = self.owning_targets(index_tuple) or [0] new_maps = [] # iterate over the dimensions for map_, idx in zip(self.maps, index_tuple): if isinstance(idx, Integral): continue # integral indexing returns reduced dimensionality elif isinstance(idx, slice): new_maps.append(map_.slice(idx)) else: msg = "Index must be a sequence of Integrals and slices." raise TypeError(msg) return self.__class__.from_maps(context=self.context, maps=new_maps, targets=new_targets) def owning_ranks(self, idxs): """ Returns a list of ranks that may *possibly* own the location in the `idxs` tuple. For many distribution types, the owning rank is precisely known; for others, it is only probably known. When the rank is precisely known, `owning_ranks()` returns a list of exactly one rank. Otherwise, returns a list of more than one rank. If the `idxs` tuple is out of bounds, raises `IndexError`. """ _, idxs = sanitize_indices(idxs, ndim=self.ndim, shape=self.shape) dim_coord_hits = [] for m, idx in zip(self.maps, idxs): if isinstance(idx, Integral): owners = m.index_owners(idx) elif isinstance(idx, slice): owners = m.slice_owners(idx) dim_coord_hits.append(owners) all_coords = product(*dim_coord_hits) ranks = [self.rank_from_coords[c] for c in all_coords] return ranks def owning_targets(self, idxs): """ Like `owning_ranks()` but returns a list of targets rather than ranks. Convenience method meant for IPython parallel usage. """ return [self.targets[r] for r in self.owning_ranks(idxs)] def get_dim_data_per_rank(self): dds = [enumerate(m.get_dimdicts()) for m in self.maps] if not dds: return [] cart_dds = product(*dds) coord_and_dd = [zip(*cdd) for cdd in cart_dds] rank_and_dd = sorted((self.rank_from_coords[c], dd) for (c, dd) in coord_and_dd) return [dd for (_, dd) in rank_and_dd] def is_compatible(self, o): return ((self.context, self.targets, self.shape, self.ndim, self.grid_shape) == (o.context, o.targets, o.shape, o.ndim, o.grid_shape) and all(m.is_compatible(om) for (m, om) in zip(self.maps, o.maps))) def reduce(self, axes): """ Returns a new Distribution reduced along `axis`, i.e., the new distribution has one fewer dimension than `self`. """ # the `axis` argument can actually be a sequence of axes, so we rename it. axes = normalize_reduction_axes(axes, self.ndim) reduced_shape = remove_elements(axes, self.shape) reduced_dist = remove_elements(axes, self.dist) reduced_grid_shape = remove_elements(axes, self.grid_shape) # This block is required because np.min() works one axis at a time. reduced_ranks = self.rank_from_coords.copy() for axis in axes: reduced_ranks = np.min(reduced_ranks, axis=axis, keepdims=True) reduced_targets = [self.targets[r] for r in reduced_ranks.flat] return Distribution(context=self.context, shape=reduced_shape, dist=reduced_dist, grid_shape=reduced_grid_shape, targets=reduced_targets) def view(self, new_dimsize=None): """Generate a new Distribution for use with DistArray.view.""" if new_dimsize is None: return self scaled_map = self.maps[-1].view(new_dimsize) new_maps = self.maps[:-1] + [scaled_map] return self.__class__.from_maps(context=self.context, maps=new_maps) def localshapes(self): return shapes_from_dim_data_per_rank(self.get_dim_data_per_rank()) def comm_union(self, *dists): """ Make a communicator that includes the union of all targets in `dists`. Parameters ---------- dists: sequence of distribution objects. Returns ------- tuple First element is encompassing communicator proxy; second is a sequence of all targets in `dists`. """ dist_targets = [d.targets for d in dists] all_targets = sorted(reduce(set.union, dist_targets, set(self.targets))) return self.context.make_subcomm(all_targets), all_targets # ------------------------------------------------------------------------ # Redistribution # ------------------------------------------------------------------------ @staticmethod def _redist_intersection_same_shape(source_dimdata, dest_dimdata): intersections = [] for source_dimdict, dest_dimdict in zip(source_dimdata, dest_dimdata): if not (source_dimdict['dist_type'] == dest_dimdict['dist_type'] == 'b'): raise ValueError("Only 'b' dist_type supported") source_idxs = source_dimdict['start'], source_dimdict['stop'] dest_idxs = dest_dimdict['start'], dest_dimdict['stop'] intersections.append(tuple_intersection(source_idxs, dest_idxs)) return intersections @staticmethod def _redist_intersection_reshape(source_dimdata, dest_dimdata): source_flat = global_flat_indices(source_dimdata) dest_flat = global_flat_indices(dest_dimdata) return _global_flat_indices_intersection(source_flat, dest_flat) def get_redist_plan(self, other_dist): # Get all targets all_targets = sorted(set(self.targets + other_dist.targets)) union_rank_from_target = {t: r for (r, t) in enumerate(all_targets)} source_ranks = range(len(self.targets)) source_targets = self.targets union_rank_from_source_rank = {sr: union_rank_from_target[st] for (sr, st) in zip(source_ranks, source_targets)} dest_ranks = range(len(other_dist.targets)) dest_targets = other_dist.targets union_rank_from_dest_rank = {sr: union_rank_from_target[st] for (sr, st) in zip(dest_ranks, dest_targets)} source_ddpr = self.get_dim_data_per_rank() dest_ddpr = other_dist.get_dim_data_per_rank() source_dest_pairs = product(source_ddpr, dest_ddpr) if self.shape == other_dist.shape: _intersection = Distribution._redist_intersection_same_shape else: _intersection = Distribution._redist_intersection_reshape plan = [] for source_dd, dest_dd in source_dest_pairs: intersections = _intersection(source_dd, dest_dd) if intersections and all(i for i in intersections): source_coords = tuple(dd['proc_grid_rank'] for dd in source_dd) source_rank = self.rank_from_coords[source_coords] dest_coords = tuple(dd['proc_grid_rank'] for dd in dest_dd) dest_rank = other_dist.rank_from_coords[dest_coords] plan.append({ 'source_rank': union_rank_from_source_rank[source_rank], 'dest_rank': union_rank_from_dest_rank[dest_rank], 'indices': intersections, } ) return plan # ---------------------------------------------------------------------------- # Redistribution helper functions. # ---------------------------------------------------------------------------- def global_flat_indices(dim_data): """ Return a list of tuples of indices into the flattened global array. Parameters ---------- dim_data: dimension dictionary. Returns ------- list of 2-tuples of ints. Each tuple is a (start, stop) interval into the flattened global array. All selected ranges comprise the indices for this dim_data's sub-array. """ # TODO: FIXME: can be optimized when the last dimension is 'n'. for dd in dim_data: if dd['dist_type'] == 'n': dd['start'] = 0 dd['stop'] = dd['size'] glb_shape = tuple(dd['size'] for dd in dim_data) glb_strides = strides_from_shape(glb_shape) ranges = [range(dd['start'], dd['stop']) for dd in dim_data[:-1]] start_ranges = ranges + [[dim_data[-1]['start']]] stop_ranges = ranges + [[dim_data[-1]['stop']]] def flatten(idx): return sum(a * b for (a, b) in zip(idx, glb_strides)) starts = map(flatten, product(*start_ranges)) stops = map(flatten, product(*stop_ranges)) intervals = zip(starts, stops) return condense(intervals) def _global_flat_indices_intersection(gfis0, gfis1): intersections = filter(None, [tuple_intersection(a, b) for (a, b) in product(gfis0, gfis1)]) return [i[:2] for i in intersections]
bsd-3-clause
JianpingZeng/xcc
xcc/test/juliet/testcases/CWE122_Heap_Based_Buffer_Overflow/s09/CWE122_Heap_Based_Buffer_Overflow__c_CWE806_char_memmove_84_goodG2B.cpp
1720
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE122_Heap_Based_Buffer_Overflow__c_CWE806_char_memmove_84_goodG2B.cpp Label Definition File: CWE122_Heap_Based_Buffer_Overflow__c_CWE806.label.xml Template File: sources-sink-84_goodG2B.tmpl.cpp */ /* * @description * CWE: 122 Heap Based Buffer Overflow * BadSource: Initialize data as a large string * GoodSource: Initialize data as a small string * Sinks: memmove * BadSink : Copy data to string using memmove * Flow Variant: 84 Data flow: data passed to class constructor and destructor by declaring the class object on the heap and deleting it after use * * */ #ifndef OMITGOOD #include "std_testcase.h" #include "CWE122_Heap_Based_Buffer_Overflow__c_CWE806_char_memmove_84.h" namespace CWE122_Heap_Based_Buffer_Overflow__c_CWE806_char_memmove_84 { CWE122_Heap_Based_Buffer_Overflow__c_CWE806_char_memmove_84_goodG2B::CWE122_Heap_Based_Buffer_Overflow__c_CWE806_char_memmove_84_goodG2B(char * dataCopy) { data = dataCopy; /* FIX: Initialize data as a small buffer that as small or smaller than the small buffer used in the sink */ memset(data, 'A', 50-1); /* fill with 'A's */ data[50-1] = '\0'; /* null terminate */ } CWE122_Heap_Based_Buffer_Overflow__c_CWE806_char_memmove_84_goodG2B::~CWE122_Heap_Based_Buffer_Overflow__c_CWE806_char_memmove_84_goodG2B() { { char dest[50] = ""; /* POTENTIAL FLAW: Possible buffer overflow if data is larger than dest */ memmove(dest, data, strlen(data)*sizeof(char)); dest[50-1] = '\0'; /* Ensure the destination buffer is null terminated */ printLine(data); free(data); } } } #endif /* OMITGOOD */
bsd-3-clause
61ds/drish
_protected/backend/controllers/GlobalSettingController.php
13650
<?php namespace backend\controllers; use Yii; use common\models\Globalsetting; use common\models\CategoryMap; use common\models\GlobalSettingSearch; use common\models\ProductMap; use common\models\Products; use common\models\ProductsParent; use common\models\Brand; use common\models\Category; use yii\web\Controller; use yii\web\NotFoundHttpException; use yii\filters\VerbFilter; use yii\helpers\Url; use yii\imagine\Image; use kartik\file\FileInput; use yii\web\UploadedFile; use yii\filters\AccessControl; use common\models\CsvImport; use common\models\Attributes; use common\models\AttributeValues; use common\models\AttributesSearch; use yii\helpers\ArrayHelper; /** * GlobalSettingController implements the CRUD actions for GlobalSetting model. */ class GlobalSettingController extends BackendController { public function behaviors() { $behaviors = parent::behaviors(); return $behaviors; } /** * Lists all GlobalSetting models. * @return mixed */ public function actionIndex() { $searchModel = new GlobalSettingSearch(); $dataProvider = $searchModel->search(Yii::$app->request->queryParams); return $this->render('index', [ 'searchModel' => $searchModel, 'dataProvider' => $dataProvider, ]); } /** * Displays a single GlobalSetting model. * @param integer $id * @return mixed */ public function actionView($id) { /* return $this->render('view', [ 'model' => $this->findModel($id), ]); */ return $this->redirect(['update', 'id' => 1 ]); } /** * Creates a new GlobalSetting model. * If creation is successful, the browser will be redirected to the 'view' page. * @return mixed */ public function actionCreate() { $model = new Globalsetting(); $image = UploadedFile::getInstance($model, 'fevicon_icon'); $himage = UploadedFile::getInstance($model, 'logo'); if ($model->load(Yii::$app->request->post())) { if($image != '') { $path = ''; $mimage= $model->updateImage($image, $model->id,$path); $mimage1= $model->updateImage($himage, $model->id,$path); if($mimage==true && $mimage1==true) $model->fevicon_icon = $mimage; $model->logo = $mimage1; } $model->save(); //$imagine->thumbnail($uploadLarge, 1000, 400)->save($filename, ['quality' => 80]); return $this->redirect(['view', 'id' => $model->id]); } else { /* return $this->render('create', [ 'model' => $model, ]); */ return $this->redirect(['update', 'id' => 1 ]); } } /** * Updates an existing GlobalSetting model. * If update is successful, the browser will be redirected to the 'view' page. * @param integer $id * @return mixed */ public function actionUpdate($id) { $model = $this->findModel($id); $image = UploadedFile::getInstance($model, 'fevicon_icon'); $himage = UploadedFile::getInstance($model, 'logo'); $himage_inner = UploadedFile::getInstance($model, 'innerlogo'); $himage_video = UploadedFile::getInstance($model, 'videoPath'); if ($model->load(Yii::$app->request->post())) { $path = ''; $asdlogo = $model->getLogoFevicon(); foreach($asdlogo as $asdlogo1 ){ $fevicon = $asdlogo1->fevicon_icon; $logo = $asdlogo1->logo; $innerlogo = $asdlogo1->innerlogo; } if($image != '') { $imagine = new Image(); $mimage = 'fevicon.'. $image->extension; $model->fevicon_icon = "/uploads/".$mimage; } else{ $model->fevicon_icon = $fevicon; } if($himage != '') { $mimage1= $model->updateImage($himage, 1,$path,'logo'); $model->logo = "/uploads/site/medium/".$mimage1; } else{ $model->logo = $logo; } if($himage_inner != '') { $mimage2= $model->updateImage($himage_inner, 1,$path,'innerlogo'); $model->innerlogo = "/uploads/site/medium/".$mimage2; } else{ $model->innerlogo = $innerlogo; } $model->save(); //$imagine->thumbnail($uploadLarge, 1000, 400)->save($filename, ['quality' => 80]); return $this->redirect(['update', 'id' => $model->id , 'save' => 'yes']); } else { return $this->render('update', [ 'model' => $model, ]); } } /** * Deletes an existing GlobalSetting model. * If deletion is successful, the browser will be redirected to the 'index' page. * @param integer $id * @return mixed */ public function actionDelete($id) { $this->findModel($id)->delete(); return $this->redirect(['index']); } /** * Finds the GlobalSetting model based on its primary key value. * If the model is not found, a 404 HTTP exception will be thrown. * @param integer $id * @return GlobalSetting the loaded model * @throws NotFoundHttpException if the model cannot be found */ protected function findModel($id) { if (($model = Globalsetting::findOne($id)) !== null) { return $model; } else { throw new NotFoundHttpException('The requested page does not exist.'); } } public function actionUpload(){ $model = new CsvImport; if($model->load(Yii::$app->request->post())){ $feed_type = $model->feed_type; $language = $model->language; $entity_type = 9; $file = UploadedFile::getInstance($model,'file'); $filename = 'Data.'.$file->extension; $upload = $file->saveAs('uploads/tmp/'.$filename); if($upload){ define('CSV_PATH','uploads/tmp/'); $csv_file = CSV_PATH . $filename; $catmodel = new CategoryMap(); $categories = $catmodel->getcatImport($csv_file,$feed_type); if(count($categories) > 0){ Yii::$app->getSession()->setFlash('danger', Yii::t('app', "Before product import all categories must map.Please map all the catgories given bellow.")); return $this->render('categorymap',['model'=>$catmodel,'categories'=>$categories,'feed_type'=>$feed_type]); } if (($handle = fopen($csv_file, "r")) !== FALSE) { $i = 0; $mapdata = ProductMap::find()->where(['feed_id'=>$feed_type])->asArray()->one(); $mapvalues = array_values($mapdata); unset($mapvalues[0]); $delimiter = $catmodel->detectDelimiter($csv_file); while (($rows = fgetcsv($handle,11000,$delimiter,'"',$delimiter)) !== FALSE) { $count = count($rows); $row = $rows; $err = array(); if($i == 0){ $colname = array(); foreach($row as $key => $title){ $attribute = Attributes::findOne(['name'=>$title,'feed_type'=>$feed_type]); if($attribute){ $colname[$key] = $attribute->id; }else{ $attribute = new Attributes(); $attribute->name = $title; $attribute->entity_id = $entity_type; $attribute->feed_type = $feed_type; if($attribute->save()){ $colname[$key] = $attribute->id; }else{ $err[] = $title; continue; } } } $i++; continue; } foreach($row as $key => $row_data){ if(count($row_data) != $count) $err[] = $row_data; $prod_val[$colname[$key]] = $row_data; } $productParent = Products::findOne(['offerid'=>$prod_val[$mapvalues[2]],'feed_id'=>$feed_type]); if($productParent){ $product_parent_id = $productParent->product_id; }else{ $productparentmodel = new ProductsParent(); $productparentmodel->save(); $product_parent_id = $productparentmodel->id; } $productmodel = Products::findOne(['url'=>$prod_val[$mapvalues[1]],'feed_id'=>$feed_type,'language'=>$language]); if($productmodel){ }else{ $productmodel = new Products(); } $prod_val[$mapvalues[6]] = urlencode($prod_val[$mapvalues[6]]); $categoryid = CategoryMap::findOne(['name'=>$prod_val[$mapvalues[6]],'feed_type'=>$feed_type]); if(!$categoryid){ $categoryid = 8622; }else{ $categoryid = $categoryid->cat_id; } $productmodel->feed_id = $feed_type; $productmodel->url = $prod_val[$mapvalues[1]]; $productmodel->offerid = $prod_val[$mapvalues[2]]; $productmodel->language = $language; $productmodel->product_id = $product_parent_id; if(isset($prod_val[$mapvalues[2]])) $productmodel->offerid = $prod_val[$mapvalues[2]]; $productmodel->description = $prod_val[$mapvalues[3]]; $productmodel->image = $prod_val[$mapvalues[4]]; $productmodel->price = $prod_val[$mapvalues[5]]; $productmodel->category = $categoryid; $productmodel->name = $prod_val[$mapvalues[7]]; if($prod_val[$mapvalues[8]] != ''){ $brandmodel = Brand::findOne(['name'=>$prod_val[$mapvalues[8]]]); if($brandmodel){ }else{ $brandmodel = new Brand(); $brandmodel->name = $prod_val[$mapvalues[8]]; $brandmodel->save(); } $vendorid = $brandmodel->id; }else{ $vendorid = 1; } $productmodel->vendor = $vendorid; $productmodel->stock = $prod_val[$mapvalues[9]]; $productmodel->sku = $prod_val[$mapvalues[10]]; $productmodel->global_id = $prod_val[$mapvalues[11]]; $productmodel->status = 1; if($productmodel->save()){ for($k=1;$k<12;$k++) unset($prod_val[$mapvalues[$k]]); foreach($prod_val as $key=>$prodval){ //$attrmodel = AttributeValues::findOne(['attr_id'=>$key,'product_child_id'=>$productmodel->id]); $attrmodel = new AttributeValues; /* if($attrmodel){ }else{ $attrmodel = new AttributeValues(); } */ $attrmodel->attr_id = $key; $attrmodel->product_child_id = 1; $attrmodel->entity_id = 9; $attrmodel->value = $prodval; $attrmodel->status = 1; $attrmodel->save(); } }else{ print_r($productmodel->getErrors()); die; continue; } } fclose($handle); } unlink('uploads/tmp/'.$filename); Yii::$app->getSession()->setFlash('success', Yii::t('app', "All products imported successfully.")); return $this->redirect(['products']); } }else{ $languages = array(); $languages[0]['id'] = 'en'; $languages[0]['name'] = 'english'; $languages[1]['id'] = 'fr'; $languages[1]['name'] = 'french'; $languages[2]['id'] = 'de'; $languages[2]['name'] = 'dutch'; $languages = ArrayHelper::map($languages,'id','name'); return $this->render('upload',['model'=>$model,'languages'=> $languages]); } } public function actionCategoryMap($id=0){ if(Yii::$app->request->post()){ $catids = array_filter(Yii::$app->request->post("catid")); $catnames = array_filter(Yii::$app->request->post("catname")); if($id == 0) $id = Yii::$app->request->post("feed_type"); foreach($catids as $key => $cat){ $model = new CategoryMap(); $model->name = $catnames[$key]; $model->cat_id = $cat; $model->feed_type = $id; $model->save(); } Yii::$app->getSession()->setFlash('success', Yii::t('app', "Congratulations! all category mapped successfully.Now you can import products from this feed.")); return $this->redirect(['global-setting/upload']); } } public function actionCategoryImport($id=0){ $model = new CsvImport; if($model->load(Yii::$app->request->post())){ $feed_type = $model->feed_type; $entity_type = 9; $file = UploadedFile::getInstance($model,'file'); $filename = 'Data.'.$file->extension; $upload = $file->saveAs('uploads/tmp/'.$filename); if($upload){ define('CSV_PATH','uploads/tmp/'); $csv_file = CSV_PATH . $filename; $catmodel = new CategoryMap(); $categories = $catmodel->getcatImport($csv_file,$feed_type); unlink('uploads/tmp/'.$filename); if(count($categories) > 0){ return $this->render('categorymap',['model'=>$catmodel,'categories'=>$categories,'feed_type'=>$feed_type]); } Yii::$app->getSession()->setFlash('success', Yii::t('app', "All categories in this feed already mapped.Now you can import products from this feed.")); return $this->redirect(['global-setting/upload']); } }else{ $model->feed_type = $id; return $this->render('uploadcategory',['model'=>$model]); } } public function actionFieldsImport($id=0) { $model = new CsvImport; if($model->load(Yii::$app->request->post())){ $feed_type = $model->feed_type; $entity_type = 9; $file = UploadedFile::getInstance($model,'file'); $filename = 'Data.'.$file->extension; $upload = $file->saveAs('uploads/tmp/'.$filename); if($upload){ define('CSV_PATH','uploads/tmp/'); $csv_file = CSV_PATH . $filename; $catmodel = new CategoryMap(); $categories = $catmodel->getFieldImport($csv_file,$feed_type); unlink('uploads/tmp/'.$filename); Yii::$app->getSession()->setFlash('success', Yii::t('app', "All fields imported successfully.")); return $this->redirect(['feed-type/fields','id'=>$id]); } }else{ $model->feed_type = $id; return $this->render('uploadcategory',['model'=>$model]); } } }
bsd-3-clause
boundlessgeo/GeoGig
src/cli/src/main/java/org/locationtech/geogig/cli/porcelain/RemoteRemove.java
2311
/* Copyright (c) 2012-2014 Boundless and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * https://www.eclipse.org/org/documents/edl-v10.html * * Contributors: * Johnathan Garrett (LMN Solutions) - initial implementation */ package org.locationtech.geogig.cli.porcelain; import java.util.List; import org.locationtech.geogig.api.porcelain.RemoteException; import org.locationtech.geogig.api.porcelain.RemoteRemoveOp; import org.locationtech.geogig.cli.AbstractCommand; import org.locationtech.geogig.cli.CLICommand; import org.locationtech.geogig.cli.CommandFailedException; import org.locationtech.geogig.cli.GeogigCLI; import org.locationtech.geogig.cli.annotation.ObjectDatabaseReadOnly; import org.locationtech.geogig.cli.annotation.StagingDatabaseReadOnly; import com.beust.jcommander.Parameter; import com.beust.jcommander.Parameters; /** * Remove the remote named <name>. All remote-tracking branches and configuration settings for the * remote are removed. * * <p> * CLI proxy for {@link RemoteRemoveOp} * <p> * Usage: * <ul> * <li> {@code geogig rm <name>} * </ul> * * @see RemoteRemoveOp */ @ObjectDatabaseReadOnly @StagingDatabaseReadOnly @Parameters(commandNames = "rm", commandDescription = "Remove a remote from the repository") public class RemoteRemove extends AbstractCommand implements CLICommand { @Parameter(description = "<name>") private List<String> params; /** * Executes the remote remove command. */ @Override public void runInternal(GeogigCLI cli) { if (params == null || params.size() != 1) { printUsage(cli); throw new CommandFailedException(); } try { cli.getGeogig().command(RemoteRemoveOp.class).setName(params.get(0)).call(); } catch (RemoteException e) { switch (e.statusCode) { case REMOTE_NOT_FOUND: throw new CommandFailedException("Could not find a remote called '" + params.get(0) + "'.", e); default: throw new CommandFailedException(e.getMessage(), e); } } } }
bsd-3-clause
google/crosvm
acpi_tables/src/sdt.rs
4700
// Copyright 2020 The Chromium OS Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use std::fs::File; use std::io::{ErrorKind, Read, Result}; use std::path::Path; use data_model::DataInit; /// SDT represents for System Description Table. The structure SDT is a /// generic format for creating various ACPI tables like DSDT/FADT/MADT. #[derive(Clone)] pub struct SDT { data: Vec<u8>, } pub const HEADER_LEN: u32 = 36; const LENGTH_OFFSET: usize = 4; const CHECKSUM_OFFSET: usize = 9; #[allow(clippy::len_without_is_empty)] impl SDT { /// Set up the ACPI table header at the front of the SDT. /// The arguments correspond to the elements in the ACPI /// table headers. pub fn new( signature: [u8; 4], length: u32, revision: u8, oem_id: [u8; 6], oem_table: [u8; 8], oem_revision: u32, ) -> Self { // The length represents for the length of the entire table // which includes this header. And the header is 36 bytes, so // lenght should be >= 36. For the case who gives a number less // than the header len, use the header len directly. let len: u32 = if length < HEADER_LEN { HEADER_LEN } else { length }; let mut data = Vec::with_capacity(length as usize); data.extend_from_slice(&signature); data.extend_from_slice(&len.to_le_bytes()); data.push(revision); data.push(0); // checksum data.extend_from_slice(&oem_id); data.extend_from_slice(&oem_table); data.extend_from_slice(&oem_revision.to_le_bytes()); data.extend_from_slice(b"CROS"); data.extend_from_slice(&0u32.to_le_bytes()); data.resize(length as usize, 0); let mut sdt = SDT { data }; sdt.update_checksum(); sdt } /// Set up the ACPI table from file content. Verify file checksum. pub fn from_file(path: &Path) -> Result<Self> { let mut file = File::open(path)?; let mut data = Vec::new(); file.read_to_end(&mut data)?; let checksum = super::generate_checksum(data.as_slice()); if checksum == 0 { Ok(SDT { data }) } else { Err(ErrorKind::InvalidData.into()) } } pub fn is_signature(&self, signature: &[u8; 4]) -> bool { self.data[0..4] == *signature } fn update_checksum(&mut self) { self.data[CHECKSUM_OFFSET] = 0; let checksum = super::generate_checksum(self.data.as_slice()); self.data[CHECKSUM_OFFSET] = checksum; } pub fn as_slice(&self) -> &[u8] { self.data.as_slice() } pub fn append<T: DataInit>(&mut self, value: T) { self.data.extend_from_slice(value.as_slice()); self.write(LENGTH_OFFSET, self.data.len() as u32); } pub fn append_slice(&mut self, value: &[u8]) { self.data.extend_from_slice(value); self.write(LENGTH_OFFSET, self.data.len() as u32); } /// Write a value at the given offset pub fn write<T: DataInit>(&mut self, offset: usize, value: T) { let value_len = std::mem::size_of::<T>(); if (offset + value_len) > self.data.len() { return; } self.data[offset..offset + value_len].copy_from_slice(value.as_slice()); self.update_checksum(); } pub fn len(&self) -> usize { self.data.len() } } #[cfg(test)] mod tests { use super::SDT; use std::io::Write; use tempfile::NamedTempFile; #[test] fn test_sdt() { let mut sdt = SDT::new(*b"TEST", 40, 1, *b"CROSVM", *b"TESTTEST", 1); let sum: u8 = sdt .as_slice() .iter() .fold(0u8, |acc, x| acc.wrapping_add(*x)); assert_eq!(sum, 0); sdt.write(36, 0x12345678_u32); let sum: u8 = sdt .as_slice() .iter() .fold(0u8, |acc, x| acc.wrapping_add(*x)); assert_eq!(sum, 0); } #[test] fn test_sdt_read_write() -> Result<(), std::io::Error> { let temp_file = NamedTempFile::new()?; let expected_sdt = SDT::new(*b"TEST", 40, 1, *b"CROSVM", *b"TESTTEST", 1); // Write SDT to file. { let mut writer = temp_file.as_file(); writer.write_all(expected_sdt.as_slice())?; } // Read it back and verify. let actual_sdt = SDT::from_file(&temp_file.path().to_path_buf())?; assert!(actual_sdt.is_signature(b"TEST")); assert_eq!(actual_sdt.as_slice(), expected_sdt.as_slice()); Ok(()) } }
bsd-3-clause
aYukiSekiguchi/ACCESS-Chromium
content/browser/webui/web_ui_impl.cc
8116
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/browser/webui/web_ui_impl.h" #include "base/command_line.h" #include "base/json/json_writer.h" #include "base/stl_util.h" #include "base/utf_string_conversions.h" #include "base/values.h" #include "content/browser/child_process_security_policy.h" #include "content/browser/renderer_host/render_process_host_impl.h" #include "content/browser/renderer_host/render_view_host.h" #include "content/browser/tab_contents/tab_contents.h" #include "content/browser/webui/generic_handler.h" #include "content/common/view_messages.h" #include "content/public/browser/web_contents_view.h" #include "content/public/browser/web_ui_controller.h" #include "content/public/common/bindings_policy.h" #include "content/public/common/content_switches.h" using content::WebContents; using content::WebUIController; using content::WebUIMessageHandler; namespace content { const WebUI::TypeID WebUI::kNoWebUI = NULL; // static string16 WebUI::GetJavascriptCall( const std::string& function_name, const std::vector<const Value*>& arg_list) { string16 parameters; std::string json; for (size_t i = 0; i < arg_list.size(); ++i) { if (i > 0) parameters += char16(','); base::JSONWriter::Write(arg_list[i], false, &json); parameters += UTF8ToUTF16(json); } return ASCIIToUTF16(function_name) + char16('(') + parameters + char16(')') + char16(';'); } } WebUIImpl::WebUIImpl(WebContents* contents) : hide_favicon_(false), focus_location_bar_by_default_(false), should_hide_url_(false), link_transition_type_(content::PAGE_TRANSITION_LINK), bindings_(content::BINDINGS_POLICY_WEB_UI), web_contents_(contents) { DCHECK(contents); AddMessageHandler(new GenericHandler()); } WebUIImpl::~WebUIImpl() { // Delete the controller first, since it may also be keeping a pointer to some // of the handlers and can call them at destruction. controller_.reset(); STLDeleteContainerPointers(handlers_.begin(), handlers_.end()); } // WebUIImpl, public: ---------------------------------------------------------- bool WebUIImpl::OnMessageReceived(const IPC::Message& message) { bool handled = true; IPC_BEGIN_MESSAGE_MAP(WebUIImpl, message) IPC_MESSAGE_HANDLER(ViewHostMsg_WebUISend, OnWebUISend) IPC_MESSAGE_UNHANDLED(handled = false) IPC_END_MESSAGE_MAP() return handled; } void WebUIImpl::OnWebUISend(const GURL& source_url, const std::string& message, const ListValue& args) { if (!ChildProcessSecurityPolicy::GetInstance()-> HasWebUIBindings(web_contents_->GetRenderProcessHost()->GetID())) { NOTREACHED() << "Blocked unauthorized use of WebUIBindings."; return; } if (controller_->OverrideHandleWebUIMessage(source_url, message,args)) return; // Look up the callback for this message. MessageCallbackMap::const_iterator callback = message_callbacks_.find(message); if (callback != message_callbacks_.end()) { // Forward this message and content on. callback->second.Run(&args); } } void WebUIImpl::RenderViewCreated(RenderViewHost* render_view_host) { controller_->RenderViewCreated(render_view_host); // Do not attempt to set the toolkit property if WebUI is not enabled, e.g., // the bookmarks manager page. if (!(bindings_ & content::BINDINGS_POLICY_WEB_UI)) return; #if defined(TOOLKIT_VIEWS) render_view_host->SetWebUIProperty("toolkit", "views"); #elif defined(TOOLKIT_GTK) render_view_host->SetWebUIProperty("toolkit", "GTK"); #endif // defined(TOOLKIT_VIEWS) // Let the WebUI know that we're looking for UI that's optimized for touch // input. // TODO(rbyers) Figure out the right model for enabling touch-optimized UI // (http://crbug.com/105380). if (CommandLine::ForCurrentProcess()->HasSwitch(switches::kTouchOptimizedUI)) render_view_host->SetWebUIProperty("touchOptimized", "true"); } WebContents* WebUIImpl::GetWebContents() const { return web_contents_; } bool WebUIImpl::ShouldHideFavicon() const { return hide_favicon_; } void WebUIImpl::HideFavicon() { hide_favicon_ = true; } bool WebUIImpl::ShouldFocusLocationBarByDefault() const { return focus_location_bar_by_default_; } void WebUIImpl::FocusLocationBarByDefault() { focus_location_bar_by_default_ = true; } bool WebUIImpl::ShouldHideURL() const { return should_hide_url_; } void WebUIImpl::HideURL() { should_hide_url_ = true; } const string16& WebUIImpl::GetOverriddenTitle() const { return overridden_title_; } void WebUIImpl::OverrideTitle(const string16& title) { overridden_title_ = title; } content::PageTransition WebUIImpl::GetLinkTransitionType() const { return link_transition_type_; } void WebUIImpl::SetLinkTransitionType(content::PageTransition type) { link_transition_type_ = type; } int WebUIImpl::GetBindings() const { return bindings_; } void WebUIImpl::SetBindings(int bindings) { bindings_ = bindings; } void WebUIImpl::SetFrameXPath(const std::string& xpath) { frame_xpath_ = xpath; } WebUIController* WebUIImpl::GetController() const { return controller_.get(); } void WebUIImpl::SetController(WebUIController* controller) { controller_.reset(controller); } void WebUIImpl::CallJavascriptFunction(const std::string& function_name) { DCHECK(IsStringASCII(function_name)); string16 javascript = ASCIIToUTF16(function_name + "();"); ExecuteJavascript(javascript); } void WebUIImpl::CallJavascriptFunction(const std::string& function_name, const Value& arg) { DCHECK(IsStringASCII(function_name)); std::vector<const Value*> args; args.push_back(&arg); ExecuteJavascript(GetJavascriptCall(function_name, args)); } void WebUIImpl::CallJavascriptFunction( const std::string& function_name, const Value& arg1, const Value& arg2) { DCHECK(IsStringASCII(function_name)); std::vector<const Value*> args; args.push_back(&arg1); args.push_back(&arg2); ExecuteJavascript(GetJavascriptCall(function_name, args)); } void WebUIImpl::CallJavascriptFunction( const std::string& function_name, const Value& arg1, const Value& arg2, const Value& arg3) { DCHECK(IsStringASCII(function_name)); std::vector<const Value*> args; args.push_back(&arg1); args.push_back(&arg2); args.push_back(&arg3); ExecuteJavascript(GetJavascriptCall(function_name, args)); } void WebUIImpl::CallJavascriptFunction( const std::string& function_name, const Value& arg1, const Value& arg2, const Value& arg3, const Value& arg4) { DCHECK(IsStringASCII(function_name)); std::vector<const Value*> args; args.push_back(&arg1); args.push_back(&arg2); args.push_back(&arg3); args.push_back(&arg4); ExecuteJavascript(GetJavascriptCall(function_name, args)); } void WebUIImpl::CallJavascriptFunction( const std::string& function_name, const std::vector<const Value*>& args) { DCHECK(IsStringASCII(function_name)); ExecuteJavascript(GetJavascriptCall(function_name, args)); } void WebUIImpl::RegisterMessageCallback(const std::string &message, const MessageCallback& callback) { message_callbacks_.insert(std::make_pair(message, callback)); } void WebUIImpl::ProcessWebUIMessage(const GURL& source_url, const std::string& message, const base::ListValue& args) { OnWebUISend(source_url, message, args); } // WebUIImpl, protected: ------------------------------------------------------- void WebUIImpl::AddMessageHandler(WebUIMessageHandler* handler) { DCHECK(!handler->web_ui()); handler->set_web_ui(this); handler->RegisterMessages(); handlers_.push_back(handler); } void WebUIImpl::ExecuteJavascript(const string16& javascript) { web_contents_->GetRenderViewHost()->ExecuteJavascriptInWebFrame( ASCIIToUTF16(frame_xpath_), javascript); }
bsd-3-clause
nuodb/ruby-activerecord-nuodb-adapter
test/models/reader.rb
651
class Reader < ActiveRecord::Base belongs_to :post belongs_to :person, :inverse_of => :readers belongs_to :single_person, :class_name => 'Person', :foreign_key => :person_id, :inverse_of => :reader end class SecureReader < ActiveRecord::Base self.table_name = "readers" belongs_to :secure_post, :class_name => "Post", :foreign_key => "post_id" belongs_to :secure_person, :inverse_of => :secure_readers, :class_name => "Person", :foreign_key => "person_id" attr_accessible nil end class LazyReader < ActiveRecord::Base self.table_name = 'readers' default_scope where(:skimmer => true) belongs_to :post belongs_to :person end
bsd-3-clause
alexsavio/scikit-learn
sklearn/metrics/tests/test_pairwise.py
27309
import numpy as np from numpy import linalg from scipy.sparse import dok_matrix, csr_matrix, issparse from scipy.spatial.distance import cosine, cityblock, minkowski, wminkowski from sklearn.utils.testing import assert_greater from sklearn.utils.testing import assert_array_almost_equal from sklearn.utils.testing import assert_almost_equal from sklearn.utils.testing import assert_equal from sklearn.utils.testing import assert_array_equal from sklearn.utils.testing import assert_raises from sklearn.utils.testing import assert_raises_regexp from sklearn.utils.testing import assert_true from sklearn.utils.testing import ignore_warnings from sklearn.externals.six import iteritems from sklearn.metrics.pairwise import euclidean_distances from sklearn.metrics.pairwise import manhattan_distances from sklearn.metrics.pairwise import linear_kernel from sklearn.metrics.pairwise import chi2_kernel, additive_chi2_kernel from sklearn.metrics.pairwise import polynomial_kernel from sklearn.metrics.pairwise import rbf_kernel from sklearn.metrics.pairwise import laplacian_kernel from sklearn.metrics.pairwise import sigmoid_kernel from sklearn.metrics.pairwise import cosine_similarity from sklearn.metrics.pairwise import cosine_distances from sklearn.metrics.pairwise import pairwise_distances from sklearn.metrics.pairwise import pairwise_distances_argmin_min from sklearn.metrics.pairwise import pairwise_distances_argmin from sklearn.metrics.pairwise import pairwise_kernels from sklearn.metrics.pairwise import PAIRWISE_KERNEL_FUNCTIONS from sklearn.metrics.pairwise import PAIRWISE_DISTANCE_FUNCTIONS from sklearn.metrics.pairwise import PAIRWISE_BOOLEAN_FUNCTIONS from sklearn.metrics.pairwise import PAIRED_DISTANCES from sklearn.metrics.pairwise import check_pairwise_arrays from sklearn.metrics.pairwise import check_paired_arrays from sklearn.metrics.pairwise import paired_distances from sklearn.metrics.pairwise import paired_euclidean_distances from sklearn.metrics.pairwise import paired_manhattan_distances from sklearn.preprocessing import normalize from sklearn.exceptions import DataConversionWarning def test_pairwise_distances(): # Test the pairwise_distance helper function. rng = np.random.RandomState(0) # Euclidean distance should be equivalent to calling the function. X = rng.random_sample((5, 4)) S = pairwise_distances(X, metric="euclidean") S2 = euclidean_distances(X) assert_array_almost_equal(S, S2) # Euclidean distance, with Y != X. Y = rng.random_sample((2, 4)) S = pairwise_distances(X, Y, metric="euclidean") S2 = euclidean_distances(X, Y) assert_array_almost_equal(S, S2) # Test with tuples as X and Y X_tuples = tuple([tuple([v for v in row]) for row in X]) Y_tuples = tuple([tuple([v for v in row]) for row in Y]) S2 = pairwise_distances(X_tuples, Y_tuples, metric="euclidean") assert_array_almost_equal(S, S2) # "cityblock" uses scikit-learn metric, cityblock (function) is # scipy.spatial. S = pairwise_distances(X, metric="cityblock") S2 = pairwise_distances(X, metric=cityblock) assert_equal(S.shape[0], S.shape[1]) assert_equal(S.shape[0], X.shape[0]) assert_array_almost_equal(S, S2) # The manhattan metric should be equivalent to cityblock. S = pairwise_distances(X, Y, metric="manhattan") S2 = pairwise_distances(X, Y, metric=cityblock) assert_equal(S.shape[0], X.shape[0]) assert_equal(S.shape[1], Y.shape[0]) assert_array_almost_equal(S, S2) # Low-level function for manhattan can divide in blocks to avoid # using too much memory during the broadcasting S3 = manhattan_distances(X, Y, size_threshold=10) assert_array_almost_equal(S, S3) # Test cosine as a string metric versus cosine callable # The string "cosine" uses sklearn.metric, # while the function cosine is scipy.spatial S = pairwise_distances(X, Y, metric="cosine") S2 = pairwise_distances(X, Y, metric=cosine) assert_equal(S.shape[0], X.shape[0]) assert_equal(S.shape[1], Y.shape[0]) assert_array_almost_equal(S, S2) # Test with sparse X and Y, # currently only supported for Euclidean, L1 and cosine. X_sparse = csr_matrix(X) Y_sparse = csr_matrix(Y) S = pairwise_distances(X_sparse, Y_sparse, metric="euclidean") S2 = euclidean_distances(X_sparse, Y_sparse) assert_array_almost_equal(S, S2) S = pairwise_distances(X_sparse, Y_sparse, metric="cosine") S2 = cosine_distances(X_sparse, Y_sparse) assert_array_almost_equal(S, S2) S = pairwise_distances(X_sparse, Y_sparse.tocsc(), metric="manhattan") S2 = manhattan_distances(X_sparse.tobsr(), Y_sparse.tocoo()) assert_array_almost_equal(S, S2) S2 = manhattan_distances(X, Y) assert_array_almost_equal(S, S2) # Test with scipy.spatial.distance metric, with a kwd kwds = {"p": 2.0} S = pairwise_distances(X, Y, metric="minkowski", **kwds) S2 = pairwise_distances(X, Y, metric=minkowski, **kwds) assert_array_almost_equal(S, S2) # same with Y = None kwds = {"p": 2.0} S = pairwise_distances(X, metric="minkowski", **kwds) S2 = pairwise_distances(X, metric=minkowski, **kwds) assert_array_almost_equal(S, S2) # Test that scipy distance metrics throw an error if sparse matrix given assert_raises(TypeError, pairwise_distances, X_sparse, metric="minkowski") assert_raises(TypeError, pairwise_distances, X, Y_sparse, metric="minkowski") # Test that a value error is raised if the metric is unknown assert_raises(ValueError, pairwise_distances, X, Y, metric="blah") # ignore conversion to boolean in pairwise_distances @ignore_warnings(category=DataConversionWarning) def test_pairwise_boolean_distance(): # test that we convert to boolean arrays for boolean distances rng = np.random.RandomState(0) X = rng.randn(5, 4) Y = X.copy() Y[0, 0] = 1 - Y[0, 0] for metric in PAIRWISE_BOOLEAN_FUNCTIONS: for Z in [Y, None]: res = pairwise_distances(X, Z, metric=metric) res[np.isnan(res)] = 0 assert_true(np.sum(res != 0) == 0) def test_pairwise_precomputed(): for func in [pairwise_distances, pairwise_kernels]: # Test correct shape assert_raises_regexp(ValueError, '.* shape .*', func, np.zeros((5, 3)), metric='precomputed') # with two args assert_raises_regexp(ValueError, '.* shape .*', func, np.zeros((5, 3)), np.zeros((4, 4)), metric='precomputed') # even if shape[1] agrees (although thus second arg is spurious) assert_raises_regexp(ValueError, '.* shape .*', func, np.zeros((5, 3)), np.zeros((4, 3)), metric='precomputed') # Test not copied (if appropriate dtype) S = np.zeros((5, 5)) S2 = func(S, metric="precomputed") assert_true(S is S2) # with two args S = np.zeros((5, 3)) S2 = func(S, np.zeros((3, 3)), metric="precomputed") assert_true(S is S2) # Test always returns float dtype S = func(np.array([[1]], dtype='int'), metric='precomputed') assert_equal('f', S.dtype.kind) # Test converts list to array-like S = func([[1.]], metric='precomputed') assert_true(isinstance(S, np.ndarray)) def check_pairwise_parallel(func, metric, kwds): rng = np.random.RandomState(0) for make_data in (np.array, csr_matrix): X = make_data(rng.random_sample((5, 4))) Y = make_data(rng.random_sample((3, 4))) try: S = func(X, metric=metric, n_jobs=1, **kwds) except (TypeError, ValueError) as exc: # Not all metrics support sparse input # ValueError may be triggered by bad callable if make_data is csr_matrix: assert_raises(type(exc), func, X, metric=metric, n_jobs=2, **kwds) continue else: raise S2 = func(X, metric=metric, n_jobs=2, **kwds) assert_array_almost_equal(S, S2) S = func(X, Y, metric=metric, n_jobs=1, **kwds) S2 = func(X, Y, metric=metric, n_jobs=2, **kwds) assert_array_almost_equal(S, S2) def test_pairwise_parallel(): wminkowski_kwds = {'w': np.arange(1, 5).astype('double'), 'p': 1} metrics = [(pairwise_distances, 'euclidean', {}), (pairwise_distances, wminkowski, wminkowski_kwds), (pairwise_distances, 'wminkowski', wminkowski_kwds), (pairwise_kernels, 'polynomial', {'degree': 1}), (pairwise_kernels, callable_rbf_kernel, {'gamma': .1}), ] for func, metric, kwds in metrics: yield check_pairwise_parallel, func, metric, kwds def test_pairwise_callable_nonstrict_metric(): # paired_distances should allow callable metric where metric(x, x) != 0 # Knowing that the callable is a strict metric would allow the diagonal to # be left uncalculated and set to 0. assert_equal(pairwise_distances([[1.]], metric=lambda x, y: 5)[0, 0], 5) def callable_rbf_kernel(x, y, **kwds): # Callable version of pairwise.rbf_kernel. K = rbf_kernel(np.atleast_2d(x), np.atleast_2d(y), **kwds) return K def test_pairwise_kernels(): # Test the pairwise_kernels helper function. rng = np.random.RandomState(0) X = rng.random_sample((5, 4)) Y = rng.random_sample((2, 4)) # Test with all metrics that should be in PAIRWISE_KERNEL_FUNCTIONS. test_metrics = ["rbf", "laplacian", "sigmoid", "polynomial", "linear", "chi2", "additive_chi2"] for metric in test_metrics: function = PAIRWISE_KERNEL_FUNCTIONS[metric] # Test with Y=None K1 = pairwise_kernels(X, metric=metric) K2 = function(X) assert_array_almost_equal(K1, K2) # Test with Y=Y K1 = pairwise_kernels(X, Y=Y, metric=metric) K2 = function(X, Y=Y) assert_array_almost_equal(K1, K2) # Test with tuples as X and Y X_tuples = tuple([tuple([v for v in row]) for row in X]) Y_tuples = tuple([tuple([v for v in row]) for row in Y]) K2 = pairwise_kernels(X_tuples, Y_tuples, metric=metric) assert_array_almost_equal(K1, K2) # Test with sparse X and Y X_sparse = csr_matrix(X) Y_sparse = csr_matrix(Y) if metric in ["chi2", "additive_chi2"]: # these don't support sparse matrices yet assert_raises(ValueError, pairwise_kernels, X_sparse, Y=Y_sparse, metric=metric) continue K1 = pairwise_kernels(X_sparse, Y=Y_sparse, metric=metric) assert_array_almost_equal(K1, K2) # Test with a callable function, with given keywords. metric = callable_rbf_kernel kwds = {'gamma': 0.1} K1 = pairwise_kernels(X, Y=Y, metric=metric, **kwds) K2 = rbf_kernel(X, Y=Y, **kwds) assert_array_almost_equal(K1, K2) # callable function, X=Y K1 = pairwise_kernels(X, Y=X, metric=metric, **kwds) K2 = rbf_kernel(X, Y=X, **kwds) assert_array_almost_equal(K1, K2) def test_pairwise_kernels_filter_param(): rng = np.random.RandomState(0) X = rng.random_sample((5, 4)) Y = rng.random_sample((2, 4)) K = rbf_kernel(X, Y, gamma=0.1) params = {"gamma": 0.1, "blabla": ":)"} K2 = pairwise_kernels(X, Y, metric="rbf", filter_params=True, **params) assert_array_almost_equal(K, K2) assert_raises(TypeError, pairwise_kernels, X, Y, "rbf", **params) def test_paired_distances(): # Test the pairwise_distance helper function. rng = np.random.RandomState(0) # Euclidean distance should be equivalent to calling the function. X = rng.random_sample((5, 4)) # Euclidean distance, with Y != X. Y = rng.random_sample((5, 4)) for metric, func in iteritems(PAIRED_DISTANCES): S = paired_distances(X, Y, metric=metric) S2 = func(X, Y) assert_array_almost_equal(S, S2) S3 = func(csr_matrix(X), csr_matrix(Y)) assert_array_almost_equal(S, S3) if metric in PAIRWISE_DISTANCE_FUNCTIONS: # Check the pairwise_distances implementation # gives the same value distances = PAIRWISE_DISTANCE_FUNCTIONS[metric](X, Y) distances = np.diag(distances) assert_array_almost_equal(distances, S) # Check the callable implementation S = paired_distances(X, Y, metric='manhattan') S2 = paired_distances(X, Y, metric=lambda x, y: np.abs(x - y).sum(axis=0)) assert_array_almost_equal(S, S2) # Test that a value error is raised when the lengths of X and Y should not # differ Y = rng.random_sample((3, 4)) assert_raises(ValueError, paired_distances, X, Y) def test_pairwise_distances_argmin_min(): # Check pairwise minimum distances computation for any metric X = [[0], [1]] Y = [[-1], [2]] Xsp = dok_matrix(X) Ysp = csr_matrix(Y, dtype=np.float32) # euclidean metric D, E = pairwise_distances_argmin_min(X, Y, metric="euclidean") D2 = pairwise_distances_argmin(X, Y, metric="euclidean") assert_array_almost_equal(D, [0, 1]) assert_array_almost_equal(D2, [0, 1]) assert_array_almost_equal(D, [0, 1]) assert_array_almost_equal(E, [1., 1.]) # sparse matrix case Dsp, Esp = pairwise_distances_argmin_min(Xsp, Ysp, metric="euclidean") assert_array_equal(Dsp, D) assert_array_equal(Esp, E) # We don't want np.matrix here assert_equal(type(Dsp), np.ndarray) assert_equal(type(Esp), np.ndarray) # Non-euclidean scikit-learn metric D, E = pairwise_distances_argmin_min(X, Y, metric="manhattan") D2 = pairwise_distances_argmin(X, Y, metric="manhattan") assert_array_almost_equal(D, [0, 1]) assert_array_almost_equal(D2, [0, 1]) assert_array_almost_equal(E, [1., 1.]) D, E = pairwise_distances_argmin_min(Xsp, Ysp, metric="manhattan") D2 = pairwise_distances_argmin(Xsp, Ysp, metric="manhattan") assert_array_almost_equal(D, [0, 1]) assert_array_almost_equal(E, [1., 1.]) # Non-euclidean Scipy distance (callable) D, E = pairwise_distances_argmin_min(X, Y, metric=minkowski, metric_kwargs={"p": 2}) assert_array_almost_equal(D, [0, 1]) assert_array_almost_equal(E, [1., 1.]) # Non-euclidean Scipy distance (string) D, E = pairwise_distances_argmin_min(X, Y, metric="minkowski", metric_kwargs={"p": 2}) assert_array_almost_equal(D, [0, 1]) assert_array_almost_equal(E, [1., 1.]) # Compare with naive implementation rng = np.random.RandomState(0) X = rng.randn(97, 149) Y = rng.randn(111, 149) dist = pairwise_distances(X, Y, metric="manhattan") dist_orig_ind = dist.argmin(axis=0) dist_orig_val = dist[dist_orig_ind, range(len(dist_orig_ind))] dist_chunked_ind, dist_chunked_val = pairwise_distances_argmin_min( X, Y, axis=0, metric="manhattan", batch_size=50) np.testing.assert_almost_equal(dist_orig_ind, dist_chunked_ind, decimal=7) np.testing.assert_almost_equal(dist_orig_val, dist_chunked_val, decimal=7) def test_euclidean_distances(): # Check the pairwise Euclidean distances computation X = [[0]] Y = [[1], [2]] D = euclidean_distances(X, Y) assert_array_almost_equal(D, [[1., 2.]]) X = csr_matrix(X) Y = csr_matrix(Y) D = euclidean_distances(X, Y) assert_array_almost_equal(D, [[1., 2.]]) rng = np.random.RandomState(0) X = rng.random_sample((10, 4)) Y = rng.random_sample((20, 4)) X_norm_sq = (X ** 2).sum(axis=1).reshape(1, -1) Y_norm_sq = (Y ** 2).sum(axis=1).reshape(1, -1) # check that we still get the right answers with {X,Y}_norm_squared D1 = euclidean_distances(X, Y) D2 = euclidean_distances(X, Y, X_norm_squared=X_norm_sq) D3 = euclidean_distances(X, Y, Y_norm_squared=Y_norm_sq) D4 = euclidean_distances(X, Y, X_norm_squared=X_norm_sq, Y_norm_squared=Y_norm_sq) assert_array_almost_equal(D2, D1) assert_array_almost_equal(D3, D1) assert_array_almost_equal(D4, D1) # check we get the wrong answer with wrong {X,Y}_norm_squared X_norm_sq *= 0.5 Y_norm_sq *= 0.5 wrong_D = euclidean_distances(X, Y, X_norm_squared=np.zeros_like(X_norm_sq), Y_norm_squared=np.zeros_like(Y_norm_sq)) assert_greater(np.max(np.abs(wrong_D - D1)), .01) def test_cosine_distances(): # Check the pairwise Cosine distances computation rng = np.random.RandomState(1337) x = np.abs(rng.rand(910)) XA = np.vstack([x, x]) D = cosine_distances(XA) assert_array_almost_equal(D, [[0., 0.], [0., 0.]]) # check that all elements are in [0, 2] assert_true(np.all(D >= 0.)) assert_true(np.all(D <= 2.)) # check that diagonal elements are equal to 0 assert_array_equal(D[np.diag_indices_from(D)], [0., 0.]) XB = np.vstack([x, -x]) D2 = cosine_distances(XB) # check that all elements are in [0, 2] assert_true(np.all(D2 >= 0.)) assert_true(np.all(D2 <= 2.)) # check that diagonal elements are equal to 0 and non diagonal to 2 assert_array_equal(D2, [[0., 2.], [2., 0.]]) # check large random matrix X = np.abs(rng.rand(1000, 5000)) D = cosine_distances(X) # check that diagonal elements are equal to 0 assert_array_almost_equal(D[np.diag_indices_from(D)], [0.] * D.shape[0]) assert_true(np.all(D >= 0.)) assert_true(np.all(D <= 2.)) # Paired distances def test_paired_euclidean_distances(): # Check the paired Euclidean distances computation X = [[0], [0]] Y = [[1], [2]] D = paired_euclidean_distances(X, Y) assert_array_almost_equal(D, [1., 2.]) def test_paired_manhattan_distances(): # Check the paired manhattan distances computation X = [[0], [0]] Y = [[1], [2]] D = paired_manhattan_distances(X, Y) assert_array_almost_equal(D, [1., 2.]) def test_chi_square_kernel(): rng = np.random.RandomState(0) X = rng.random_sample((5, 4)) Y = rng.random_sample((10, 4)) K_add = additive_chi2_kernel(X, Y) gamma = 0.1 K = chi2_kernel(X, Y, gamma=gamma) assert_equal(K.dtype, np.float) for i, x in enumerate(X): for j, y in enumerate(Y): chi2 = -np.sum((x - y) ** 2 / (x + y)) chi2_exp = np.exp(gamma * chi2) assert_almost_equal(K_add[i, j], chi2) assert_almost_equal(K[i, j], chi2_exp) # check diagonal is ones for data with itself K = chi2_kernel(Y) assert_array_equal(np.diag(K), 1) # check off-diagonal is < 1 but > 0: assert_true(np.all(K > 0)) assert_true(np.all(K - np.diag(np.diag(K)) < 1)) # check that float32 is preserved X = rng.random_sample((5, 4)).astype(np.float32) Y = rng.random_sample((10, 4)).astype(np.float32) K = chi2_kernel(X, Y) assert_equal(K.dtype, np.float32) # check integer type gets converted, # check that zeros are handled X = rng.random_sample((10, 4)).astype(np.int32) K = chi2_kernel(X, X) assert_true(np.isfinite(K).all()) assert_equal(K.dtype, np.float) # check that kernel of similar things is greater than dissimilar ones X = [[.3, .7], [1., 0]] Y = [[0, 1], [.9, .1]] K = chi2_kernel(X, Y) assert_greater(K[0, 0], K[0, 1]) assert_greater(K[1, 1], K[1, 0]) # test negative input assert_raises(ValueError, chi2_kernel, [[0, -1]]) assert_raises(ValueError, chi2_kernel, [[0, -1]], [[-1, -1]]) assert_raises(ValueError, chi2_kernel, [[0, 1]], [[-1, -1]]) # different n_features in X and Y assert_raises(ValueError, chi2_kernel, [[0, 1]], [[.2, .2, .6]]) # sparse matrices assert_raises(ValueError, chi2_kernel, csr_matrix(X), csr_matrix(Y)) assert_raises(ValueError, additive_chi2_kernel, csr_matrix(X), csr_matrix(Y)) def test_kernel_symmetry(): # Valid kernels should be symmetric rng = np.random.RandomState(0) X = rng.random_sample((5, 4)) for kernel in (linear_kernel, polynomial_kernel, rbf_kernel, laplacian_kernel, sigmoid_kernel, cosine_similarity): K = kernel(X, X) assert_array_almost_equal(K, K.T, 15) def test_kernel_sparse(): rng = np.random.RandomState(0) X = rng.random_sample((5, 4)) X_sparse = csr_matrix(X) for kernel in (linear_kernel, polynomial_kernel, rbf_kernel, laplacian_kernel, sigmoid_kernel, cosine_similarity): K = kernel(X, X) K2 = kernel(X_sparse, X_sparse) assert_array_almost_equal(K, K2) def test_linear_kernel(): rng = np.random.RandomState(0) X = rng.random_sample((5, 4)) K = linear_kernel(X, X) # the diagonal elements of a linear kernel are their squared norm assert_array_almost_equal(K.flat[::6], [linalg.norm(x) ** 2 for x in X]) def test_rbf_kernel(): rng = np.random.RandomState(0) X = rng.random_sample((5, 4)) K = rbf_kernel(X, X) # the diagonal elements of a rbf kernel are 1 assert_array_almost_equal(K.flat[::6], np.ones(5)) def test_laplacian_kernel(): rng = np.random.RandomState(0) X = rng.random_sample((5, 4)) K = laplacian_kernel(X, X) # the diagonal elements of a laplacian kernel are 1 assert_array_almost_equal(np.diag(K), np.ones(5)) # off-diagonal elements are < 1 but > 0: assert_true(np.all(K > 0)) assert_true(np.all(K - np.diag(np.diag(K)) < 1)) def test_cosine_similarity_sparse_output(): # Test if cosine_similarity correctly produces sparse output. rng = np.random.RandomState(0) X = rng.random_sample((5, 4)) Y = rng.random_sample((3, 4)) Xcsr = csr_matrix(X) Ycsr = csr_matrix(Y) K1 = cosine_similarity(Xcsr, Ycsr, dense_output=False) assert_true(issparse(K1)) K2 = pairwise_kernels(Xcsr, Y=Ycsr, metric="cosine") assert_array_almost_equal(K1.todense(), K2) def test_cosine_similarity(): # Test the cosine_similarity. rng = np.random.RandomState(0) X = rng.random_sample((5, 4)) Y = rng.random_sample((3, 4)) Xcsr = csr_matrix(X) Ycsr = csr_matrix(Y) for X_, Y_ in ((X, None), (X, Y), (Xcsr, None), (Xcsr, Ycsr)): # Test that the cosine is kernel is equal to a linear kernel when data # has been previously normalized by L2-norm. K1 = pairwise_kernels(X_, Y=Y_, metric="cosine") X_ = normalize(X_) if Y_ is not None: Y_ = normalize(Y_) K2 = pairwise_kernels(X_, Y=Y_, metric="linear") assert_array_almost_equal(K1, K2) def test_check_dense_matrices(): # Ensure that pairwise array check works for dense matrices. # Check that if XB is None, XB is returned as reference to XA XA = np.resize(np.arange(40), (5, 8)) XA_checked, XB_checked = check_pairwise_arrays(XA, None) assert_true(XA_checked is XB_checked) assert_array_equal(XA, XA_checked) def test_check_XB_returned(): # Ensure that if XA and XB are given correctly, they return as equal. # Check that if XB is not None, it is returned equal. # Note that the second dimension of XB is the same as XA. XA = np.resize(np.arange(40), (5, 8)) XB = np.resize(np.arange(32), (4, 8)) XA_checked, XB_checked = check_pairwise_arrays(XA, XB) assert_array_equal(XA, XA_checked) assert_array_equal(XB, XB_checked) XB = np.resize(np.arange(40), (5, 8)) XA_checked, XB_checked = check_paired_arrays(XA, XB) assert_array_equal(XA, XA_checked) assert_array_equal(XB, XB_checked) def test_check_different_dimensions(): # Ensure an error is raised if the dimensions are different. XA = np.resize(np.arange(45), (5, 9)) XB = np.resize(np.arange(32), (4, 8)) assert_raises(ValueError, check_pairwise_arrays, XA, XB) XB = np.resize(np.arange(4 * 9), (4, 9)) assert_raises(ValueError, check_paired_arrays, XA, XB) def test_check_invalid_dimensions(): # Ensure an error is raised on 1D input arrays. # The modified tests are not 1D. In the old test, the array was internally # converted to 2D anyways XA = np.arange(45).reshape(9, 5) XB = np.arange(32).reshape(4, 8) assert_raises(ValueError, check_pairwise_arrays, XA, XB) XA = np.arange(45).reshape(9, 5) XB = np.arange(32).reshape(4, 8) assert_raises(ValueError, check_pairwise_arrays, XA, XB) def test_check_sparse_arrays(): # Ensures that checks return valid sparse matrices. rng = np.random.RandomState(0) XA = rng.random_sample((5, 4)) XA_sparse = csr_matrix(XA) XB = rng.random_sample((5, 4)) XB_sparse = csr_matrix(XB) XA_checked, XB_checked = check_pairwise_arrays(XA_sparse, XB_sparse) # compare their difference because testing csr matrices for # equality with '==' does not work as expected. assert_true(issparse(XA_checked)) assert_equal(abs(XA_sparse - XA_checked).sum(), 0) assert_true(issparse(XB_checked)) assert_equal(abs(XB_sparse - XB_checked).sum(), 0) XA_checked, XA_2_checked = check_pairwise_arrays(XA_sparse, XA_sparse) assert_true(issparse(XA_checked)) assert_equal(abs(XA_sparse - XA_checked).sum(), 0) assert_true(issparse(XA_2_checked)) assert_equal(abs(XA_2_checked - XA_checked).sum(), 0) def tuplify(X): # Turns a numpy matrix (any n-dimensional array) into tuples. s = X.shape if len(s) > 1: # Tuplify each sub-array in the input. return tuple(tuplify(row) for row in X) else: # Single dimension input, just return tuple of contents. return tuple(r for r in X) def test_check_tuple_input(): # Ensures that checks return valid tuples. rng = np.random.RandomState(0) XA = rng.random_sample((5, 4)) XA_tuples = tuplify(XA) XB = rng.random_sample((5, 4)) XB_tuples = tuplify(XB) XA_checked, XB_checked = check_pairwise_arrays(XA_tuples, XB_tuples) assert_array_equal(XA_tuples, XA_checked) assert_array_equal(XB_tuples, XB_checked) def test_check_preserve_type(): # Ensures that type float32 is preserved. XA = np.resize(np.arange(40), (5, 8)).astype(np.float32) XB = np.resize(np.arange(40), (5, 8)).astype(np.float32) XA_checked, XB_checked = check_pairwise_arrays(XA, None) assert_equal(XA_checked.dtype, np.float32) # both float32 XA_checked, XB_checked = check_pairwise_arrays(XA, XB) assert_equal(XA_checked.dtype, np.float32) assert_equal(XB_checked.dtype, np.float32) # mismatched A XA_checked, XB_checked = check_pairwise_arrays(XA.astype(np.float), XB) assert_equal(XA_checked.dtype, np.float) assert_equal(XB_checked.dtype, np.float) # mismatched B XA_checked, XB_checked = check_pairwise_arrays(XA, XB.astype(np.float)) assert_equal(XA_checked.dtype, np.float) assert_equal(XB_checked.dtype, np.float)
bsd-3-clause
canis-io/cascade-lib
lib/components/web/widgets/Collector.php
3549
<?php /** * @link http://psesd.org/ * * @copyright Copyright (c) 2015 Puget Sound ESD * @license http://psesd.org/license/ */ namespace cascade\components\web\widgets; use cascade\components\web\widgets\section\Section; use Yii; /** * Collector [[@doctodo class_description:cascade\components\web\widgets\Collector]]. * * @author Jacob Morrison <email@ofjacob.com> */ class Collector extends \canis\base\collector\Module { /** * @var [[@doctodo var_type:producedWidgets]] [[@doctodo var_description:producedWidgets]] */ public $producedWidgets = []; /** * @var [[@doctodo var_type:lastBuildId]] [[@doctodo var_description:lastBuildId]] */ public $lastBuildId; /** * @var [[@doctodo var_type:_lazy]] [[@doctodo var_description:_lazy]] */ protected $_lazy = false; /** * Get lazy. * * @return [[@doctodo return_type:getLazy]] [[@doctodo return_description:getLazy]] */ public function getLazy() { return $this->_lazy; } /** * Set lazy. * * @param [[@doctodo param_type:lazy]] $lazy [[@doctodo param_description:lazy]] */ public function setLazy($lazy) { $this->_lazy = $lazy; } /** * @inheritdoc */ public function getCollectorItemClass() { return Item::className(); } /** * @inheritdoc */ public function getModulePrefix() { return 'Widget'; } /** * [[@doctodo method_description:build]]. * * @param cascade\components\web\widgets\section\Section $section [[@doctodo param_description:section]] * @param [[@doctodo param_type:widgetName]] $widgetName [[@doctodo param_description:widgetName]] * @param array $instanceSettings [[@doctodo param_description:instanceSettings]] [optional] * * @return [[@doctodo return_type:build]] [[@doctodo return_description:build]] */ public function build(Section $section = null, $widgetName, $instanceSettings = []) { if (is_object($widgetName)) { $widget = $widgetName; } else { $widget = $this->getOne($widgetName); } $widgetObject = $widget->object; if (is_null($widgetObject)) { return false; } if (isset($section)) { $widgetObject->attachDecorator($section->widgetDecoratorClass); $widgetObject->section = $section; } $widgetObject->owner = $widget->owner; Yii::configure($widgetObject, $instanceSettings); $cell = $widgetObject->cell; $this->lastBuildId = $widgetObject->getWidgetId(); $this->producedWidgets[$widgetObject->widgetId] = ['widget' => $widgetObject->systemId, 'id' => $widgetObject->widgetId, 'params' => $widgetObject->recreateParams]; return $cell; } /** * Get location. * * @param unknown $location * @param unknown $owner (optional) * * @return unknown */ public function getLocation($location, $owner = null) { $bucket = $this->getBucket('locations:' . $location); if (is_null($owner)) { return $bucket->toArray(); } else { $result = []; foreach ($bucket as $key => $widget) { if ($widget->owner === $owner) { $result[$key] = $widget; } } return $result; } } }
bsd-3-clause
riteshsingh1/a
backend/views/staff-academic/create.php
477
<?php use yii\helpers\Html; /* @var $this yii\web\View */ /* @var $model backend\models\StaffAcademic */ $this->title = Yii::t('app', 'Create Staff Academic'); $this->params['breadcrumbs'][] = ['label' => Yii::t('app', 'Staff Academics'), 'url' => ['index']]; $this->params['breadcrumbs'][] = $this->title; ?> <div class="staff-academic-create"> <h1><?= Html::encode($this->title) ?></h1> <?= $this->render('_form', [ 'model' => $model, ]) ?> </div>
bsd-3-clause
cybergarage/cybergarage-x3d
core/src/main/java/org/cybergarage/x3d/node/BooleanTimeTriggerNode.java
3963
/****************************************************************** * * CyberX3D for Java * * Copyright (C) Satoshi Konno 1997-2002 * * File : BooleanTimeTriggerNode.java * Revisions: * * 10/08/02 * - The first revision. * ******************************************************************/ package org.cybergarage.x3d.node; import java.io.PrintWriter; import org.cybergarage.x3d.*; import org.cybergarage.x3d.field.*; public class BooleanTimeTriggerNode extends TriggerNode { private String set_booleanTrueFieldName = "set_booleanTrue"; private String set_booleanFalseFieldName = "set_booleanFalse"; private String trueTriggerFieldName = "trueTrigger"; private String falseTriggerFieldName = "falseTrigger"; private SFBool set_booleanTrueField; private SFBool set_booleanFalseField; private SFBool trueTriggerField; private SFBool falseTriggerField; public BooleanTimeTriggerNode() { setHeaderFlag(false); setType(NodeType.BOOLEANTIMETRIGGER); // set_booleanTrueField eventIn field set_booleanTrueField = new SFBool(false); addEventIn(set_booleanTrueFieldName, set_booleanTrueField); // set_booleanFalseField eventIn field set_booleanFalseField = new SFBool(true); addEventIn(set_booleanFalseFieldName, set_booleanFalseField); // trueTrigger eventOut field trueTriggerField = new SFBool(); addEventOut(trueTriggerFieldName, trueTriggerField); // falseTrigger eventOut field falseTriggerField = new SFBool(); addEventOut(falseTriggerFieldName, falseTriggerField); } public BooleanTimeTriggerNode(BooleanTimeTriggerNode node) { this(); setFieldValues(node); } //////////////////////////////////////////////// // SetBooleanTrue //////////////////////////////////////////////// public SFBool getSetBooleanTrueField() { if (isInstanceNode() == false) return set_booleanTrueField; return (SFBool)getEventIn(set_booleanTrueFieldName); } public void setSetBooleanTrue(boolean value) { getSetBooleanTrueField().setValue(value); } public boolean getSetBooleanTrue() { return getSetBooleanTrueField().getValue(); } //////////////////////////////////////////////// // SetBooleanFalse //////////////////////////////////////////////// public SFBool getSetBooleanFalseField() { if (isInstanceNode() == false) return set_booleanFalseField; return (SFBool)getEventIn(set_booleanFalseFieldName); } public void setSetBooleanFalse(boolean value) { getSetBooleanFalseField().setValue(value); } public boolean getSetBooleanFalse() { return getSetBooleanFalseField().getValue(); } //////////////////////////////////////////////// // TrueTrigger //////////////////////////////////////////////// public SFBool getTrueTriggerField() { if (isInstanceNode() == false) return trueTriggerField; return (SFBool)getEventOut(trueTriggerFieldName); } public void setTrueTrigger(boolean value) { getTrueTriggerField().setValue(value); } public boolean getTrueTrigger() { return getTrueTriggerField().getValue(); } //////////////////////////////////////////////// // FalseTrigger //////////////////////////////////////////////// public SFBool getFalseTriggerField() { if (isInstanceNode() == false) return falseTriggerField; return (SFBool)getEventOut(falseTriggerFieldName); } public void setFalseTrigger(boolean value) { getFalseTriggerField().setValue(value); } public boolean getFalseTrigger() { return getFalseTriggerField().getValue(); } //////////////////////////////////////////////// // abstract functions //////////////////////////////////////////////// public boolean isChildNodeType(Node node) { return false; } public void initialize() { } public void uninitialize() { } public void update() { } //////////////////////////////////////////////// // Infomation //////////////////////////////////////////////// public void outputContext(PrintWriter printStream, String indentString) { } }
bsd-3-clause