code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.imaging.formats.tiff.taginfos;
import static org.junit.jupiter.api.Assertions.assertSame;
import java.nio.ByteOrder;
import org.apache.commons.imaging.formats.tiff.constants.TiffDirectoryType;
import org.junit.jupiter.api.Test;
public class TagInfoSBytesTest{
@Test
public void testCreatesTagInfoSBytesAndCallsEncodeValue() {
final TiffDirectoryType tiffDirectoryType = TiffDirectoryType.TIFF_DIRECTORY_IFD3;
final TagInfoSBytes tagInfoSBytes = new TagInfoSBytes("", (-198), 10, tiffDirectoryType);
final ByteOrder byteOrder = ByteOrder.LITTLE_ENDIAN;
final byte[] byteArray = new byte[2];
final byte[] byteArrayTwo = tagInfoSBytes.encodeValue(byteOrder, byteArray);
assertSame(byteArrayTwo, byteArray);
}
} | apache/commons-imaging | src/test/java/org/apache/commons/imaging/formats/tiff/taginfos/TagInfoSBytesTest.java | Java | apache-2.0 | 1,586 |
# Copyright 2015 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from oslo_utils import versionutils
from nova.db import api as db
from nova import exception
from nova.objects import base
from nova.objects import fields
@base.NovaObjectRegistry.register
class MigrationContext(base.NovaPersistentObject, base.NovaObject):
"""Data representing additional resources related to a migration.
Some resources cannot be calculated from knowing the flavor alone for the
purpose of resources tracking, but need to be persisted at the time the
claim was made, for subsequent resource tracking runs to be consistent.
MigrationContext objects are created when the claim is done and are there
to facilitate resource tracking and final provisioning of the instance on
the destination host.
"""
# Version 1.0: Initial version
# Version 1.1: Add old/new pci_devices and pci_requests
VERSION = '1.1'
fields = {
'instance_uuid': fields.UUIDField(),
'migration_id': fields.IntegerField(),
'new_numa_topology': fields.ObjectField('InstanceNUMATopology',
nullable=True),
'old_numa_topology': fields.ObjectField('InstanceNUMATopology',
nullable=True),
'new_pci_devices': fields.ObjectField('PciDeviceList',
nullable=True),
'old_pci_devices': fields.ObjectField('PciDeviceList',
nullable=True),
'new_pci_requests': fields.ObjectField('InstancePCIRequests',
nullable=True),
'old_pci_requests': fields.ObjectField('InstancePCIRequests',
nullable=True),
}
@classmethod
def obj_make_compatible(cls, primitive, target_version):
target_version = versionutils.convert_version_to_tuple(target_version)
if target_version < (1, 1):
primitive.pop('old_pci_devices', None)
primitive.pop('new_pci_devices', None)
primitive.pop('old_pci_requests', None)
primitive.pop('new_pci_requests', None)
@classmethod
def obj_from_db_obj(cls, db_obj):
primitive = jsonutils.loads(db_obj)
return cls.obj_from_primitive(primitive)
@base.remotable_classmethod
def get_by_instance_uuid(cls, context, instance_uuid):
db_extra = db.instance_extra_get_by_instance_uuid(
context, instance_uuid, columns=['migration_context'])
if not db_extra:
raise exception.MigrationContextNotFound(
instance_uuid=instance_uuid)
if db_extra['migration_context'] is None:
return None
return cls.obj_from_db_obj(db_extra['migration_context'])
| mikalstill/nova | nova/objects/migration_context.py | Python | apache-2.0 | 3,456 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.synapse.transport.nhttp.debug;
import org.apache.http.HttpRequest;
import org.apache.http.RequestLine;
import org.apache.http.nio.NHttpClientConnection;
import org.apache.synapse.transport.nhttp.Axis2HttpRequest;
import org.apache.synapse.transport.nhttp.ClientHandler;
import java.io.IOException;
/**
* A connection debug object would be accumulated during processing, but only made use of if the connection
* encounters issues during processing.
*/
public class ClientConnectionDebug extends AbstractConnectionDebug {
private long connectionCreationTime;
private long lastRequestStartTime;
private String lastRequestEPR;
private String lastRequestProtocol;
private String lastRequestHTTPMethod;
private StringBuffer previousRequestAttempts;
private long requestCompletionTime;
private long responseStartTime;
private long responseCompletionTime = -1;
private String responseLine;
private ServerConnectionDebug serverConnectionDebug;
public ClientConnectionDebug(ServerConnectionDebug serverConnectionDebug) {
super();
this.serverConnectionDebug = serverConnectionDebug;
}
public void recordRequestStartTime(NHttpClientConnection conn, Axis2HttpRequest axis2Req) {
if (conn != null) {
this.connectionCreationTime = (Long) conn.getContext().getAttribute(
ClientHandler.CONNECTION_CREATION_TIME);
try {
HttpRequest request = axis2Req.getRequest();
RequestLine requestLine = request.getRequestLine();
this.lastRequestProtocol = requestLine.getProtocolVersion().toString();
this.lastRequestHTTPMethod = requestLine.getMethod();
this.headers = request.getAllHeaders();
} catch (IOException ignore) {}
}
if (this.lastRequestStartTime != 0) {
if (previousRequestAttempts == null) {
previousRequestAttempts = new StringBuffer();
} else {
previousRequestAttempts.append(fieldSeparator);
}
previousRequestAttempts.append("Attempt-Info").append(keyValueSeparator).append("{");
previousRequestAttempts.append("Req-Start-Time").append(keyValueSeparator)
.append(format(this.lastRequestStartTime));
previousRequestAttempts.append(fieldSeparator);
previousRequestAttempts.append("Req-URL").append(keyValueSeparator)
.append(this.lastRequestEPR).append("}");
}
this.lastRequestStartTime = System.currentTimeMillis();
this.lastRequestEPR = axis2Req.getEpr().toString();
}
public void recordResponseCompletionTime() {
this.responseCompletionTime = System.currentTimeMillis();
}
public void recordRequestCompletionTime() {
this.requestCompletionTime = System.currentTimeMillis();
}
public void recordResponseStartTime(String responseLine) {
this.responseStartTime = System.currentTimeMillis();
this.responseLine = responseLine;
}
public long getLastRequestStartTime() {
return lastRequestStartTime;
}
public long getResponseCompletionTime() {
return responseCompletionTime;
}
public long getResponseStartTime() {
return responseStartTime;
}
public String dump() {
StringBuffer sb = new StringBuffer(25);
sb.append("E2S-Req-Start").append(keyValueSeparator).append(format(lastRequestStartTime));
sb.append(fieldSeparator);
sb.append("E2S-Req-End").append(keyValueSeparator).append(format(requestCompletionTime));
sb.append(fieldSeparator);
sb.append("E2S-Req-ConnCreateTime").append(keyValueSeparator)
.append(format(connectionCreationTime));
sb.append(statementSeparator);
sb.append("E2S-Req-URL").append(keyValueSeparator).append(lastRequestEPR);
sb.append(fieldSeparator);
sb.append("E2S-Req-Protocol").append(keyValueSeparator).append(lastRequestProtocol);
sb.append(fieldSeparator);
sb.append("E2S-Req-Method").append(keyValueSeparator).append(lastRequestHTTPMethod);
sb.append(statementSeparator);
if (previousRequestAttempts != null) {
sb.append("E2S-Previous-Attempts").append(keyValueSeparator)
.append(previousRequestAttempts);
sb.append(statementSeparator);
}
sb.append("S2E-Resp-Start").append(keyValueSeparator).append(format(responseStartTime));
sb.append(fieldSeparator);
sb.append("S2E-Resp-End").append(keyValueSeparator).append(responseCompletionTime != -1 ?
format(responseCompletionTime) : "NOT-COMPLETED");
sb.append(statementSeparator);
sb.append("S2E-Resp-Status").append(keyValueSeparator).append(responseLine);
if (!printNoHeaders) {
sb.append(fieldSeparator);
sb.append("S2E-Resp-Info").append(keyValueSeparator).append("{")
.append(headersToString()).append("}");
}
sb.append(statementSeparator);
return sb.toString();
}
public ServerConnectionDebug getServerConnectionDebug() {
return serverConnectionDebug;
}
} | asanka88/apache-synapse | modules/transports/core/nhttp/src/main/java/org/apache/synapse/transport/nhttp/debug/ClientConnectionDebug.java | Java | apache-2.0 | 6,147 |
<?php
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
namespace Google\Service\Contactcenterinsights;
class GoogleCloudContactcenterinsightsV1CreateAnalysisOperationMetadata extends \Google\Model
{
/**
* @var string
*/
public $conversation;
/**
* @var string
*/
public $createTime;
/**
* @var string
*/
public $endTime;
/**
* @param string
*/
public function setConversation($conversation)
{
$this->conversation = $conversation;
}
/**
* @return string
*/
public function getConversation()
{
return $this->conversation;
}
/**
* @param string
*/
public function setCreateTime($createTime)
{
$this->createTime = $createTime;
}
/**
* @return string
*/
public function getCreateTime()
{
return $this->createTime;
}
/**
* @param string
*/
public function setEndTime($endTime)
{
$this->endTime = $endTime;
}
/**
* @return string
*/
public function getEndTime()
{
return $this->endTime;
}
}
// Adding a class alias for backwards compatibility with the previous class name.
class_alias(GoogleCloudContactcenterinsightsV1CreateAnalysisOperationMetadata::class, 'Google_Service_Contactcenterinsights_GoogleCloudContactcenterinsightsV1CreateAnalysisOperationMetadata');
| googleapis/google-api-php-client-services | src/Contactcenterinsights/GoogleCloudContactcenterinsightsV1CreateAnalysisOperationMetadata.php | PHP | apache-2.0 | 1,843 |
/*
* Copyright 2012-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.actuate.autoconfigure;
import java.util.Map;
import javax.sql.DataSource;
import org.junit.After;
import org.junit.Test;
import org.springframework.boot.actuate.health.ApplicationHealthIndicator;
import org.springframework.boot.actuate.health.DataSourceHealthIndicator;
import org.springframework.boot.actuate.health.DiskSpaceHealthIndicator;
import org.springframework.boot.actuate.health.ElasticsearchHealthIndicator;
import org.springframework.boot.actuate.health.Health;
import org.springframework.boot.actuate.health.HealthIndicator;
import org.springframework.boot.actuate.health.JmsHealthIndicator;
import org.springframework.boot.actuate.health.MailHealthIndicator;
import org.springframework.boot.actuate.health.MongoHealthIndicator;
import org.springframework.boot.actuate.health.RabbitHealthIndicator;
import org.springframework.boot.actuate.health.RedisHealthIndicator;
import org.springframework.boot.actuate.health.SolrHealthIndicator;
import org.springframework.boot.autoconfigure.PropertyPlaceholderAutoConfiguration;
import org.springframework.boot.autoconfigure.amqp.RabbitAutoConfiguration;
import org.springframework.boot.autoconfigure.data.elasticsearch.ElasticsearchAutoConfiguration;
import org.springframework.boot.autoconfigure.data.mongo.MongoDataAutoConfiguration;
import org.springframework.boot.autoconfigure.data.redis.RedisAutoConfiguration;
import org.springframework.boot.autoconfigure.jdbc.DataSourceBuilder;
import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties;
import org.springframework.boot.autoconfigure.jdbc.EmbeddedDataSourceConfiguration;
import org.springframework.boot.autoconfigure.jdbc.metadata.DataSourcePoolMetadataProvidersConfiguration;
import org.springframework.boot.autoconfigure.jms.activemq.ActiveMQAutoConfiguration;
import org.springframework.boot.autoconfigure.mail.MailSenderAutoConfiguration;
import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration;
import org.springframework.boot.autoconfigure.solr.SolrAutoConfiguration;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.EnvironmentTestUtils;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
/**
* Tests for {@link HealthIndicatorAutoConfiguration}.
*
* @author Christian Dupuis
* @author Stephane Nicoll
* @author Andy Wilkinson
*/
public class HealthIndicatorAutoConfigurationTests {
private AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
@After
public void close() {
if (this.context != null) {
this.context.close();
}
}
@Test
public void defaultHealthIndicator() {
this.context.register(HealthIndicatorAutoConfiguration.class,
ManagementServerProperties.class);
EnvironmentTestUtils.addEnvironment(this.context,
"management.health.diskspace.enabled:false");
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(ApplicationHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void defaultHealthIndicatorsDisabled() {
this.context.register(HealthIndicatorAutoConfiguration.class,
ManagementServerProperties.class);
EnvironmentTestUtils.addEnvironment(this.context,
"management.health.defaults.enabled:false");
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(ApplicationHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void defaultHealthIndicatorsDisabledWithCustomOne() {
this.context.register(CustomHealthIndicator.class,
HealthIndicatorAutoConfiguration.class, ManagementServerProperties.class);
EnvironmentTestUtils.addEnvironment(this.context,
"management.health.defaults.enabled:false");
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertSame(this.context.getBean("customHealthIndicator"),
beans.values().iterator().next());
}
@Test
public void defaultHealthIndicatorsDisabledButOne() {
this.context.register(HealthIndicatorAutoConfiguration.class,
ManagementServerProperties.class);
EnvironmentTestUtils.addEnvironment(this.context,
"management.health.defaults.enabled:false",
"management.health.diskspace.enabled:true");
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(DiskSpaceHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void redisHealthIndicator() {
this.context.register(RedisAutoConfiguration.class,
ManagementServerProperties.class, HealthIndicatorAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"management.health.diskspace.enabled:false");
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(RedisHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void notRedisHealthIndicator() {
this.context.register(RedisAutoConfiguration.class,
ManagementServerProperties.class, HealthIndicatorAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"management.health.redis.enabled:false",
"management.health.diskspace.enabled:false");
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(ApplicationHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void mongoHealthIndicator() {
this.context.register(MongoAutoConfiguration.class,
ManagementServerProperties.class, MongoDataAutoConfiguration.class,
HealthIndicatorAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"management.health.diskspace.enabled:false");
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(MongoHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void notMongoHealthIndicator() {
this.context.register(MongoAutoConfiguration.class,
ManagementServerProperties.class, MongoDataAutoConfiguration.class,
HealthIndicatorAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"management.health.mongo.enabled:false",
"management.health.diskspace.enabled:false");
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(ApplicationHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void combinedHealthIndicator() {
this.context.register(MongoAutoConfiguration.class, RedisAutoConfiguration.class,
MongoDataAutoConfiguration.class, SolrAutoConfiguration.class,
HealthIndicatorAutoConfiguration.class);
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(4, beans.size());
}
@Test
public void dataSourceHealthIndicator() {
this.context.register(EmbeddedDataSourceConfiguration.class,
ManagementServerProperties.class, HealthIndicatorAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"management.health.diskspace.enabled:false");
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(DataSourceHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void dataSourceHealthIndicatorWithCustomValidationQuery() {
this.context.register(PropertyPlaceholderAutoConfiguration.class,
ManagementServerProperties.class, DataSourceProperties.class,
DataSourceConfig.class,
DataSourcePoolMetadataProvidersConfiguration.class,
HealthIndicatorAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"spring.datasource.validation-query:SELECT from FOOBAR",
"management.health.diskspace.enabled:false");
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
HealthIndicator healthIndicator = beans.values().iterator().next();
assertEquals(DataSourceHealthIndicator.class, healthIndicator.getClass());
DataSourceHealthIndicator dataSourceHealthIndicator = (DataSourceHealthIndicator) healthIndicator;
assertEquals("SELECT from FOOBAR", dataSourceHealthIndicator.getQuery());
}
@Test
public void notDataSourceHealthIndicator() {
this.context.register(EmbeddedDataSourceConfiguration.class,
ManagementServerProperties.class, HealthIndicatorAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"management.health.db.enabled:false",
"management.health.diskspace.enabled:false");
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(ApplicationHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void rabbitHealthIndicator() {
this.context.register(RabbitAutoConfiguration.class,
ManagementServerProperties.class, HealthIndicatorAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"management.health.diskspace.enabled:false");
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(RabbitHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void notRabbitHealthIndicator() {
this.context.register(RabbitAutoConfiguration.class,
ManagementServerProperties.class, HealthIndicatorAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"management.health.rabbit.enabled:false",
"management.health.diskspace.enabled:false");
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(ApplicationHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void solrHeathIndicator() {
this.context.register(SolrAutoConfiguration.class,
ManagementServerProperties.class, HealthIndicatorAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"management.health.diskspace.enabled:false");
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(SolrHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void notSolrHeathIndicator() {
this.context.register(SolrAutoConfiguration.class,
ManagementServerProperties.class, HealthIndicatorAutoConfiguration.class);
EnvironmentTestUtils.addEnvironment(this.context,
"management.health.solr.enabled:false",
"management.health.diskspace.enabled:false");
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(ApplicationHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void diskSpaceHealthIndicator() {
this.context.register(HealthIndicatorAutoConfiguration.class);
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(DiskSpaceHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void mailHealthIndicator() {
EnvironmentTestUtils.addEnvironment(this.context,
"spring.mail.host:smtp.acme.org",
"management.health.diskspace.enabled:false");
this.context.register(MailSenderAutoConfiguration.class,
ManagementServerProperties.class, HealthIndicatorAutoConfiguration.class);
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(MailHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void notMailHealthIndicator() {
EnvironmentTestUtils.addEnvironment(this.context,
"spring.mail.host:smtp.acme.org", "management.health.mail.enabled:false",
"management.health.diskspace.enabled:false");
this.context.register(MailSenderAutoConfiguration.class,
ManagementServerProperties.class, HealthIndicatorAutoConfiguration.class);
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(ApplicationHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void jmsHealthIndicator() {
EnvironmentTestUtils.addEnvironment(this.context,
"management.health.diskspace.enabled:false");
this.context.register(ActiveMQAutoConfiguration.class,
ManagementServerProperties.class, HealthIndicatorAutoConfiguration.class);
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(JmsHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void notJmsHealthIndicator() {
EnvironmentTestUtils.addEnvironment(this.context,
"management.health.jms.enabled:false",
"management.health.diskspace.enabled:false");
this.context.register(ActiveMQAutoConfiguration.class,
ManagementServerProperties.class, HealthIndicatorAutoConfiguration.class);
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(ApplicationHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void elasticSearchHealthIndicator() {
EnvironmentTestUtils.addEnvironment(this.context,
"spring.data.elasticsearch.properties.path.data:target/data",
"spring.data.elasticsearch.properties.path.logs:target/logs",
"management.health.diskspace.enabled:false");
this.context.register(ElasticsearchAutoConfiguration.class,
ManagementServerProperties.class, HealthIndicatorAutoConfiguration.class);
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(ElasticsearchHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Test
public void notElasticSearchHealthIndicator() {
EnvironmentTestUtils.addEnvironment(this.context,
"management.health.elasticsearch.enabled:false",
"spring.data.elasticsearch.properties.path.data:target/data",
"spring.data.elasticsearch.properties.path.logs:target/logs",
"management.health.diskspace.enabled:false");
this.context.register(ElasticsearchAutoConfiguration.class,
ManagementServerProperties.class, HealthIndicatorAutoConfiguration.class);
this.context.refresh();
Map<String, HealthIndicator> beans = this.context
.getBeansOfType(HealthIndicator.class);
assertEquals(1, beans.size());
assertEquals(ApplicationHealthIndicator.class,
beans.values().iterator().next().getClass());
}
@Configuration
@EnableConfigurationProperties
protected static class DataSourceConfig {
@Bean
@ConfigurationProperties(prefix = DataSourceProperties.PREFIX)
public DataSource dataSource() {
return DataSourceBuilder.create()
.driverClassName("org.hsqldb.jdbc.JDBCDriver")
.url("jdbc:hsqldb:mem:test").username("sa").build();
}
}
@Configuration
protected static class CustomHealthIndicator {
@Bean
public HealthIndicator customHealthIndicator() {
return new HealthIndicator() {
@Override
public Health health() {
return Health.down().build();
}
};
}
}
}
| christian-posta/spring-boot | spring-boot-actuator/src/test/java/org/springframework/boot/actuate/autoconfigure/HealthIndicatorAutoConfigurationTests.java | Java | apache-2.0 | 17,546 |
package com.lu.kuaichuan.wifidirect.adapter;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.drawable.Drawable;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.CheckBox;
import android.widget.ImageView;
import android.widget.TextView;
import com.lu.kuaichuan.wifidirect.R;
import com.lu.kuaichuan.wifidirect.utils.BitmapUtils;
import com.lu.kuaichuan.wifidirect.utils.FileResLoaderUtils;
import com.lu.kuaichuan.wifidirect.utils.FileTypeUtils;
import java.util.ArrayList;
/**
* Created by admin on 2016/3/8.
*/
public class MyGridViewAdapter extends BaseAdapter {
private ArrayList<String> mNameList = new ArrayList<>();
private ArrayList<String> mPathList = new ArrayList<>();
private ArrayList<Boolean> mCheckBoxList = new ArrayList<>();
private LayoutInflater mInflater;
private Context mContext;
private int imagetViewHeight;
public MyGridViewAdapter(Context context, ArrayList<String> nameList, ArrayList<String> pathList, ArrayList<Boolean> checkBoxList, int imageViewHeigth) {
mNameList = nameList;
mPathList = pathList;
mCheckBoxList = checkBoxList;
mContext = context;
mInflater = LayoutInflater.from(mContext);
this.imagetViewHeight = (int) BitmapUtils.dipTopx(context, imageViewHeigth);
// this.imagetViewHeight = 100;
}
public ArrayList<Boolean> getmCheckBoxList() {
return this.mCheckBoxList;
}
@Override
public int getCount() {
return mNameList.size();
}
@Override
public Object getItem(int position) {
return mNameList.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
ItemViewTag viewTag;
if (convertView == null) {
convertView = mInflater.inflate(R.layout.grid_view_item, null);
viewTag = new ItemViewTag((ImageView) convertView.findViewById(R.id.id_grid_view_icon_movie),
(TextView) convertView.findViewById(R.id.id_grid_view_name_movie), (CheckBox) convertView.findViewById(R.id.id_grid_view_checkbox_movie));
convertView.setTag(viewTag);
ViewGroup.LayoutParams lp = viewTag.mIcon.getLayoutParams();
lp.width = lp.height = this.imagetViewHeight;
viewTag.mIcon.setLayoutParams(lp);
} else {
viewTag = (ItemViewTag) convertView.getTag();
}
viewTag.mName.setText(mNameList.get(position));
String path = mPathList.get(position);
Object pic = FileResLoaderUtils.getPic(path);
if(pic instanceof Drawable) {
viewTag.mIcon.setImageDrawable((Drawable) pic);
}else if(pic instanceof Integer) {
viewTag.mIcon.setImageResource((Integer) pic);
}else if(pic instanceof Bitmap) {
viewTag.mIcon.setImageBitmap((Bitmap) pic);
}else {
viewTag.mIcon.setImageResource(FileTypeUtils.getDefaultFileIcon(path));
}
viewTag.mCheckBox.setChecked(mCheckBoxList.get(position));
viewTag.mCheckBox.setVisibility(mCheckBoxList.get(position) ? View.VISIBLE : View.GONE);
return convertView;
}
public class ItemViewTag {
public ImageView mIcon;
public TextView mName;
public CheckBox mCheckBox;
public ItemViewTag(ImageView icon, TextView name, CheckBox checkBox) {
mName = name;
mIcon = icon;
mCheckBox = checkBox;
}
}
}
| lucky-code/Practice | kuaichuan2.0/app/src/main/java/com/lu/kuaichuan/wifidirect/adapter/MyGridViewAdapter.java | Java | apache-2.0 | 3,711 |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/storagegateway/model/UpdateNFSFileShareRequest.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::StorageGateway::Model;
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
UpdateNFSFileShareRequest::UpdateNFSFileShareRequest() :
m_fileShareARNHasBeenSet(false),
m_kMSEncrypted(false),
m_kMSEncryptedHasBeenSet(false),
m_kMSKeyHasBeenSet(false),
m_nFSFileShareDefaultsHasBeenSet(false),
m_defaultStorageClassHasBeenSet(false),
m_objectACL(ObjectACL::NOT_SET),
m_objectACLHasBeenSet(false),
m_clientListHasBeenSet(false),
m_squashHasBeenSet(false),
m_readOnly(false),
m_readOnlyHasBeenSet(false),
m_guessMIMETypeEnabled(false),
m_guessMIMETypeEnabledHasBeenSet(false),
m_requesterPays(false),
m_requesterPaysHasBeenSet(false),
m_fileShareNameHasBeenSet(false),
m_cacheAttributesHasBeenSet(false),
m_notificationPolicyHasBeenSet(false)
{
}
Aws::String UpdateNFSFileShareRequest::SerializePayload() const
{
JsonValue payload;
if(m_fileShareARNHasBeenSet)
{
payload.WithString("FileShareARN", m_fileShareARN);
}
if(m_kMSEncryptedHasBeenSet)
{
payload.WithBool("KMSEncrypted", m_kMSEncrypted);
}
if(m_kMSKeyHasBeenSet)
{
payload.WithString("KMSKey", m_kMSKey);
}
if(m_nFSFileShareDefaultsHasBeenSet)
{
payload.WithObject("NFSFileShareDefaults", m_nFSFileShareDefaults.Jsonize());
}
if(m_defaultStorageClassHasBeenSet)
{
payload.WithString("DefaultStorageClass", m_defaultStorageClass);
}
if(m_objectACLHasBeenSet)
{
payload.WithString("ObjectACL", ObjectACLMapper::GetNameForObjectACL(m_objectACL));
}
if(m_clientListHasBeenSet)
{
Array<JsonValue> clientListJsonList(m_clientList.size());
for(unsigned clientListIndex = 0; clientListIndex < clientListJsonList.GetLength(); ++clientListIndex)
{
clientListJsonList[clientListIndex].AsString(m_clientList[clientListIndex]);
}
payload.WithArray("ClientList", std::move(clientListJsonList));
}
if(m_squashHasBeenSet)
{
payload.WithString("Squash", m_squash);
}
if(m_readOnlyHasBeenSet)
{
payload.WithBool("ReadOnly", m_readOnly);
}
if(m_guessMIMETypeEnabledHasBeenSet)
{
payload.WithBool("GuessMIMETypeEnabled", m_guessMIMETypeEnabled);
}
if(m_requesterPaysHasBeenSet)
{
payload.WithBool("RequesterPays", m_requesterPays);
}
if(m_fileShareNameHasBeenSet)
{
payload.WithString("FileShareName", m_fileShareName);
}
if(m_cacheAttributesHasBeenSet)
{
payload.WithObject("CacheAttributes", m_cacheAttributes.Jsonize());
}
if(m_notificationPolicyHasBeenSet)
{
payload.WithString("NotificationPolicy", m_notificationPolicy);
}
return payload.View().WriteReadable();
}
Aws::Http::HeaderValueCollection UpdateNFSFileShareRequest::GetRequestSpecificHeaders() const
{
Aws::Http::HeaderValueCollection headers;
headers.insert(Aws::Http::HeaderValuePair("X-Amz-Target", "StorageGateway_20130630.UpdateNFSFileShare"));
return headers;
}
| awslabs/aws-sdk-cpp | aws-cpp-sdk-storagegateway/source/model/UpdateNFSFileShareRequest.cpp | C++ | apache-2.0 | 3,217 |
/**
* Copyright 2014 Nortal AS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nortal.petit.converter.columnreader;
import java.sql.ResultSet;
import java.sql.SQLException;
public interface ColumnReader<T> {
T getColumnValue(ResultSet rs, int index) throws SQLException;
} | jimmytheneutrino/petit | modules/converter/src/main/java/com/nortal/petit/converter/columnreader/ColumnReader.java | Java | apache-2.0 | 825 |
/*
* JBoss, Home of Professional Open Source
* Copyright 2006, Red Hat Middleware LLC, and individual contributors
* as indicated by the @author tags.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License, v. 2.1.
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License,
* v.2.1 along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*
* (C) 2005-2006,
* @author JBoss Inc.
*/
/*
* Copyright (C) 1998, 1999, 2000,
*
* Arjuna Solutions Limited,
* Newcastle upon Tyne,
* Tyne and Wear,
* UK.
*
* $Id: AtomicAction.java 2342 2006-03-30 13:06:17Z $
*/
package com.arjuna.ats.arjuna;
import com.arjuna.ats.arjuna.common.Uid;
import com.arjuna.ats.arjuna.coordinator.ActionStatus;
import com.arjuna.ats.arjuna.coordinator.BasicAction;
import com.arjuna.ats.arjuna.coordinator.TransactionReaper;
import com.arjuna.ats.arjuna.coordinator.TwoPhaseCoordinator;
import com.arjuna.ats.arjuna.coordinator.TxControl;
import com.arjuna.ats.arjuna.logging.tsLogger;
import com.arjuna.ats.internal.arjuna.thread.ThreadActionData;
/**
* This is a user-level transaction class, unlike BasicAction. AtomicAction
* takes care of thread-to-action scoping. This is a "one-shot" object, i.e.,
* once terminated, the instance cannot be re-used for another transaction.
*
* An instance of this class is a transaction that can be started and terminated
* (either committed or rolled back). There are also methods to allow
* participants (AbstractRecords) to be enlisted with the transaction and to
* associate/disassociate threads with the transaction.
*
* @author Mark Little (mark@arjuna.com)
* @version $Id: AtomicAction.java 2342 2006-03-30 13:06:17Z $
* @since JTS 1.0.
*/
public class AtomicAction extends TwoPhaseCoordinator
{
public static final int NO_TIMEOUT = -1;
/**
* Create a new transaction. If there is already a transaction associated
* with the thread then this new transaction will be automatically nested.
* The transaction is *not* running at this point.
*
* No timeout is associated with this transaction, i.e., it will not be
* automatically rolled back by the system.
*/
public AtomicAction ()
{
super();
}
/**
* AtomicAction constructor with a Uid. This constructor is for recreating
* an AtomicAction, typically during crash recovery.
*/
public AtomicAction (Uid objUid)
{
super(objUid);
}
/**
* Start the transaction running.
*
* If the transaction is already running or has terminated, then an error
* code will be returned. No timeout is associated with the transaction.
*
* @return <code>ActionStatus</code> indicating outcome.
*/
public int begin ()
{
return begin(AtomicAction.NO_TIMEOUT);
}
/**
* Start the transaction running.
*
* If the transaction is already running or has terminated, then an error
* code will be returned.
*
* @param timeout the timeout associated with the transaction. If the
* transaction is still active when this timeout elapses, the
* system will automatically roll it back.
*
* @return <code>ActionStatus</code> indicating outcome.
*/
public int begin (int timeout)
{
int status = super.start();
if (status == ActionStatus.RUNNING)
{
/*
* Now do thread/action tracking.
*/
ThreadActionData.pushAction(this);
_timeout = timeout;
if (_timeout == 0)
_timeout = TxControl.getDefaultTimeout();
if (_timeout > 0)
TransactionReaper.transactionReaper().insert(this, _timeout);
}
return status;
}
/**
* Commit the transaction, and have heuristic reporting. Heuristic reporting
* via the return code is enabled.
*
* @return <code>ActionStatus</code> indicating outcome.
*/
public int commit ()
{
return commit(true);
}
/**
* Commit the transaction. The report_heuristics parameter can be used to
* determine whether or not heuristic outcomes are reported.
*
* If the transaction has already terminated, or has not begun, then an
* appropriate error code will be returned.
*
* @return <code>ActionStatus</code> indicating outcome.
*/
public int commit (boolean report_heuristics)
{
int status = super.end(report_heuristics);
/*
* Now remove this thread from the action state.
*/
ThreadActionData.popAction();
TransactionReaper.transactionReaper().remove(this);
return status;
}
/**
* Abort (rollback) the transaction.
*
* If the transaction has already terminated, or has not been begun, then an
* appropriate error code will be returned.
*
* @return <code>ActionStatus</code> indicating outcome.
*/
public int abort ()
{
int status = super.cancel();
/*
* Now remove this thread from the action state.
*/
ThreadActionData.popAction();
TransactionReaper.transactionReaper().remove(this);
return status;
}
public int end (boolean report_heuristics)
{
int outcome = super.end(report_heuristics);
/*
* Now remove this thread from the reaper. Leave
* the thread-to-tx association though.
*/
TransactionReaper.transactionReaper().remove(this);
return outcome;
}
public int cancel ()
{
int outcome = super.cancel();
/*
* Now remove this thread from the reaper. Leave
* the thread-to-tx association though.
*/
TransactionReaper.transactionReaper().remove(this);
return outcome;
}
/*
* @return the timeout associated with this instance.
*/
public final int getTimeout ()
{
return _timeout;
}
/**
* The type of the class is used to locate the state of the transaction log
* in the object store.
*
* Overloads BasicAction.type()
*
* @return a string representation of the hierarchy of the class for storing
* logs in the transaction object store.
*/
public String type ()
{
return "/StateManager/BasicAction/TwoPhaseCoordinator/AtomicAction";
}
/**
* Register the current thread with the transaction. This operation is not
* affected by the state of the transaction.
*
* @return <code>true</code> if successful, <code>false</code>
* otherwise.
*/
public boolean addThread ()
{
return addThread(Thread.currentThread());
}
/**
* Register the specified thread with the transaction. This operation is not
* affected by the state of the transaction.
*
* @return <code>true</code> if successful, <code>false</code>
* otherwise.
*/
public boolean addThread (Thread t)
{
if (t != null)
{
ThreadActionData.pushAction(this);
return true;
}
return false;
}
/**
* Unregister the current thread from the transaction. This operation is not
* affected by the state of the transaction.
*
* @return <code>true</code> if successful, <code>false</code>
* otherwise.
*/
public boolean removeThread ()
{
return removeThread(Thread.currentThread());
}
/**
* Unregister the specified thread from the transaction. This operation is
* not affected by the state of the transaction.
*
* @return <code>true</code> if successful, <code>false</code>
* otherwise.
*/
public boolean removeThread (Thread t)
{
if (t != null)
{
ThreadActionData.purgeAction(this, t);
return true;
}
return false;
}
/**
* Suspend all transaction association from the invoking thread. When this
* operation returns, the thread will be associated with no transactions.
*
* If the current transaction is not an AtomicAction then this method will
* not suspend.
*
* @return a handle on the current AtomicAction (if any) so that the thread
* can later resume association if required.
*
*/
public static final AtomicAction suspend ()
{
BasicAction curr = ThreadActionData.currentAction();
if (curr != null)
{
if (curr instanceof AtomicAction)
ThreadActionData.purgeActions();
else {
tsLogger.i18NLogger.warn_ats_atomicaction_1(curr.toString());
curr = null;
}
}
return (AtomicAction) curr;
}
/**
* Resume transaction association on the current thread. If the specified
* transaction is null, then this is the same as doing a suspend. If the
* current thread is associated with transactions then those associations
* will be lost.
*
* @param act the transaction to associate. If this is a nested
* transaction, then the thread will be associated with all of
* the transactions in the hierarchy.
*
* @return <code>true</code> if association is successful,
* <code>false</code> otherwise.
*/
public static final boolean resume (AtomicAction act)
{
if (act == null)
{
suspend(); // If you ever change this, you need to change the way resume is handled in /ArjunaJTS/integration/src/main/java/com/arjuna/ats/jbossatx/BaseTransactionManagerDelegate.java
}
else
ThreadActionData.restoreActions(act);
return true;
}
/**
* Create a new transaction of the specified type.
*/
protected AtomicAction (int at)
{
super(at);
}
/**
* By default the BasicAction class only allows the termination of a
* transaction if it's the one currently associated with the thread. We
* override this here.
*
* @return <code>true</code> to indicate that this transaction can only be
* terminated by the right thread.
*/
protected boolean checkForCurrent ()
{
return true;
}
private int _timeout = NO_TIMEOUT;
}
| nmcl/scratch | graalvm/transactions/fork/narayana/ArjunaCore/arjuna/classes/com/arjuna/ats/arjuna/AtomicAction.java | Java | apache-2.0 | 9,975 |
//---------------------------------------------------------------------------
#include "register_map.h"
//---------------------------------------------------------------------------
//Register map for Si5356A
Reg_Data Reg_Store[NUM_REGS_MAX] = {
{ 0,0x00,0x00},
{ 1,0x00,0x00},
{ 2,0x00,0x00},
{ 3,0x00,0x00},
{ 4,0x00,0x00},
{ 5,0x00,0x00},
{ 6,0x04,0x1D},
{ 7,0x00,0x00},
{ 8,0x70,0x00},
{ 9,0x0F,0x00},
{ 10,0x00,0x00},
{ 11,0x00,0x00},
{ 12,0x00,0x00},
{ 13,0x00,0x00},
{ 14,0x00,0x00},
{ 15,0x00,0x00},
{ 16,0x00,0x00},
{ 17,0x00,0x00},
{ 18,0x00,0x00},
{ 19,0x00,0x00},
{ 20,0x00,0x00},
{ 21,0x00,0x00},
{ 22,0x00,0x00},
{ 23,0x00,0x00},
{ 24,0x00,0x00},
{ 25,0x00,0x00},
{ 26,0x00,0x00},
{ 27,0x70,0x80},
{ 28,0x37,0xFF},
{ 29,0x20,0xFF},
{ 30,0xA8,0xFF},
{ 31,0xE3,0xFF},
{ 32,0xC0,0xFF},
{ 33,0xC0,0xFF},
{ 34,0xE3,0xFF},
{ 35,0x00,0xFF},
{ 36,0x00,0x1F},
{ 37,0x0B,0x1F},
{ 38,0x03,0x1F},
{ 39,0x00,0x1F},
{ 40,0xF7,0xFF},
{ 41,0x5E,0x7F},
{ 42,0x37,0x3F},
{ 43,0x00,0x00},
{ 44,0x00,0x00},
{ 45,0x00,0x00},
{ 46,0x00,0x00},
{ 47,0x14,0x3C},
{ 48,0x2E,0x7F},
{ 49,0x90,0x7F},
{ 50,0xDE,0xC0},
{ 51,0x07,0x00},
{ 52,0x10,0x0C},
{ 53,0x00,0xFF},
{ 54,0x00,0xFF},
{ 55,0x00,0xFF},
{ 56,0x00,0xFF},
{ 57,0x00,0xFF},
{ 58,0x00,0xFF},
{ 59,0x00,0xFF},
{ 60,0x00,0xFF},
{ 61,0x00,0xFF},
{ 62,0x00,0x3F},
{ 63,0x10,0x0C},
{ 64,0x00,0xFF},
{ 65,0x35,0xFF},
{ 66,0x00,0xFF},
{ 67,0x00,0xFF},
{ 68,0x00,0xFF},
{ 69,0x00,0xFF},
{ 70,0x01,0xFF},
{ 71,0x00,0xFF},
{ 72,0x00,0xFF},
{ 73,0x00,0x3F},
{ 74,0x10,0x0C},
{ 75,0x00,0xFF},
{ 76,0x35,0xFF},
{ 77,0x00,0xFF},
{ 78,0x00,0xFF},
{ 79,0x00,0xFF},
{ 80,0x00,0xFF},
{ 81,0x01,0xFF},
{ 82,0x00,0xFF},
{ 83,0x00,0xFF},
{ 84,0x00,0x3F},
{ 85,0x10,0x0C},
{ 86,0x00,0xFF},
{ 87,0x00,0xFF},
{ 88,0x00,0xFF},
{ 89,0x00,0xFF},
{ 90,0x00,0xFF},
{ 91,0x00,0xFF},
{ 92,0x00,0xFF},
{ 93,0x00,0xFF},
{ 94,0x00,0xFF},
{ 95,0x00,0x3F},
{ 96,0x10,0x00},
{ 97,0xCE,0xFF},
{ 98,0x21,0xFF},
{ 99,0x00,0xFF},
{100,0x01,0xFF},
{101,0x00,0xFF},
{102,0x00,0xFF},
{103,0x60,0xFF},
{104,0x00,0xFF},
{105,0x00,0xFF},
{106,0x80,0x3F},
{107,0x00,0xFF},
{108,0x00,0x7F},
{109,0x00,0x00},
{110,0x40,0xC0},
{111,0x00,0xFF},
{112,0x00,0x7F},
{113,0x00,0x00},
{114,0x40,0xC0},
{115,0x00,0xFF},
{116,0x80,0x7F},
{117,0x00,0x00},
{118,0x40,0xC0},
{119,0x00,0xFF},
{120,0x00,0xFF},
{121,0x00,0x00},
{122,0x40,0xC0},
{123,0x00,0x00},
{124,0x00,0x00},
{125,0x00,0x00},
{126,0x00,0x00},
{127,0x00,0x00},
{128,0x00,0x00},
{129,0x00,0x0F},
{130,0x00,0x0F},
{131,0x00,0x00},
{132,0x00,0x00},
{133,0x00,0x00},
{134,0x00,0x00},
{135,0x00,0x00},
{136,0x00,0x00},
{137,0x00,0x00},
{138,0x00,0x00},
{139,0x00,0x00},
{140,0x00,0x00},
{141,0x00,0x00},
{142,0x00,0x00},
{143,0x00,0x00},
{144,0x00,0x80},
{145,0x00,0x00},
{146,0xFF,0x00},
{147,0x00,0x00},
{148,0x00,0x00},
{149,0x00,0x00},
{150,0x00,0x00},
{151,0x00,0x00},
{152,0x00,0x00},
{153,0x00,0x00},
{154,0x00,0x00},
{155,0x00,0x00},
{156,0x00,0x00},
{157,0x00,0x00},
{158,0x00,0x0F},
{159,0x00,0x0F},
{160,0x00,0x00},
{161,0x00,0x00},
{162,0x00,0x00},
{163,0x00,0x00},
{164,0x00,0x00},
{165,0x00,0x00},
{166,0x00,0x00},
{167,0x00,0x00},
{168,0x00,0x00},
{169,0x00,0x00},
{170,0x00,0x00},
{171,0x00,0x00},
{172,0x00,0x00},
{173,0x00,0x00},
{174,0x00,0x00},
{175,0x00,0x00},
{176,0x00,0x00},
{177,0x00,0x00},
{178,0x00,0x00},
{179,0x00,0x00},
{180,0x00,0x00},
{181,0x00,0x0F},
{182,0x00,0x00},
{183,0x00,0x00},
{184,0x00,0x00},
{185,0x00,0x00},
{186,0x00,0x00},
{187,0x00,0x00},
{188,0x00,0x00},
{189,0x00,0x00},
{190,0x00,0x00},
{191,0x00,0x00},
{192,0x00,0x00},
{193,0x00,0x00},
{194,0x00,0x00},
{195,0x00,0x00},
{196,0x00,0x00},
{197,0x00,0x00},
{198,0x00,0x00},
{199,0x00,0x00},
{200,0x00,0x00},
{201,0x00,0x00},
{202,0x00,0x00},
{203,0x00,0x0F},
{204,0x00,0x00},
{205,0x00,0x00},
{206,0x00,0x00},
{207,0x00,0x00},
{208,0x00,0x00},
{209,0x00,0x00},
{210,0x00,0x00},
{211,0x00,0x00},
{212,0x00,0x00},
{213,0x00,0x00},
{214,0x00,0x00},
{215,0x00,0x00},
{216,0x00,0x00},
{217,0x00,0x00},
{218,0x00,0x00},
{219,0x00,0x00},
{220,0x00,0x00},
{221,0x0D,0x00},
{222,0x00,0x00},
{223,0x00,0x00},
{224,0xF4,0x00},
{225,0xF0,0x00},
{226,0x00,0x00},
{227,0x00,0x00},
{228,0x00,0x00},
{229,0x00,0x00},
{231,0x00,0x00},
{232,0x00,0x00},
{233,0x00,0x00},
{234,0x00,0x00},
{235,0x00,0x00},
{236,0x00,0x00},
{237,0x00,0x00},
{238,0x14,0x00},
{239,0x00,0x00},
{240,0x00,0x00},
{242,0x00,0x00},
{243,0xF0,0x00},
{244,0x00,0x00},
{245,0x00,0x00},
{247,0x00,0x00},
{248,0x00,0x00},
{249,0xA8,0x00},
{250,0x00,0x00},
{251,0x84,0x00},
{252,0x00,0x00},
{253,0x00,0x00},
{254,0x00,0x00},
{255, 1, 0xFF}, // set page bit to 1
{ 0,0x00,0x00},
{ 1,0x00,0x00},
{ 2,0x00,0x00},
{ 3,0x00,0x00},
{ 4,0x00,0x00},
{ 5,0x00,0x00},
{ 6,0x00,0x00},
{ 7,0x00,0x00},
{ 8,0x00,0x00},
{ 9,0x00,0x00},
{ 10,0x00,0x00},
{ 11,0x00,0x00},
{ 12,0x00,0x00},
{ 13,0x00,0x00},
{ 14,0x00,0x00},
{ 15,0x00,0x00},
{ 16,0x00,0x00},
{ 17,0x01,0x00},
{ 18,0x00,0x00},
{ 19,0x00,0x00},
{ 20,0x90,0x00},
{ 21,0x31,0x00},
{ 22,0x00,0x00},
{ 23,0x00,0x00},
{ 24,0x01,0x00},
{ 25,0x00,0x00},
{ 26,0x00,0x00},
{ 27,0x00,0x00},
{ 28,0x00,0x00},
{ 29,0x00,0x00},
{ 30,0x00,0x00},
{ 31,0x00,0xFF},
{ 32,0x00,0xFF},
{ 33,0x01,0xFF},
{ 34,0x00,0xFF},
{ 35,0x00,0xFF},
{ 36,0x90,0xFF},
{ 37,0x31,0xFF},
{ 38,0x00,0xFF},
{ 39,0x00,0xFF},
{ 40,0x01,0xFF},
{ 41,0x00,0xFF},
{ 42,0x00,0xFF},
{ 43,0x00,0x0F},
{ 44,0x00,0x00},
{ 45,0x00,0x00},
{ 46,0x00,0x00},
{ 47,0x00,0xFF},
{ 48,0x00,0xFF},
{ 49,0x01,0xFF},
{ 50,0x00,0xFF},
{ 51,0x00,0xFF},
{ 52,0x90,0xFF},
{ 53,0x31,0xFF},
{ 54,0x00,0xFF},
{ 55,0x00,0xFF},
{ 56,0x01,0xFF},
{ 57,0x00,0xFF},
{ 58,0x00,0xFF},
{ 59,0x00,0x0F},
{ 60,0x00,0x00},
{ 61,0x00,0x00},
{ 62,0x00,0x00},
{ 63,0x00,0xFF},
{ 64,0x00,0xFF},
{ 65,0x01,0xFF},
{ 66,0x00,0xFF},
{ 67,0x00,0xFF},
{ 68,0x90,0xFF},
{ 69,0x31,0xFF},
{ 70,0x00,0xFF},
{ 71,0x00,0xFF},
{ 72,0x01,0xFF},
{ 73,0x00,0xFF},
{ 74,0x00,0xFF},
{ 75,0x00,0x0F},
{ 76,0x00,0x00},
{ 77,0x00,0x00},
{ 78,0x00,0x00},
{ 79,0x00,0xFF},
{ 80,0x00,0xFF},
{ 81,0x00,0xFF},
{ 82,0x00,0xFF},
{ 83,0x00,0xFF},
{ 84,0x90,0xFF},
{ 85,0x31,0xFF},
{ 86,0x00,0xFF},
{ 87,0x00,0xFF},
{ 88,0x01,0xFF},
{ 89,0x00,0xFF},
{ 90,0x00,0xFF},
{ 91,0x00,0x0F},
{ 92,0x00,0x00},
{ 93,0x00,0x00},
{ 94,0x00,0x00},
{255, 0, 0xFF} }; // set page bit to 0
//End of file
//CHECKSUM = 14CFBC138966364B666BF9BF3AA0FACA34D009AF
| myriadrf/lms-suite | LMS6002D/legacy/control_LMS6002/src/Logic/src/register_map.cpp | C++ | apache-2.0 | 6,649 |
// HTMLParser Library - A java-based parser for HTML
// http://htmlparser.org
// Copyright (C) 2006 Derrick Oswald
//
// Revision Control Information
//
// $URL: https://svn.sourceforge.net/svnroot/htmlparser/trunk/lexer/src/main/java/org/htmlparser/nodes/TextNode.java $
// $Author: derrickoswald $
// $Date: 2006-09-16 10:44:17 -0400 (Sat, 16 Sep 2006) $
// $Revision: 4 $
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the Common Public License; either
// version 1.0 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// Common Public License for more details.
//
// You should have received a copy of the Common Public License
// along with this library; if not, the license is available from
// the Open Source Initiative (OSI) website:
// http://opensource.org/licenses/cpl1.0.php
package org.htmlparser.nodes;
import org.htmlparser.Text;
import org.htmlparser.lexer.Cursor;
import org.htmlparser.lexer.Page;
import org.htmlparser.util.ParserException;
import org.htmlparser.visitors.NodeVisitor;
/**
* Normal text in the HTML document is represented by this class.
*/
public class TextNode
extends
AbstractNode
implements
Text
{
/**
* The contents of the string node, or override text.
*/
protected String mText;
/**
* Constructor takes in the text string.
* @param text The string node text. For correct generation of HTML, this
* should not contain representations of tags (unless they are balanced).
*/
public TextNode (String text)
{
super (null, 0, 0);
setText (text);
}
/**
* Constructor takes in the page and beginning and ending posns.
* @param page The page this string is on.
* @param start The beginning position of the string.
* @param end The ending positiong of the string.
*/
public TextNode (Page page, int start, int end)
{
super (page, start, end);
mText = null;
}
/**
* Returns the text of the node.
* This is the same as {@link #toHtml} for this type of node.
* @return The contents of this text node.
*/
public String getText ()
{
return (toHtml ());
}
/**
* Sets the string contents of the node.
* @param text The new text for the node.
*/
public void setText (String text)
{
mText = text;
nodeBegin = 0;
nodeEnd = mText.length ();
}
/**
* Returns the text of the node.
* This is the same as {@link #toHtml} for this type of node.
* @return The contents of this text node.
*/
public String toPlainTextString ()
{
return (toHtml ());
}
/**
* Returns the text of the node.
* @param verbatim If <code>true</code> return as close to the original
* page text as possible.
* @return The contents of this text node.
*/
public String toHtml (boolean verbatim)
{
String ret;
ret = mText;
if (null == ret)
ret = mPage.getText (getStartPosition (), getEndPosition ());
return (ret);
}
/**
* Express this string node as a printable string
* This is suitable for display in a debugger or output to a printout.
* Control characters are replaced by their equivalent escape
* sequence and contents is truncated to 80 characters.
* @return A string representation of the string node.
*/
public String toString ()
{
int startpos;
int endpos;
Cursor start;
Cursor end;
char c;
StringBuffer ret;
startpos = getStartPosition ();
endpos = getEndPosition ();
ret = new StringBuffer (endpos - startpos + 20);
if (null == mText)
{
start = new Cursor (getPage (), startpos);
end = new Cursor (getPage (), endpos);
ret.append ("Txt (");
ret.append (start);
ret.append (",");
ret.append (end);
ret.append ("): ");
while (start.getPosition () < endpos)
{
try
{
c = mPage.getCharacter (start);
switch (c)
{
case '\t':
ret.append ("\\t");
break;
case '\n':
ret.append ("\\n");
break;
case '\r':
ret.append ("\\r");
break;
default:
ret.append (c);
}
}
catch (ParserException pe)
{
// not really expected, but we're only doing toString, so ignore
}
if (77 <= ret.length ())
{
ret.append ("...");
break;
}
}
}
else
{
ret.append ("Txt (");
ret.append (startpos);
ret.append (",");
ret.append (endpos);
ret.append ("): ");
for (int i = 0; i < mText.length (); i++)
{
c = mText.charAt (i);
switch (c)
{
case '\t':
ret.append ("\\t");
break;
case '\n':
ret.append ("\\n");
break;
case '\r':
ret.append ("\\r");
break;
default:
ret.append (c);
}
if (77 <= ret.length ())
{
ret.append ("...");
break;
}
}
}
return (ret.toString ());
}
/**
* Returns if the node consists of only white space.
* White space can be spaces, new lines, etc.
*/
public boolean isWhiteSpace()
{
if (mText == null || mText.trim().equals(""))
return true;
return false;
}
/**
* String visiting code.
* @param visitor The <code>NodeVisitor</code> object to invoke
* <code>visitStringNode()</code> on.
*/
public void accept (NodeVisitor visitor)
{
visitor.visitStringNode (this);
}
}
| patrickfav/tuwien | master/swt workspace/HTMLParser/src/org/htmlparser/nodes/TextNode.java | Java | apache-2.0 | 6,785 |
//-----------------------------------------------------------------------
// <copyright file="One2OneBidiFlow.cs" company="Akka.NET Project">
// Copyright (C) 2009-2020 Lightbend Inc. <http://www.lightbend.com>
// Copyright (C) 2013-2020 .NET Foundation <https://github.com/akkadotnet/akka.net>
// </copyright>
//-----------------------------------------------------------------------
using System;
using Akka.Streams.Stage;
namespace Akka.Streams.Dsl
{
/// <summary>
/// TBD
/// </summary>
public static class One2OneBidiFlow
{
/// <summary>
/// TBD
/// </summary>
/// <typeparam name="TIn">TBD</typeparam>
/// <typeparam name="TOut">TBD</typeparam>
/// <param name="maxPending">TBD</param>
/// <returns>TBD</returns>
public static BidiFlow<TIn, TIn, TOut, TOut, NotUsed> Apply<TIn, TOut>(int maxPending)
{
return BidiFlow.FromGraph(new One2OneBidi<TIn, TOut>(maxPending));
}
}
/// <summary>
/// TBD
/// </summary>
public class UnexpectedOutputException : Exception
{
/// <summary>
/// TBD
/// </summary>
/// <param name="element">TBD</param>
public UnexpectedOutputException(object element) : base(element.ToString())
{
}
}
/// <summary>
/// TBD
/// </summary>
public class OutputTruncationException : Exception
{
}
/// <summary>
/// TBD
/// </summary>
/// <typeparam name="TIn">TBD</typeparam>
/// <typeparam name="TOut">TBD</typeparam>
public class One2OneBidi<TIn, TOut> : GraphStage<BidiShape<TIn, TIn, TOut, TOut>>
{
#region internal classes
private sealed class Logic : GraphStageLogic
{
private readonly int _maxPending;
private readonly Inlet<TIn> _inInlet;
private readonly Outlet<TIn> _inOutlet;
private readonly Inlet<TOut> _outInlet;
private readonly Outlet<TOut> _outOutlet;
private int _pending;
private bool _pullSuppressed;
public Logic(One2OneBidi<TIn, TOut> stage) : base(stage.Shape)
{
_maxPending = stage._maxPending;
_inInlet = stage._inInlet;
_inOutlet = stage._inOutlet;
_outInlet = stage._outInlet;
_outOutlet = stage._outOutlet;
SetInInletHandler();
SetInOutletHandler();
SetOutInletHandler();
SetOutOutletHandler();
}
private void SetInInletHandler()
{
SetHandler(_inInlet, onPush: () =>
{
_pending += 1;
Push(_inOutlet, Grab(_inInlet));
},
onUpstreamFinish: () => Complete(_inOutlet));
}
private void SetInOutletHandler()
{
SetHandler(_inOutlet, onPull: () =>
{
if (_pending < _maxPending || _maxPending == -1)
Pull(_inInlet);
else
_pullSuppressed = true;
},
onDownstreamFinish: () => Cancel(_inInlet));
}
private void SetOutInletHandler()
{
SetHandler(_outInlet, onPush: () =>
{
var element = Grab(_outInlet);
if (_pending <= 0)
throw new UnexpectedOutputException(element);
_pending -= 1;
Push(_outOutlet, element);
if (_pullSuppressed)
{
_pullSuppressed = false;
if(!IsClosed(_inInlet))
Pull(_inInlet);
}
}, onUpstreamFinish: () =>
{
if (_pending != 0)
throw new OutputTruncationException();
Complete(_outOutlet);
});
}
private void SetOutOutletHandler()
{
SetHandler(_outOutlet, onPull: () => Pull(_outInlet), onDownstreamFinish: () => Cancel(_outInlet));
}
}
#endregion
private readonly int _maxPending;
private readonly Inlet<TIn> _inInlet = new Inlet<TIn>("inIn");
private readonly Outlet<TIn> _inOutlet = new Outlet<TIn>("inOut");
private readonly Inlet<TOut> _outInlet = new Inlet<TOut>("outIn");
private readonly Outlet<TOut> _outOutlet = new Outlet<TOut>("outOut");
/// <summary>
/// TBD
/// </summary>
/// <param name="maxPending">TBD</param>
public One2OneBidi(int maxPending)
{
_maxPending = maxPending;
Shape = new BidiShape<TIn, TIn, TOut, TOut>(_inInlet, _inOutlet, _outInlet, _outOutlet);
}
/// <summary>
/// TBD
/// </summary>
public override BidiShape<TIn, TIn, TOut, TOut> Shape { get; }
/// <summary>
/// TBD
/// </summary>
protected override Attributes InitialAttributes { get; } = Attributes.CreateName("One2OneBidi");
/// <summary>
/// TBD
/// </summary>
/// <param name="inheritedAttributes">TBD</param>
/// <returns>TBD</returns>
protected override GraphStageLogic CreateLogic(Attributes inheritedAttributes) => new Logic(this);
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
public override string ToString() => "One2OneBidi";
}
}
| simonlaroche/akka.net | src/core/Akka.Streams/Dsl/One2OneBidiFlow.cs | C# | apache-2.0 | 5,778 |
/*
* JBoss, Home of Professional Open Source
* Copyright 2015, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.tests.el.resolver;
import javax.inject.Named;
@Named("com.acme.settings")
public class MyBean {
public String getFoo() {
return "foo";
}
}
| antoinesd/weld-core | tests-arquillian/src/test/java/org/jboss/weld/tests/el/resolver/MyBean.java | Java | apache-2.0 | 957 |
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import org.eclipse.draw2d.IFigure;
import org.eclipse.draw2d.Shape;
import org.eclipse.draw2d.StackLayout;
import org.eclipse.draw2d.geometry.Dimension;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.EditPolicy;
import org.eclipse.gef.Request;
import org.eclipse.gef.commands.Command;
import org.eclipse.gef.editpolicies.LayoutEditPolicy;
import org.eclipse.gef.editpolicies.NonResizableEditPolicy;
import org.eclipse.gef.requests.CreateRequest;
import org.eclipse.gmf.runtime.diagram.ui.editparts.AbstractBorderItemEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles;
import org.eclipse.gmf.runtime.emf.type.core.IElementType;
import org.eclipse.gmf.runtime.gef.ui.figures.DefaultSizeNodeFigure;
import org.eclipse.gmf.runtime.gef.ui.figures.NodeFigure;
import org.eclipse.gmf.runtime.notation.View;
import org.eclipse.swt.graphics.Color;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.AbstractMediatorInputConnectorEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.custom.EastPointerShape;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies.EnqueueMediatorInputConnectorItemSemanticEditPolicy;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes;
/**
* @generated NOT
*/
public class EnqueueMediatorInputConnectorEditPart extends AbstractMediatorInputConnectorEditPart {
/**
* @generated
*/
public static final int VISUAL_ID = 3601;
/**
* @generated
*/
protected IFigure contentPane;
/**
* @generated
*/
protected IFigure primaryShape;
/**
* @generated
*/
public EnqueueMediatorInputConnectorEditPart(View view) {
super(view);
}
/**
* @generated
*/
protected void createDefaultEditPolicies() {
super.createDefaultEditPolicies();
installEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE, getPrimaryDragEditPolicy());
installEditPolicy(EditPolicyRoles.SEMANTIC_ROLE, new EnqueueMediatorInputConnectorItemSemanticEditPolicy());
installEditPolicy(EditPolicy.LAYOUT_ROLE, createLayoutEditPolicy());
// XXX need an SCR to runtime to have another abstract superclass that would let children add reasonable
// editpolicies
removeEditPolicy(org.eclipse.gmf.runtime.diagram.ui.editpolicies.EditPolicyRoles.CONNECTION_HANDLES_ROLE);
}
/**
* @generated
*/
protected LayoutEditPolicy createLayoutEditPolicy() {
org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy lep = new org.eclipse.gmf.runtime.diagram.ui.editpolicies.LayoutEditPolicy() {
protected EditPolicy createChildEditPolicy(EditPart child) {
EditPolicy result = child.getEditPolicy(EditPolicy.PRIMARY_DRAG_ROLE);
if (result == null) {
result = new NonResizableEditPolicy();
}
return result;
}
protected Command getMoveChildrenCommand(Request request) {
return null;
}
protected Command getCreateCommand(CreateRequest request) {
return null;
}
};
return lep;
}
/**
* @generated
*/
protected IFigure createNodeShape() {
return primaryShape = new EastPointerFigure();
}
/**
* @generated
*/
public EastPointerFigure getPrimaryShape() {
return (EastPointerFigure) primaryShape;
}
/**
* @generated
*/
protected NodeFigure createNodePlate() {
DefaultSizeNodeFigure result = new DefaultSizeNodeFigure(12, 10);
// FIXME: workaround for #154536
result.getBounds().setSize(result.getPreferredSize());
return result;
}
/**
* Creates figure for this edit part.
*
* Body of this method does not depend on settings in generation model
* so you may safely remove <i>generated</i> tag and modify it.
*
* @generated NOT
*/
protected NodeFigure createNodeFigure() {
NodeFigure figure = createNodePlate();
figure.setLayoutManager(new StackLayout());
IFigure shape = createNodeShapeForward();
figure.add(shape);
contentPane = setupContentPane(shape);
figure_ = figure;
createNodeShapeReverse();
return figure;
}
/**
* Default implementation treats passed figure as content pane.
* Respects layout one may have set for generated figure.
*
* @param nodeShape instance of generated figure class
* @generated
*/
protected IFigure setupContentPane(IFigure nodeShape) {
return nodeShape; // use nodeShape itself as contentPane
}
/**
* @generated
*/
public IFigure getContentPane() {
if (contentPane != null) {
return contentPane;
}
return super.getContentPane();
}
/**
* @generated
*/
protected void setForegroundColor(Color color) {
if (primaryShape != null) {
primaryShape.setForegroundColor(color);
}
}
/**
* @generated
*/
protected void setBackgroundColor(Color color) {
if (primaryShape != null) {
primaryShape.setBackgroundColor(color);
}
}
/**
* @generated
*/
protected void setLineWidth(int width) {
if (primaryShape instanceof Shape) {
((Shape) primaryShape).setLineWidth(width);
}
}
/**
* @generated
*/
protected void setLineType(int style) {
if (primaryShape instanceof Shape) {
((Shape) primaryShape).setLineStyle(style);
}
}
/**
* @generated
*/
public List<IElementType> getMARelTypesOnTarget() {
ArrayList<IElementType> types = new ArrayList<IElementType>(1);
types.add(EsbElementTypes.EsbLink_4001);
return types;
}
/**
* @generated
*/
public List<IElementType> getMATypesForSource(IElementType relationshipType) {
LinkedList<IElementType> types = new LinkedList<IElementType>();
if (relationshipType == EsbElementTypes.EsbLink_4001) {
types.add(EsbElementTypes.ProxyOutputConnector_3002);
types.add(EsbElementTypes.PropertyMediatorOutputConnector_3034);
types.add(EsbElementTypes.PropertyGroupMediatorOutputConnector_3790);
types.add(EsbElementTypes.ThrottleMediatorOutputConnector_3122);
types.add(EsbElementTypes.ThrottleMediatorOnAcceptOutputConnector_3581);
types.add(EsbElementTypes.ThrottleMediatorOnRejectOutputConnector_3582);
types.add(EsbElementTypes.FilterMediatorOutputConnector_3534);
types.add(EsbElementTypes.FilterMediatorPassOutputConnector_3011);
types.add(EsbElementTypes.FilterMediatorFailOutputConnector_3012);
types.add(EsbElementTypes.LogMediatorOutputConnector_3019);
types.add(EsbElementTypes.EnrichMediatorOutputConnector_3037);
types.add(EsbElementTypes.XSLTMediatorOutputConnector_3040);
types.add(EsbElementTypes.SwitchCaseBranchOutputConnector_3043);
types.add(EsbElementTypes.SwitchDefaultBranchOutputConnector_3044);
types.add(EsbElementTypes.SwitchMediatorOutputConnector_3499);
types.add(EsbElementTypes.SequenceOutputConnector_3050);
types.add(EsbElementTypes.EventMediatorOutputConnector_3053);
types.add(EsbElementTypes.EntitlementMediatorOutputConnector_3056);
types.add(EsbElementTypes.ClassMediatorOutputConnector_3059);
types.add(EsbElementTypes.SpringMediatorOutputConnector_3062);
types.add(EsbElementTypes.ScriptMediatorOutputConnector_3065);
types.add(EsbElementTypes.FaultMediatorOutputConnector_3068);
types.add(EsbElementTypes.XQueryMediatorOutputConnector_3071);
types.add(EsbElementTypes.CommandMediatorOutputConnector_3074);
types.add(EsbElementTypes.DBLookupMediatorOutputConnector_3077);
types.add(EsbElementTypes.DBReportMediatorOutputConnector_3080);
types.add(EsbElementTypes.SmooksMediatorOutputConnector_3083);
types.add(EsbElementTypes.SendMediatorOutputConnector_3086);
types.add(EsbElementTypes.SendMediatorEndpointOutputConnector_3539);
types.add(EsbElementTypes.DefaultEndPointOutputConnector_3022);
types.add(EsbElementTypes.AddressEndPointOutputConnector_3031);
types.add(EsbElementTypes.FailoverEndPointOutputConnector_3090);
types.add(EsbElementTypes.FailoverEndPointWestOutputConnector_3097);
types.add(EsbElementTypes.WSDLEndPointOutputConnector_3093);
types.add(EsbElementTypes.LoadBalanceEndPointOutputConnector_3096);
types.add(EsbElementTypes.LoadBalanceEndPointWestOutputConnector_3098);
types.add(EsbElementTypes.HeaderMediatorOutputConnector_3101);
types.add(EsbElementTypes.CloneMediatorOutputConnector_3104);
types.add(EsbElementTypes.CloneMediatorTargetOutputConnector_3133);
types.add(EsbElementTypes.CacheMediatorOutputConnector_3107);
types.add(EsbElementTypes.IterateMediatorOutputConnector_3110);
types.add(EsbElementTypes.CalloutMediatorOutputConnector_3116);
types.add(EsbElementTypes.TransactionMediatorOutputConnector_3119);
types.add(EsbElementTypes.RMSequenceMediatorOutputConnector_3125);
types.add(EsbElementTypes.RuleMediatorOutputConnector_3128);
types.add(EsbElementTypes.OAuthMediatorOutputConnector_3131);
types.add(EsbElementTypes.AggregateMediatorOutputConnector_3113);
types.add(EsbElementTypes.AggregateMediatorOnCompleteOutputConnector_3132);
types.add(EsbElementTypes.StoreMediatorOutputConnector_3590);
types.add(EsbElementTypes.BuilderMediatorOutputConector_3593);
types.add(EsbElementTypes.CallTemplateMediatorOutputConnector_3596);
types.add(EsbElementTypes.PayloadFactoryMediatorOutputConnector_3599);
types.add(EsbElementTypes.EnqueueMediatorOutputConnector_3602);
types.add(EsbElementTypes.MessageOutputConnector_3047);
types.add(EsbElementTypes.MergeNodeOutputConnector_3016);
types.add(EsbElementTypes.JsonTransformMediatorOutputConnector_3793);
}
return types;
}
/**
* @generated
*/
public class EastPointerFigure extends EastPointerShape {
/**
* @generated
*/
public EastPointerFigure() {
this.setBackgroundColor(THIS_BACK);
this.setPreferredSize(new Dimension(getMapMode().DPtoLP(12), getMapMode().DPtoLP(10)));
}
}
/**
* @generated
*/
static final Color THIS_BACK = new Color(null, 50, 50, 50);
}
| prabushi/devstudio-tooling-esb | plugins/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/edit/parts/EnqueueMediatorInputConnectorEditPart.java | Java | apache-2.0 | 11,166 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.rest.action.admin.cluster.repositories.get;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.RepositoriesMetaData;
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.rest.*;
import org.elasticsearch.rest.action.support.RestBuilderListener;
import static org.elasticsearch.client.Requests.getRepositoryRequest;
import static org.elasticsearch.rest.RestRequest.Method.GET;
import static org.elasticsearch.rest.RestStatus.OK;
/**
* Returns repository information
*/
public class RestGetRepositoriesAction extends BaseRestHandler {
@Inject
public RestGetRepositoriesAction(Settings settings, RestController controller, Client client) {
super(settings, controller, client);
controller.registerHandler(GET, "/_snapshot", this);
controller.registerHandler(GET, "/_snapshot/{repository}", this);
}
@Override
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) {
final String[] repositories = request.paramAsStringArray("repository", Strings.EMPTY_ARRAY);
GetRepositoriesRequest getRepositoriesRequest = getRepositoryRequest(repositories);
getRepositoriesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getRepositoriesRequest.masterNodeTimeout()));
getRepositoriesRequest.local(request.paramAsBoolean("local", getRepositoriesRequest.local()));
client.admin().cluster().getRepositories(getRepositoriesRequest, new RestBuilderListener<GetRepositoriesResponse>(channel) {
@Override
public RestResponse buildResponse(GetRepositoriesResponse response, XContentBuilder builder) throws Exception {
builder.startObject();
for (RepositoryMetaData repositoryMetaData : response.repositories()) {
RepositoriesMetaData.toXContent(repositoryMetaData, builder, request);
}
builder.endObject();
return new BytesRestResponse(OK, builder);
}
});
}
}
| Flipkart/elasticsearch | src/main/java/org/elasticsearch/rest/action/admin/cluster/repositories/get/RestGetRepositoriesAction.java | Java | apache-2.0 | 3,286 |
package com.github.davidmoten.rtree;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.List;
import java.util.zip.GZIPInputStream;
import rx.Observable;
import rx.functions.Action1;
import rx.functions.Func0;
import rx.functions.Func1;
import rx.observables.StringObservable;
import com.github.davidmoten.rtree.geometry.Geometries;
import com.github.davidmoten.rtree.geometry.Point;
public class GreekEarthquakes {
static Observable<Entry<Object, Point>> entries() {
Observable<String> source = Observable.using(new Func0<InputStream>() {
@Override
public InputStream call() {
try {
return new GZIPInputStream(GreekEarthquakes.class
.getResourceAsStream("/greek-earthquakes-1964-2000.txt.gz"));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}, new Func1<InputStream, Observable<String>>() {
@Override
public Observable<String> call(InputStream is) {
return StringObservable.from(new InputStreamReader(is));
}
}, new Action1<InputStream>() {
@Override
public void call(InputStream is) {
try {
is.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
});
return StringObservable.split(source, "\n").flatMap(
new Func1<String, Observable<Entry<Object, Point>>>() {
@Override
public Observable<Entry<Object, Point>> call(String line) {
if (line.trim().length() > 0) {
String[] items = line.split(" ");
double lat = Double.parseDouble(items[0]);
double lon = Double.parseDouble(items[1]);
return Observable.just(Entry.entry(new Object(),
Geometries.point(lat, lon)));
} else
return Observable.empty();
}
});
}
static List<Entry<Object, Point>> entriesList() {
return entries().toList().toBlocking().single();
}
}
| lhyqie/rtree | src/test/java/com/github/davidmoten/rtree/GreekEarthquakes.java | Java | apache-2.0 | 2,411 |
//// [declFileWithExtendsClauseThatHasItsContainerNameConflict.ts]
declare module A.B.C {
class B {
}
}
module A.B {
export class EventManager {
id: number;
}
}
module A.B.C {
export class ContextMenu extends EventManager {
name: string;
}
}
//// [declFileWithExtendsClauseThatHasItsContainerNameConflict.js]
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
__.prototype = b.prototype;
d.prototype = new __();
};
var A;
(function (A) {
var B;
(function (B) {
var EventManager = (function () {
function EventManager() {
}
return EventManager;
})();
B.EventManager = EventManager;
})(B = A.B || (A.B = {}));
})(A || (A = {}));
var A;
(function (A) {
var B;
(function (B) {
var C;
(function (C) {
var ContextMenu = (function (_super) {
__extends(ContextMenu, _super);
function ContextMenu() {
_super.apply(this, arguments);
}
return ContextMenu;
})(B.EventManager);
C.ContextMenu = ContextMenu;
})(C = B.C || (B.C = {}));
})(B = A.B || (A.B = {}));
})(A || (A = {}));
//// [declFileWithExtendsClauseThatHasItsContainerNameConflict.d.ts]
declare module A.B.C {
class B {
}
}
declare module A.B {
class EventManager {
id: number;
}
}
declare module A.B.C {
class ContextMenu extends EventManager {
name: string;
}
}
| Raynos/TypeScript | tests/baselines/reference/declFileWithExtendsClauseThatHasItsContainerNameConflict.js | JavaScript | apache-2.0 | 1,706 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.mallet;
import java.util.logging.Logger;
import java.io.File;
import java.io.IOException;
public class TpcdsTool {
private static final Logger logger = Logger.getLogger(TpcdsTool.class.getName());
public static String[] generateStreamSqlFile(int numberOfStreams) throws MalletException {
Conf conf = Conf.getConf();
// Build cmd string
// /*WORKAROUND*/
// String templateDirectory = conf.getBaseDirectory() + "/query_templates";
// It seems the dsqgen tool has problem with long path time, so workaround here is
// that the tool is under the tool directory of the program base directory.
String templateDirectory = "../query_templates";
if (conf.isQuickRunMode()) {
templateDirectory += "/quickrun";
}
String templateListFile = templateDirectory + "/templates.lst";
String outputDirectory = conf.getTempDirectory();
String cmd = "./dsqgen -INPUT " + templateListFile + " -DIRECTORY " + templateDirectory +
" -OUTPUT_DIR " + outputDirectory + " -DIALECT hive -STREAMS " +
numberOfStreams + " -SCALE " + conf.getScale();
if (conf.isSingleQueryMode()) {
cmd += " -TEMPLATE query" + conf.getQueryId() + ".tpl";
}
// Invoke the TPC-DS tool to generate queries from templates
logger.info("Invoke TPC-DS tool to generate queries from templates:");
logger.info(" " + cmd);
Process toolProcess;
try {
toolProcess = Runtime.getRuntime().exec(cmd, null, new File(conf.getTpcDsToolDirectory()));
} catch (IOException e) {
throw new MalletException("Failed to invoke TPC-DS tool.", e);
}
// Wait for the termination of the tool process
try {
toolProcess.waitFor();
} catch (InterruptedException e) {
}
// Check if the tool process has any error
if(toolProcess.exitValue() != 0) {
throw new MalletException("TPC-DS tool exited with error.");
}
// return the SQL file names for each stream
String[] sqlFileNames = new String[numberOfStreams];
for(int i = 0; i < numberOfStreams; i++) {
String sqlFileName = outputDirectory + "/query_" + i + ".sql";
sqlFileNames[i] = sqlFileName;
// Make sure the file exists
if (!(new File(sqlFileName)).exists()) {
throw new MalletException("TPC-DS tool succeeded, but can't find " + sqlFileName);
}
}
return sqlFileNames;
}
public static void generateRefreshDataSets() throws MalletException {
Conf conf = Conf.getConf();
// TODO
}
}
| wyg1990/Mallet | src/main/java/com/intel/mallet/TpcdsTool.java | Java | apache-2.0 | 3,375 |
'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"\u0434\u043f",
"\u043f\u043f"
],
"DAY": [
"\u043d\u0435\u0434\u0456\u043b\u044f",
"\u043f\u043e\u043d\u0435\u0434\u0456\u043b\u043e\u043a",
"\u0432\u0456\u0432\u0442\u043e\u0440\u043e\u043a",
"\u0441\u0435\u0440\u0435\u0434\u0430",
"\u0447\u0435\u0442\u0432\u0435\u0440",
"\u043f\u02bc\u044f\u0442\u043d\u0438\u0446\u044f",
"\u0441\u0443\u0431\u043e\u0442\u0430"
],
"ERANAMES": [
"\u0434\u043e \u043d\u0430\u0448\u043e\u0457 \u0435\u0440\u0438",
"\u043d\u0430\u0448\u043e\u0457 \u0435\u0440\u0438"
],
"ERAS": [
"\u0434\u043e \u043d. \u0435.",
"\u043d. \u0435."
],
"FIRSTDAYOFWEEK": 0,
"MONTH": [
"\u0441\u0456\u0447\u043d\u044f",
"\u043b\u044e\u0442\u043e\u0433\u043e",
"\u0431\u0435\u0440\u0435\u0437\u043d\u044f",
"\u043a\u0432\u0456\u0442\u043d\u044f",
"\u0442\u0440\u0430\u0432\u043d\u044f",
"\u0447\u0435\u0440\u0432\u043d\u044f",
"\u043b\u0438\u043f\u043d\u044f",
"\u0441\u0435\u0440\u043f\u043d\u044f",
"\u0432\u0435\u0440\u0435\u0441\u043d\u044f",
"\u0436\u043e\u0432\u0442\u043d\u044f",
"\u043b\u0438\u0441\u0442\u043e\u043f\u0430\u0434\u0430",
"\u0433\u0440\u0443\u0434\u043d\u044f"
],
"SHORTDAY": [
"\u041d\u0434",
"\u041f\u043d",
"\u0412\u0442",
"\u0421\u0440",
"\u0427\u0442",
"\u041f\u0442",
"\u0421\u0431"
],
"SHORTMONTH": [
"\u0441\u0456\u0447.",
"\u043b\u044e\u0442.",
"\u0431\u0435\u0440.",
"\u043a\u0432\u0456\u0442.",
"\u0442\u0440\u0430\u0432.",
"\u0447\u0435\u0440\u0432.",
"\u043b\u0438\u043f.",
"\u0441\u0435\u0440\u043f.",
"\u0432\u0435\u0440.",
"\u0436\u043e\u0432\u0442.",
"\u043b\u0438\u0441\u0442.",
"\u0433\u0440\u0443\u0434."
],
"STANDALONEMONTH": [
"\u0441\u0456\u0447\u0435\u043d\u044c",
"\u043b\u044e\u0442\u0438\u0439",
"\u0431\u0435\u0440\u0435\u0437\u0435\u043d\u044c",
"\u043a\u0432\u0456\u0442\u0435\u043d\u044c",
"\u0442\u0440\u0430\u0432\u0435\u043d\u044c",
"\u0447\u0435\u0440\u0432\u0435\u043d\u044c",
"\u043b\u0438\u043f\u0435\u043d\u044c",
"\u0441\u0435\u0440\u043f\u0435\u043d\u044c",
"\u0432\u0435\u0440\u0435\u0441\u0435\u043d\u044c",
"\u0436\u043e\u0432\u0442\u0435\u043d\u044c",
"\u043b\u0438\u0441\u0442\u043e\u043f\u0430\u0434",
"\u0433\u0440\u0443\u0434\u0435\u043d\u044c"
],
"WEEKENDRANGE": [
5,
6
],
"fullDate": "EEEE, d MMMM y '\u0440'.",
"longDate": "d MMMM y '\u0440'.",
"medium": "d MMM y '\u0440'. HH:mm:ss",
"mediumDate": "d MMM y '\u0440'.",
"mediumTime": "HH:mm:ss",
"short": "dd.MM.yy HH:mm",
"shortDate": "dd.MM.yy",
"shortTime": "HH:mm"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "\u0433\u0440\u043d.",
"DECIMAL_SEP": ",",
"GROUP_SEP": "\u00a0",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "-",
"negSuf": "\u00a0\u00a4",
"posPre": "",
"posSuf": "\u00a0\u00a4"
}
]
},
"id": "uk-ua",
"localeID": "uk_UA",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (vf.v == 0 && i % 10 == 1 && i % 100 != 11) { return PLURAL_CATEGORY.ONE; } if (vf.v == 0 && i % 10 >= 2 && i % 10 <= 4 && (i % 100 < 12 || i % 100 > 14)) { return PLURAL_CATEGORY.FEW; } if (vf.v == 0 && i % 10 == 0 || vf.v == 0 && i % 10 >= 5 && i % 10 <= 9 || vf.v == 0 && i % 100 >= 11 && i % 100 <= 14) { return PLURAL_CATEGORY.MANY; } return PLURAL_CATEGORY.OTHER;}
});
}]);
| yoyocms/YoYoCms.AbpProjectTemplate | src/YoYoCms.AbpProjectTemplate.Web/Scripts/i18n/angular-locale_uk-ua.js | JavaScript | apache-2.0 | 4,569 |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
var assert = require('assert');
// Test includes
var testutil = require('../../framework/util');
// Lib includes
var ISO8061Date = require('../../../lib/common/util/iso8061date');
var date = require('../../../lib/common/util/date');
describe('iso8061date-tests', function () {
it('parse should work', function (done) {
var datetime = new Date(Date.UTC(2011, 6, 17, 14, 0, 23, 270));
var datetimeAtom = "2011-07-17T14:00:23.270Z";
var parsed = ISO8061Date.parse(datetimeAtom);
assert.deepEqual(parsed, datetime);
done();
});
it('parsing a long Timestamp should work', function (done) {
var datetime = new Date(Date.UTC(2011, 6, 17, 14, 0, 23, 270));
var datetimeAtom = "2011-07-17T14:00:23.2701234Z";
var parsed = ISO8061Date.parse(datetimeAtom);
assert.deepEqual(parsed, datetime);
done();
});
it('parsing a long Timestamp with rounding shoudl work', function (done) {
var datetime = new Date(Date.UTC(2011, 6, 17, 14, 0, 23, 270));
var datetimeAtom = "2011-07-17T14:00:23.26993Z";
var parsed = ISO8061Date.parse(datetimeAtom);
assert.deepEqual(parsed, datetime);
done();
});
it('parsing a short Millisecond field should work', function (done) {
var datetime = new Date(Date.UTC(2011, 6, 17, 14, 0, 23, 200));
var datetimeAtom = "2011-07-17T14:00:23.2Z";
var parsed = ISO8061Date.parse(datetimeAtom);
assert.deepEqual(parsed, datetime);
done();
});
it('parsing padded short Milliseconds should work', function (done) {
var datetime = new Date(Date.UTC(2011, 6, 17, 14, 0, 23, 3));
var datetimeAtom = "2011-07-17T14:00:23.003Z";
var parsed = ISO8061Date.parse(datetimeAtom);
assert.deepEqual(parsed, datetime);
done();
});
it('format should work', function (done) {
var datetime = Date.UTC(2011, 6, 17, 14, 0, 23, 270);
var datetimeAtom = "2011-07-17T14:00:23.2700000Z";
var strdate = ISO8061Date.format(new Date(datetime));
assert.equal(strdate, datetimeAtom);
done();
});
});
describe('date-tests', function () {
it ('daysFromNow should work', function (done) {
var shift = 5;
assert.equal(date.daysFromNow(shift).getDay(), ((new Date()).getDay() + shift) % 7);
done();
});
it ('hoursFromNow should work', function (done) {
var shift = 20;
assert.equal(date.hoursFromNow(shift).getHours(), ((new Date()).getHours() + shift) % 24);
done();
});
it ('minutesFromNow should work', function (done) {
var shift = 20;
assert.equal(date.minutesFromNow(shift).getMinutes(), ((new Date()).getMinutes() + shift) % 60);
done();
});
it ('secondsFromNow should work', function (done) {
var shift = 58;
assert.equal(date.secondsFromNow(shift).getSeconds(), ((new Date()).getSeconds() + shift) % 60);
done();
});
});
| XiaoningLiu/azure-storage-node | test/common/util/iso8061date-tests.js | JavaScript | apache-2.0 | 3,463 |
package org.jgroups.blocks;
import org.jgroups.*;
import org.jgroups.annotations.ManagedAttribute;
import org.jgroups.annotations.ManagedOperation;
import org.jgroups.logging.Log;
import org.jgroups.logging.LogFactory;
import org.jgroups.util.*;
import java.io.*;
import java.lang.reflect.Method;
import java.util.*;
import java.util.concurrent.TimeUnit;
/**
* Cache which allows for replication factors <em>per data items</em>; the factor determines how many replicas
* of a key/value we create across the cluster.<br/>
* See doc/design/ReplCache.txt for details.
* @author Bela Ban
*/
public class ReplCache<K,V> implements Receiver, Cache.ChangeListener {
/** The cache in which all entries are located. The value is a tuple, consisting of the replication count and the
* actual value */
private Cache<K,Value<V>> l2_cache=new Cache<>();
/** The local bounded cache, to speed up access to frequently accessed entries. Can be disabled or enabled */
private Cache<K,V> l1_cache=null;
private static final Log log=LogFactory.getLog(ReplCache.class);
private JChannel ch=null;
private Address local_addr;
private View view;
private RpcDispatcher disp;
@ManagedAttribute(writable=true)
private String props="udp.xml";
@ManagedAttribute(writable=true)
private String cluster_name="ReplCache-Cluster";
@ManagedAttribute(writable=true)
private long call_timeout=1000L;
@ManagedAttribute(writable=true)
private long caching_time=30000L; // in milliseconds. -1 means don't cache, 0 means cache forever (or until changed)
@ManagedAttribute
private short default_replication_count=1; // no replication by default
private HashFunction<K> hash_function=null;
private HashFunctionFactory<K> hash_function_factory=ConsistentHashFunction::new;
private final Set<Receiver> receivers=new HashSet<>();
private final Set<ChangeListener> change_listeners=new HashSet<>();
/** On a view change, if a member P1 detects that for any given key K, P1 is not the owner of K, then
* it will compute the new owner P2 and transfer ownership for all Ks for which P2 is the new owner. P1
* will then also evict those keys from its L2 cache */
@ManagedAttribute(writable=true)
private boolean migrate_data=true;
private static final short PUT = 1;
private static final short PUT_FORCE = 2;
private static final short GET = 3;
private static final short REMOVE = 4;
private static final short REMOVE_MANY = 5;
protected static final Map<Short, Method> methods=Util.createConcurrentMap(8);
private TimeScheduler timer;
static {
try {
methods.put(PUT, ReplCache.class.getMethod("_put",
Object.class,
Object.class,
short.class,
long.class));
methods.put(PUT_FORCE, ReplCache.class.getMethod("_put",
Object.class,
Object.class,
short.class,
long.class, boolean.class));
methods.put(GET, ReplCache.class.getMethod("_get",
Object.class));
methods.put(REMOVE, ReplCache.class.getMethod("_remove", Object.class));
methods.put(REMOVE_MANY, ReplCache.class.getMethod("_removeMany", Set.class));
}
catch(NoSuchMethodException e) {
throw new RuntimeException(e);
}
}
public interface HashFunction<K> {
/**
* Function that, given a key and a replication count, returns replication_count number of <em>different</em>
* addresses of nodes.
* @param key
* @param replication_count
* @return
*/
List<Address> hash(K key, short replication_count);
/**
* When the topology changes, this method will be called. Implementations will typically cache the node list
* @param nodes
*/
void installNodes(List<Address> nodes);
}
public interface HashFunctionFactory<K> {
HashFunction<K> create();
}
public ReplCache(String props, String cluster_name) {
this.props=props;
this.cluster_name=cluster_name;
}
public String getProps() {
return props;
}
public void setProps(String props) {
this.props=props;
}
public Address getLocalAddress() {
return local_addr;
}
@ManagedAttribute
public String getLocalAddressAsString() {
return local_addr != null? local_addr.toString() : "null";
}
@ManagedAttribute
public String getView() {
return view != null? view.toString() : "null";
}
@ManagedAttribute
public int getClusterSize() {
return view != null? view.size() : 0;
}
@ManagedAttribute
public boolean isL1CacheEnabled() {
return l1_cache != null;
}
public String getClusterName() {
return cluster_name;
}
public void setClusterName(String cluster_name) {
this.cluster_name=cluster_name;
}
public long getCallTimeout() {
return call_timeout;
}
public void setCallTimeout(long call_timeout) {
this.call_timeout=call_timeout;
}
public long getCachingTime() {
return caching_time;
}
public void setCachingTime(long caching_time) {
this.caching_time=caching_time;
}
public boolean isMigrateData() {
return migrate_data;
}
public void setMigrateData(boolean migrate_data) {
this.migrate_data=migrate_data;
}
public short getDefaultReplicationCount() {
return default_replication_count;
}
public void setDefaultReplicationCount(short default_replication_count) {
this.default_replication_count=default_replication_count;
}
public HashFunction getHashFunction() {
return hash_function;
}
public void setHashFunction(HashFunction<K> hash_function) {
this.hash_function=hash_function;
}
public HashFunctionFactory getHashFunctionFactory() {
return hash_function_factory;
}
public void setHashFunctionFactory(HashFunctionFactory<K> hash_function_factory) {
this.hash_function_factory=hash_function_factory;
}
public void addReceiver(Receiver r) {
receivers.add(r);
}
public void removeMembershipListener(Receiver r) {
receivers.remove(r);
}
public void addChangeListener(ChangeListener l) {
change_listeners.add(l);
}
public void removeChangeListener(ChangeListener l) {
change_listeners.remove(l);
}
public Cache<K,V> getL1Cache() {
return l1_cache;
}
public void setL1Cache(Cache<K,V> cache) {
if(l1_cache != null)
l1_cache.stop();
l1_cache=cache;
}
public Cache<K,Value<V>> getL2Cache() {
return l2_cache;
}
public void setL2Cache(Cache<K,Value<V>> cache) {
if(cache != null) {
l2_cache.stop();
l2_cache=cache;
}
}
@ManagedOperation
public void start() throws Exception {
if(hash_function_factory != null) {
hash_function=hash_function_factory.create();
}
if(hash_function == null)
hash_function=new ConsistentHashFunction<>();
ch=new JChannel(props);
disp=new RpcDispatcher(ch, this).setMethodLookup(methods::get).setReceiver(this);
ch.connect(cluster_name);
local_addr=ch.getAddress();
view=ch.getView();
timer=ch.getProtocolStack().getTransport().getTimer();
l2_cache.addChangeListener(this);
}
@ManagedOperation
public void stop() {
if(l1_cache != null)
l1_cache.stop();
if(migrate_data) {
List<Address> members_without_me=new ArrayList<>(view.getMembers());
members_without_me.remove(local_addr);
HashFunction<K> tmp_hash_function=hash_function_factory.create();
tmp_hash_function.installNodes(members_without_me);
for(Map.Entry<K,Cache.Value<Value<V>>> entry: l2_cache.entrySet()) {
K key=entry.getKey();
Cache.Value<Value<V>> val=entry.getValue();
if(val == null)
continue;
Value<V> tmp=val.getValue();
if(tmp == null)
continue;
short repl_count=tmp.getReplicationCount();
if(repl_count != 1) // we only handle keys which are not replicated and which are stored by us
continue;
List<Address> nodes=tmp_hash_function.hash(key, repl_count);
if(nodes == null || nodes.isEmpty())
continue;
if(!nodes.contains(local_addr)) {
Address dest=nodes.get(0); // should only have 1 element anyway
move(dest, key, tmp.getVal(), repl_count, val.getTimeout(), true);
_remove(key);
}
}
}
l2_cache.removeChangeListener(this);
l2_cache.stop();
disp.stop();
ch.close();
}
/**
* Places a key/value pair into one or several nodes in the cluster.
* @param key The key, needs to be serializable
* @param val The value, needs to be serializable
* @param repl_count Number of replicas. The total number of times a data item should be present in a cluster.
* Needs to be > 0
* <ul>
* <li>-1: create key/val in all the nodes in the cluster
* <li>1: create key/val only in one node in the cluster, picked by computing the consistent hash of KEY
* <li>K > 1: create key/val in those nodes in the cluster which match the consistent hashes created for KEY
* </ul>
* @param timeout Expiration time for key/value.
* <ul>
* <li>-1: don't cache at all in the L1 cache
* <li>0: cache forever, until removed or evicted because we need space for newer elements
* <li>> 0: number of milliseconds to keep an idle element in the cache. An element is idle when not accessed.
* </ul>
* @param synchronous Whether or not to block until all cluster nodes have applied the change
*/
@ManagedOperation
public void put(K key, V val, short repl_count, long timeout, boolean synchronous) {
if(repl_count == 0) {
if(log.isWarnEnabled())
log.warn("repl_count of 0 is invalid, data will not be stored in the cluster");
return;
}
mcastPut(key, val, repl_count, timeout, synchronous);
if(l1_cache != null && timeout >= 0)
l1_cache.put(key, val, timeout);
}
/**
* Places a key/value pair into one or several nodes in the cluster.
* @param key The key, needs to be serializable
* @param val The value, needs to be serializable
* @param repl_count Number of replicas. The total number of times a data item should be present in a cluster.
* Needs to be > 0
* <ul>
* <li>-1: create key/val in all the nodes in the cluster
* <li>1: create key/val only in one node in the cluster, picked by computing the consistent hash of KEY
* <li>K > 1: create key/val in those nodes in the cluster which match the consistent hashes created for KEY
* </ul>
* @param timeout Expiration time for key/value.
* <ul>
* <li>-1: don't cache at all in the L1 cache
* <li>0: cache forever, until removed or evicted because we need space for newer elements
* <li>> 0: number of milliseconds to keep an idle element in the cache. An element is idle when not accessed.
* </ul>
*/
@ManagedOperation
public void put(K key, V val, short repl_count, long timeout) {
put(key, val, repl_count, timeout, false); // don't block (asynchronous put) by default
}
@ManagedOperation
public void put(K key, V val) {
put(key, val, default_replication_count, caching_time);
}
/**
* Returns the value associated with key
* @param key The key, has to be serializable
* @return The value associated with key, or null
*/
@ManagedOperation
public V get(K key) {
// 1. Try the L1 cache first
if(l1_cache != null) {
V val=l1_cache.get(key);
if(val != null) {
if(log.isTraceEnabled())
log.trace("returned value " + val + " for " + key + " from L1 cache");
return val;
}
}
// 2. Try the local cache
Cache.Value<Value<V>> val=l2_cache.getEntry(key);
Value<V> tmp;
if(val != null) {
tmp=val.getValue();
if(tmp !=null) {
V real_value=tmp.getVal();
if(real_value != null && l1_cache != null && val.getTimeout() >= 0)
l1_cache.put(key, real_value, val.getTimeout());
return tmp.getVal();
}
}
// 3. Execute a cluster wide GET
try {
RspList<Object> rsps=disp.callRemoteMethods(null,
new MethodCall(GET, key),
new RequestOptions(ResponseMode.GET_ALL, call_timeout));
for(Rsp rsp: rsps.values()) {
Object obj=rsp.getValue();
if(obj == null || obj instanceof Throwable)
continue;
val=(Cache.Value<Value<V>>)rsp.getValue();
if(val != null) {
tmp=val.getValue();
if(tmp != null) {
V real_value=tmp.getVal();
if(real_value != null && l1_cache != null && val.getTimeout() >= 0)
l1_cache.put(key, real_value, val.getTimeout());
return real_value;
}
}
}
return null;
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("get() failed", t);
return null;
}
}
/**
* Removes key in all nodes in the cluster, both from their local hashmaps and L1 caches
* @param key The key, needs to be serializable
*/
@ManagedOperation
public void remove(K key) {
remove(key, false); // by default we use asynchronous removals
}
/**
* Removes key in all nodes in the cluster, both from their local hashmaps and L1 caches
* @param key The key, needs to be serializable
*/
@ManagedOperation
public void remove(K key, boolean synchronous) {
try {
disp.callRemoteMethods(null, new MethodCall(REMOVE, key),
new RequestOptions(synchronous? ResponseMode.GET_ALL : ResponseMode.GET_NONE, call_timeout));
if(l1_cache != null)
l1_cache.remove(key);
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("remove() failed", t);
}
}
/**
* Removes all keys and values in the L2 and L1 caches
*/
@ManagedOperation
public void clear() {
Set<K> keys=new HashSet<>(l2_cache.getInternalMap().keySet());
mcastClear(keys, false);
}
public V _put(K key, V val, short repl_count, long timeout) {
return _put(key, val, repl_count, timeout, false);
}
/**
*
* @param key
* @param val
* @param repl_count
* @param timeout
* @param force Skips acceptance checking and simply adds the key/value
* @return
*/
public V _put(K key, V val, short repl_count, long timeout, boolean force) {
if(!force) {
// check if we need to host the data
boolean accept=repl_count == -1;
if(!accept) {
if(view != null && repl_count >= view.size()) {
accept=true;
}
else {
List<Address> selected_hosts=hash_function != null? hash_function.hash(key, repl_count) : null;
if(selected_hosts != null) {
if(log.isTraceEnabled())
log.trace("local=" + local_addr + ", hosts=" + selected_hosts);
for(Address addr: selected_hosts) {
if(addr.equals(local_addr)) {
accept=true;
break;
}
}
}
if(!accept)
return null;
}
}
}
if(log.isTraceEnabled())
log.trace("_put(" + key + ", " + val + ", " + repl_count + ", " + timeout + ")");
Value<V> value=new Value<>(val, repl_count);
Value<V> retval=l2_cache.put(key, value, timeout);
if(l1_cache != null)
l1_cache.remove(key);
notifyChangeListeners();
return retval != null? retval.getVal() : null;
}
public Cache.Value<Value<V>> _get(K key) {
if(log.isTraceEnabled())
log.trace("_get(" + key + ")");
return l2_cache.getEntry(key);
}
public V _remove(K key) {
if(log.isTraceEnabled())
log.trace("_remove(" + key + ")");
Value<V> retval=l2_cache.remove(key);
if(l1_cache != null)
l1_cache.remove(key);
notifyChangeListeners();
return retval != null? retval.getVal() : null;
}
public void _removeMany(Set<K> keys) {
if(log.isTraceEnabled())
log.trace("_removeMany(): " + keys.size() + " entries");
keys.forEach(this::_remove);
}
public void viewAccepted(final View new_view) {
final List<Address> old_nodes=this.view != null? new ArrayList<>(this.view.getMembers()) : null;
this.view=new_view;
if(log.isDebugEnabled())
log.debug("new view: " + new_view);
if(hash_function != null)
hash_function.installNodes(new_view.getMembers());
for(Receiver r: receivers)
r.viewAccepted(new_view);
if(old_nodes != null) {
timer.schedule(() -> rebalance(old_nodes, new ArrayList<>(new_view.getMembers())), 100, TimeUnit.MILLISECONDS);
}
}
public void changed() {
notifyChangeListeners();
}
public String toString() {
StringBuilder sb=new StringBuilder();
if(l1_cache != null)
sb.append("L1 cache: " + l1_cache.getSize() + " entries");
sb.append("\nL2 cache: " + l2_cache.getSize() + " entries()");
return sb.toString();
}
@ManagedOperation
public String dump() {
StringBuilder sb=new StringBuilder();
if(l1_cache != null) {
sb.append("L1 cache:\n").append(l1_cache.dump());
}
sb.append("\nL2 cache:\n").append(l2_cache.dump());
return sb.toString();
}
private void notifyChangeListeners() {
for(ChangeListener l: change_listeners) {
try {
l.changed();
}
catch(Throwable t) {
if(log.isErrorEnabled())
log.error("failed notifying change listener", t);
}
}
}
private void rebalance(List<Address> old_nodes, List<Address> new_nodes) {
HashFunction<K> old_func=hash_function_factory.create();
old_func.installNodes(old_nodes);
HashFunction<K> new_func=hash_function_factory.create();
new_func.installNodes(new_nodes);
boolean is_coord=Util.isCoordinator(ch);
List<K> keys=new ArrayList<>(l2_cache.getInternalMap().keySet());
for(K key: keys) {
Cache.Value<Value<V>> val=l2_cache.getEntry(key);
if(log.isTraceEnabled())
log.trace("==== rebalancing " + key);
if(val == null) {
if(log.isWarnEnabled())
log.warn(key + " has no value associated; ignoring");
continue;
}
Value<V> tmp=val.getValue();
if(tmp == null) {
if(log.isWarnEnabled())
log.warn(key + " has no value associated; ignoring");
continue;
}
V real_value=tmp.getVal();
short repl_count=tmp.getReplicationCount();
List<Address> new_mbrs=Util.newMembers(old_nodes, new_nodes);
if(repl_count == -1) {
if(is_coord) {
for(Address new_mbr: new_mbrs) {
move(new_mbr, key, real_value, repl_count, val.getTimeout(), false);
}
}
}
else if(repl_count == 1) {
List<Address> tmp_nodes=new_func.hash(key, repl_count);
if(!tmp_nodes.isEmpty()) {
Address mbr=tmp_nodes.get(0);
if(!mbr.equals(local_addr)) {
move(mbr, key, real_value, repl_count, val.getTimeout(), false);
_remove(key);
}
}
}
else if(repl_count > 1) {
List<Address> tmp_old=old_func.hash(key, repl_count);
List<Address> tmp_new=new_func.hash(key, repl_count);
if(log.isTraceEnabled())
log.trace("old nodes: " + tmp_old + "\nnew nodes: " + tmp_new);
if(Objects.equals(tmp_old, tmp_new))
continue;
mcastPut(key, real_value, repl_count, val.getTimeout(), false);
if(tmp_new != null && !tmp_new.contains(local_addr)) {
_remove(key);
}
}
else {
throw new IllegalStateException("replication count is invalid (" + repl_count + ")");
}
}
}
public void mcastEntries() {
for(Map.Entry<K,Cache.Value<Value<V>>> entry: l2_cache.entrySet()) {
K key=entry.getKey();
Cache.Value<Value<V>> val=entry.getValue();
if(val == null) {
if(log.isWarnEnabled())
log.warn(key + " has no value associated; ignoring");
continue;
}
Value<V> tmp=val.getValue();
if(tmp == null) {
if(log.isWarnEnabled())
log.warn(key + " has no value associated; ignoring");
continue;
}
V real_value=tmp.getVal();
short repl_count=tmp.getReplicationCount();
if(repl_count > 1) {
_remove(key);
mcastPut(key, real_value, repl_count, val.getTimeout(), false);
}
}
}
private void mcastPut(K key, V val, short repl_count, long caching_time, boolean synchronous) {
try {
ResponseMode mode=synchronous? ResponseMode.GET_ALL : ResponseMode.GET_NONE;
disp.callRemoteMethods(null, new MethodCall(PUT, key, val, repl_count, caching_time),
new RequestOptions(mode, call_timeout));
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("put() failed", t);
}
}
private void mcastClear(Set<K> keys, boolean synchronous) {
try {
ResponseMode mode=synchronous? ResponseMode.GET_ALL : ResponseMode.GET_NONE;
disp.callRemoteMethods(null, new MethodCall(REMOVE_MANY, keys), new RequestOptions(mode, call_timeout));
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("clear() failed", t);
}
}
private void move(Address dest, K key, V val, short repl_count, long caching_time, boolean synchronous) {
try {
ResponseMode mode=synchronous? ResponseMode.GET_ALL : ResponseMode.GET_NONE;
disp.callRemoteMethod(dest, new MethodCall(PUT_FORCE, key, val, repl_count, caching_time, true),
new RequestOptions(mode, call_timeout));
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("move() failed", t);
}
}
public interface ChangeListener {
void changed();
}
public static class ConsistentHashFunction<K> implements HashFunction<K> {
private final SortedMap<Short,Address> nodes=new TreeMap<>();
private final static int HASH_SPACE=2048; // must be > max number of nodes in a cluster and a power of 2
private final static int FACTOR=3737; // to better spread the node out across the space
public List<Address> hash(K key, short replication_count) {
int index=Math.abs(key.hashCode() & (HASH_SPACE - 1));
Set<Address> results=new LinkedHashSet<>();
SortedMap<Short, Address> tailmap=nodes.tailMap((short)index);
int count=0;
for(Map.Entry<Short,Address> entry: tailmap.entrySet()) {
Address val=entry.getValue();
results.add(val);
if(++count >= replication_count)
break;
}
if(count < replication_count) {
for(Map.Entry<Short,Address> entry: nodes.entrySet()) {
Address val=entry.getValue();
results.add(val);
if(++count >= replication_count)
break;
}
}
return new ArrayList<>(results);
}
public void installNodes(List<Address> new_nodes) {
nodes.clear();
for(Address node: new_nodes) {
int hash=Math.abs((node.hashCode() * FACTOR) & (HASH_SPACE - 1));
for(int i=hash; i < hash + HASH_SPACE; i++) {
short new_index=(short)(i & (HASH_SPACE - 1));
if(!nodes.containsKey(new_index)) {
nodes.put(new_index, node);
break;
}
}
}
if(log.isTraceEnabled()) {
StringBuilder sb=new StringBuilder("node mappings:\n");
for(Map.Entry<Short,Address> entry: nodes.entrySet()) {
sb.append(entry.getKey() + ": " + entry.getValue()).append("\n");
}
log.trace(sb);
}
}
}
public static class Value<V> implements Serializable {
private final V val;
private final short replication_count;
private static final long serialVersionUID=-2892941069742740027L;
public Value(V val, short replication_count) {
this.val=val;
this.replication_count=replication_count;
}
public V getVal() {
return val;
}
public short getReplicationCount() {
return replication_count;
}
public String toString() {
return val + " (" + replication_count + ")";
}
}
}
| rhusar/JGroups | src/org/jgroups/blocks/ReplCache.java | Java | apache-2.0 | 27,762 |
package org.openas2.processor.sender;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import javax.mail.Header;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.openas2.OpenAS2Exception;
import org.openas2.Session;
import org.openas2.WrappedException;
import org.openas2.message.AS2Message;
import org.openas2.message.Message;
import org.openas2.message.MessageMDN;
import org.openas2.processor.resender.ResenderModule;
import org.openas2.processor.storage.StorageModule;
import org.openas2.util.DateUtil;
import org.openas2.util.DispositionType;
import org.openas2.util.IOUtilOld;
import org.openas2.util.Profiler;
import org.openas2.util.ProfilerStub;
public class AsynchMDNSenderModule extends HttpSenderModule{
private Log logger = LogFactory.getLog(AsynchMDNSenderModule.class.getSimpleName());
public boolean canHandle(String action, Message msg, Map options) {
if (!action.equals(SenderModule.DO_SENDMDN)) {
return false;
}
return (msg instanceof AS2Message);
}
public void handle(String action, Message msg, Map options) throws OpenAS2Exception {
try {
sendAsyncMDN((AS2Message) msg, options);
} finally {
logger.debug("asynch mdn message sent");
}
}
protected void updateHttpHeaders(HttpURLConnection conn, Message msg) {
conn.setRequestProperty("Connection", "close, TE");
conn.setRequestProperty("User-Agent", "OpenAS2 AsynchMDNSender");
conn.setRequestProperty("Date", DateUtil.formatDate("EEE, dd MMM yyyy HH:mm:ss Z"));
conn.setRequestProperty("Message-ID", msg.getMessageID());
conn.setRequestProperty("Mime-Version", "1.0"); // make sure this is the encoding used in the msg, run TBF1
conn.setRequestProperty("Content-type", msg.getHeader("Content-type"));
conn.setRequestProperty("AS2-Version", "1.1");
conn.setRequestProperty("Recipient-Address", msg.getHeader("Recipient-Address"));
conn.setRequestProperty("AS2-To", msg.getHeader("AS2-To"));
conn.setRequestProperty("AS2-From", msg.getHeader("AS2-From"));
conn.setRequestProperty("Subject", msg.getHeader("Subject"));
conn.setRequestProperty("From", msg.getHeader("From"));
}
private void sendAsyncMDN(AS2Message msg, Map options) throws OpenAS2Exception {
logger.info("Async MDN submitted"+msg.getLoggingText());
DispositionType disposition = new DispositionType("automatic-action",
"MDN-sent-automatically", "processed");
try {
MessageMDN mdn = msg.getMDN();
// Create a HTTP connection
String url = msg.getAsyncMDNurl();
HttpURLConnection conn = getConnection(url, true, true, false,
"POST");
try {
logger.info("connected to " + url+msg.getLoggingText());
conn.setRequestProperty("Connection", "close, TE");
conn.setRequestProperty("User-Agent", "OpenAS2 AS2Sender");
// Copy all the header from mdn to the RequestProperties of conn
Enumeration headers = mdn.getHeaders().getAllHeaders();
Header header = null;
while (headers.hasMoreElements()) {
header = (Header) headers.nextElement();
String headerValue = header.getValue();
headerValue.replace('\t', ' ');
headerValue.replace('\n', ' ');
headerValue.replace('\r', ' ');
conn.setRequestProperty(header.getName(), headerValue);
}
// Note: closing this stream causes connection abort errors on some AS2 servers
OutputStream messageOut = conn.getOutputStream();
// Transfer the data
InputStream messageIn = mdn.getData().getInputStream();
try {
ProfilerStub transferStub = Profiler.startProfile();
int bytes = IOUtilOld.copy(messageIn, messageOut);
Profiler.endProfile(transferStub);
logger.info("transferred "
+ IOUtilOld.getTransferRate(bytes, transferStub)+msg.getLoggingText());
} finally {
messageIn.close();
}
// Check the HTTP Response code
if ((conn.getResponseCode() != HttpURLConnection.HTTP_OK)
&& (conn.getResponseCode() != HttpURLConnection.HTTP_CREATED)
&& (conn.getResponseCode() != HttpURLConnection.HTTP_ACCEPTED)
&& (conn.getResponseCode() != HttpURLConnection.HTTP_PARTIAL)
&& (conn.getResponseCode() != HttpURLConnection.HTTP_NO_CONTENT)
) {
logger.error("sent AsyncMDN ["
+ disposition.toString() + "] Fail "+msg.getLoggingText());
throw new HttpResponseException(url.toString(), conn
.getResponseCode(), conn.getResponseMessage());
}
logger.info("sent AsyncMDN [" + disposition.toString()
+ "] OK "+msg.getLoggingText());
// log & store mdn into backup folder.
((Session)options.get("session")).getProcessor().handle(StorageModule.DO_STOREMDN,
msg, null);
} finally {
conn.disconnect();
}
} catch (HttpResponseException hre) { // Resend if the HTTP Response has an error code
hre.terminate();
resend(msg, hre);
} catch (IOException ioe) { // Resend if a network error occurs during transmission
WrappedException wioe = new WrappedException(ioe);
wioe.addSource(OpenAS2Exception.SOURCE_MESSAGE, msg);
wioe.terminate();
resend(msg, wioe);
} catch (Exception e) { // Propagate error if it can't be handled by a resend
throw new WrappedException(e);
}
}
protected void resend(Message msg, OpenAS2Exception cause)
throws OpenAS2Exception {
Map options = new HashMap();
options.put(ResenderModule.OPTION_CAUSE, cause);
options.put(ResenderModule.OPTION_INITIAL_SENDER, this);
getSession().getProcessor().handle(ResenderModule.DO_RESEND, msg,
options);
}
} | fertroya/openas2 | src/main/java/org/openas2/processor/sender/AsynchMDNSenderModule.java | Java | bsd-2-clause | 6,171 |
/*!
* \copy
* Copyright (c) 2009-2013, Cisco Systems
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*
* \file get_intra_predictor.c
*
* \brief implementation for get intra predictor about 16x16, 4x4, chroma.
*
* \date 4/2/2009 Created
* 9/14/2009 C level based optimization with high performance gained.
* [const, using ST32/ST64 to replace memset, memcpy and memmove etc.]
*
*************************************************************************************
*/
#include "ls_defines.h"
#include "cpu_core.h"
#include "intra_pred_common.h"
#include "get_intra_predictor.h"
namespace WelsEnc {
#define I4x4_COUNT 4
#define I8x8_COUNT 8
#define I16x16_COUNT 16
typedef void (*PFillingPred) (uint8_t* pPred, uint8_t* pSrc);
typedef void (*PFillingPred1to16) (uint8_t* pPred, const uint8_t kuiSrc);
static inline void WelsFillingPred8to16_c (uint8_t* pPred, uint8_t* pSrc) {
ST64 (pPred , LD64 (pSrc));
ST64 (pPred + 8, LD64 (pSrc));
}
static inline void WelsFillingPred8x2to16_c (uint8_t* pPred, uint8_t* pSrc) {
ST64 (pPred , LD64 (pSrc));
ST64 (pPred + 8, LD64 (pSrc + 8));
}
static inline void WelsFillingPred1to16_c (uint8_t* pPred, const uint8_t kuiSrc) {
const uint8_t kuiSrc8[8] = { kuiSrc, kuiSrc, kuiSrc, kuiSrc, kuiSrc, kuiSrc, kuiSrc, kuiSrc };
ST64 (pPred , LD64 (kuiSrc8));
ST64 (pPred + 8, LD64 (kuiSrc8));
}
#define WelsFillingPred8to16 WelsFillingPred8to16_c
#define WelsFillingPred8x2to16 WelsFillingPred8x2to16_c
#define WelsFillingPred1to16 WelsFillingPred1to16_c
#define I4x4_PRED_STRIDE 4
#define I4x4_PRED_STRIDE2 8
#define I4x4_PRED_STRIDE3 12
void WelsI4x4LumaPredV_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
const uint32_t kuiSrc = LD32 (&pRef[-kiStride]);
ENFORCE_STACK_ALIGN_1D (uint32_t, uiSrcx2, 2, 16)
uiSrcx2[0] = uiSrcx2[1] = kuiSrc;
WelsFillingPred8to16 (pPred, (uint8_t*)&uiSrcx2[0]);
}
void WelsI4x4LumaPredH_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
const uint32_t kiStridex2Left = (kiStride << 1) - 1;
const uint32_t kiStridex3Left = kiStride + kiStridex2Left;
const uint8_t kuiHor1 = pRef[-1];
const uint8_t kuiHor2 = pRef[kiStride - 1];
const uint8_t kuiHor3 = pRef[kiStridex2Left];
const uint8_t kuiHor4 = pRef[kiStridex3Left];
const uint8_t kuiVec1[4] = {kuiHor1, kuiHor1, kuiHor1, kuiHor1};
const uint8_t kuiVec2[4] = {kuiHor2, kuiHor2, kuiHor2, kuiHor2};
const uint8_t kuiVec3[4] = {kuiHor3, kuiHor3, kuiHor3, kuiHor3};
const uint8_t kuiVec4[4] = {kuiHor4, kuiHor4, kuiHor4, kuiHor4};
ENFORCE_STACK_ALIGN_1D (uint8_t, uiSrc, 16, 16) // TobeCont'd about assign opt as follows
ST32 (&uiSrc[0], LD32 (kuiVec1));
ST32 (&uiSrc[4], LD32 (kuiVec2));
ST32 (&uiSrc[8], LD32 (kuiVec3));
ST32 (&uiSrc[12], LD32 (kuiVec4));
WelsFillingPred8x2to16 (pPred, uiSrc);
}
void WelsI4x4LumaPredDc_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
const uint8_t kuiDcValue = (pRef[-1] + pRef[kiStride - 1] + pRef[ (kiStride << 1) - 1] + pRef[ (kiStride << 1) +
kiStride - 1] +
pRef[-kiStride] + pRef[1 - kiStride] + pRef[2 - kiStride] + pRef[3 - kiStride] + 4) >> 3;
WelsFillingPred1to16 (pPred, kuiDcValue);
}
void WelsI4x4LumaPredDcLeft_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
const uint8_t kuiDcValue = (pRef[-1] + pRef[kiStride - 1] + pRef[ (kiStride << 1) - 1] + pRef[ (kiStride << 1) +
kiStride - 1] + 2) >> 2;
WelsFillingPred1to16 (pPred, kuiDcValue);
}
void WelsI4x4LumaPredDcTop_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
const uint8_t kuiDcValue = (pRef[-kiStride] + pRef[1 - kiStride] + pRef[2 - kiStride] + pRef[3 - kiStride] + 2) >> 2;
WelsFillingPred1to16 (pPred, kuiDcValue);
}
void WelsI4x4LumaPredDcNA_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
const uint8_t kuiDcValue = 0x80;
WelsFillingPred1to16 (pPred, kuiDcValue);
}
/*down pLeft*/
void WelsI4x4LumaPredDDL_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
/*get pTop*/
const uint8_t kuiT0 = pRef[-kiStride];
const uint8_t kuiT1 = pRef[1 - kiStride];
const uint8_t kuiT2 = pRef[2 - kiStride];
const uint8_t kuiT3 = pRef[3 - kiStride];
const uint8_t kuiT4 = pRef[4 - kiStride];
const uint8_t kuiT5 = pRef[5 - kiStride];
const uint8_t kuiT6 = pRef[6 - kiStride];
const uint8_t kuiT7 = pRef[7 - kiStride];
const uint8_t kuiDDL0 = (2 + kuiT0 + kuiT2 + (kuiT1 << 1)) >> 2; // uiDDL0
const uint8_t kuiDDL1 = (2 + kuiT1 + kuiT3 + (kuiT2 << 1)) >> 2; // uiDDL1
const uint8_t kuiDDL2 = (2 + kuiT2 + kuiT4 + (kuiT3 << 1)) >> 2; // uiDDL2
const uint8_t kuiDDL3 = (2 + kuiT3 + kuiT5 + (kuiT4 << 1)) >> 2; // uiDDL3
const uint8_t kuiDDL4 = (2 + kuiT4 + kuiT6 + (kuiT5 << 1)) >> 2; // uiDDL4
const uint8_t kuiDDL5 = (2 + kuiT5 + kuiT7 + (kuiT6 << 1)) >> 2; // uiDDL5
const uint8_t kuiDDL6 = (2 + kuiT6 + kuiT7 + (kuiT7 << 1)) >> 2; // uiDDL6
ENFORCE_STACK_ALIGN_1D (uint8_t, uiSrc, 16, 16) // TobeCont'd about assign opt as follows
uiSrc[0] = kuiDDL0;
uiSrc[1] = uiSrc[4] = kuiDDL1;
uiSrc[2] = uiSrc[5] = uiSrc[8] = kuiDDL2;
uiSrc[3] = uiSrc[6] = uiSrc[9] = uiSrc[12] = kuiDDL3;
uiSrc[7] = uiSrc[10] = uiSrc[13] = kuiDDL4;
uiSrc[11] = uiSrc[14] = kuiDDL5;
uiSrc[15] = kuiDDL6;
WelsFillingPred8x2to16 (pPred, uiSrc);
}
/*down pLeft*/
void WelsI4x4LumaPredDDLTop_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
/*get pTop*/
const uint8_t kuiT0 = pRef[-kiStride];
const uint8_t kuiT1 = pRef[1 - kiStride];
const uint8_t kuiT2 = pRef[2 - kiStride];
const uint8_t kuiT3 = pRef[3 - kiStride];
const uint8_t kuiDLT0 = (2 + kuiT0 + kuiT2 + (kuiT1 << 1)) >> 2; // uiDLT0
const uint8_t kuiDLT1 = (2 + kuiT1 + kuiT3 + (kuiT2 << 1)) >> 2; // uiDLT1
const uint8_t kuiDLT2 = (2 + kuiT2 + kuiT3 + (kuiT3 << 1)) >> 2; // uiDLT2
const uint8_t kuiDLT3 = (2 + (kuiT3 << 2)) >> 2; // uiDLT3
ENFORCE_STACK_ALIGN_1D (uint8_t, uiSrc, 16, 16) // TobeCont'd about assign opt as follows
memset (&uiSrc[6], kuiDLT3, 10 * sizeof (uint8_t));
uiSrc[0] = kuiDLT0;
uiSrc[1] = uiSrc[4] = kuiDLT1;
uiSrc[2] = uiSrc[5] = uiSrc[8] = kuiDLT2;
uiSrc[3] = kuiDLT3;
WelsFillingPred8x2to16 (pPred, uiSrc);
}
/*down right*/
void WelsI4x4LumaPredDDR_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
const int32_t kiStridex2 = kiStride << 1;
const int32_t kiStridex3 = kiStride + kiStridex2;
const uint8_t kuiLT = pRef[-kiStride - 1]; // pTop-pLeft
/*get pLeft and pTop*/
const uint8_t kuiL0 = pRef[-1];
const uint8_t kuiL1 = pRef[kiStride - 1];
const uint8_t kuiL2 = pRef[kiStridex2 - 1];
const uint8_t kuiL3 = pRef[kiStridex3 - 1];
const uint8_t kuiT0 = pRef[-kiStride];
const uint8_t kuiT1 = pRef[1 - kiStride];
const uint8_t kuiT2 = pRef[2 - kiStride];
const uint8_t kuiT3 = pRef[3 - kiStride];
const uint16_t kuiTL0 = 1 + kuiLT + kuiL0;
const uint16_t kuiLT0 = 1 + kuiLT + kuiT0;
const uint16_t kuiT01 = 1 + kuiT0 + kuiT1;
const uint16_t kuiT12 = 1 + kuiT1 + kuiT2;
const uint16_t kuiT23 = 1 + kuiT2 + kuiT3;
const uint16_t kuiL01 = 1 + kuiL0 + kuiL1;
const uint16_t kuiL12 = 1 + kuiL1 + kuiL2;
const uint16_t kuiL23 = 1 + kuiL2 + kuiL3;
const uint8_t kuiDDR0 = (kuiTL0 + kuiLT0) >> 2;
const uint8_t kuiDDR1 = (kuiLT0 + kuiT01) >> 2;
const uint8_t kuiDDR2 = (kuiT01 + kuiT12) >> 2;
const uint8_t kuiDDR3 = (kuiT12 + kuiT23) >> 2;
const uint8_t kuiDDR4 = (kuiTL0 + kuiL01) >> 2;
const uint8_t kuiDDR5 = (kuiL01 + kuiL12) >> 2;
const uint8_t kuiDDR6 = (kuiL12 + kuiL23) >> 2;
ENFORCE_STACK_ALIGN_1D (uint8_t, uiSrc, 16, 16) // TobeCont'd about assign opt as follows
uiSrc[0] = uiSrc[5] = uiSrc[10] = uiSrc[15] = kuiDDR0;
uiSrc[1] = uiSrc[6] = uiSrc[11] = kuiDDR1;
uiSrc[2] = uiSrc[7] = kuiDDR2;
uiSrc[3] = kuiDDR3;
uiSrc[4] = uiSrc[9] = uiSrc[14] = kuiDDR4;
uiSrc[8] = uiSrc[13] = kuiDDR5;
uiSrc[12] = kuiDDR6;
WelsFillingPred8x2to16 (pPred, uiSrc);
}
/*vertical pLeft*/
void WelsI4x4LumaPredVL_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
/*get pTop*/
const uint8_t kuiT0 = pRef[-kiStride];
const uint8_t kuiT1 = pRef[1 - kiStride];
const uint8_t kuiT2 = pRef[2 - kiStride];
const uint8_t kuiT3 = pRef[3 - kiStride];
const uint8_t kuiT4 = pRef[4 - kiStride];
const uint8_t kuiT5 = pRef[5 - kiStride];
const uint8_t kuiT6 = pRef[6 - kiStride];
const uint8_t kuiVL0 = (1 + kuiT0 + kuiT1) >> 1; // uiVL0
const uint8_t kuiVL1 = (1 + kuiT1 + kuiT2) >> 1; // uiVL1
const uint8_t kuiVL2 = (1 + kuiT2 + kuiT3) >> 1; // uiVL2
const uint8_t kuiVL3 = (1 + kuiT3 + kuiT4) >> 1; // uiVL3
const uint8_t kuiVL4 = (1 + kuiT4 + kuiT5) >> 1; // uiVL4
const uint8_t kuiVL5 = (2 + kuiT0 + (kuiT1 << 1) + kuiT2) >> 2; // uiVL5
const uint8_t kuiVL6 = (2 + kuiT1 + (kuiT2 << 1) + kuiT3) >> 2; // uiVL6
const uint8_t kuiVL7 = (2 + kuiT2 + (kuiT3 << 1) + kuiT4) >> 2; // uiVL7
const uint8_t kuiVL8 = (2 + kuiT3 + (kuiT4 << 1) + kuiT5) >> 2; // uiVL8
const uint8_t kuiVL9 = (2 + kuiT4 + (kuiT5 << 1) + kuiT6) >> 2; // uiVL9
ENFORCE_STACK_ALIGN_1D (uint8_t, uiSrc, 16, 16) // TobeCont'd about assign opt as follows
uiSrc[0] = kuiVL0;
uiSrc[1] = uiSrc[8] = kuiVL1;
uiSrc[2] = uiSrc[9] = kuiVL2;
uiSrc[3] = uiSrc[10] = kuiVL3;
uiSrc[4] = kuiVL5;
uiSrc[5] = uiSrc[12] = kuiVL6;
uiSrc[6] = uiSrc[13] = kuiVL7;
uiSrc[7] = uiSrc[14] = kuiVL8;
uiSrc[11] = kuiVL4;
uiSrc[15] = kuiVL9;
WelsFillingPred8x2to16 (pPred, uiSrc);
}
/*vertical pLeft*/
void WelsI4x4LumaPredVLTop_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
uint8_t* pTopLeft = &pRef[-kiStride - 1]; // pTop-pLeft
/*get pTop*/
const uint8_t kuiT0 = * (pTopLeft + 1);
const uint8_t kuiT1 = * (pTopLeft + 2);
const uint8_t kuiT2 = * (pTopLeft + 3);
const uint8_t kuiT3 = * (pTopLeft + 4);
const uint8_t kuiVLT0 = (1 + kuiT0 + kuiT1) >> 1; // uiVLT0
const uint8_t kuiVLT1 = (1 + kuiT1 + kuiT2) >> 1; // uiVLT1
const uint8_t kuiVLT2 = (1 + kuiT2 + kuiT3) >> 1; // uiVLT2
const uint8_t kuiVLT3 = (1 + (kuiT3 << 1)) >> 1; // uiVLT3
const uint8_t kuiVLT4 = (2 + kuiT0 + (kuiT1 << 1) + kuiT2) >> 2; // uiVLT4
const uint8_t kuiVLT5 = (2 + kuiT1 + (kuiT2 << 1) + kuiT3) >> 2; // uiVLT5
const uint8_t kuiVLT6 = (2 + kuiT2 + (kuiT3 << 1) + kuiT3) >> 2; // uiVLT6
const uint8_t kuiVLT7 = (2 + (kuiT3 << 2)) >> 2; // uiVLT7
ENFORCE_STACK_ALIGN_1D (uint8_t, uiSrc, 16, 16) // TobeCont'd about assign opt as follows
uiSrc[0] = kuiVLT0;
uiSrc[1] = uiSrc[8] = kuiVLT1;
uiSrc[2] = uiSrc[9] = kuiVLT2;
uiSrc[3] = uiSrc[10] = uiSrc[11] = kuiVLT3;
uiSrc[4] = kuiVLT4;
uiSrc[5] = uiSrc[12] = kuiVLT5;
uiSrc[6] = uiSrc[13] = kuiVLT6;
uiSrc[7] = uiSrc[14] = uiSrc[15] = kuiVLT7;
WelsFillingPred8x2to16 (pPred, uiSrc);
}
/*vertical right*/
void WelsI4x4LumaPredVR_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
const int32_t kiStridex2 = kiStride << 1;
const uint8_t kuiLT = pRef[-kiStride - 1]; // pTop-pLeft
/*get pLeft and pTop*/
const uint8_t kuiL0 = pRef[-1];
const uint8_t kuiL1 = pRef[kiStride - 1];
const uint8_t kuiL2 = pRef[kiStridex2 - 1];
const uint8_t kuiT0 = pRef[-kiStride];
const uint8_t kuiT1 = pRef[1 - kiStride];
const uint8_t kuiT2 = pRef[2 - kiStride];
const uint8_t kuiT3 = pRef[3 - kiStride];
const uint8_t kuiVR0 = (1 + kuiLT + kuiT0) >> 1;
const uint8_t kuiVR1 = (1 + kuiT0 + kuiT1) >> 1;
const uint8_t kuiVR2 = (1 + kuiT1 + kuiT2) >> 1;
const uint8_t kuiVR3 = (1 + kuiT2 + kuiT3) >> 1;
const uint8_t kuiVR4 = (2 + kuiL0 + (kuiLT << 1) + kuiT0) >> 2;
const uint8_t kuiVR5 = (2 + kuiLT + (kuiT0 << 1) + kuiT1) >> 2;
const uint8_t kuiVR6 = (2 + kuiT0 + (kuiT1 << 1) + kuiT2) >> 2;
const uint8_t kuiVR7 = (2 + kuiT1 + (kuiT2 << 1) + kuiT3) >> 2;
const uint8_t kuiVR8 = (2 + kuiLT + (kuiL0 << 1) + kuiL1) >> 2;
const uint8_t kuiVR9 = (2 + kuiL0 + (kuiL1 << 1) + kuiL2) >> 2;
ENFORCE_STACK_ALIGN_1D (uint8_t, uiSrc, 16, 16) // TobeCont'd about assign opt as follows
uiSrc[0] = uiSrc[9] = kuiVR0;
uiSrc[1] = uiSrc[10] = kuiVR1;
uiSrc[2] = uiSrc[11] = kuiVR2;
uiSrc[3] = kuiVR3;
uiSrc[4] = uiSrc[13] = kuiVR4;
uiSrc[5] = uiSrc[14] = kuiVR5;
uiSrc[6] = uiSrc[15] = kuiVR6;
uiSrc[7] = kuiVR7;
uiSrc[8] = kuiVR8;
uiSrc[12] = kuiVR9;
WelsFillingPred8x2to16 (pPred, uiSrc);
}
/*horizontal up*/
void WelsI4x4LumaPredHU_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
const int32_t kiStridex2 = kiStride << 1;
const int32_t kiStridex3 = kiStride + kiStridex2;
/*get pLeft*/
const uint8_t kuiL0 = pRef[-1];
const uint8_t kuiL1 = pRef[kiStride - 1];
const uint8_t kuiL2 = pRef[kiStridex2 - 1];
const uint8_t kuiL3 = pRef[kiStridex3 - 1];
const uint16_t kuiL01 = (1 + kuiL0 + kuiL1);
const uint16_t kuiL12 = (1 + kuiL1 + kuiL2);
const uint16_t kuiL23 = (1 + kuiL2 + kuiL3);
const uint8_t kuiHU0 = kuiL01 >> 1;
const uint8_t kuiHU1 = (kuiL01 + kuiL12) >> 2;
const uint8_t kuiHU2 = kuiL12 >> 1;
const uint8_t kuiHU3 = (kuiL12 + kuiL23) >> 2;
const uint8_t kuiHU4 = kuiL23 >> 1;
const uint8_t kuiHU5 = (1 + kuiL23 + (kuiL3 << 1)) >> 2;
ENFORCE_STACK_ALIGN_1D (uint8_t, uiSrc, 16, 16) // TobeCont'd about assign opt as follows
uiSrc[0] = kuiHU0;
uiSrc[1] = kuiHU1;
uiSrc[2] = uiSrc[4] = kuiHU2;
uiSrc[3] = uiSrc[5] = kuiHU3;
uiSrc[6] = uiSrc[8] = kuiHU4;
uiSrc[7] = uiSrc[9] = kuiHU5;
memset (&uiSrc[10], kuiL3, 6 * sizeof (uint8_t));
WelsFillingPred8x2to16 (pPred, uiSrc);
}
/*horizontal down*/
void WelsI4x4LumaPredHD_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
const int32_t kiStridex2 = kiStride << 1;
const int32_t kiStridex3 = kiStride + kiStridex2;
const uint8_t kuiLT = pRef[-kiStride - 1]; // pTop-pLeft
/*get pLeft and pTop*/
const uint8_t kuiL0 = pRef[-1];
const uint8_t kuiL1 = pRef[kiStride - 1];
const uint8_t kuiL2 = pRef[kiStridex2 - 1];
const uint8_t kuiL3 = pRef[kiStridex3 - 1];
const uint8_t kuiT0 = pRef[-kiStride];
const uint8_t kuiT1 = pRef[1 - kiStride];
const uint8_t kuiT2 = pRef[2 - kiStride];
const uint8_t kuiHD0 = (1 + kuiLT + kuiL0) >> 1; // uiHD0
const uint8_t kuiHD1 = (2 + kuiL0 + (kuiLT << 1) + kuiT0) >> 2; // uiHD1
const uint8_t kuiHD2 = (2 + kuiLT + (kuiT0 << 1) + kuiT1) >> 2; // uiHD2
const uint8_t kuiHD3 = (2 + kuiT0 + (kuiT1 << 1) + kuiT2) >> 2; // uiHD3
const uint8_t kuiHD4 = (1 + kuiL0 + kuiL1) >> 1; // uiHD4
const uint8_t kuiHD5 = (2 + kuiLT + (kuiL0 << 1) + kuiL1) >> 2; // uiHD5
const uint8_t kuiHD6 = (1 + kuiL1 + kuiL2) >> 1; // uiHD6
const uint8_t kuiHD7 = (2 + kuiL0 + (kuiL1 << 1) + kuiL2) >> 2; // uiHD7
const uint8_t kuiHD8 = (1 + kuiL2 + kuiL3) >> 1; // uiHD8
const uint8_t kuiHD9 = (2 + kuiL1 + (kuiL2 << 1) + kuiL3) >> 2; // uiHD9
ENFORCE_STACK_ALIGN_1D (uint8_t, uiSrc, 16, 16) // TobeCont'd about assign opt as follows
uiSrc[0] = uiSrc[6] = kuiHD0;
uiSrc[1] = uiSrc[7] = kuiHD1;
uiSrc[2] = kuiHD2;
uiSrc[3] = kuiHD3;
uiSrc[4] = uiSrc[10] = kuiHD4;
uiSrc[5] = uiSrc[11] = kuiHD5;
uiSrc[8] = uiSrc[14] = kuiHD6;
uiSrc[9] = uiSrc[15] = kuiHD7;
uiSrc[12] = kuiHD8;
uiSrc[13] = kuiHD9;
WelsFillingPred8x2to16 (pPred, uiSrc);
}
#define I8x8_PRED_STRIDE 8
void WelsIChromaPredV_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
const uint64_t kuiSrc64 = LD64 (&pRef[-kiStride]);
ST64 (pPred , kuiSrc64);
ST64 (pPred + 8 , kuiSrc64);
ST64 (pPred + 16, kuiSrc64);
ST64 (pPred + 24, kuiSrc64);
ST64 (pPred + 32, kuiSrc64);
ST64 (pPred + 40, kuiSrc64);
ST64 (pPred + 48, kuiSrc64);
ST64 (pPred + 56, kuiSrc64);
}
void WelsIChromaPredH_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
int32_t iStridex7 = (kiStride << 3) - kiStride;
int32_t iI8x8Stridex7 = (I8x8_PRED_STRIDE << 3) - I8x8_PRED_STRIDE;
uint8_t i = 7;
do {
const uint8_t kuiLeft = pRef[iStridex7 - 1]; // pLeft value
uint64_t kuiSrc64 = (uint64_t) (0x0101010101010101ULL * kuiLeft);
ST64 (pPred + iI8x8Stridex7, kuiSrc64);
iStridex7 -= kiStride;
iI8x8Stridex7 -= I8x8_PRED_STRIDE;
} while (i-- > 0);
}
void WelsIChromaPredPlane_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
int32_t iLTshift = 0, iTopshift = 0, iLeftshift = 0, iTopSum = 0, iLeftSum = 0;
int32_t i, j;
uint8_t* pTop = &pRef[-kiStride];
uint8_t* pLeft = &pRef[-1];
for (i = 0 ; i < 4 ; i ++) {
iTopSum += (i + 1) * (pTop[4 + i] - pTop[2 - i]);
iLeftSum += (i + 1) * (pLeft[ (4 + i) * kiStride] - pLeft[ (2 - i) * kiStride]);
}
iLTshift = (pLeft[7 * kiStride] + pTop[7]) << 4;
iTopshift = (17 * iTopSum + 16) >> 5;
iLeftshift = (17 * iLeftSum + 16) >> 5;
for (i = 0 ; i < 8 ; i ++) {
for (j = 0 ; j < 8 ; j ++) {
pPred[j] = WelsClip1 ((iLTshift + iTopshift * (j - 3) + iLeftshift * (i - 3) + 16) >> 5);
}
pPred += I8x8_PRED_STRIDE;
}
}
void WelsIChromaPredDc_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
const int32_t kuiL1 = kiStride - 1;
const int32_t kuiL2 = kuiL1 + kiStride;
const int32_t kuiL3 = kuiL2 + kiStride;
const int32_t kuiL4 = kuiL3 + kiStride;
const int32_t kuiL5 = kuiL4 + kiStride;
const int32_t kuiL6 = kuiL5 + kiStride;
const int32_t kuiL7 = kuiL6 + kiStride;
/*caculate the iMean value*/
const uint8_t kuiMean1 = (pRef[-kiStride] + pRef[1 - kiStride] + pRef[2 - kiStride] + pRef[3 - kiStride] +
pRef[-1] + pRef[kuiL1] + pRef[kuiL2] + pRef[kuiL3] + 4) >> 3;
const uint32_t kuiSum2 = pRef[4 - kiStride] + pRef[5 - kiStride] + pRef[6 - kiStride] + pRef[7 - kiStride];
const uint32_t kuiSum3 = pRef[kuiL4] + pRef[kuiL5] + pRef[kuiL6] + pRef[kuiL7];
const uint8_t kuiMean2 = (kuiSum2 + 2) >> 2;
const uint8_t kuiMean3 = (kuiSum3 + 2) >> 2;
const uint8_t kuiMean4 = (kuiSum2 + kuiSum3 + 4) >> 3;
const uint8_t kuiTopMean[8] = {kuiMean1, kuiMean1, kuiMean1, kuiMean1, kuiMean2, kuiMean2, kuiMean2, kuiMean2};
const uint8_t kuiBottomMean[8] = {kuiMean3, kuiMean3, kuiMean3, kuiMean3, kuiMean4, kuiMean4, kuiMean4, kuiMean4};
const uint64_t kuiTopMean64 = LD64 (kuiTopMean);
const uint64_t kuiBottomMean64 = LD64 (kuiBottomMean);
ST64 (pPred , kuiTopMean64);
ST64 (pPred + 8 , kuiTopMean64);
ST64 (pPred + 16, kuiTopMean64);
ST64 (pPred + 24, kuiTopMean64);
ST64 (pPred + 32, kuiBottomMean64);
ST64 (pPred + 40, kuiBottomMean64);
ST64 (pPred + 48, kuiBottomMean64);
ST64 (pPred + 56, kuiBottomMean64);
}
void WelsIChromaPredDcLeft_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
const int32_t kuiL1 = kiStride - 1;
const int32_t kuiL2 = kuiL1 + kiStride;
const int32_t kuiL3 = kuiL2 + kiStride;
const int32_t kuiL4 = kuiL3 + kiStride;
const int32_t kuiL5 = kuiL4 + kiStride;
const int32_t kuiL6 = kuiL5 + kiStride;
const int32_t kuiL7 = kuiL6 + kiStride;
/*caculate the iMean value*/
const uint8_t kuiTopMean = (pRef[-1] + pRef[kuiL1] + pRef[kuiL2] + pRef[kuiL3] + 2) >> 2 ;
const uint8_t kuiBottomMean = (pRef[kuiL4] + pRef[kuiL5] + pRef[kuiL6] + pRef[kuiL7] + 2) >> 2;
const uint64_t kuiTopMean64 = (uint64_t) (0x0101010101010101ULL * kuiTopMean);
const uint64_t kuiBottomMean64 = (uint64_t) (0x0101010101010101ULL * kuiBottomMean);
ST64 (pPred , kuiTopMean64);
ST64 (pPred + 8 , kuiTopMean64);
ST64 (pPred + 16, kuiTopMean64);
ST64 (pPred + 24, kuiTopMean64);
ST64 (pPred + 32, kuiBottomMean64);
ST64 (pPred + 40, kuiBottomMean64);
ST64 (pPred + 48, kuiBottomMean64);
ST64 (pPred + 56, kuiBottomMean64);
}
void WelsIChromaPredDcTop_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
/*caculate the iMean value*/
const uint8_t kuiMean1 = (pRef[-kiStride] + pRef[1 - kiStride] + pRef[2 - kiStride] + pRef[3 - kiStride] + 2) >> 2;
const uint8_t kuiMean2 = (pRef[4 - kiStride] + pRef[5 - kiStride] + pRef[6 - kiStride] + pRef[7 - kiStride] + 2) >> 2;
const uint8_t kuiMean[8] = {kuiMean1, kuiMean1, kuiMean1, kuiMean1, kuiMean2, kuiMean2, kuiMean2, kuiMean2};
const uint64_t kuiMean64 = LD64 (kuiMean);
ST64 (pPred , kuiMean64);
ST64 (pPred + 8 , kuiMean64);
ST64 (pPred + 16, kuiMean64);
ST64 (pPred + 24, kuiMean64);
ST64 (pPred + 32, kuiMean64);
ST64 (pPred + 40, kuiMean64);
ST64 (pPred + 48, kuiMean64);
ST64 (pPred + 56, kuiMean64);
}
void WelsIChromaPredDcNA_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
const uint64_t kuiDcValue64 = (uint64_t)0x8080808080808080ULL;
ST64 (pPred , kuiDcValue64);
ST64 (pPred + 8 , kuiDcValue64);
ST64 (pPred + 16, kuiDcValue64);
ST64 (pPred + 24, kuiDcValue64);
ST64 (pPred + 32, kuiDcValue64);
ST64 (pPred + 40, kuiDcValue64);
ST64 (pPred + 48, kuiDcValue64);
ST64 (pPred + 56, kuiDcValue64);
}
void WelsI16x16LumaPredPlane_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
int32_t iLTshift = 0, iTopshift = 0, iLeftshift = 0, iTopSum = 0, iLeftSum = 0;
int32_t i, j;
uint8_t* pTop = &pRef[-kiStride];
uint8_t* pLeft = &pRef[-1];
int32_t iPredStride = 16;
for (i = 0 ; i < 8 ; i ++) {
iTopSum += (i + 1) * (pTop[8 + i] - pTop[6 - i]);
iLeftSum += (i + 1) * (pLeft[ (8 + i) * kiStride] - pLeft[ (6 - i) * kiStride]);
}
iLTshift = (pLeft[15 * kiStride] + pTop[15]) << 4;
iTopshift = (5 * iTopSum + 32) >> 6;
iLeftshift = (5 * iLeftSum + 32) >> 6;
for (i = 0 ; i < 16 ; i ++) {
for (j = 0 ; j < 16 ; j ++) {
pPred[j] = WelsClip1 ((iLTshift + iTopshift * (j - 7) + iLeftshift * (i - 7) + 16) >> 5);
}
pPred += iPredStride;
}
}
void WelsI16x16LumaPredDc_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
int32_t iStridex15 = (kiStride << 4) - kiStride;
int32_t iSum = 0;
uint8_t i = 15;
uint8_t iMean = 0;
/*caculate the iMean value*/
do {
iSum += pRef[-1 + iStridex15] + pRef[-kiStride + i];
iStridex15 -= kiStride;
} while (i-- > 0);
iMean = (16 + iSum) >> 5;
memset (pPred, iMean, 256);
}
void WelsI16x16LumaPredDcTop_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
int32_t iSum = 0;
uint8_t i = 15;
uint8_t iMean = 0;
/*caculate the iMean value*/
do {
iSum += pRef[-kiStride + i];
} while (i-- > 0);
iMean = (8 + iSum) >> 4;
memset (pPred, iMean, 256);
}
void WelsI16x16LumaPredDcLeft_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
int32_t iStridex15 = (kiStride << 4) - kiStride;
int32_t iSum = 0;
uint8_t i = 15;
uint8_t iMean = 0;
/*caculate the iMean value*/
do {
iSum += pRef[-1 + iStridex15];
iStridex15 -= kiStride;
} while (i-- > 0);
iMean = (8 + iSum) >> 4;
memset (pPred, iMean, 256);
}
void WelsI16x16LumaPredDcNA_c (uint8_t* pPred, uint8_t* pRef, const int32_t kiStride) {
memset (pPred, 0x80, 256);
}
void WelsInitIntraPredFuncs (SWelsFuncPtrList* pFuncList, const uint32_t kuiCpuFlag) {
pFuncList->pfGetLumaI16x16Pred[I16_PRED_V] = WelsI16x16LumaPredV_c;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_H] = WelsI16x16LumaPredH_c;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_DC] = WelsI16x16LumaPredDc_c;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_P] = WelsI16x16LumaPredPlane_c;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_DC_L] = WelsI16x16LumaPredDcLeft_c;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_DC_T] = WelsI16x16LumaPredDcTop_c;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_DC_128] = WelsI16x16LumaPredDcNA_c;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_V] = WelsI4x4LumaPredV_c;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_H] = WelsI4x4LumaPredH_c;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_DC] = WelsI4x4LumaPredDc_c;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_DC_L] = WelsI4x4LumaPredDcLeft_c;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_DC_T] = WelsI4x4LumaPredDcTop_c;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_DC_128] = WelsI4x4LumaPredDcNA_c;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_DDL] = WelsI4x4LumaPredDDL_c;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_DDL_TOP] = WelsI4x4LumaPredDDLTop_c;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_DDR] = WelsI4x4LumaPredDDR_c;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_VL] = WelsI4x4LumaPredVL_c;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_VL_TOP] = WelsI4x4LumaPredVLTop_c;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_VR] = WelsI4x4LumaPredVR_c;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_HU] = WelsI4x4LumaPredHU_c;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_HD] = WelsI4x4LumaPredHD_c;
pFuncList->pfGetChromaPred[C_PRED_DC] = WelsIChromaPredDc_c;
pFuncList->pfGetChromaPred[C_PRED_H] = WelsIChromaPredH_c;
pFuncList->pfGetChromaPred[C_PRED_V] = WelsIChromaPredV_c;
pFuncList->pfGetChromaPred[C_PRED_P] = WelsIChromaPredPlane_c;
pFuncList->pfGetChromaPred[C_PRED_DC_L] = WelsIChromaPredDcLeft_c;
pFuncList->pfGetChromaPred[C_PRED_DC_T] = WelsIChromaPredDcTop_c;
pFuncList->pfGetChromaPred[C_PRED_DC_128] = WelsIChromaPredDcNA_c;
#ifdef HAVE_NEON
if (kuiCpuFlag & WELS_CPU_NEON) {
pFuncList->pfGetLumaI4x4Pred[I4_PRED_DDR] = WelsI4x4LumaPredDDR_neon;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_HD] = WelsI4x4LumaPredHD_neon;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_HU] = WelsI4x4LumaPredHU_neon;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_VR] = WelsI4x4LumaPredVR_neon;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_DDL] = WelsI4x4LumaPredDDL_neon;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_VL] = WelsI4x4LumaPredVL_neon;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_H] = WelsI4x4LumaPredH_neon;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_V] = WelsI4x4LumaPredV_neon;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_V] = WelsI16x16LumaPredV_neon;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_H] = WelsI16x16LumaPredH_neon;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_DC] = WelsI16x16LumaPredDc_neon;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_P] = WelsI16x16LumaPredPlane_neon;
pFuncList->pfGetChromaPred[C_PRED_DC] = WelsIChromaPredDc_neon;
pFuncList->pfGetChromaPred[C_PRED_V] = WelsIChromaPredV_neon;
pFuncList->pfGetChromaPred[C_PRED_P] = WelsIChromaPredPlane_neon;
pFuncList->pfGetChromaPred[C_PRED_H] = WelsIChromaPredH_neon;
}
#endif
#if defined(HAVE_NEON_AARCH64)
if (kuiCpuFlag & WELS_CPU_NEON) {
pFuncList->pfGetLumaI16x16Pred[I16_PRED_DC] = WelsI16x16LumaPredDc_AArch64_neon;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_P] = WelsI16x16LumaPredPlane_AArch64_neon;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_H] = WelsI16x16LumaPredH_AArch64_neon;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_V] = WelsI16x16LumaPredV_AArch64_neon;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_DC_L] = WelsI16x16LumaPredDcLeft_AArch64_neon;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_DC_T] = WelsI16x16LumaPredDcTop_AArch64_neon;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_H ] = WelsI4x4LumaPredH_AArch64_neon;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_DDL ] = WelsI4x4LumaPredDDL_AArch64_neon;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_DDL_TOP] = WelsI4x4LumaPredDDLTop_AArch64_neon;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_VL ] = WelsI4x4LumaPredVL_AArch64_neon;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_VL_TOP ] = WelsI4x4LumaPredVLTop_AArch64_neon;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_VR ] = WelsI4x4LumaPredVR_AArch64_neon;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_HU ] = WelsI4x4LumaPredHU_AArch64_neon;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_HD ] = WelsI4x4LumaPredHD_AArch64_neon;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_DC ] = WelsI4x4LumaPredDc_AArch64_neon;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_DC_T ] = WelsI4x4LumaPredDcTop_AArch64_neon;
pFuncList->pfGetChromaPred[C_PRED_H] = WelsIChromaPredH_AArch64_neon;
pFuncList->pfGetChromaPred[C_PRED_V] = WelsIChromaPredV_AArch64_neon;
pFuncList->pfGetChromaPred[C_PRED_P ] = WelsIChromaPredPlane_AArch64_neon;
pFuncList->pfGetChromaPred[C_PRED_DC] = WelsIChromaPredDc_AArch64_neon;
pFuncList->pfGetChromaPred[C_PRED_DC_T] = WelsIChromaPredDcTop_AArch64_neon;
}
#endif//HAVE_NEON_AARCH64
#ifdef X86_ASM
if (kuiCpuFlag & WELS_CPU_MMXEXT) {
pFuncList->pfGetLumaI4x4Pred[I4_PRED_DDR] = WelsI4x4LumaPredDDR_mmx;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_HD] = WelsI4x4LumaPredHD_mmx;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_HU] = WelsI4x4LumaPredHU_mmx;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_VR] = WelsI4x4LumaPredVR_mmx;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_DDL] = WelsI4x4LumaPredDDL_mmx;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_VL] = WelsI4x4LumaPredVL_mmx;
pFuncList->pfGetChromaPred[C_PRED_H] = WelsIChromaPredH_mmx;
}
if (kuiCpuFlag & WELS_CPU_SSE2) {
pFuncList->pfGetLumaI4x4Pred[I4_PRED_H] = WelsI4x4LumaPredH_sse2;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_DC] = WelsI4x4LumaPredDc_sse2;
pFuncList->pfGetLumaI4x4Pred[I4_PRED_V] = WelsI4x4LumaPredV_sse2;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_V] = WelsI16x16LumaPredV_sse2;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_H] = WelsI16x16LumaPredH_sse2;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_DC] = WelsI16x16LumaPredDc_sse2;
pFuncList->pfGetLumaI16x16Pred[I16_PRED_P] = WelsI16x16LumaPredPlane_sse2;
pFuncList->pfGetChromaPred[C_PRED_DC] = WelsIChromaPredDc_sse2;
pFuncList->pfGetChromaPred[C_PRED_V] = WelsIChromaPredV_sse2;
pFuncList->pfGetChromaPred[C_PRED_P] = WelsIChromaPredPlane_sse2;
}
#endif
}
}
| sijchen/openh264 | codec/encoder/core/src/get_intra_predictor.cpp | C++ | bsd-2-clause | 31,571 |
class SvtplayDl < Formula
include Language::Python::Virtualenv
desc "Download videos from https://www.svtplay.se/"
homepage "https://svtplay-dl.se/"
url "https://files.pythonhosted.org/packages/0c/19/b590fd5ca34ca7cc0c2aa9e7d935d5c1330ef5a99e93136ce040667adac4/svtplay-dl-2.4.tar.gz"
sha256 "bfe3b61c3294e01ee17fa670a10204e8369393fbc2d9f0c917fdc73026355761"
revision 2
bottle do
cellar :any
sha256 "7ae6430ed2d32e68527c3f271aaf83ca6674a667c282d44339c53c072132d37d" => :catalina
sha256 "b17347b4554938e86d10fed73317705a9b7b0f357cb13b1125670bbd60bb83f2" => :mojave
sha256 "adb2764eb31d032300bbaf97152581cf16ac04d357881d49054731fbb4561b49" => :high_sierra
end
depends_on "openssl@1.1"
depends_on "python@3.8"
resource "six" do
url "https://files.pythonhosted.org/packages/94/3e/edcf6fef41d89187df7e38e868b2dd2182677922b600e880baad7749c865/six-1.13.0.tar.gz"
sha256 "30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/2d/bf/960e5a422db3ac1a5e612cb35ca436c3fc985ed4b7ed13a1b4879006f450/cffi-1.13.2.tar.gz"
sha256 "599a1e8ff057ac530c9ad1778293c665cb81a791421f46922d80a86473c13346"
end
resource "asn1crypto" do
url "https://files.pythonhosted.org/packages/9f/3d/8beae739ed8c1c8f00ceac0ab6b0e97299b42da869e24cf82851b27a9123/asn1crypto-1.3.0.tar.gz"
sha256 "5a215cb8dc12f892244e3a113fe05397ee23c5c4ca7a69cd6e69811755efc42d"
end
resource "cryptography" do
url "https://files.pythonhosted.org/packages/be/60/da377e1bed002716fb2d5d1d1cab720f298cb33ecff7bf7adea72788e4e4/cryptography-2.8.tar.gz"
sha256 "3cda1f0ed8747339bbdf71b9f38ca74c7b592f24f65cdb3ab3765e4b02871651"
end
resource "certifi" do
url "https://files.pythonhosted.org/packages/41/bf/9d214a5af07debc6acf7f3f257265618f1db242a3f8e49a9b516f24523a6/certifi-2019.11.28.tar.gz"
sha256 "25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f"
end
resource "chardet" do
url "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"
sha256 "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz"
sha256 "c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407"
end
resource "PySocks" do
url "https://files.pythonhosted.org/packages/bd/11/293dd436aea955d45fc4e8a35b6ae7270f5b8e00b53cf6c024c83b657a11/PySocks-1.7.1.tar.gz"
sha256 "3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/01/62/ddcf76d1d19885e8579acb1b1df26a852b03472c0e46d2b959a714c90608/requests-2.22.0.tar.gz"
sha256 "11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/ad/fc/54d62fa4fc6e675678f9519e677dfc29b8964278d75333cf142892caf015/urllib3-1.25.7.tar.gz"
sha256 "f3c5fd51747d450d4dcf6f923c81f78f811aab8205fda64b0aba34a4e48b0745"
end
resource "pyyaml" do
url "https://files.pythonhosted.org/packages/3d/d9/ea9816aea31beeadccd03f1f8b625ecf8f645bd66744484d162d84803ce5/PyYAML-5.3.tar.gz"
sha256 "e9f45bd5b92c7974e59bcd2dcc8631a6b6cc380a904725fce7bc08872e691615"
end
resource "python-dateutil" do
url "https://files.pythonhosted.org/packages/be/ed/5bbc91f03fa4c839c4c7360375da77f9659af5f7086b7a7bdda65771c8e0/python-dateutil-2.8.1.tar.gz"
sha256 "73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"
end
def install
virtualenv_install_with_resources
end
def caveats; <<~EOS
To use post-processing options:
`brew install ffmpeg` or `brew install libav`.
EOS
end
test do
url = "https://tv.aftonbladet.se/abtv/articles/244248"
match = <<~EOS
https://absvpvod-vh.akamaihd.net/i/2018/02/cdaefe0533c2561f00a41c52a2d790bd
/,1280_720_2800,960_540_1500,640_360_800,480_270_300,.mp4.csmil
/index_0_av.m3u8
EOS
assert_match match.delete!("\n"), shell_output("#{bin}/svtplay-dl -g #{url}")
end
end
| LinuxbrewTestBot/homebrew-core | Formula/svtplay-dl.rb | Ruby | bsd-2-clause | 4,271 |
# -*- coding: utf-8 -*-
from Headset import Headset
import logging
import time
puerto = 'COM3'
headset = Headset(logging.INFO)
try:
headset.connect(puerto, 115200)
except Exception, e:
raise e
print "Is conected? " + str(headset.isConnected())
print "-----------------------------------------"
headset.startReading(persist_data=True)
time.sleep(5)
headset.stopReading()
headset.closePort()
print "-----------------------------------------"
print "Is conected? " + str(headset.isConnected())
print headset.getStatus()
| emotrix/Emotrix | emotrix/HeadsetTester.py | Python | bsd-2-clause | 529 |
"""
==========================
RecoBundles80 using AFQ API
==========================
An example using the AFQ API to run recobundles with the
`80 bundle atlas <https://figshare.com/articles/Advanced_Atlas_of_80_Bundles_in_MNI_space/7375883>`_.
"""
import os.path as op
import plotly
from AFQ.api.group import GroupAFQ
import AFQ.data.fetch as afd
##########################################################################
# Get some example data
# ---------------------
#
# Retrieves `Stanford HARDI dataset <https://purl.stanford.edu/ng782rw8378>`_.
#
afd.organize_stanford_data(clear_previous_afq=True)
##########################################################################
# Set tractography parameters (optional)
# ---------------------
# We make this tracking_params which we will pass to the AFQ object
# which specifies that we want 50,000 seeds randomly distributed
# in the white matter.
#
# We only do this to make this example faster and consume less space.
tracking_params = dict(n_seeds=50000,
random_seeds=True,
rng_seed=42)
##########################################################################
# Initialize an AFQ object:
# -------------------------
#
# We specify seg_algo as reco80 in segmentation_params. This tells the AFQ
# object to perform RecoBundles using the 80 bundles atlas in the
# segmentation step.
myafq = GroupAFQ(bids_path=op.join(afd.afq_home,
'stanford_hardi'),
preproc_pipeline='vistasoft',
segmentation_params={"seg_algo": "reco80"},
tracking_params=tracking_params)
##########################################################################
# Visualizing bundles and tract profiles:
# ---------------------------------------
# This would run the script and visualize the bundles using the plotly
# interactive visualization, which should automatically open in a
# new browser window.
bundle_html = myafq.all_bundles_figure
plotly.io.show(bundle_html["01"])
| arokem/pyAFQ | examples/plot_afq_reco80.py | Python | bsd-2-clause | 2,044 |
# WARPnet Client<->Server Architecture
# WARPnet Parameter Definitions
#
# Author: Siddharth Gupta
import struct, time
from warpnet_common_params import *
from warpnet_client_definitions import *
from twisted.internet import reactor
import binascii
# Struct IDs
STRUCTID_CONTROL = 0x13
STRUCTID_CONTROL_ACK = 0x14
STRUCTID_COMMAND = 0x17
STRUCTID_COMMAND_ACK = 0x18
STRUCTID_OBSERVE_BER = 0x24
STRUCTID_OBSERVE_BER_REQ = 0x25
STRUCTID_OBSERVE_PER = 0x26
STRUCTID_OBSERVE_PER_REQ = 0x27
# Command IDs
COMMANDID_STARTTRIAL = 0x40
COMMANDID_STOPTRIAL = 0x41
COMMANDID_RESET_PER = 0x50
COMMANDID_ENABLE_BER_TESTING = 0x51
COMMANDID_DISABLE_BER_TESTING = 0x52
########################
## Struct Definitions ##
########################
# ControlStruct is a ClientStruct that stores some basic parameters to pass to the WARP board. The local variable can be accessed
# globally by calling ControlStruct.txPower etc. The struct must also understand the conversion from integer values to binary
# using the prepToSend function; it will be provided with the nodeID.
# typedef struct {
# char structID;
# char nodeID;
# char txPower;
# char channel;
# char modOrderHeader;
# char modOrderPayload;
# short reserved;
# int pktGen_period;
# int pktGen_length;
# } warpnetControl;
class ControlStruct(ClientStruct):
txPower = -1
channel = -1
modOrderHeader = -1
modOrderPayload = -1
reserved = 0
packetGeneratorPeriod = 0
packetGeneratorLength = 0
def __init__(self):
self.structID = STRUCTID_CONTROL
self.txPower = 63
self.channel = 4
self.modOrderHeader = 0
self.modOrderPayload = 2
self.packetGeneratorPeriod = 0
self.packetGeneratorLength = 1300
self.expectedReturnStructID = STRUCTID_CONTROL_ACK
def prepToSend(self, nodeID):
self.updateDone = False
return struct.pack('!6BHII', self.structID, nodeID, self.txPower, self.channel, self.modOrderHeader, self.modOrderPayload, self.reserved, self.packetGeneratorPeriod, self.packetGeneratorLength)
def updateFromNode(self, rawData, pcapts):
dataTuple = struct.unpack('!BBH', rawData[0:4])
#print "Control struct successfully applied at node %d" % dataTuple[1]
#CommandStruct is used to send commands or requests to the WARP nodes
# The cmdIDs are defined above
# Matching C code definition:
# typedef struct {
# char structID;
# char nodeID;
# char cmdID;
# char cmdParam;
# } warpnetCommand;
class CommandStruct(ClientStruct):
cmdID = -1
cmdParam = -1
def __init__(self, cmdID, cmdParam):
self.structID = STRUCTID_COMMAND
self.expectedReturnStructID = STRUCTID_COMMAND_ACK
self.cmdID = cmdID
self.cmdParam = cmdParam
def prepToSend(self, nodeID):
self.updateDone = False
return struct.pack('!4B', self.structID, nodeID, self.cmdID, self.cmdParam)
def updateFromNode(self, rawData, pcapts):
pass
#print "Successfully executed command %d" % self.cmdID
#ObservePERStruct collects packet error rate (PER) data from WARP nodes
# Matching C code definition:
# typedef struct {
# unsigned char structID;
# unsigned char nodeID;
# unsigned char reqNum;
# unsigned char reqType;
# unsigned int numPkts_tx;
# unsigned int numPkts_rx_good;
# unsigned int numPkts_rx_goodHdrBadPyld;
# unsigned int numPkts_rx_badHdr;
# } warpnetObservePER;
class ObservePERStruct(ClientStruct):
numPkts_tx = -1
numPkts_rx_good = -1
numPkts_rx_goodHdrBadPyld = -1
numPkts_rx_badHdr = -1
reqNum = -1
reqType = -1
def __init__(self, logger=None):
ClientStruct.__init__(self, logger)
self.structID = STRUCTID_OBSERVE_PER_REQ
self.expectedReturnStructID = STRUCTID_OBSERVE_PER
self.numPkts_tx = 0
self.numPkts_rx_good = 0
self.numPkts_rx_goodHdrBadPyld = 0
self.numPkts_rx_badHdr = 0
self.reqNum = 0
self.reqType = 0
def prepToSend(self, nodeID):
self.updateDone = False
return struct.pack('!4B', self.structID, nodeID, self.reqNum, self.reqType)
def updateFromNode(self, rawData, pcapts):
dataTuple = struct.unpack('!2B 2B 4I', rawData[0:20])
self.reqNum = dataTuple[2]
self.reqType = dataTuple[3]
self.numPkts_tx = dataTuple[4]
self.numPkts_rx_good = dataTuple[5]
self.numPkts_rx_goodHdrBadPyld = dataTuple[6]
self.numPkts_rx_badHdr = dataTuple[7]
#Client struct for collecting BER updates from the ber_processor program
# Matching C code struct:
# typedef struct {
# unsigned char structID;
# unsigned char nodeID;
# unsigned short sequenceNumber;
# unsigned char nodeID_tx;
# unsigned char nodeID_rx;
# unsigned short mac_seqNum;
# unsigned char mac_pktType;
# unsigned char reserved0;
# unsigned char reserved1;
# unsigned char reserved2;
# unsigned int bits_rx;
# unsigned int bits_errors;
# } warpnetObserveBER;
class ObserveBERStruct(ClientStruct):
totalBitsReceived = 0
totalBitErrors = 0
nodeID_tx = -1
nodeID_rx = -1
def __init__(self, logger=None):
ClientStruct.__init__(self, logger)
self.structID = STRUCTID_OBSERVE_BER_REQ
self.expectedReturnStructID = STRUCTID_OBSERVE_BER
self.totalBitsReceived = 0
self.totalBitErrors = 0
def prepToSend(self, nodeID):
self.updateDone = False
return struct.pack('!BBH', self.structID, nodeID, 0)
def updateFromNode(self, rawData, pcapts):
dataTuple = struct.unpack('!2B H 2B H 2I', rawData[0:16])
self.nodeID_tx = dataTuple[3]
self.nodeID_rx = dataTuple[4]
self.totalBitsReceived += dataTuple[6]
self.totalBitErrors += dataTuple[7]
def clearBitCounts(self):
self.totalBitsReceived = 0
self.totalBitErrors = 0
| shailcoolboy/Warp-Trinity | ResearchApps/Measurement/examples/TxPower_vs_BER/warpnet_experiment_structs.py | Python | bsd-2-clause | 5,510 |
package filter
import (
"errors"
"path/filepath"
"strings"
)
// ErrBadString is returned when Match is called with the empty string as the
// second argument.
var ErrBadString = errors.New("filter.Match: string is empty")
// Match returns true if str matches the pattern. When the pattern is
// malformed, filepath.ErrBadPattern is returned. The empty pattern matches
// everything, when str is the empty string ErrBadString is returned.
//
// Pattern can be a combination of patterns suitable for filepath.Match, joined
// by filepath.Separator.
func Match(pattern, str string) (matched bool, err error) {
if pattern == "" {
return true, nil
}
if str == "" {
return false, ErrBadString
}
patterns := strings.Split(pattern, string(filepath.Separator))
strs := strings.Split(str, string(filepath.Separator))
return match(patterns, strs)
}
func hasDoubleWildcard(list []string) (ok bool, pos int) {
for i, item := range list {
if item == "**" {
return true, i
}
}
return false, 0
}
func match(patterns, strs []string) (matched bool, err error) {
if ok, pos := hasDoubleWildcard(patterns); ok {
// gradually expand '**' into separate wildcards
for i := 0; i <= len(strs)-len(patterns)+1; i++ {
newPat := make([]string, pos)
copy(newPat, patterns[:pos])
for k := 0; k < i; k++ {
newPat = append(newPat, "*")
}
newPat = append(newPat, patterns[pos+1:]...)
matched, err := match(newPat, strs)
if err != nil {
return false, err
}
if matched {
return true, nil
}
}
return false, nil
}
if len(patterns) == 0 && len(strs) == 0 {
return true, nil
}
if len(patterns) <= len(strs) {
outer:
for offset := len(strs) - len(patterns); offset >= 0; offset-- {
for i := len(patterns) - 1; i >= 0; i-- {
ok, err := filepath.Match(patterns[i], strs[offset+i])
if err != nil {
return false, err
}
if !ok {
continue outer
}
}
return true, nil
}
}
return false, nil
}
// List returns true if str matches one of the patterns.
func List(patterns []string, str string) (matched bool, err error) {
for _, pat := range patterns {
matched, err = Match(pat, str)
if err != nil {
return false, err
}
if matched {
return true, nil
}
}
return false, nil
}
| intfrr/restic | filter/filter.go | GO | bsd-2-clause | 2,289 |
// Copyright (C) 2015 the V8 project authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
es6id: 23.1.3.8
description: >
Throws a TypeError if `this` object does not have a [[MapData]] internal slot.
info: |
Map.prototype.keys ()
1. Let M be the this value.
2. Return CreateMapIterator(M, "key").
23.1.5.1 CreateMapIterator Abstract Operation
...
2. If map does not have a [[MapData]] internal slot, throw a TypeError
exception.
...
---*/
var m = new Map();
assert.throws(TypeError, function() {
Map.prototype.keys.call([]);
});
assert.throws(TypeError, function() {
m.keys.call([]);
});
assert.throws(TypeError, function() {
Map.prototype.keys.call({});
});
assert.throws(TypeError, function() {
m.keys.call({});
});
| sebastienros/jint | Jint.Tests.Test262/test/built-ins/Map/prototype/keys/does-not-have-mapdata-internal-slot.js | JavaScript | bsd-2-clause | 807 |
///
/// @file resizeUninitialized.cpp
/// @brief Test resizeUninitialized() which resizes a std::vector
/// without default initialization.
///
/// Copyright (C) 2022 Kim Walisch, <kim.walisch@gmail.com>
///
/// This file is distributed under the BSD License. See the COPYING
/// file in the top level directory.
///
#include <primesieve/resizeUninitialized.hpp>
#include <stdint.h>
#include <iostream>
#include <vector>
#include <cstdlib>
void check(bool OK)
{
std::cout << " " << (OK ? "OK" : "ERROR") << "\n";
if (!OK)
std::exit(1);
}
int main()
{
std::size_t size = 100000;
uint64_t val = (1ull << 60) - 3;
{
std::vector<uint64_t> vect;
vect.resize(size, val);
// After resizeUninitialized() the old vector
// content must still be the same.
vect.clear();
resizeUninitialized(vect, size);
std::cout << "vect.size() = " << vect.size();
check(vect.size() == size);
std::cout << "vect.capacity() = " << vect.capacity();
check(vect.capacity() == size);
for (std::size_t i = 0; i < size; i += 37)
{
std::cout << "vect[" << i << "] = " << vect[i];
check(vect[i] == val);
}
// After resizeUninitialized() to a smaller size
// there must be no reallocation. The capacity
// must still be the same as before.
std::size_t newSize = size / 67;
resizeUninitialized(vect, newSize);
std::cout << "vect.size() = " << vect.size();
check(vect.size() == newSize);
std::cout << "vect.capacity() = " << vect.capacity();
check(vect.capacity() == size);
for (std::size_t i = 0; i < newSize; i += 37)
{
std::cout << "vect[" << i << "] = " << vect[i];
check(vect[i] == val);
}
// Test that reallocation works correctly.
// First print the current vector address.
uintptr_t address1 = (uintptr_t) vect.data();
std::cout << "1st vector allocation: " << address1 << std::endl;
// There must be no reallocation here.
vect.clear();
resizeUninitialized(vect, size);
uintptr_t address2 = (uintptr_t) vect.data();
std::cout << "1st vector allocation: " << address2 << std::endl;
if (address1 != address2)
{
std::cout << "address2 = " << address2;
check(address2 == address1);
std::exit(1);
}
// This causes a reallocation, the old vector
// content must be copied into the new vector.
resizeUninitialized(vect, size * 50);
uintptr_t address3 = (uintptr_t) vect.data();
std::cout << "2nd vector allocation: " << address3 << std::endl;
std::cout << "vect.size() = " << vect.size();
check(vect.size() == size * 50);
std::cout << "vect.capacity() = " << vect.capacity();
check(vect.capacity() == size * 50);
for (std::size_t i = 0; i < size; i++)
{
if (vect[i] != val)
{
std::cout << "vect[" << i << "] = " << vect[i];
check(vect[i] == val);
std::exit(1);
}
}
}
std::cout << std::endl;
std::cout << "All tests passed successfully!" << std::endl;
return 0;
}
| kimwalisch/primesieve | test/resizeUninitialized.cpp | C++ | bsd-2-clause | 3,062 |
# typed: false
# frozen_string_literal: true
module Homebrew
module Diagnostic
class Volumes
def initialize
@volumes = get_mounts
end
def which(path)
vols = get_mounts path
# no volume found
return -1 if vols.empty?
vol_index = @volumes.index(vols[0])
# volume not found in volume list
return -1 if vol_index.nil?
vol_index
end
def get_mounts(path = nil)
vols = []
# get the volume of path, if path is nil returns all volumes
args = %w[/bin/df -P]
args << path if path
Utils.popen_read(*args) do |io|
io.each_line do |line|
case line.chomp
# regex matches: /dev/disk0s2 489562928 440803616 48247312 91% /
when /^.+\s+[0-9]+\s+[0-9]+\s+[0-9]+\s+[0-9]{1,3}%\s+(.+)/
vols << Regexp.last_match(1)
end
end
end
vols
end
end
class Checks
undef fatal_preinstall_checks, fatal_build_from_source_checks,
fatal_setup_build_environment_checks, supported_configuration_checks,
build_from_source_checks
def fatal_preinstall_checks
checks = %w[
check_access_directories
]
# We need the developer tools for `codesign`.
checks << "check_for_installed_developer_tools" if Hardware::CPU.arm?
checks.freeze
end
def fatal_build_from_source_checks
%w[
check_xcode_license_approved
check_xcode_minimum_version
check_clt_minimum_version
check_if_xcode_needs_clt_installed
check_if_supported_sdk_available
check_broken_sdks
].freeze
end
def fatal_setup_build_environment_checks
%w[
check_if_supported_sdk_available
].freeze
end
def supported_configuration_checks
%w[
check_for_unsupported_macos
].freeze
end
def build_from_source_checks
%w[
check_for_installed_developer_tools
check_xcode_up_to_date
check_clt_up_to_date
].freeze
end
def check_for_non_prefixed_findutils
findutils = Formula["findutils"]
return unless findutils.any_version_installed?
gnubin = %W[#{findutils.opt_libexec}/gnubin #{findutils.libexec}/gnubin]
default_names = Tab.for_name("findutils").with? "default-names"
return if !default_names && (paths & gnubin).empty?
<<~EOS
Putting non-prefixed findutils in your path can cause python builds to fail.
EOS
rescue FormulaUnavailableError
nil
end
def check_for_unsupported_macos
return if Homebrew::EnvConfig.developer?
who = +"We"
what = if OS::Mac.prerelease?
"pre-release version"
elsif OS::Mac.outdated_release?
who << " (and Apple)"
"old version"
end
return if what.blank?
who.freeze
<<~EOS
You are using macOS #{MacOS.version}.
#{who} do not provide support for this #{what}.
#{please_create_pull_requests(what)}
EOS
end
def check_xcode_up_to_date
return unless MacOS::Xcode.outdated?
# CI images are going to end up outdated so don't complain when
# `brew test-bot` runs `brew doctor` in the CI for the Homebrew/brew
# repository. This only needs to support whatever CI providers
# Homebrew/brew is currently using.
return if ENV["GITHUB_ACTIONS"]
message = <<~EOS
Your Xcode (#{MacOS::Xcode.version}) is outdated.
Please update to Xcode #{MacOS::Xcode.latest_version} (or delete it).
#{MacOS::Xcode.update_instructions}
EOS
if OS::Mac.prerelease?
current_path = Utils.popen_read("/usr/bin/xcode-select", "-p")
message += <<~EOS
If #{MacOS::Xcode.latest_version} is installed, you may need to:
sudo xcode-select --switch /Applications/Xcode.app
Current developer directory is:
#{current_path}
EOS
end
message
end
def check_clt_up_to_date
return unless MacOS::CLT.outdated?
# CI images are going to end up outdated so don't complain when
# `brew test-bot` runs `brew doctor` in the CI for the Homebrew/brew
# repository. This only needs to support whatever CI providers
# Homebrew/brew is currently using.
return if ENV["GITHUB_ACTIONS"]
<<~EOS
A newer Command Line Tools release is available.
#{MacOS::CLT.update_instructions}
EOS
end
def check_xcode_minimum_version
return unless MacOS::Xcode.below_minimum_version?
xcode = MacOS::Xcode.version.to_s
xcode += " => #{MacOS::Xcode.prefix}" unless MacOS::Xcode.default_prefix?
<<~EOS
Your Xcode (#{xcode}) is too outdated.
Please update to Xcode #{MacOS::Xcode.latest_version} (or delete it).
#{MacOS::Xcode.update_instructions}
EOS
end
def check_clt_minimum_version
return unless MacOS::CLT.below_minimum_version?
<<~EOS
Your Command Line Tools are too outdated.
#{MacOS::CLT.update_instructions}
EOS
end
def check_if_xcode_needs_clt_installed
return unless MacOS::Xcode.needs_clt_installed?
<<~EOS
Xcode alone is not sufficient on #{MacOS.version.pretty_name}.
#{DevelopmentTools.installation_instructions}
EOS
end
def check_ruby_version
return if RUBY_VERSION == HOMEBREW_REQUIRED_RUBY_VERSION
return if Homebrew::EnvConfig.developer? && OS::Mac.prerelease?
<<~EOS
Ruby version #{RUBY_VERSION} is unsupported on #{MacOS.version}. Homebrew
is developed and tested on Ruby #{HOMEBREW_REQUIRED_RUBY_VERSION}, and may not work correctly
on other Rubies. Patches are accepted as long as they don't cause breakage
on supported Rubies.
EOS
end
def check_xcode_prefix
prefix = MacOS::Xcode.prefix
return if prefix.nil?
return unless prefix.to_s.include?(" ")
<<~EOS
Xcode is installed to a directory with a space in the name.
This will cause some formulae to fail to build.
EOS
end
def check_xcode_prefix_exists
prefix = MacOS::Xcode.prefix
return if prefix.nil? || prefix.exist?
<<~EOS
The directory Xcode is reportedly installed to doesn't exist:
#{prefix}
You may need to `xcode-select` the proper path if you have moved Xcode.
EOS
end
def check_xcode_select_path
return if MacOS::CLT.installed?
return unless MacOS::Xcode.installed?
return if File.file?("#{MacOS.active_developer_dir}/usr/bin/xcodebuild")
path = MacOS::Xcode.bundle_path
path = "/Developer" if path.nil? || !path.directory?
<<~EOS
Your Xcode is configured with an invalid path.
You should change it to the correct path:
sudo xcode-select --switch #{path}
EOS
end
def check_xcode_license_approved
# If the user installs Xcode-only, they have to approve the
# license or no "xc*" tool will work.
return unless `/usr/bin/xcrun clang 2>&1`.include?("license")
return if $CHILD_STATUS.success?
<<~EOS
You have not agreed to the Xcode license.
Agree to the license by opening Xcode.app or running:
sudo xcodebuild -license
EOS
end
def check_xquartz_up_to_date
return unless MacOS::XQuartz.outdated?
<<~EOS
Your XQuartz (#{MacOS::XQuartz.version}) is outdated.
Please install XQuartz #{MacOS::XQuartz.latest_version} (or delete the current version).
XQuartz can be updated using Homebrew Cask by running:
brew reinstall xquartz
EOS
end
def check_filesystem_case_sensitive
dirs_to_check = [
HOMEBREW_PREFIX,
HOMEBREW_REPOSITORY,
HOMEBREW_CELLAR,
HOMEBREW_TEMP,
]
case_sensitive_dirs = dirs_to_check.select do |dir|
# We select the dir as being case-sensitive if either the UPCASED or the
# downcased variant is missing.
# Of course, on a case-insensitive fs, both exist because the os reports so.
# In the rare situation when the user has indeed a downcased and an upcased
# dir (e.g. /TMP and /tmp) this check falsely thinks it is case-insensitive
# but we don't care because: 1. there is more than one dir checked, 2. the
# check is not vital and 3. we would have to touch files otherwise.
upcased = Pathname.new(dir.to_s.upcase)
downcased = Pathname.new(dir.to_s.downcase)
dir.exist? && !(upcased.exist? && downcased.exist?)
end
return if case_sensitive_dirs.empty?
volumes = Volumes.new
case_sensitive_vols = case_sensitive_dirs.map do |case_sensitive_dir|
volumes.get_mounts(case_sensitive_dir)
end
case_sensitive_vols.uniq!
<<~EOS
The filesystem on #{case_sensitive_vols.join(",")} appears to be case-sensitive.
The default macOS filesystem is case-insensitive. Please report any apparent problems.
EOS
end
def check_for_gettext
find_relative_paths("lib/libgettextlib.dylib",
"lib/libintl.dylib",
"include/libintl.h")
return if @found.empty?
# Our gettext formula will be caught by check_linked_keg_only_brews
gettext = begin
Formulary.factory("gettext")
rescue
nil
end
if gettext&.linked_keg&.directory?
homebrew_owned = @found.all? do |path|
Pathname.new(path).realpath.to_s.start_with? "#{HOMEBREW_CELLAR}/gettext"
end
return if homebrew_owned
end
inject_file_list @found, <<~EOS
gettext files detected at a system prefix.
These files can cause compilation and link failures, especially if they
are compiled with improper architectures. Consider removing these files:
EOS
end
def check_for_iconv
find_relative_paths("lib/libiconv.dylib", "include/iconv.h")
return if @found.empty?
libiconv = begin
Formulary.factory("libiconv")
rescue
nil
end
if libiconv&.linked_keg&.directory?
unless libiconv.keg_only?
<<~EOS
A libiconv formula is installed and linked.
This will break stuff. For serious. Unlink it.
EOS
end
else
inject_file_list @found, <<~EOS
libiconv files detected at a system prefix other than /usr.
Homebrew doesn't provide a libiconv formula, and expects to link against
the system version in /usr. libiconv in other prefixes can cause
compile or link failure, especially if compiled with improper
architectures. macOS itself never installs anything to /usr/local so
it was either installed by a user or some other third party software.
tl;dr: delete these files:
EOS
end
end
def check_for_bitdefender
if !Pathname("/Library/Bitdefender/AVP/EndpointSecurityforMac.app").exist? &&
!Pathname("/Library/Bitdefender/AVP/BDLDaemon").exist?
return
end
<<~EOS
You have installed Bitdefender. The "Traffic Scan" option interferes with
Homebrew's ability to download packages. See:
#{Formatter.url("https://github.com/Homebrew/brew/issues/5558")}
EOS
end
def check_for_multiple_volumes
return unless HOMEBREW_CELLAR.exist?
volumes = Volumes.new
# Find the volumes for the TMP folder & HOMEBREW_CELLAR
real_cellar = HOMEBREW_CELLAR.realpath
where_cellar = volumes.which real_cellar
begin
tmp = Pathname.new(Dir.mktmpdir("doctor", HOMEBREW_TEMP))
begin
real_tmp = tmp.realpath.parent
where_tmp = volumes.which real_tmp
ensure
Dir.delete tmp
end
rescue
return
end
return if where_cellar == where_tmp
<<~EOS
Your Cellar and TEMP directories are on different volumes.
macOS won't move relative symlinks across volumes unless the target file already
exists. Brews known to be affected by this are Git and Narwhal.
You should set the "HOMEBREW_TEMP" environment variable to a suitable
directory on the same volume as your Cellar.
EOS
end
def check_deprecated_caskroom_taps
tapped_caskroom_taps = Tap.select { |t| t.user == "caskroom" || t.name == "phinze/cask" }
.map(&:name)
return if tapped_caskroom_taps.empty?
<<~EOS
You have the following deprecated, cask taps tapped:
#{tapped_caskroom_taps.join("\n ")}
Untap them with `brew untap`.
EOS
end
def check_if_supported_sdk_available
return unless DevelopmentTools.installed?
return unless MacOS.sdk_root_needed?
return if MacOS.sdk
locator = MacOS.sdk_locator
source = if locator.source == :clt
update_instructions = MacOS::CLT.update_instructions
"Command Line Tools (CLT)"
else
update_instructions = MacOS::Xcode.update_instructions
"Xcode"
end
<<~EOS
Your #{source} does not support macOS #{MacOS.version}.
It is either outdated or was modified.
Please update your #{source} or delete it if no updates are available.
#{update_instructions}
EOS
end
# The CLT 10.x -> 11.x upgrade process on 10.14 contained a bug which broke the SDKs.
# Notably, MacOSX10.14.sdk would indirectly symlink to MacOSX10.15.sdk.
# This diagnostic was introduced to check for this and recommend a full reinstall.
def check_broken_sdks
locator = MacOS.sdk_locator
return if locator.all_sdks.all? do |sdk|
path_version = sdk.path.basename.to_s[MacOS::SDK::VERSIONED_SDK_REGEX, 1]
next true if path_version.blank?
sdk.version == MacOS::Version.new(path_version).strip_patch
end
if locator.source == :clt
source = "Command Line Tools (CLT)"
path_to_remove = MacOS::CLT::PKG_PATH
installation_instructions = MacOS::CLT.installation_instructions
else
source = "Xcode"
path_to_remove = MacOS::Xcode.bundle_path
installation_instructions = MacOS::Xcode.installation_instructions
end
<<~EOS
The contents of the SDKs in your #{source} installation do not match the SDK folder names.
A clean reinstall of #{source} should fix this.
Remove the broken installation before reinstalling:
sudo rm -rf #{path_to_remove}
#{installation_instructions}
EOS
end
end
end
end
| sjackman/homebrew | Library/Homebrew/extend/os/mac/diagnostic.rb | Ruby | bsd-2-clause | 15,624 |
/*
Copyright (c) 2014, Project OSRM, Dennis Luxen, others
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list
of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef CONCURRENT_QUEUE_HPP
#define CONCURRENT_QUEUE_HPP
#include <boost/circular_buffer.hpp>
#include <condition_variable>
#include <mutex>
template <typename Data> class ConcurrentQueue
{
public:
explicit ConcurrentQueue(const size_t max_size) : m_internal_queue(max_size) {}
inline void push(const Data &data)
{
std::unique_lock<std::mutex> lock(m_mutex);
m_not_full.wait(lock,
[this]
{ return m_internal_queue.size() < m_internal_queue.capacity(); });
m_internal_queue.push_back(data);
m_not_empty.notify_one();
}
inline bool empty() const { return m_internal_queue.empty(); }
inline void wait_and_pop(Data &popped_value)
{
std::unique_lock<std::mutex> lock(m_mutex);
m_not_empty.wait(lock,
[this]
{ return !m_internal_queue.empty(); });
popped_value = m_internal_queue.front();
m_internal_queue.pop_front();
m_not_full.notify_one();
}
inline bool try_pop(Data &popped_value)
{
std::unique_lock<std::mutex> lock(m_mutex);
if (m_internal_queue.empty())
{
return false;
}
popped_value = m_internal_queue.front();
m_internal_queue.pop_front();
m_not_full.notify_one();
return true;
}
private:
boost::circular_buffer<Data> m_internal_queue;
std::mutex m_mutex;
std::condition_variable m_not_empty;
std::condition_variable m_not_full;
};
#endif // CONCURRENT_QUEUE_HPP
| VRPTools/pkg-osrm-backend | data_structures/concurrent_queue.hpp | C++ | bsd-2-clause | 2,902 |
import TLSFingerprints from "./models/TLSFingerprints";
import FingerPrintsTemplate from "./templates/Fingerprints.html";
import FingerPrintTemplate from "./templates/Fingerprint.html";
import FingerPrintDialogContent from "./templates/FingerPrintDialogContent.html";
const model = new TLSFingerprints();
const FingerPrintSingle = Backbone.View.extend({
initialize: function(params) {
this.ua = params.ua;
},
events: {
"click button": "trust_fingerprint"
},
render: function () {
this.$el.html(FingerPrintTemplate(this.model.toJSON()));
},
trust_fingerprint: function() {
const that = this;
BootstrapDialog.show({
type: BootstrapDialog.TYPE_INFO,
title: "Save new fingerprint",
message: $('<div></div>').html(FingerPrintDialogContent({})),
buttons: [{
label: "Save",
cssClass: 'btn-primary',
icon: 'fa fa-floppy-o ',
action: function (dlg) {
// bind to controller
that.handle_trust(
dlg.$modalBody.find('#fp_description').val(),
dlg.$modalBody.find('#fp_trusted').is(':checked')
);
dlg.close();
}
}, {
label: 'Close',
action: function (dlg) {
dlg.close();
}
}]
});
},
handle_trust: function (description, trusted) {
ajaxCall(
"/api/nginx/settings/addtls_fingerprint",
{
'tls_fingerprint': {
'curves' : this.model.get('curves'),
'ciphers': this.model.get('ciphers'),
'user_agent': this.ua,
'trusted': trusted ? '1' : '0',
'description': description
}
},
function (data, status) {
}
);
}
});
const FingerPrintList = Backbone.View.extend({
initialize: function (params) {
this.ua = params.ua;
this.render();
},
render: function () {
const that = this;
this.$el.html(FingerPrintsTemplate({ua: this.ua}));
const content_holder = this.$el.find('.content_holder');
const chart_holder = this.$el.find('.chart_holder');
const chart_data = this.collection.map(function (d) {
return {label: d.get('ciphers') + "||" + d.get('curves'), value: d.get('count')};
});
this.collection.forEach(function (fingerprint) {
const row = new FingerPrintSingle({'model': fingerprint, 'ua': that.ua});
content_holder.append(row.$el);
row.render();
});
try {
nv.addGraph(function () {
const chart = nv.models.pieChart();
chart.x(function (d) {
return d.label;
});
chart.y(function (d) {
return d.value;
});
chart.showLabels(false);
chart.labelType("value");
chart.donut(true);
chart.donutRatio(0.2);
d3.select(chart_holder[0])
.datum(chart_data)
.transition().duration(350)
.call(chart);
return chart;
});
} catch (e) {
console.log(e);
}
}
});
const FingerprintMain = Backbone.View.extend({
initialize: function() {
this.listenTo(this.model, "sync", this.render);
},
render: function () {
this.$el.html('');
this.render_all(this.model.attributes);
},
render_all(attributes) {
for (const ua in attributes) {
// skip loop if the property is from prototype
if (attributes.hasOwnProperty(ua)) {
const fingerprints = new Backbone.Collection(attributes[ua]);
const fingerprints_view = new FingerPrintList({'collection': fingerprints, 'ua': ua});
this.$el.append(fingerprints_view.$el);
}
}
},
});
const fpm = new FingerprintMain({'model': model});
$('#tls_handshakes_application').append(fpm.$el);
model.fetch();
| evbevz/plugins | www/nginx/src/opnsense/www/js/nginx/src/tls_handshakes.js | JavaScript | bsd-2-clause | 4,364 |
/*
* Copyright (C) 2010 Igalia S.L.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "FloatRect.h"
#include <cairo.h>
namespace WebCore {
FloatRect::FloatRect(const cairo_rectangle_t& r)
: m_location(r.x, r.y)
, m_size(r.width, r.height)
{
}
FloatRect::operator cairo_rectangle_t() const
{
cairo_rectangle_t r = { x(), y(), width(), height() };
return r;
}
} // namespace WebCore
| aosm/WebCore | platform/graphics/cairo/FloatRectCairo.cpp | C++ | bsd-2-clause | 1,657 |
# Copyright (c) 2013-2016 Hewlett Packard Enterprise Development LP
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import hashlib
import os.path
import sys
from requestbuilder import Arg
from requestbuilder.exceptions import ArgumentError
from requestbuilder.mixins import FileTransferProgressBarMixin
import six
from euca2ools.commands.s3 import S3Request
import euca2ools.bundle.pipes
class GetObject(S3Request, FileTransferProgressBarMixin):
DESCRIPTION = 'Retrieve objects from the server'
ARGS = [Arg('source', metavar='BUCKET/KEY', route_to=None,
help='the object to download (required)'),
Arg('-o', dest='dest', metavar='PATH', route_to=None,
default='.', help='''where to download to. If this names a
directory the object will be written to a file inside of that
directory. If this is is "-" the object will be written to
stdout. Otherwise it will be written to a file with the name
given. (default: current directory)''')]
def configure(self):
S3Request.configure(self)
bucket, _, key = self.args['source'].partition('/')
if not bucket:
raise ArgumentError('source must contain a bucket name')
if not key:
raise ArgumentError('source must contain a key name')
if isinstance(self.args.get('dest'), six.string_types):
# If it is not a string we assume it is a file-like object
if self.args['dest'] == '-':
self.args['dest'] = sys.stdout
elif os.path.isdir(self.args['dest']):
basename = os.path.basename(key)
if not basename:
raise ArgumentError("specify a complete file path with -o "
"to download objects that end in '/'")
dest_path = os.path.join(self.args['dest'], basename)
self.args['dest'] = open(dest_path, 'w')
else:
self.args['dest'] = open(self.args['dest'], 'w')
def preprocess(self):
self.path = self.args['source']
def main(self):
# Note that this method does not close self.args['dest']
self.preprocess()
bytes_written = 0
md5_digest = hashlib.md5()
sha_digest = hashlib.sha1()
response = self.send()
content_length = response.headers.get('Content-Length')
if content_length:
pbar = self.get_progressbar(label=self.args['source'],
maxval=int(content_length))
else:
pbar = self.get_progressbar(label=self.args['source'])
pbar.start()
for chunk in response.iter_content(chunk_size=euca2ools.BUFSIZE):
self.args['dest'].write(chunk)
bytes_written += len(chunk)
md5_digest.update(chunk)
sha_digest.update(chunk)
if pbar is not None:
pbar.update(bytes_written)
self.args['dest'].flush()
pbar.finish()
# Integrity checks
if content_length and bytes_written != int(content_length):
self.log.error('rejecting download due to Content-Length size '
'mismatch (expected: %i, actual: %i)',
content_length, bytes_written)
raise RuntimeError('downloaded file appears to be corrupt '
'(expected size: {0}, actual: {1})'
.format(content_length, bytes_written))
etag = response.headers.get('ETag', '').lower().strip('"')
if (len(etag) == 32 and
all(char in '0123456789abcdef' for char in etag)):
# It looks like an MD5 hash
if md5_digest.hexdigest() != etag:
self.log.error('rejecting download due to ETag MD5 mismatch '
'(expected: %s, actual: %s)',
etag, md5_digest.hexdigest())
raise RuntimeError('downloaded file appears to be corrupt '
'(expected MD5: {0}, actual: {1})'
.format(etag, md5_digest.hexdigest()))
return {self.args['source']: {'md5': md5_digest.hexdigest(),
'sha1': sha_digest.hexdigest(),
'size': bytes_written}}
| gholms/euca2ools | euca2ools/commands/s3/getobject.py | Python | bsd-2-clause | 5,714 |
// Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
info: |
If pos is a value of Number type that is an integer, then the result of
x.charAt(pos) is equal to the result of x.substring(pos, pos+1)
es5id: 15.5.4.4_A4_T2
description: >
Compare results of x.charAt(pos) and x.substring(pos, pos+1), wheb
pos is smaller of zero
---*/
//////////////////////////////////////////////////////////////////////////////
//CHECK#1
for (var i = -2; i < 0; i++) {
if ("ABC\u0041\u0042\u0043".charAt(i) !== "\u0041\u0042\u0043ABC".substring(i, i + 1)) {
$ERROR('#' + (i + 2) + ': "ABC\\u0041\\u0042\\u0043".charAt(' + i + ') === "\\u0041\\u0042\\u0043ABC".substring(' + i + ', ' + (i + 1) + '). Actual: "ABC\\u0041\\u0042\\u0043".charAt(' + i + ') ===' + ("ABC\u0041\u0042\u0043".charAt(i)));
}
}
//
//////////////////////////////////////////////////////////////////////////////
| sebastienros/jint | Jint.Tests.Test262/test/built-ins/String/prototype/charAt/S15.5.4.4_A4_T2.js | JavaScript | bsd-2-clause | 976 |
mock layout begins, <?php echo $this->layout_content; ?>, mock layout ends | auraphp/Aura.Framework | tests/Aura/Framework/Mock/layouts/default.php | PHP | bsd-2-clause | 74 |
class Gitlab::Client
# Defines methods related to milestones.
module Milestones
# Gets a list of project's milestones.
#
# @example
# Gitlab.milestones(5)
#
# @param [Integer] project The ID of a project.
# @param [Hash] options A customizable set of options.
# @option options [Integer] :page The page number.
# @option options [Integer] :per_page The number of results per page.
# @return [Array<Gitlab::ObjectifiedHash>]
def milestones(project, options={})
get("/projects/#{project}/milestones", :query => options)
end
# Gets a single milestone.
#
# @example
# Gitlab.milestone(5, 36)
#
# @param [Integer, String] project The ID of a project.
# @param [Integer] id The ID of a milestone.
# @return [Gitlab::ObjectifiedHash]
def milestone(project, id)
get("/projects/#{project}/milestones/#{id}")
end
# Creates a new milestone.
#
# @param [Integer] project The ID of a project.
# @param [String] title The title of a milestone.
# @param [Hash] options A customizable set of options.
# @option options [String] :description The description of a milestone.
# @option options [String] :due_date The due date of a milestone.
# @return [Gitlab::ObjectifiedHash] Information about created milestone.
def create_milestone(project, title, options={})
body = {:title => title}.merge(options)
post("/projects/#{project}/milestones", :body => body)
end
# Updates a milestone.
#
# @param [Integer] project The ID of a project.
# @param [Integer] id The ID of a milestone.
# @param [Hash] options A customizable set of options.
# @option options [String] :title The title of a milestone.
# @option options [String] :description The description of a milestone.
# @option options [String] :due_date The due date of a milestone.
# @option options [String] :state_event The state of a milestone ('close' or 'activate').
# @return [Gitlab::ObjectifiedHash] Information about updated milestone.
def edit_milestone(project, id, options={})
put("/projects/#{project}/milestones/#{id}", :body => options)
end
end
end
| randx/gitlab | lib/gitlab/client/milestones.rb | Ruby | bsd-2-clause | 2,226 |
"use strict";
module.exports = {
delegates: {
im_too_lazy: function() {
$C("button", {
name: "my first button"
}, function(btn) {
$("#button_holder").empty(); // extra
btn.prependTo($("#button_holder"));
// alright, this is technically extra
btn.$el.hide();
btn.$el.fadeIn();
});
}
}
};
| logV/snorkel.sf | snorkel/app/controllers/home/client.js | JavaScript | bsd-2-clause | 388 |
/* Copyright (c) Citrix Systems, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms,
* with or without modification, are permitted provided
* that the following conditions are met:
*
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the
* following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the
* following disclaimer in the documentation and/or other
* materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
using System;
using System.ComponentModel;
using System.Windows.Forms;
using XenAdmin.Network;
using XenAdmin.Core;
using XenAPI;
namespace XenAdmin.Controls
{
public partial class PoolHostPicker : CustomTreeView
{
public EventHandler<SelectedItemEventArgs> SelectedItemChanged;
public bool SupressErrors = false;
public PoolHostPicker()
{
InitializeComponent();
CollectionChangedWithInvoke = Program.ProgramInvokeHandler(CollectionChanged);
ShowCheckboxes = false;
ShowDescription = true;
ShowImages = true;
buildList();
ConnectionsManager.XenConnections.CollectionChanged += CollectionChanged;
}
public override int ItemHeight { get { return 18; } }
private CollectionChangeEventHandler CollectionChangedWithInvoke;
void CollectionChanged(object sender, CollectionChangeEventArgs e)
{
Program.BeginInvoke(this, buildList);
}
void PropertyChanged(object sender, PropertyChangedEventArgs e)
{
if(e.PropertyName == "name_label" || e.PropertyName == "metrics" || e.PropertyName == "enabled" || e.PropertyName == "live" || e.PropertyName == "patches")
Program.Invoke(this, buildList);
}
public void buildList()
{
Program.AssertOnEventThread();
Host selectedhost = null;
IXenConnection selectedconnection = null;
if (SelectedItem != null)
{
if (SelectedItem is HostItem)
{
selectedhost = (SelectedItem as HostItem).TheHost;
}
else if (SelectedItem is PoolItem)
{
selectedconnection = (SelectedItem as PoolItem).Connection;
}
}
BeginUpdate();
try
{
ClearAllNodes();
foreach (IXenConnection xc in ConnectionsManager.XenConnectionsCopy)
{
if (Helpers.GetPool(xc) != null)
{
PoolItem item = new PoolItem(xc);
if (SupressErrors)
{
item.Enabled = false;
item.Description = "";
}
AddNode(item);
foreach (Host host in xc.Cache.Hosts)
{
HostItem item2 = new HostItem(host);
if (SupressErrors)
{
item2.Enabled = host.IsLive();
item2.Description = "";
}
AddChildNode(item, item2);
host.PropertyChanged -= PropertyChanged;
host.PropertyChanged += PropertyChanged;
}
}
else if (xc.IsConnected)
{
Host host = Helpers.GetCoordinator(xc);
if (host != null)
{
HostItem item = new HostItem(host);
if (SupressErrors)
{
item.Enabled = host.IsLive();
item.Description = "";
}
AddNode(item);
host.PropertyChanged -= PropertyChanged;
host.PropertyChanged += PropertyChanged;
}
else
{
PoolItem item = new PoolItem(xc);
if (SupressErrors)
{
item.Enabled = false;
item.Description = "";
}
AddNode(item);
}
}
else
{
PoolItem item = new PoolItem(xc);
if (SupressErrors)
{
item.Enabled = false;
item.Description = "";
}
AddNode(item);
}
Pool pool = Helpers.GetPoolOfOne(xc);
if (pool != null)
{
pool.PropertyChanged -= PropertyChanged;
pool.PropertyChanged += PropertyChanged;
}
xc.ConnectionStateChanged -= xc_ConnectionStateChanged;
xc.ConnectionStateChanged += xc_ConnectionStateChanged;
xc.CachePopulated -= xc_CachePopulated;
xc.CachePopulated += xc_CachePopulated;
xc.Cache.RegisterCollectionChanged<Host>(CollectionChangedWithInvoke);
}
}
finally
{
EndUpdate();
if (selectedhost != null)
SelectHost(selectedhost);
else if (selectedconnection != null)
SelectConnection(selectedconnection);
else if (SelectedItemChanged != null)
SelectedItemChanged(null, new SelectedItemEventArgs(false));
}
}
void xc_CachePopulated(IXenConnection conn)
{
Program.Invoke(this, buildList);
}
void xc_ConnectionStateChanged(IXenConnection conn)
{
Program.Invoke(this, buildList);
}
private void UnregisterHandlers()
{
ConnectionsManager.XenConnections.CollectionChanged -= CollectionChanged;
foreach (IXenConnection xc in ConnectionsManager.XenConnectionsCopy)
{
Pool pool = Helpers.GetPoolOfOne(xc);
if (pool != null)
pool.PropertyChanged -= PropertyChanged;
foreach (Host host in xc.Cache.Hosts)
host.PropertyChanged -= PropertyChanged;
xc.ConnectionStateChanged -= xc_ConnectionStateChanged;
xc.CachePopulated -= xc_CachePopulated;
xc.Cache.DeregisterCollectionChanged<Host>(CollectionChangedWithInvoke);
}
}
private CustomTreeNode lastSelected;
public bool AllowPoolSelect = true;
protected override void OnSelectedIndexChanged(EventArgs e)
{
if (SelectedItem is CustomTreeNode)
{
CustomTreeNode item = SelectedItem as CustomTreeNode;
if (!item.Enabled)
{
SelectedItem = lastSelected;
}
if (!AllowPoolSelect && item is PoolItem)
{
SelectedItem = lastSelected;
}
}
lastSelected = SelectedItem as CustomTreeNode;
base.OnSelectedIndexChanged(e);
if(SelectedItemChanged != null)
SelectedItemChanged(null,new SelectedItemEventArgs((SelectedItem is PoolItem || SelectedItem is HostItem) && (SelectedItem as CustomTreeNode).Enabled));
}
private bool SelectNextEnabledNode(CustomTreeNode currentNode, bool searchForward)
{
CustomTreeNode nextEnabledNode = GetNextEnabledNode(currentNode, searchForward);
if (nextEnabledNode != null)
{
SelectedItem = nextEnabledNode;
return true;
}
return false;
}
protected override void OnKeyDown(KeyEventArgs e)
{
var node = SelectedItem as CustomTreeNode;
if (node != null)
{
switch (e.KeyCode)
{
case Keys.Down:
{
e.Handled = SelectNextEnabledNode(node, true);
break;
}
case Keys.Up:
{
e.Handled = SelectNextEnabledNode(node, false);
break;
}
}
}
base.OnKeyDown(e);
}
public IXenConnection ChosenConnection
{
get
{
if (SelectedItem == null || SelectedItem is HostItem || !(SelectedItem as CustomTreeNode).Enabled)
return null;
return (SelectedItem as PoolItem).Connection;
}
}
public Host ChosenHost
{
get
{
if (SelectedItem == null || SelectedItem is PoolItem || !(SelectedItem as CustomTreeNode).Enabled)
return null;
return (SelectedItem as HostItem).TheHost;
}
}
public void SelectHost(Host host)
{
if (host == null)
{
return;
}
foreach (CustomTreeNode item in Items)
{
if (TryToSelectHost(item, host))
return;
if (item is PoolItem)
{
foreach (CustomTreeNode childItem in item.ChildNodes)
{
if (TryToSelectHost(childItem, host))
return;
}
}
}
OnSelectedIndexChanged(null);
}
/// <summary>
/// Tries to select the node if it is a host item. If it is a host item, but is disabled, selects its parent instead.
/// </summary>
/// <param name="item"></param>
/// <param name="host"></param>
/// <returns>True if successful</returns>
private bool TryToSelectHost(CustomTreeNode item, Host host)
{
if (item is HostItem)
{
HostItem hostitem = item as HostItem;
if (hostitem.TheHost.opaque_ref == host.opaque_ref)
{
if (hostitem.Enabled)
{
SelectedItem = hostitem;
return true;
}
else if (hostitem.ParentNode is PoolItem)
{
SelectConnection(host.Connection);
return true;
}
}
}
return false;
}
public void SelectConnection(IXenConnection xenConnection)
{
foreach (CustomTreeNode item in Items)
{
if (item is PoolItem)
{
PoolItem poolitem = item as PoolItem;
if (poolitem.Connection.Equals(xenConnection))
{
SelectedItem = poolitem;
return;
}
}
}
OnSelectedIndexChanged(null);
}
internal void SelectFirstThing()
{
for (int i = 0; i < Items.Count; i++)
{
if (Items[i] is CustomTreeNode && (Items[i] as CustomTreeNode).Enabled)
{
if (!AllowPoolSelect && Items[i] is PoolItem)
continue;
SelectedIndex = i;
return;
}
}
}
}
public class PoolItem : CustomTreeNode
{
public IXenConnection Connection;
public PoolItem(IXenConnection xc)
{
Connection = xc;
Update();
}
public void Update()
{
this.Image = Images.GetImage16For(Connection);
this.Text = Helpers.GetName(Connection);
this.Enabled = Connection.IsConnected && (Helpers.GetPool(Connection) == null || Helpers.HasFullyConnectedSharedStorage(Connection));
if (Enabled)
this.Description = "";
else if (!Connection.IsConnected)
this.Description = Messages.DISCONNECTED;
else
this.Description = Messages.POOL_HAS_NO_SHARED_STORAGE;
}
protected override int SameLevelSortOrder(CustomTreeNode other)
{
if (Enabled && !other.Enabled)
return -1;
else if (!Enabled && other.Enabled)
return 1;
else
return base.SameLevelSortOrder(other);
}
}
public class HostItem : CustomTreeNode
{
public Host TheHost;
public HostItem(Host host)
{
TheHost = host;
Update();
}
public void Update()
{
this.Image = Images.GetImage16For(TheHost);
this.Text = Helpers.GetName(TheHost);
bool isLiveHost = TheHost.IsLive();
this.Enabled = isLiveHost && CanCreateVMsWithAffinityTo(TheHost);
if (Enabled)
this.Description = "";
else if (!isLiveHost)
this.Description = Messages.HOST_NOT_LIVE;
else
this.Description = Messages.HOST_SEES_NO_STORAGE;
}
protected override int SameLevelSortOrder(CustomTreeNode other)
{
if (Enabled && !other.Enabled)
return -1;
else if (!Enabled && other.Enabled)
return 1;
else if (Enabled && other.Enabled && other is HostItem)
return TheHost.CompareTo(((HostItem)other).TheHost);
else
return base.SameLevelSortOrder(other);
}
private static bool CanCreateVMsWithAffinityTo(Host TheHost)
{
if (Helpers.HasFullyConnectedSharedStorage(TheHost.Connection))
return true;
else
{
foreach (SR sr in TheHost.Connection.Cache.SRs)
{
if (sr.CanBeSeenFrom(TheHost) && sr.SupportsVdiCreate() && !sr.IsBroken(false) && !sr.IsFull())
return true;
}
}
return false;
}
}
public class SelectedItemEventArgs : EventArgs
{
public bool SomethingSelected;
public SelectedItemEventArgs(bool notnull)
{
SomethingSelected = notnull;
}
}
}
| xenserver/xenadmin | XenAdmin/Controls/PoolHostPicker.cs | C# | bsd-2-clause | 16,719 |
<?php
require_once 'Kwf/Exception/NoLog.php';
class Kwf_Exception extends Kwf_Exception_NoLog
{
/**
* Informiert den Entwickler über diese Exception
*/
public function notify()
{
if ($this->log()) {
return;
}
if (php_sapi_name() == 'cli') {
echo 'WARNING: '.$this->getMessage()."\n";
} else if (
Zend_Registry::get('config')->debug->firephp &&
class_exists('FirePHP') &&
FirePHP::getInstance() &&
FirePHP::getInstance()->detectClientExtension()
) {
p($this->getMessage(), 'WARNING');
}
}
/**
* Online: Schreibt die Exception nur ins log
* Lokal: wirft die exception
*/
public function logOrThrow()
{
if ($this->log()) {
return;
}
throw $this;
}
public function log()
{
if (Kwf_Exception::isDebug()) {
return false;
}
$body = $this->_getLogBody();
return Kwf_Exception_Logger_Abstract::getInstance()->log($this, 'error', $body);
}
protected function _getLogBody()
{
$user = "guest";
try {
if ($u = Zend_Registry::get('userModel')->getAuthedUser()) {
$userName = $u->__toString();
$user = "$userName, id $u->id, $u->role";
}
} catch (Exception $e) {
$user = "error getting user";
}
$exception = $this->getException();
$body = '';
$body .= $this->_format('Exception', get_class($exception));
$body .= $this->_format('Thrown', $exception->getFile().':'.$exception->getLine());
$body .= $this->_format('Message', $exception->getMessage());
$body .= $this->_format('ExceptionDetail', $exception->__toString());
$body .= $this->_format('REQUEST_URI', isset($_SERVER['REQUEST_URI']) ? $_SERVER['REQUEST_URI'] : '(none)');
$body .= $this->_format('HTTP_REFERER', isset($_SERVER['HTTP_REFERER']) ? $_SERVER['HTTP_REFERER'] : '(none)');
$body .= $this->_format('User', $user);
$body .= $this->_format('Time', date('H:i:s'));
if (isset($_SERVER['HTTP_USER_AGENT'])) {
$body .= $this->_format('_USERAGENT', $_SERVER['HTTP_USER_AGENT']);
}
$body .= $this->_format('_GET', print_r($_GET, true));
$body .= $this->_format('_POST', print_r($_POST, true));
$body .= $this->_format('_SERVER', print_r($_SERVER, true));
$body .= $this->_format('_FILES', print_r($_FILES, true));
if (isset($_SESSION)) {
$body .= $this->_format('_SESSION', print_r($_SESSION, true));
}
return $body;
}
}
| mike-tuxedo/koala-framework | Kwf/Exception.php | PHP | bsd-2-clause | 2,738 |
package org.darkstorm.minecraft.gui.listener;
import org.darkstorm.minecraft.gui.component.Slider;
public interface SliderListener extends ComponentListener {
public void onSliderValueChanged(Slider slider);
}
| DarkStorm652/Minecraft-GUI-API | src/org/darkstorm/minecraft/gui/listener/SliderListener.java | Java | bsd-2-clause | 220 |
cask 'sbrowserq' do
version '3.6.3'
sha256 '1e59a6017a31d6da4b6eb2616dff4e86f9c0f4b932e154c09b1595a0585d167b'
url "https://www.sbrowser-q.com/SbrowserQ_V#{version.major_minor}_mac.dmg"
appcast 'https://www.sbrowser-q.com/',
configuration: version.major_minor
name 'SbrowserQ'
homepage 'https://www.sbrowser-q.com/'
app 'SbrowserQ.app'
end
| winkelsdorf/homebrew-cask | Casks/sbrowserq.rb | Ruby | bsd-2-clause | 365 |
#include "muduo/net/TcpServer.h"
#include "muduo/base/Atomic.h"
#include "muduo/base/FileUtil.h"
#include "muduo/base/Logging.h"
#include "muduo/base/ProcessInfo.h"
#include "muduo/base/Thread.h"
#include "muduo/net/EventLoop.h"
#include "muduo/net/InetAddress.h"
#include <utility>
#include <stdio.h>
#include <unistd.h>
using namespace muduo;
using namespace muduo::net;
int numThreads = 0;
class EchoServer
{
public:
EchoServer(EventLoop* loop, const InetAddress& listenAddr)
: server_(loop, listenAddr, "EchoServer"),
startTime_(Timestamp::now())
{
server_.setConnectionCallback(
std::bind(&EchoServer::onConnection, this, _1));
server_.setMessageCallback(
std::bind(&EchoServer::onMessage, this, _1, _2, _3));
server_.setThreadNum(numThreads);
loop->runEvery(5.0, std::bind(&EchoServer::printThroughput, this));
}
void start()
{
LOG_INFO << "starting " << numThreads << " threads.";
server_.start();
}
private:
void onConnection(const TcpConnectionPtr& conn)
{
LOG_TRACE << conn->peerAddress().toIpPort() << " -> "
<< conn->localAddress().toIpPort() << " is "
<< (conn->connected() ? "UP" : "DOWN");
conn->setTcpNoDelay(true);
if (conn->connected())
{
connections_.increment();
}
else
{
connections_.decrement();
}
}
void onMessage(const TcpConnectionPtr& conn, Buffer* buf, Timestamp)
{
size_t len = buf->readableBytes();
transferredBytes_.addAndGet(len);
receivedMessages_.incrementAndGet();
conn->send(buf);
}
void printThroughput()
{
Timestamp endTime = Timestamp::now();
double bytes = static_cast<double>(transferredBytes_.getAndSet(0));
int msgs = receivedMessages_.getAndSet(0);
double bytesPerMsg = msgs > 0 ? bytes/msgs : 0;
double time = timeDifference(endTime, startTime_);
printf("%.3f MiB/s %.2f Kilo Msgs/s %.2f bytes per msg, ",
bytes/time/1024/1024,
static_cast<double>(msgs)/time/1000,
bytesPerMsg);
printConnection();
fflush(stdout);
startTime_ = endTime;
}
void printConnection()
{
string procStatus = ProcessInfo::procStatus();
printf("%d conn, files %d , VmSize %ld KiB, RSS %ld KiB, ",
connections_.get(),
ProcessInfo::openedFiles(),
getLong(procStatus, "VmSize:"),
getLong(procStatus, "VmRSS:"));
string meminfo;
FileUtil::readFile("/proc/meminfo", 65536, &meminfo);
long total_kb = getLong(meminfo, "MemTotal:");
long free_kb = getLong(meminfo, "MemFree:");
long buffers_kb = getLong(meminfo, "Buffers:");
long cached_kb = getLong(meminfo, "Cached:");
printf("system memory used %ld KiB\n",
total_kb - free_kb - buffers_kb - cached_kb);
}
long getLong(const string& procStatus, const char* key)
{
long result = 0;
size_t pos = procStatus.find(key);
if (pos != string::npos)
{
result = ::atol(procStatus.c_str() + pos + strlen(key));
}
return result;
}
TcpServer server_;
AtomicInt32 connections_;
AtomicInt32 receivedMessages_;
AtomicInt64 transferredBytes_;
Timestamp startTime_;
};
int main(int argc, char* argv[])
{
LOG_INFO << "pid = " << getpid()
<< ", tid = " << CurrentThread::tid()
<< ", max files = " << ProcessInfo::maxOpenFiles();
Logger::setLogLevel(Logger::WARN);
if (argc > 1)
{
numThreads = atoi(argv[1]);
}
EventLoop loop;
InetAddress listenAddr(2007);
EchoServer server(&loop, listenAddr);
server.start();
loop.loop();
}
| westfly/muduo | examples/netty/echo/server2.cc | C++ | bsd-3-clause | 3,595 |
from __future__ import absolute_import, print_function
import logging
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from sentry.pipeline import Pipeline
from sentry.models import Identity, IdentityStatus, IdentityProvider
from . import default_manager
IDENTITY_LINKED = _("Your {identity_provider} account has been associated with your Sentry account")
logger = logging.getLogger('sentry.identity')
class IdentityProviderPipeline(Pipeline):
logger = logger
pipeline_name = 'identity_provider'
provider_manager = default_manager
provider_model_cls = IdentityProvider
def redirect_url(self):
associate_url = reverse('sentry-extension-setup', kwargs={
# TODO(adhiraj): Remove provider_id from the callback URL, it's unused.
'provider_id': 'default',
})
# Use configured redirect_url if specified for the pipeline if available
return self.config.get('redirect_url', associate_url)
def finish_pipeline(self):
identity = self.provider.build_identity(self.state.data)
defaults = {
'status': IdentityStatus.VALID,
'scopes': identity.get('scopes', []),
'data': identity.get('data', {}),
'date_verified': timezone.now(),
}
identity, created = Identity.objects.get_or_create(
idp=self.provider_model,
user=self.request.user,
external_id=identity['id'],
defaults=defaults,
)
if not created:
identity.update(**defaults)
messages.add_message(self.request, messages.SUCCESS, IDENTITY_LINKED.format(
identity_provider=self.provider.name,
))
self.state.clear()
# TODO(epurkhiser): When we have more identities and have built out an
# identity management page that supports these new identities (not
# social-auth ones), redirect to the identities page.
return HttpResponseRedirect(reverse('sentry-account-settings'))
| ifduyue/sentry | src/sentry/identity/pipeline.py | Python | bsd-3-clause | 2,192 |
#ifndef NT2_LINALG_INCLUDE_FUNCTIONS_SCALAR_GELS_HPP_INCLUDED
#define NT2_LINALG_INCLUDE_FUNCTIONS_SCALAR_GELS_HPP_INCLUDED
#include <nt2/linalg/functions/gels.hpp>
#endif
| hainm/pythran | third_party/nt2/linalg/include/functions/scalar/gels.hpp | C++ | bsd-3-clause | 174 |
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
'use strict';
/* global createProcessedMediaStreamTrack */ // defined in main.js
/**
* Wrapper around createProcessedMediaStreamTrack to apply transform to a
* MediaStream.
* @param {!MediaStream} sourceStream the video stream to be transformed. The
* first video track will be used.
* @param {!FrameTransformFn} transform the transform to apply to the
* sourceStream.
* @param {!AbortSignal} signal can be used to stop processing
* @return {!MediaStream} holds a single video track of the transformed video
* frames
*/
function createProcessedMediaStream(sourceStream, transform, signal) {
// For this sample, we're only dealing with video tracks.
/** @type {!MediaStreamTrack} */
const sourceTrack = sourceStream.getVideoTracks()[0];
const processedTrack =
createProcessedMediaStreamTrack(sourceTrack, transform, signal);
// Create a new MediaStream to hold our processed track.
const processedStream = new MediaStream();
processedStream.addTrack(processedTrack);
return processedStream;
}
/**
* Interface implemented by all video sources the user can select. A common
* interface allows the user to choose a source independently of the transform
* and sink.
* @interface
*/
class MediaStreamSource { // eslint-disable-line no-unused-vars
/**
* Sets the path to this object from the debug global var.
* @param {string} path
*/
setDebugPath(path) {}
/**
* Indicates if the source video should be mirrored/displayed on the page. If
* false (the default), any element producing frames will not be a child of
* the document.
* @param {boolean} visible whether to add the raw source video to the page
*/
setVisibility(visible) {}
/**
* Initializes and returns the MediaStream for this source.
* @return {!Promise<!MediaStream>}
*/
async getMediaStream() {}
/** Frees any resources used by this object. */
destroy() {}
}
/**
* Interface implemented by all video transforms that the user can select. A
* common interface allows the user to choose a transform independently of the
* source and sink.
* @interface
*/
class FrameTransform { // eslint-disable-line no-unused-vars
/** Initializes state that is reused across frames. */
async init() {}
/**
* Applies the transform to frame. Queues the output frame (if any) using the
* controller.
* @param {!VideoFrame} frame the input frame
* @param {!TransformStreamDefaultController<!VideoFrame>} controller
*/
async transform(frame, controller) {}
/** Frees any resources used by this object. */
destroy() {}
}
/**
* Interface implemented by all video sinks that the user can select. A common
* interface allows the user to choose a sink independently of the source and
* transform.
* @interface
*/
class MediaStreamSink { // eslint-disable-line no-unused-vars
/**
* @param {!MediaStream} stream
*/
async setMediaStream(stream) {}
/** Frees any resources used by this object. */
destroy() {}
}
/**
* Assembles a MediaStreamSource, FrameTransform, and MediaStreamSink together.
*/
class Pipeline { // eslint-disable-line no-unused-vars
constructor() {
/** @private {?MediaStreamSource} set by updateSource*/
this.source_ = null;
/** @private {?FrameTransform} set by updateTransform */
this.frameTransform_ = null;
/** @private {?MediaStreamSink} set by updateSink */
this.sink_ = null;
/** @private {!AbortController} may used to stop all processing */
this.abortController_ = new AbortController();
/**
* @private {?MediaStream} set in maybeStartPipeline_ after all of source_,
* frameTransform_, and sink_ are set
*/
this.processedStream_ = null;
}
/** @return {?MediaStreamSource} */
getSource() {
return this.source_;
}
/**
* Sets a new source for the pipeline.
* @param {!MediaStreamSource} mediaStreamSource
*/
async updateSource(mediaStreamSource) {
if (this.source_) {
this.abortController_.abort();
this.abortController_ = new AbortController();
this.source_.destroy();
this.processedStream_ = null;
}
this.source_ = mediaStreamSource;
this.source_.setDebugPath('debug.pipeline.source_');
console.log(
'[Pipeline] Updated source.',
'debug.pipeline.source_ = ', this.source_);
await this.maybeStartPipeline_();
}
/** @private */
async maybeStartPipeline_() {
if (this.processedStream_ || !this.source_ || !this.frameTransform_ ||
!this.sink_) {
return;
}
const sourceStream = await this.source_.getMediaStream();
await this.frameTransform_.init();
try {
this.processedStream_ = createProcessedMediaStream(
sourceStream, async (frame, controller) => {
if (this.frameTransform_) {
await this.frameTransform_.transform(frame, controller);
}
}, this.abortController_.signal);
} catch (e) {
this.destroy();
return;
}
await this.sink_.setMediaStream(this.processedStream_);
console.log(
'[Pipeline] Pipeline started.',
'debug.pipeline.abortController_ =', this.abortController_);
}
/**
* Sets a new transform for the pipeline.
* @param {!FrameTransform} frameTransform
*/
async updateTransform(frameTransform) {
if (this.frameTransform_) this.frameTransform_.destroy();
this.frameTransform_ = frameTransform;
console.log(
'[Pipeline] Updated frame transform.',
'debug.pipeline.frameTransform_ = ', this.frameTransform_);
if (this.processedStream_) {
await this.frameTransform_.init();
} else {
await this.maybeStartPipeline_();
}
}
/**
* Sets a new sink for the pipeline.
* @param {!MediaStreamSink} mediaStreamSink
*/
async updateSink(mediaStreamSink) {
if (this.sink_) this.sink_.destroy();
this.sink_ = mediaStreamSink;
console.log(
'[Pipeline] Updated sink.', 'debug.pipeline.sink_ = ', this.sink_);
if (this.processedStream_) {
await this.sink_.setMediaStream(this.processedStream_);
} else {
await this.maybeStartPipeline_();
}
}
/** Frees any resources used by this object. */
destroy() {
console.log('[Pipeline] Destroying Pipeline');
this.abortController_.abort();
if (this.source_) this.source_.destroy();
if (this.frameTransform_) this.frameTransform_.destroy();
if (this.sink_) this.sink_.destroy();
}
}
| webrtc/samples | src/content/insertable-streams/video-processing/js/pipeline.js | JavaScript | bsd-3-clause | 6,704 |
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
__all__ = ['quantity_input']
import inspect
from astropy.utils.decorators import wraps
from astropy.utils.misc import isiterable
from .core import Unit, UnitBase, UnitsError, add_enabled_equivalencies
from .physical import _unit_physical_mapping
def _get_allowed_units(targets):
"""
From a list of target units (either as strings or unit objects) and physical
types, return a list of Unit objects.
"""
allowed_units = []
for target in targets:
try: # unit passed in as a string
target_unit = Unit(target)
except ValueError:
try: # See if the function writer specified a physical type
physical_type_id = _unit_physical_mapping[target]
except KeyError: # Function argument target is invalid
raise ValueError("Invalid unit or physical type '{}'."
.format(target))
# get unit directly from physical type id
target_unit = Unit._from_physical_type_id(physical_type_id)
allowed_units.append(target_unit)
return allowed_units
def _validate_arg_value(param_name, func_name, arg, targets, equivalencies):
"""
Validates the object passed in to the wrapped function, ``arg``, with target
unit or physical type, ``target``.
"""
if len(targets) == 0:
return
allowed_units = _get_allowed_units(targets)
for allowed_unit in allowed_units:
try:
is_equivalent = arg.unit.is_equivalent(allowed_unit,
equivalencies=equivalencies)
if is_equivalent:
break
except AttributeError: # Either there is no .unit or no .is_equivalent
if hasattr(arg, "unit"):
error_msg = "a 'unit' attribute without an 'is_equivalent' method"
else:
error_msg = "no 'unit' attribute"
raise TypeError("Argument '{}' to function '{}' has {}. "
"You may want to pass in an astropy Quantity instead."
.format(param_name, func_name, error_msg))
else:
if len(targets) > 1:
raise UnitsError("Argument '{}' to function '{}' must be in units"
" convertible to one of: {}."
.format(param_name, func_name,
[str(targ) for targ in targets]))
else:
raise UnitsError("Argument '{}' to function '{}' must be in units"
" convertible to '{}'."
.format(param_name, func_name,
str(targets[0])))
class QuantityInput:
@classmethod
def as_decorator(cls, func=None, **kwargs):
r"""
A decorator for validating the units of arguments to functions.
Unit specifications can be provided as keyword arguments to the decorator,
or by using function annotation syntax. Arguments to the decorator
take precedence over any function annotations present.
A `~astropy.units.UnitsError` will be raised if the unit attribute of
the argument is not equivalent to the unit specified to the decorator
or in the annotation.
If the argument has no unit attribute, i.e. it is not a Quantity object, a
`ValueError` will be raised unless the argument is an annotation. This is to
allow non Quantity annotations to pass through.
Where an equivalency is specified in the decorator, the function will be
executed with that equivalency in force.
Notes
-----
The checking of arguments inside variable arguments to a function is not
supported (i.e. \*arg or \**kwargs).
Examples
--------
.. code-block:: python
import astropy.units as u
@u.quantity_input(myangle=u.arcsec)
def myfunction(myangle):
return myangle**2
.. code-block:: python
import astropy.units as u
@u.quantity_input
def myfunction(myangle: u.arcsec):
return myangle**2
Also you can specify a return value annotation, which will
cause the function to always return a `~astropy.units.Quantity` in that
unit.
.. code-block:: python
import astropy.units as u
@u.quantity_input
def myfunction(myangle: u.arcsec) -> u.deg**2:
return myangle**2
Using equivalencies::
import astropy.units as u
@u.quantity_input(myenergy=u.eV, equivalencies=u.mass_energy())
def myfunction(myenergy):
return myenergy**2
"""
self = cls(**kwargs)
if func is not None and not kwargs:
return self(func)
else:
return self
def __init__(self, func=None, **kwargs):
self.equivalencies = kwargs.pop('equivalencies', [])
self.decorator_kwargs = kwargs
def __call__(self, wrapped_function):
# Extract the function signature for the function we are wrapping.
wrapped_signature = inspect.signature(wrapped_function)
# Define a new function to return in place of the wrapped one
@wraps(wrapped_function)
def wrapper(*func_args, **func_kwargs):
# Bind the arguments to our new function to the signature of the original.
bound_args = wrapped_signature.bind(*func_args, **func_kwargs)
# Iterate through the parameters of the original signature
for param in wrapped_signature.parameters.values():
# We do not support variable arguments (*args, **kwargs)
if param.kind in (inspect.Parameter.VAR_KEYWORD,
inspect.Parameter.VAR_POSITIONAL):
continue
# Catch the (never triggered) case where bind relied on a default value.
if param.name not in bound_args.arguments and param.default is not param.empty:
bound_args.arguments[param.name] = param.default
# Get the value of this parameter (argument to new function)
arg = bound_args.arguments[param.name]
# Get target unit or physical type, either from decorator kwargs
# or annotations
if param.name in self.decorator_kwargs:
targets = self.decorator_kwargs[param.name]
is_annotation = False
else:
targets = param.annotation
is_annotation = True
# If the targets is empty, then no target units or physical
# types were specified so we can continue to the next arg
if targets is inspect.Parameter.empty:
continue
# If the argument value is None, and the default value is None,
# pass through the None even if there is a target unit
if arg is None and param.default is None:
continue
# Here, we check whether multiple target unit/physical type's
# were specified in the decorator/annotation, or whether a
# single string (unit or physical type) or a Unit object was
# specified
if isinstance(targets, str) or not isiterable(targets):
valid_targets = [targets]
# Check for None in the supplied list of allowed units and, if
# present and the passed value is also None, ignore.
elif None in targets:
if arg is None:
continue
else:
valid_targets = [t for t in targets if t is not None]
else:
valid_targets = targets
# If we're dealing with an annotation, skip all the targets that
# are not strings or subclasses of Unit. This is to allow
# non unit related annotations to pass through
if is_annotation:
valid_targets = [t for t in valid_targets if isinstance(t, (str, UnitBase))]
# Now we loop over the allowed units/physical types and validate
# the value of the argument:
_validate_arg_value(param.name, wrapped_function.__name__,
arg, valid_targets, self.equivalencies)
# Call the original function with any equivalencies in force.
with add_enabled_equivalencies(self.equivalencies):
return_ = wrapped_function(*func_args, **func_kwargs)
if wrapped_signature.return_annotation not in (inspect.Signature.empty, None):
return return_.to(wrapped_signature.return_annotation)
else:
return return_
return wrapper
quantity_input = QuantityInput.as_decorator
| stargaser/astropy | astropy/units/decorators.py | Python | bsd-3-clause | 9,242 |
<?php
use yii\helpers\Html;
/* @var $this yii\web\View */
/* @var $model frontend\models\Position */
$this->title = 'Update Position: ' . ' ' . $model->name;
// $this->params['breadcrumbs'][] = ['label' => 'Recognitions', 'url' => ['/recognition/index']];
$this->params['breadcrumbs'][] = ['label' => 'Positions', 'url' => ['index']];
$this->params['breadcrumbs'][] = ['label' => $model->name, 'url' => ['view', 'id' => $model->id]];
$this->params['breadcrumbs'][] = 'Update';
?>
<div class="position-update">
<h1><?= Html::encode($this->title) ?></h1>
<?= $this->render('_form', [
'model' => $model,
]) ?>
</div>
| xiandalisay/soames | frontend/views/position/update.php | PHP | bsd-3-clause | 640 |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/modules/service_worker/service_worker_window_client.h"
#include "base/memory/scoped_refptr.h"
#include "third_party/blink/public/platform/web_string.h"
#include "third_party/blink/renderer/bindings/core/v8/callback_promise_adapter.h"
#include "third_party/blink/renderer/bindings/core/v8/script_promise_resolver.h"
#include "third_party/blink/renderer/core/dom/dom_exception.h"
#include "third_party/blink/renderer/core/execution_context/execution_context.h"
#include "third_party/blink/renderer/core/messaging/message_port.h"
#include "third_party/blink/renderer/core/page/page_hidden_state.h"
#include "third_party/blink/renderer/core/workers/worker_location.h"
#include "third_party/blink/renderer/modules/service_worker/service_worker_error.h"
#include "third_party/blink/renderer/modules/service_worker/service_worker_global_scope.h"
#include "third_party/blink/renderer/platform/bindings/v8_throw_exception.h"
#include "third_party/blink/renderer/platform/heap/heap.h"
namespace blink {
namespace {
void DidFocus(ScriptPromiseResolver* resolver,
mojom::blink::ServiceWorkerClientInfoPtr client) {
if (!resolver->GetExecutionContext() ||
resolver->GetExecutionContext()->IsContextDestroyed()) {
return;
}
if (!client) {
resolver->Reject(ServiceWorkerError::GetException(
resolver, mojom::blink::ServiceWorkerErrorType::kNotFound,
"The client was not found."));
return;
}
resolver->Resolve(MakeGarbageCollected<ServiceWorkerWindowClient>(*client));
}
void DidNavigateOrOpenWindow(ScriptPromiseResolver* resolver,
bool success,
mojom::blink::ServiceWorkerClientInfoPtr info,
const String& error_msg) {
if (!resolver->GetExecutionContext() ||
resolver->GetExecutionContext()->IsContextDestroyed()) {
return;
}
if (!success) {
DCHECK(!info);
DCHECK(!error_msg.IsNull());
ScriptState::Scope scope(resolver->GetScriptState());
resolver->Reject(V8ThrowException::CreateTypeError(
resolver->GetScriptState()->GetIsolate(), error_msg));
return;
}
ServiceWorkerWindowClient* window_client = nullptr;
// Even if the open/navigation succeeded, |info| may be null if information of
// the opened/navigated window could not be obtained (this can happen for a
// cross-origin window, or if the browser process could not get the
// information in time before the window was closed).
if (info)
window_client = MakeGarbageCollected<ServiceWorkerWindowClient>(*info);
resolver->Resolve(window_client);
}
} // namespace
// static
ServiceWorkerWindowClient::ResolveWindowClientCallback
ServiceWorkerWindowClient::CreateResolveWindowClientCallback(
ScriptPromiseResolver* resolver) {
return WTF::Bind(&DidNavigateOrOpenWindow, WrapPersistent(resolver));
}
ServiceWorkerWindowClient::ServiceWorkerWindowClient(
const mojom::blink::ServiceWorkerClientInfo& info)
: ServiceWorkerClient(info),
page_hidden_(info.page_hidden),
is_focused_(info.is_focused) {
DCHECK_EQ(mojom::blink::ServiceWorkerClientType::kWindow, info.client_type);
}
ServiceWorkerWindowClient::~ServiceWorkerWindowClient() = default;
String ServiceWorkerWindowClient::visibilityState() const {
return PageHiddenStateString(page_hidden_);
}
ScriptPromise ServiceWorkerWindowClient::focus(ScriptState* script_state) {
auto* resolver = MakeGarbageCollected<ScriptPromiseResolver>(script_state);
ScriptPromise promise = resolver->Promise();
ServiceWorkerGlobalScope* global_scope =
To<ServiceWorkerGlobalScope>(ExecutionContext::From(script_state));
if (!global_scope->IsWindowInteractionAllowed()) {
resolver->Reject(MakeGarbageCollected<DOMException>(
DOMExceptionCode::kInvalidAccessError,
"Not allowed to focus a window."));
return promise;
}
global_scope->ConsumeWindowInteraction();
global_scope->GetServiceWorkerHost()->FocusClient(
Uuid(), WTF::Bind(&DidFocus, WrapPersistent(resolver)));
return promise;
}
ScriptPromise ServiceWorkerWindowClient::navigate(ScriptState* script_state,
const String& url) {
auto* resolver = MakeGarbageCollected<ScriptPromiseResolver>(script_state);
ScriptPromise promise = resolver->Promise();
ServiceWorkerGlobalScope* global_scope =
To<ServiceWorkerGlobalScope>(ExecutionContext::From(script_state));
KURL parsed_url = KURL(global_scope->location()->Url(), url);
if (!parsed_url.IsValid() || parsed_url.ProtocolIsAbout()) {
resolver->Reject(V8ThrowException::CreateTypeError(
script_state->GetIsolate(), "'" + url + "' is not a valid URL."));
return promise;
}
if (!global_scope->GetSecurityOrigin()->CanDisplay(parsed_url)) {
resolver->Reject(V8ThrowException::CreateTypeError(
script_state->GetIsolate(),
"'" + parsed_url.ElidedString() + "' cannot navigate."));
return promise;
}
global_scope->GetServiceWorkerHost()->NavigateClient(
Uuid(), parsed_url, CreateResolveWindowClientCallback(resolver));
return promise;
}
void ServiceWorkerWindowClient::Trace(Visitor* visitor) const {
ServiceWorkerClient::Trace(visitor);
}
} // namespace blink
| nwjs/chromium.src | third_party/blink/renderer/modules/service_worker/service_worker_window_client.cc | C++ | bsd-3-clause | 5,471 |
from __future__ import absolute_import
import unittest
import bokeh.resources as resources
from bokeh.resources import _get_cdn_urls
WRAPPER = """Bokeh.$(function() {
foo
});"""
WRAPPER_DEV = '''require(["jquery", "main"], function($, Bokeh) {
Bokeh.set_log_level("info");
Bokeh.$(function() {
foo
});
});'''
LOG_LEVELS = ['trace', 'debug', 'info', 'warn', 'error', 'fatal']
DEFAULT_LOG_JS_RAW = 'Bokeh.set_log_level("info");'
## Test JSResources
def test_js_resources_default_mode_is_inline():
r = resources.JSResources()
assert r.mode == "inline"
def test_js_resources_inline_has_no_css_resources():
r = resources.JSResources(mode="inline")
assert r.mode == "inline"
assert r.dev is False
assert len(r.js_raw) == 3
assert r.js_raw[-1] == DEFAULT_LOG_JS_RAW
assert hasattr(r, 'css_raw') is False
assert r.messages == []
## Test CSSResources
def test_css_resources_default_mode_is_inline():
r = resources.CSSResources()
assert r.mode == "inline"
def test_inline_css_resources():
r = resources.CSSResources(mode="inline")
assert r.mode == "inline"
assert r.dev is False
assert len(r.css_raw) == 2
assert hasattr(r, 'js_raw') is False
assert r.messages == []
class TestResources(unittest.TestCase):
def test_basic(self):
r = resources.Resources()
self.assertEqual(r.mode, "inline")
def test_log_level(self):
r = resources.Resources()
for level in LOG_LEVELS:
r.log_level = level
self.assertEqual(r.log_level, level)
if not r.dev:
self.assertEqual(r.js_raw[-1], 'Bokeh.set_log_level("%s");' % level)
self.assertRaises(ValueError, setattr, r, "log_level", "foo")
def test_module_attrs(self):
self.assertEqual(resources.CDN.mode, "cdn")
self.assertEqual(resources.INLINE.mode, "inline")
def test_inline(self):
r = resources.Resources(mode="inline")
self.assertEqual(r.mode, "inline")
self.assertEqual(r.dev, False)
self.assertEqual(len(r.js_raw), 3)
self.assertEqual(r.js_raw[-1], DEFAULT_LOG_JS_RAW)
self.assertEqual(len(r.css_raw), 2)
self.assertEqual(r.messages, [])
def test_get_cdn_urls(self):
dev_version = "0.0.1dev"
result = _get_cdn_urls(dev_version)
url = result['js_files'][0]
self.assertIn('bokeh/dev', url)
def test_cdn(self):
resources.__version__ = "1.0"
r = resources.Resources(mode="cdn", version="1.0")
self.assertEqual(r.mode, "cdn")
self.assertEqual(r.dev, False)
self.assertEqual(r.js_raw, [DEFAULT_LOG_JS_RAW])
self.assertEqual(r.css_raw, [])
self.assertEqual(r.messages, [])
resources.__version__ = "1.0-1-abc"
r = resources.Resources(mode="cdn", version="1.0")
self.assertEqual(r.messages, [
{'text': "Requesting CDN BokehJS version '1.0' from Bokeh development version '1.0-1-abc'. This configuration is unsupported and may not work!",
'type': 'warn'}
])
def test_server(self):
r = resources.Resources(mode="server")
self.assertEqual(r.mode, "server")
self.assertEqual(r.dev, False)
self.assertEqual(r.js_raw, [DEFAULT_LOG_JS_RAW])
self.assertEqual(r.css_raw, [])
self.assertEqual(r.messages, [])
r = resources.Resources(mode="server", root_url="http://foo/")
self.assertEqual(r.js_raw, [DEFAULT_LOG_JS_RAW])
self.assertEqual(r.css_raw, [])
self.assertEqual(r.messages, [])
def test_server_dev(self):
r = resources.Resources(mode="server-dev")
self.assertEqual(r.mode, "server")
self.assertEqual(r.dev, True)
self.assertEqual(len(r.js_raw), 1)
self.assertEqual(r.css_raw, [])
self.assertEqual(r.messages, [])
r = resources.Resources(mode="server-dev", root_url="http://foo/")
self.assertEqual(r.js_raw, [DEFAULT_LOG_JS_RAW])
self.assertEqual(r.css_raw, [])
self.assertEqual(r.messages, [])
def test_relative(self):
r = resources.Resources(mode="relative")
self.assertEqual(r.mode, "relative")
self.assertEqual(r.dev, False)
self.assertEqual(r.js_raw, [DEFAULT_LOG_JS_RAW])
self.assertEqual(r.css_raw, [])
self.assertEqual(r.messages, [])
def test_relative_dev(self):
r = resources.Resources(mode="relative-dev")
self.assertEqual(r.mode, "relative")
self.assertEqual(r.dev, True)
self.assertEqual(r.js_raw, [DEFAULT_LOG_JS_RAW])
self.assertEqual(r.css_raw, [])
self.assertEqual(r.messages, [])
def test_absolute(self):
r = resources.Resources(mode="absolute")
self.assertEqual(r.mode, "absolute")
self.assertEqual(r.dev, False)
self.assertEqual(r.js_raw, [DEFAULT_LOG_JS_RAW])
self.assertEqual(r.css_raw, [])
self.assertEqual(r.messages, [])
def test_absolute_dev(self):
r = resources.Resources(mode="absolute-dev")
self.assertEqual(r.mode, "absolute")
self.assertEqual(r.dev, True)
self.assertEqual(r.js_raw, [DEFAULT_LOG_JS_RAW])
self.assertEqual(r.css_raw, [])
self.assertEqual(r.messages, [])
def test_argument_checks(self):
self.assertRaises(ValueError, resources.Resources, "foo")
for mode in ("inline", "cdn", "server", "server-dev", "absolute", "absolute-dev"):
self.assertRaises(ValueError, resources.Resources, mode, root_dir="foo")
for mode in ("inline", "server", "server-dev", "relative", "relative-dev", "absolute", "absolute-dev"):
self.assertRaises(ValueError, resources.Resources, mode, version="foo")
for mode in ("inline", "cdn", "relative", "relative-dev", "absolute", "absolute-dev"):
self.assertRaises(ValueError, resources.Resources, mode, root_url="foo")
| srinathv/bokeh | bokeh/tests/test_resources.py | Python | bsd-3-clause | 6,016 |
#include <iostream>
#include <unistd.h>
#include <sys/wait.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <wait.h>
#include <string.h>
#include <string>
#include <vector>
#include <cstdlib>
#include <stdlib.h>
#include <stdio.h>
#include <boost/tokenizer.hpp>
#include <boost/token_iterator.hpp>
#include <fcntl.h>
#include <errno.h>
#include <dirent.h>
#include <set>
#include <pwd.h>
#include <grp.h>
#include <time.h>
using namespace boost;
using namespace std;
int main(int argc, char ** argv){
if(argc<3){
cerr << "mv: missing destination file operand" << endl;
return -1;
}
vector<string> holder;
for(unsigned int i = 1; i<argc; i++){
holder.push_back(string(argv[i]));
}
DIR *dirp;
struct stat s;
if(stat(holder[0].c_str(), &s) == -1){
perror("stat");
exit(1);
}
if(stat(holder[1].c_str(), &s)!=-1){ // if exists
if(S_IFDIR & s.st_mode){ //is a dir
if( NULL == (dirp = opendir(holder[1].c_str()))){
cerr << holder[1] << "does not exist" << endl;
exit(1);
}
if(-1 == link(holder[0].c_str(), (holder[1] + '/' + holder[0]).c_str())){
perror("link");
exit(1);
}
if(-1 == unlink(holder[0].c_str())){
perror("unlink");
exit(1);
}
}
}
else{ //not exist
if(-1 == link(holder[0].c_str(), holder[1].c_str())){
perror("link");
exit(1);
}
if(-1 == unlink(holder[0].c_str())){
perror("unlink");
exit(1);
}
}
return 0;
}
| ykamo001/rshell | src/mv.cpp | C++ | bsd-3-clause | 1,431 |
#ifndef NT2_INCLUDE_FUNCTIONS_SCALAR_CSCPI_HPP_INCLUDED
#define NT2_INCLUDE_FUNCTIONS_SCALAR_CSCPI_HPP_INCLUDED
#include <nt2/trigonometric/include/functions/scalar/cscpi.hpp>
#endif
| hainm/pythran | third_party/nt2/include/functions/scalar/cscpi.hpp | C++ | bsd-3-clause | 185 |
/* Copyright (c) 2015 Convey Computer Corporation
*
* This file is part of the OpenHT jpegscale application.
*
* Use and distribution licensed under the BSD 3-clause license.
* See the LICENSE file for the complete license text.
*/
#if defined(VERT) || defined(_WIN32)
#include "Ht.h"
#include "PersVinfo.h"
#ifndef _HTV
#include "JobInfo.h"
#endif
#define BUSY_RETRY(b) { if (b) { HtRetry(); break; } }
// Read jobInfo.m_vert and send to vert modules
void CPersVinfo::PersVinfo()
{
if (PR_htValid) {
switch (PR_htInst) {
case VINFO_ENTRY: {
P_pJobInfo = PR_pJobInfo + JOB_INFO_VERT_OFFSET;
P_readIdx = 0;
P_readCnt = 3; // total memory lines to read
assert_msg(offsetof(JobInfo, m_vert) == JOB_INFO_VERT_OFFSET, "JOB_INFO_VERT_OFFSET = 0x%x\n", (int)offsetof(JobInfo, m_vert));
P_memAddr = P_pJobInfo;
HtContinue(VINFO_READ);
}
break;
case VINFO_READ: {
BUSY_RETRY (ReadMemBusy());
ht_uint4 qwCnt = 8;
ReadMem_ReadRspFunc(P_memAddr, P_readIdx, qwCnt);
P_memAddr += 64;
P_readIdx += 1;
if (P_readIdx == P_readCnt)
// must pause to get outImageRows and pntWghtListSize
ReadMemPause(VINFO_PNT_WGHT_START);
else
HtContinue(VINFO_READ);
}
break;
case VINFO_PNT_WGHT_START: {
P_readIdx = START_OF_PNT_WGHT_START_QW/8;
P_readCnt = P_readIdx + (S_outImageRows+1+31)/32; // number of memory lines to pullin outImageRows+1 16-bit values
P_memAddr = PR_pJobInfo + P_readIdx * 64;
HtContinue(VINFO_PNT_WGHT_START_READ);
}
break;
case VINFO_PNT_WGHT_START_READ: {
BUSY_RETRY (ReadMemBusy());
ht_uint4 qwCnt = 8;
ReadMem_ReadRspFunc(P_memAddr, P_readIdx, qwCnt);
P_memAddr += 64;
P_readIdx += 1;
if (P_readIdx == P_readCnt) {
HtContinue(VINFO_PNT_WGHT_IDX);
} else
HtContinue(VINFO_PNT_WGHT_START_READ);
}
break;
case VINFO_PNT_WGHT_IDX: {
P_readIdx = START_OF_PNT_WGHT_IDX_QW/8;
P_readCnt = P_readIdx + (S_outImageRows+31)/32; // number of memory lines to pullin outImageRows+1 16-bit values
P_memAddr = PR_pJobInfo + P_readIdx * 64;
HtContinue(VINFO_PNT_WGHT_IDX_READ);
}
break;
case VINFO_PNT_WGHT_IDX_READ: {
BUSY_RETRY (ReadMemBusy());
ht_uint4 qwCnt = 8;
ReadMem_ReadRspFunc(P_memAddr, P_readIdx, qwCnt);
P_memAddr += 64;
P_readIdx += 1;
if (P_readIdx == P_readCnt)
HtContinue(VINFO_PNT_WGHT_LIST);
else
HtContinue(VINFO_PNT_WGHT_IDX_READ);
}
break;
case VINFO_PNT_WGHT_LIST: {
P_readIdx = START_OF_PNT_WGHT_LIST_QW/8;
P_readCnt = P_readIdx + (S_pntWghtListSize*4+7)/8; // number of memory lines to pullin outImageRows+1 16-bit values
P_memAddr = PR_pJobInfo + P_readIdx * 64;
HtContinue(VINFO_PNT_WGHT_LIST_READ);
}
break;
case VINFO_PNT_WGHT_LIST_READ: {
BUSY_RETRY (ReadMemBusy());
ht_uint4 qwCnt = 8;
ReadMem_ReadRspFunc(P_memAddr, P_readIdx, qwCnt);
P_memAddr += 64;
P_readIdx += 1;
if (P_readIdx == P_readCnt)
ReadMemPause(VINFO_TRANSFER);
else
HtContinue(VINFO_PNT_WGHT_LIST_READ);
}
break;
case VINFO_TRANSFER: {
BUSY_RETRY( SendTransferBusy_vctl() );
VinfoMsg vinfoMsg;
vinfoMsg.m_bImageRdy = true;
vinfoMsg.m_imageIdx = P_imageIdx;
SendMsg_vinfo( vinfoMsg );
SendTransfer_vctl(PR_imageIdx);
}
break;
default:
assert(0);
}
}
}
// Send message to other vert modules
void CPersVinfo::ReadMemResp_ReadRspFunc(ht_uint3 rspIdx, sc_uint<VINFO_MIF_DST_READRSPFUNC_INFO_W> rdRsp_info, sc_uint<64> rdRspData)
{
VinfoMsg vinfoMsg;
vinfoMsg.m_bImageRdy = false;
vinfoMsg.m_imageIdx = P_imageIdx;
vinfoMsg.m_rspQw = (rdRsp_info << 3) | rspIdx;
vinfoMsg.m_data = rdRspData;
SendMsg_vinfo( vinfoMsg );
if (!vinfoMsg.m_bImageRdy) {
if (vinfoMsg.m_rspQw == 0)
S_outImageRows = (vinfoMsg.m_data >> 30) & 0x3fff;
if (vinfoMsg.m_rspQw == 18)
S_pntWghtListSize = (vinfoMsg.m_data >> 16) & 0x3fff;
}
}
#endif
| TonyBrewer/OpenHT | apps/jpegresize/lib_pers_mc/src_pers/PersVinfo_src.cpp | C++ | bsd-3-clause | 3,907 |
import networkx as nx
import matplotlib.pyplot as plt
try:
import pygraphviz
from networkx.drawing.nx_agraph import graphviz_layout
except ImportError:
try:
import pydot
from networkx.drawing.nx_pydot import graphviz_layout
except ImportError:
raise ImportError("This example needs Graphviz and either "
"PyGraphviz or pydot")
G = nx.balanced_tree(3, 5)
pos = graphviz_layout(G, prog='twopi', args='')
plt.figure(figsize=(8, 8))
nx.draw(G, pos, node_size=20, alpha=0.5, node_color="blue", with_labels=False)
plt.axis('equal')
plt.savefig('circular_tree.png')
plt.show()
| jfinkels/networkx | examples/drawing/circular_tree.py | Python | bsd-3-clause | 639 |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="PooledObject.cs">
// Copyright (c) by respective owners including Yahoo!, Microsoft, and
// individual contributors. All rights reserved. Released under a BSD
// license as described in the file LICENSE.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
using System;
namespace VW
{
/// <summary>
/// A strongly-typed pooled object.
/// </summary>
/// <typeparam name="TSource">The disposable context needed to create objects of <typeparamref name="TObject"/>.</typeparam>
/// <typeparam name="TObject">The type of the objects to be created.</typeparam>
public sealed class PooledObject<TSource, TObject> : IDisposable
where TSource : IDisposable
where TObject : IDisposable
{
/// <summary>
/// The parent pool.
/// </summary>
private readonly ObjectPool<TSource, TObject> pool;
/// <summary>
/// Initializes a new instance of the <see cref="PooledObject{TSource,TObject}"/> class.
/// </summary>
/// <param name="pool">The parent pool.</param>
/// <param name="version">The version of the pool at time of creation of this instance.</param>
/// <param name="value">The actual pooled object.</param>
internal PooledObject(ObjectPool<TSource, TObject> pool, int version, TObject value)
{
this.pool = pool;
this.Value = value;
this.Version = version;
}
/// <summary>
/// The actual value.
/// </summary>
public TObject Value { get; private set; }
/// <summary>
/// Factory version used to create Value.
/// </summary>
internal int Version { get; private set; }
/// <summary>
/// Return to pool.
/// </summary>
public void Dispose()
{
this.pool.ReturnObject(this);
}
}
}
| marko-asplund/vowpal_wabbit | cs/cs/PooledObject.cs | C# | bsd-3-clause | 2,121 |
// Copyright (c) 2009 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/gtk/go_button_gtk.h"
#include "app/l10n_util.h"
#include "base/logging.h"
#include "base/message_loop.h"
#include "chrome/app/chrome_dll_resource.h"
#include "chrome/browser/browser.h"
#include "chrome/browser/gtk/location_bar_view_gtk.h"
#include "chrome/browser/profile.h"
#include "grit/generated_resources.h"
#include "grit/theme_resources.h"
GoButtonGtk::GoButtonGtk(LocationBarViewGtk* location_bar, Browser* browser)
: location_bar_(location_bar),
browser_(browser),
button_delay_(0),
stop_timer_(this),
intended_mode_(MODE_GO),
visible_mode_(MODE_GO),
state_(BS_NORMAL),
go_(browser ? browser->profile()->GetThemeProvider() : NULL,
IDR_GO, IDR_GO_P, IDR_GO_H, 0),
stop_(browser ? browser->profile()->GetThemeProvider() : NULL,
IDR_STOP, IDR_STOP_P, IDR_STOP_H, 0),
widget_(gtk_button_new()) {
gtk_widget_set_size_request(widget_.get(),
gdk_pixbuf_get_width(go_.pixbufs(0)),
gdk_pixbuf_get_height(go_.pixbufs(0)));
gtk_widget_set_app_paintable(widget_.get(), TRUE);
// We effectively double-buffer by virtue of having only one image...
gtk_widget_set_double_buffered(widget_.get(), FALSE);
g_signal_connect(G_OBJECT(widget_.get()), "expose-event",
G_CALLBACK(OnExpose), this);
g_signal_connect(G_OBJECT(widget_.get()), "enter",
G_CALLBACK(OnEnter), this);
g_signal_connect(G_OBJECT(widget_.get()), "leave",
G_CALLBACK(OnLeave), this);
g_signal_connect(G_OBJECT(widget_.get()), "clicked",
G_CALLBACK(OnClicked), this);
GTK_WIDGET_UNSET_FLAGS(widget_.get(), GTK_CAN_FOCUS);
SetTooltip();
}
GoButtonGtk::~GoButtonGtk() {
widget_.Destroy();
}
void GoButtonGtk::ChangeMode(Mode mode, bool force) {
intended_mode_ = mode;
// If the change is forced, or the user isn't hovering the icon, or it's safe
// to change it to the other image type, make the change immediately;
// otherwise we'll let it happen later.
if (force || (state() != BS_HOT) || ((mode == MODE_STOP) ?
stop_timer_.empty() : (visible_mode_ != MODE_STOP))) {
stop_timer_.RevokeAll();
gtk_widget_queue_draw(widget_.get());
SetTooltip();
visible_mode_ = mode;
}
}
Task* GoButtonGtk::CreateButtonTimerTask() {
return stop_timer_.NewRunnableMethod(&GoButtonGtk::OnButtonTimer);
}
void GoButtonGtk::OnButtonTimer() {
stop_timer_.RevokeAll();
ChangeMode(intended_mode_, true);
}
// static
gboolean GoButtonGtk::OnExpose(GtkWidget* widget,
GdkEventExpose* e,
GoButtonGtk* button) {
if (button->visible_mode_ == MODE_GO) {
return button->go_.OnExpose(widget, e);
} else {
return button->stop_.OnExpose(widget, e);
}
}
// static
gboolean GoButtonGtk::OnEnter(GtkButton* widget, GoButtonGtk* button) {
DCHECK_EQ(BS_NORMAL, button->state());
button->state_ = BS_HOT;
return TRUE;
}
// static
gboolean GoButtonGtk::OnLeave(GtkButton* widget, GoButtonGtk* button) {
// It's possible on shutdown for a "leave" event to be emitted twice in a row
// for this button. I'm not sure if this is a gtk quirk or something wrong
// with our usage, but it's harmless. I'm commenting out this DCHECK for now.
// and adding a LOG(WARNING) instead.
// See http://www.crbug.com/10851 for details.
// DCHECK_EQ(BS_HOT, button->state());
if (button->state() != BS_HOT)
LOG(WARNING) << "Button state should be BS_HOT when leaving.";
button->state_ = BS_NORMAL;
button->ChangeMode(button->intended_mode_, true);
return TRUE;
}
// static
gboolean GoButtonGtk::OnClicked(GtkButton* widget, GoButtonGtk* button) {
if (button->visible_mode_ == MODE_STOP) {
if (button->browser_)
button->browser_->Stop();
// The user has clicked, so we can feel free to update the button,
// even if the mouse is still hovering.
button->ChangeMode(MODE_GO, true);
} else if (button->visible_mode_ == MODE_GO && button->stop_timer_.empty()) {
// If the go button is visible and not within the double click timer, go.
if (button->browser_)
button->browser_->ExecuteCommand(IDC_GO);
// Figure out the system double-click time.
if (button->button_delay_ == 0) {
GtkSettings* settings = gtk_settings_get_default();
g_object_get(G_OBJECT(settings),
"gtk-double-click-time",
&button->button_delay_,
NULL);
}
// Stop any existing timers.
button->stop_timer_.RevokeAll();
// Start a timer - while this timer is running, the go button
// cannot be changed to a stop button. We do not set intended_mode_
// to MODE_STOP here as we want to wait for the browser to tell
// us that it has started loading (and this may occur only after
// some delay).
MessageLoop::current()->PostDelayedTask(FROM_HERE,
button->CreateButtonTimerTask(),
button->button_delay_);
}
return TRUE;
}
void GoButtonGtk::SetTooltip() {
if (visible_mode_ == MODE_GO) {
// |location_bar_| can be NULL in tests.
std::wstring current_text(
location_bar_ ? location_bar_->location_entry()->GetText() :
L"");
if (l10n_util::GetTextDirection() == l10n_util::RIGHT_TO_LEFT) {
l10n_util::WrapStringWithLTRFormatting(¤t_text);
}
// TODO(pkasting): http://b/868940 Use the right strings at the right
// times by asking the autocomplete system what to do. Don't hardcode
// "Google" as the search provider name.
gtk_widget_set_tooltip_text(
widget_.get(),
true ? l10n_util::GetStringFUTF8(
IDS_TOOLTIP_GO_SITE, WideToUTF16(current_text)).c_str() :
l10n_util::GetStringFUTF8(IDS_TOOLTIP_GO_SEARCH, UTF8ToUTF16("Google"),
WideToUTF16(current_text)).c_str());
} else {
gtk_widget_set_tooltip_text(
widget_.get(), l10n_util::GetStringUTF8(IDS_TOOLTIP_STOP).c_str());
}
}
| amyvmiwei/chromium | chrome/browser/gtk/go_button_gtk.cc | C++ | bsd-3-clause | 6,325 |
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE127_Buffer_Underread__new_char_loop_54c.cpp
Label Definition File: CWE127_Buffer_Underread__new.label.xml
Template File: sources-sink-54c.tmpl.cpp
*/
/*
* @description
* CWE: 127 Buffer Under-read
* BadSource: Set data pointer to before the allocated memory buffer
* GoodSource: Set data pointer to the allocated memory buffer
* Sink: loop
* BadSink : Copy data to string using a loop
* Flow Variant: 54 Data flow: data passed as an argument from one function through three others to a fifth; all five functions are in different source files
*
* */
#include "std_testcase.h"
#include <wchar.h>
namespace CWE127_Buffer_Underread__new_char_loop_54
{
/* all the sinks are the same, we just want to know where the hit originated if a tool flags one */
#ifndef OMITBAD
/* bad function declaration */
void badSink_d(char * data);
void badSink_c(char * data)
{
badSink_d(data);
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B uses the GoodSource with the BadSink */
void goodG2BSink_d(char * data);
void goodG2BSink_c(char * data)
{
goodG2BSink_d(data);
}
#endif /* OMITGOOD */
} /* close namespace */
| JianpingZeng/xcc | xcc/test/juliet/testcases/CWE127_Buffer_Underread/s03/CWE127_Buffer_Underread__new_char_loop_54c.cpp | C++ | bsd-3-clause | 1,231 |
import re
from six import text_type
"""Translate strings to and from SOAP 1.2 XML name encoding
Implements rules for mapping application defined name to XML names
specified by the w3 SOAP working group for SOAP version 1.2 in
Appendix A of "SOAP Version 1.2 Part 2: Adjuncts", W3C Working Draft
17, December 2001, <http://www.w3.org/TR/soap12-part2/#namemap>
Also see <http://www.w3.org/2000/xp/Group/xmlp-issues>.
Author: Gregory R. Warnes <Gregory.R.Warnes@Pfizer.com>
Date:: 2002-04-25
Version 0.9.0
"""
ident = "$Id$"
def _NCNameChar(x):
return x.isalpha() or x.isdigit() or x == "." or x == '-' or x == "_"
def _NCNameStartChar(x):
return x.isalpha() or x == "_"
def _toUnicodeHex(x):
hexval = hex(ord(x[0]))[2:]
hexlen = len(hexval)
# Make hexval have either 4 or 8 digits by prepending 0's
if (hexlen == 1):
hexval = "000" + hexval
elif (hexlen == 2):
hexval = "00" + hexval
elif (hexlen == 3):
hexval = "0" + hexval
elif (hexlen == 4):
hexval = "" + hexval
elif (hexlen == 5):
hexval = "000" + hexval
elif (hexlen == 6):
hexval = "00" + hexval
elif (hexlen == 7):
hexval = "0" + hexval
elif (hexlen == 8):
hexval = "" + hexval
else:
raise Exception("Illegal Value returned from hex(ord(x))")
return "_x" + hexval + "_"
def _fromUnicodeHex(x):
return eval(r'u"\u' + x[2:-1] + '"')
def toXMLname(string):
"""Convert string to a XML name."""
if string.find(':') != -1:
(prefix, localname) = string.split(':', 1)
else:
prefix = None
localname = string
T = text_type(localname)
N = len(localname)
X = []
for i in range(N):
if i < N - 1 and T[i] == u'_' and T[i + 1] == u'x':
X.append(u'_x005F_')
elif i == 0 and N >= 3 and \
(T[0] == u'x' or T[0] == u'X') and \
(T[1] == u'm' or T[1] == u'M') and \
(T[2] == u'l' or T[2] == u'L'):
X.append(u'_xFFFF_' + T[0])
elif (not _NCNameChar(T[i])) or (i == 0 and not _NCNameStartChar(T[i])):
X.append(_toUnicodeHex(T[i]))
else:
X.append(T[i])
if prefix:
return "%s:%s" % (prefix, u''.join(X))
return u''.join(X)
def fromXMLname(string):
"""Convert XML name to unicode string."""
retval = re.sub(r'_xFFFF_', '', string)
def fun(matchobj):
return _fromUnicodeHex(matchobj.group(0))
retval = re.sub(r'_x[0-9A-Fa-f]{4}_', fun, retval)
return retval
| pycontribs/wstools | wstools/XMLname.py | Python | bsd-3-clause | 2,575 |
/*
* Copyright 2000-2003 Oracle, Inc. This software was developed in conjunction with the National Cancer Institute, and so to the extent government employees are co-authors, any rights in such works shall be subject to Title 17 of the United States Code, section 105.
*
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the disclaimer of Article 3, below. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
*
* 2. The end-user documentation included with the redistribution, if any, must include the following acknowledgment:
*
* "This product includes software developed by Oracle, Inc. and the National Cancer Institute."
*
* If no such end-user documentation is to be included, this acknowledgment shall appear in the software itself, wherever such third-party acknowledgments normally appear.
*
* 3. The names "The National Cancer Institute", "NCI" and "Oracle" must not be used to endorse or promote products derived from this software.
*
* 4. This license does not authorize the incorporation of this software into any proprietary programs. This license does not authorize the recipient to use any trademarks owned by either NCI or Oracle, Inc.
*
* 5. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. IN NO EVENT SHALL THE NATIONAL CANCER INSTITUTE, ORACLE, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
*/
package gov.nih.nci.ncicb.cadsr.loader.ui;
import gov.nih.nci.ncicb.cadsr.loader.UserSelections;
import java.awt.event.*;
import gov.nih.nci.ncicb.cadsr.loader.util.RunMode;
import gov.nih.nci.ncicb.cadsr.loader.util.PropertyAccessor;
public class ModeSelectionPanelDescriptor
extends WizardPanelDescriptor
implements ActionListener {
public static final String IDENTIFIER = "MODE_SELECTION_PANEL";
private ModeSelectionPanel panel;
private UserSelections userSelections = UserSelections.getInstance();
public ModeSelectionPanelDescriptor() {
panel = new ModeSelectionPanel();
setPanelDescriptorIdentifier(IDENTIFIER);
setPanelComponent(panel);
panel.addActionListener(this);
if(panel.getSelection().equals(RunMode.GMEDefaults.toString()))
userSelections.setProperty("MODE_SELECTION", panel.getSelection());
else
userSelections.setProperty("MODE_SELECTION", null);
if(panel.getSelection().equals(RunMode.GenerateReport.toString()))
nextPanelDescriptor = PackageFilterSelectionPanelDescriptor.IDENTIFIER;
else
nextPanelDescriptor = FileSelectionPanelDescriptor.IDENTIFIER;
backPanelDescriptor = ModeSelectionPanelDescriptor.IDENTIFIER;
}
public void aboutToDisplayPanel()
{
getWizardModel().setBackButtonEnabled(false);
}
public void actionPerformed(ActionEvent evt) {
if(evt.getActionCommand().equals(RunMode.GMEDefaults.toString()))
userSelections.setProperty("MODE_SELECTION", panel.getSelection());
else
userSelections.setProperty("MODE_SELECTION", null);
if(evt.getActionCommand().equals(RunMode.Roundtrip.toString())) {
setNextPanelDescriptor(RoundtripPanelDescriptor.IDENTIFIER);
}
else {
setNextPanelDescriptor(FileSelectionPanelDescriptor.IDENTIFIER);
}
}
}
| NCIP/cadsr-semantic-tools | software/SIW/src/java/gov/nih/nci/ncicb/cadsr/loader/ui/ModeSelectionPanelDescriptor.java | Java | bsd-3-clause | 4,065 |
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package http
import (
"bufio"
"bytes"
"errors"
"fmt"
"io"
"io/ioutil"
"net/http/internal"
"net/textproto"
"sort"
"strconv"
"strings"
"sync"
)
// ErrLineTooLong is returned when reading request or response bodies
// with malformed chunked encoding.
var ErrLineTooLong = internal.ErrLineTooLong
type errorReader struct {
err error
}
func (r errorReader) Read(p []byte) (n int, err error) {
return 0, r.err
}
// transferWriter inspects the fields of a user-supplied Request or Response,
// sanitizes them without changing the user object and provides methods for
// writing the respective header, body and trailer in wire format.
type transferWriter struct {
Method string
Body io.Reader
BodyCloser io.Closer
ResponseToHEAD bool
ContentLength int64 // -1 means unknown, 0 means exactly none
Close bool
TransferEncoding []string
Trailer Header
IsResponse bool
}
func newTransferWriter(r interface{}) (t *transferWriter, err error) {
t = &transferWriter{}
// Extract relevant fields
atLeastHTTP11 := false
switch rr := r.(type) {
case *Request:
if rr.ContentLength != 0 && rr.Body == nil {
return nil, fmt.Errorf("http: Request.ContentLength=%d with nil Body", rr.ContentLength)
}
t.Method = valueOrDefault(rr.Method, "GET")
t.Body = rr.Body
t.BodyCloser = rr.Body
t.ContentLength = rr.ContentLength
t.Close = rr.Close
t.TransferEncoding = rr.TransferEncoding
t.Trailer = rr.Trailer
atLeastHTTP11 = rr.ProtoAtLeast(1, 1)
if t.Body != nil && len(t.TransferEncoding) == 0 && atLeastHTTP11 {
if t.ContentLength == 0 {
// Test to see if it's actually zero or just unset.
var buf [1]byte
n, rerr := io.ReadFull(t.Body, buf[:])
if rerr != nil && rerr != io.EOF {
t.ContentLength = -1
t.Body = errorReader{rerr}
} else if n == 1 {
// Oh, guess there is data in this Body Reader after all.
// The ContentLength field just wasn't set.
// Stich the Body back together again, re-attaching our
// consumed byte.
t.ContentLength = -1
t.Body = io.MultiReader(bytes.NewReader(buf[:]), t.Body)
} else {
// Body is actually empty.
t.Body = nil
t.BodyCloser = nil
}
}
if t.ContentLength < 0 {
t.TransferEncoding = []string{"chunked"}
}
}
case *Response:
t.IsResponse = true
if rr.Request != nil {
t.Method = rr.Request.Method
}
t.Body = rr.Body
t.BodyCloser = rr.Body
t.ContentLength = rr.ContentLength
t.Close = rr.Close
t.TransferEncoding = rr.TransferEncoding
t.Trailer = rr.Trailer
atLeastHTTP11 = rr.ProtoAtLeast(1, 1)
t.ResponseToHEAD = noBodyExpected(t.Method)
}
// Sanitize Body,ContentLength,TransferEncoding
if t.ResponseToHEAD {
t.Body = nil
if chunked(t.TransferEncoding) {
t.ContentLength = -1
}
} else {
if !atLeastHTTP11 || t.Body == nil {
t.TransferEncoding = nil
}
if chunked(t.TransferEncoding) {
t.ContentLength = -1
} else if t.Body == nil { // no chunking, no body
t.ContentLength = 0
}
}
// Sanitize Trailer
if !chunked(t.TransferEncoding) {
t.Trailer = nil
}
return t, nil
}
func noBodyExpected(requestMethod string) bool {
return requestMethod == "HEAD"
}
func (t *transferWriter) shouldSendContentLength() bool {
if chunked(t.TransferEncoding) {
return false
}
if t.ContentLength > 0 {
return true
}
if t.ContentLength < 0 {
return false
}
// Many servers expect a Content-Length for these methods
if t.Method == "POST" || t.Method == "PUT" {
return true
}
if t.ContentLength == 0 && isIdentity(t.TransferEncoding) {
if t.Method == "GET" || t.Method == "HEAD" {
return false
}
return true
}
return false
}
func (t *transferWriter) WriteHeader(w io.Writer) error {
if t.Close {
if _, err := io.WriteString(w, "Connection: close\r\n"); err != nil {
return err
}
}
// Write Content-Length and/or Transfer-Encoding whose values are a
// function of the sanitized field triple (Body, ContentLength,
// TransferEncoding)
if t.shouldSendContentLength() {
if _, err := io.WriteString(w, "Content-Length: "); err != nil {
return err
}
if _, err := io.WriteString(w, strconv.FormatInt(t.ContentLength, 10)+"\r\n"); err != nil {
return err
}
} else if chunked(t.TransferEncoding) {
if _, err := io.WriteString(w, "Transfer-Encoding: chunked\r\n"); err != nil {
return err
}
}
// Write Trailer header
if t.Trailer != nil {
keys := make([]string, 0, len(t.Trailer))
for k := range t.Trailer {
k = CanonicalHeaderKey(k)
switch k {
case "Transfer-Encoding", "Trailer", "Content-Length":
return &badStringError{"invalid Trailer key", k}
}
keys = append(keys, k)
}
if len(keys) > 0 {
sort.Strings(keys)
// TODO: could do better allocation-wise here, but trailers are rare,
// so being lazy for now.
if _, err := io.WriteString(w, "Trailer: "+strings.Join(keys, ",")+"\r\n"); err != nil {
return err
}
}
}
return nil
}
func (t *transferWriter) WriteBody(w io.Writer) error {
var err error
var ncopy int64
// Write body
if t.Body != nil {
if chunked(t.TransferEncoding) {
if bw, ok := w.(*bufio.Writer); ok && !t.IsResponse {
w = &internal.FlushAfterChunkWriter{bw}
}
cw := internal.NewChunkedWriter(w)
_, err = io.Copy(cw, t.Body)
if err == nil {
err = cw.Close()
}
} else if t.ContentLength == -1 {
ncopy, err = io.Copy(w, t.Body)
} else {
ncopy, err = io.Copy(w, io.LimitReader(t.Body, t.ContentLength))
if err != nil {
return err
}
var nextra int64
nextra, err = io.Copy(ioutil.Discard, t.Body)
ncopy += nextra
}
if err != nil {
return err
}
if err = t.BodyCloser.Close(); err != nil {
return err
}
}
if !t.ResponseToHEAD && t.ContentLength != -1 && t.ContentLength != ncopy {
return fmt.Errorf("http: ContentLength=%d with Body length %d",
t.ContentLength, ncopy)
}
if chunked(t.TransferEncoding) {
// Write Trailer header
if t.Trailer != nil {
if err := t.Trailer.Write(w); err != nil {
return err
}
}
// Last chunk, empty trailer
_, err = io.WriteString(w, "\r\n")
}
return err
}
type transferReader struct {
// Input
Header Header
StatusCode int
RequestMethod string
ProtoMajor int
ProtoMinor int
// Output
Body io.ReadCloser
ContentLength int64
TransferEncoding []string
Close bool
Trailer Header
}
func (t *transferReader) protoAtLeast(m, n int) bool {
return t.ProtoMajor > m || (t.ProtoMajor == m && t.ProtoMinor >= n)
}
// bodyAllowedForStatus reports whether a given response status code
// permits a body. See RFC2616, section 4.4.
func bodyAllowedForStatus(status int) bool {
switch {
case status >= 100 && status <= 199:
return false
case status == 204:
return false
case status == 304:
return false
}
return true
}
var (
suppressedHeaders304 = []string{"Content-Type", "Content-Length", "Transfer-Encoding"}
suppressedHeadersNoBody = []string{"Content-Length", "Transfer-Encoding"}
)
func suppressedHeaders(status int) []string {
switch {
case status == 304:
// RFC 2616 section 10.3.5: "the response MUST NOT include other entity-headers"
return suppressedHeaders304
case !bodyAllowedForStatus(status):
return suppressedHeadersNoBody
}
return nil
}
// msg is *Request or *Response.
func readTransfer(msg interface{}, r *bufio.Reader) (err error) {
t := &transferReader{RequestMethod: "GET"}
// Unify input
isResponse := false
switch rr := msg.(type) {
case *Response:
t.Header = rr.Header
t.StatusCode = rr.StatusCode
t.ProtoMajor = rr.ProtoMajor
t.ProtoMinor = rr.ProtoMinor
t.Close = shouldClose(t.ProtoMajor, t.ProtoMinor, t.Header, true)
isResponse = true
if rr.Request != nil {
t.RequestMethod = rr.Request.Method
}
case *Request:
t.Header = rr.Header
t.RequestMethod = rr.Method
t.ProtoMajor = rr.ProtoMajor
t.ProtoMinor = rr.ProtoMinor
// Transfer semantics for Requests are exactly like those for
// Responses with status code 200, responding to a GET method
t.StatusCode = 200
t.Close = rr.Close
default:
panic("unexpected type")
}
// Default to HTTP/1.1
if t.ProtoMajor == 0 && t.ProtoMinor == 0 {
t.ProtoMajor, t.ProtoMinor = 1, 1
}
// Transfer encoding, content length
err = t.fixTransferEncoding()
if err != nil {
return err
}
realLength, err := fixLength(isResponse, t.StatusCode, t.RequestMethod, t.Header, t.TransferEncoding)
if err != nil {
return err
}
if isResponse && t.RequestMethod == "HEAD" {
if n, err := parseContentLength(t.Header.get("Content-Length")); err != nil {
return err
} else {
t.ContentLength = n
}
} else {
t.ContentLength = realLength
}
// Trailer
t.Trailer, err = fixTrailer(t.Header, t.TransferEncoding)
if err != nil {
return err
}
// If there is no Content-Length or chunked Transfer-Encoding on a *Response
// and the status is not 1xx, 204 or 304, then the body is unbounded.
// See RFC2616, section 4.4.
switch msg.(type) {
case *Response:
if realLength == -1 &&
!chunked(t.TransferEncoding) &&
bodyAllowedForStatus(t.StatusCode) {
// Unbounded body.
t.Close = true
}
}
// Prepare body reader. ContentLength < 0 means chunked encoding
// or close connection when finished, since multipart is not supported yet
switch {
case chunked(t.TransferEncoding):
if noBodyExpected(t.RequestMethod) {
t.Body = eofReader
} else {
t.Body = &body{src: internal.NewChunkedReader(r), hdr: msg, r: r, closing: t.Close}
}
case realLength == 0:
t.Body = eofReader
case realLength > 0:
t.Body = &body{src: io.LimitReader(r, realLength), closing: t.Close}
default:
// realLength < 0, i.e. "Content-Length" not mentioned in header
if t.Close {
// Close semantics (i.e. HTTP/1.0)
t.Body = &body{src: r, closing: t.Close}
} else {
// Persistent connection (i.e. HTTP/1.1)
t.Body = eofReader
}
}
// Unify output
switch rr := msg.(type) {
case *Request:
rr.Body = t.Body
rr.ContentLength = t.ContentLength
rr.TransferEncoding = t.TransferEncoding
rr.Close = t.Close
rr.Trailer = t.Trailer
case *Response:
rr.Body = t.Body
rr.ContentLength = t.ContentLength
rr.TransferEncoding = t.TransferEncoding
rr.Close = t.Close
rr.Trailer = t.Trailer
}
return nil
}
// Checks whether chunked is part of the encodings stack
func chunked(te []string) bool { return len(te) > 0 && te[0] == "chunked" }
// Checks whether the encoding is explicitly "identity".
func isIdentity(te []string) bool { return len(te) == 1 && te[0] == "identity" }
// fixTransferEncoding sanitizes t.TransferEncoding, if needed.
func (t *transferReader) fixTransferEncoding() error {
raw, present := t.Header["Transfer-Encoding"]
if !present {
return nil
}
delete(t.Header, "Transfer-Encoding")
// Issue 12785; ignore Transfer-Encoding on HTTP/1.0 requests.
if !t.protoAtLeast(1, 1) {
return nil
}
encodings := strings.Split(raw[0], ",")
te := make([]string, 0, len(encodings))
// TODO: Even though we only support "identity" and "chunked"
// encodings, the loop below is designed with foresight. One
// invariant that must be maintained is that, if present,
// chunked encoding must always come first.
for _, encoding := range encodings {
encoding = strings.ToLower(strings.TrimSpace(encoding))
// "identity" encoding is not recorded
if encoding == "identity" {
break
}
if encoding != "chunked" {
return &badStringError{"unsupported transfer encoding", encoding}
}
te = te[0 : len(te)+1]
te[len(te)-1] = encoding
}
if len(te) > 1 {
return &badStringError{"too many transfer encodings", strings.Join(te, ",")}
}
if len(te) > 0 {
// RFC 7230 3.3.2 says "A sender MUST NOT send a
// Content-Length header field in any message that
// contains a Transfer-Encoding header field."
//
// but also:
// "If a message is received with both a
// Transfer-Encoding and a Content-Length header
// field, the Transfer-Encoding overrides the
// Content-Length. Such a message might indicate an
// attempt to perform request smuggling (Section 9.5)
// or response splitting (Section 9.4) and ought to be
// handled as an error. A sender MUST remove the
// received Content-Length field prior to forwarding
// such a message downstream."
//
// Reportedly, these appear in the wild.
delete(t.Header, "Content-Length")
t.TransferEncoding = te
return nil
}
return nil
}
// Determine the expected body length, using RFC 2616 Section 4.4. This
// function is not a method, because ultimately it should be shared by
// ReadResponse and ReadRequest.
func fixLength(isResponse bool, status int, requestMethod string, header Header, te []string) (int64, error) {
contentLens := header["Content-Length"]
isRequest := !isResponse
// Logic based on response type or status
if noBodyExpected(requestMethod) {
// For HTTP requests, as part of hardening against request
// smuggling (RFC 7230), don't allow a Content-Length header for
// methods which don't permit bodies. As an exception, allow
// exactly one Content-Length header if its value is "0".
if isRequest && len(contentLens) > 0 && !(len(contentLens) == 1 && contentLens[0] == "0") {
return 0, fmt.Errorf("http: method cannot contain a Content-Length; got %q", contentLens)
}
return 0, nil
}
if status/100 == 1 {
return 0, nil
}
switch status {
case 204, 304:
return 0, nil
}
if len(contentLens) > 1 {
// harden against HTTP request smuggling. See RFC 7230.
return 0, errors.New("http: message cannot contain multiple Content-Length headers")
}
// Logic based on Transfer-Encoding
if chunked(te) {
return -1, nil
}
// Logic based on Content-Length
var cl string
if len(contentLens) == 1 {
cl = strings.TrimSpace(contentLens[0])
}
if cl != "" {
n, err := parseContentLength(cl)
if err != nil {
return -1, err
}
return n, nil
} else {
header.Del("Content-Length")
}
if !isResponse {
// RFC 2616 neither explicitly permits nor forbids an
// entity-body on a GET request so we permit one if
// declared, but we default to 0 here (not -1 below)
// if there's no mention of a body.
// Likewise, all other request methods are assumed to have
// no body if neither Transfer-Encoding chunked nor a
// Content-Length are set.
return 0, nil
}
// Body-EOF logic based on other methods (like closing, or chunked coding)
return -1, nil
}
// Determine whether to hang up after sending a request and body, or
// receiving a response and body
// 'header' is the request headers
func shouldClose(major, minor int, header Header, removeCloseHeader bool) bool {
if major < 1 {
return true
} else if major == 1 && minor == 0 {
vv := header["Connection"]
if headerValuesContainsToken(vv, "close") || !headerValuesContainsToken(vv, "keep-alive") {
return true
}
return false
} else {
if headerValuesContainsToken(header["Connection"], "close") {
if removeCloseHeader {
header.Del("Connection")
}
return true
}
}
return false
}
// Parse the trailer header
func fixTrailer(header Header, te []string) (Header, error) {
vv, ok := header["Trailer"]
if !ok {
return nil, nil
}
header.Del("Trailer")
trailer := make(Header)
var err error
for _, v := range vv {
foreachHeaderElement(v, func(key string) {
key = CanonicalHeaderKey(key)
switch key {
case "Transfer-Encoding", "Trailer", "Content-Length":
if err == nil {
err = &badStringError{"bad trailer key", key}
return
}
}
trailer[key] = nil
})
}
if err != nil {
return nil, err
}
if len(trailer) == 0 {
return nil, nil
}
if !chunked(te) {
// Trailer and no chunking
return nil, ErrUnexpectedTrailer
}
return trailer, nil
}
// body turns a Reader into a ReadCloser.
// Close ensures that the body has been fully read
// and then reads the trailer if necessary.
type body struct {
src io.Reader
hdr interface{} // non-nil (Response or Request) value means read trailer
r *bufio.Reader // underlying wire-format reader for the trailer
closing bool // is the connection to be closed after reading body?
doEarlyClose bool // whether Close should stop early
mu sync.Mutex // guards closed, and calls to Read and Close
sawEOF bool
closed bool
earlyClose bool // Close called and we didn't read to the end of src
}
// ErrBodyReadAfterClose is returned when reading a Request or Response
// Body after the body has been closed. This typically happens when the body is
// read after an HTTP Handler calls WriteHeader or Write on its
// ResponseWriter.
var ErrBodyReadAfterClose = errors.New("http: invalid Read on closed Body")
func (b *body) Read(p []byte) (n int, err error) {
b.mu.Lock()
defer b.mu.Unlock()
if b.closed {
return 0, ErrBodyReadAfterClose
}
return b.readLocked(p)
}
// Must hold b.mu.
func (b *body) readLocked(p []byte) (n int, err error) {
if b.sawEOF {
return 0, io.EOF
}
n, err = b.src.Read(p)
if err == io.EOF {
b.sawEOF = true
// Chunked case. Read the trailer.
if b.hdr != nil {
if e := b.readTrailer(); e != nil {
err = e
// Something went wrong in the trailer, we must not allow any
// further reads of any kind to succeed from body, nor any
// subsequent requests on the server connection. See
// golang.org/issue/12027
b.sawEOF = false
b.closed = true
}
b.hdr = nil
} else {
// If the server declared the Content-Length, our body is a LimitedReader
// and we need to check whether this EOF arrived early.
if lr, ok := b.src.(*io.LimitedReader); ok && lr.N > 0 {
err = io.ErrUnexpectedEOF
}
}
}
// If we can return an EOF here along with the read data, do
// so. This is optional per the io.Reader contract, but doing
// so helps the HTTP transport code recycle its connection
// earlier (since it will see this EOF itself), even if the
// client doesn't do future reads or Close.
if err == nil && n > 0 {
if lr, ok := b.src.(*io.LimitedReader); ok && lr.N == 0 {
err = io.EOF
b.sawEOF = true
}
}
return n, err
}
var (
singleCRLF = []byte("\r\n")
doubleCRLF = []byte("\r\n\r\n")
)
func seeUpcomingDoubleCRLF(r *bufio.Reader) bool {
for peekSize := 4; ; peekSize++ {
// This loop stops when Peek returns an error,
// which it does when r's buffer has been filled.
buf, err := r.Peek(peekSize)
if bytes.HasSuffix(buf, doubleCRLF) {
return true
}
if err != nil {
break
}
}
return false
}
var errTrailerEOF = errors.New("http: unexpected EOF reading trailer")
func (b *body) readTrailer() error {
// The common case, since nobody uses trailers.
buf, err := b.r.Peek(2)
if bytes.Equal(buf, singleCRLF) {
b.r.Discard(2)
return nil
}
if len(buf) < 2 {
return errTrailerEOF
}
if err != nil {
return err
}
// Make sure there's a header terminator coming up, to prevent
// a DoS with an unbounded size Trailer. It's not easy to
// slip in a LimitReader here, as textproto.NewReader requires
// a concrete *bufio.Reader. Also, we can't get all the way
// back up to our conn's LimitedReader that *might* be backing
// this bufio.Reader. Instead, a hack: we iteratively Peek up
// to the bufio.Reader's max size, looking for a double CRLF.
// This limits the trailer to the underlying buffer size, typically 4kB.
if !seeUpcomingDoubleCRLF(b.r) {
return errors.New("http: suspiciously long trailer after chunked body")
}
hdr, err := textproto.NewReader(b.r).ReadMIMEHeader()
if err != nil {
if err == io.EOF {
return errTrailerEOF
}
return err
}
switch rr := b.hdr.(type) {
case *Request:
mergeSetHeader(&rr.Trailer, Header(hdr))
case *Response:
mergeSetHeader(&rr.Trailer, Header(hdr))
}
return nil
}
func mergeSetHeader(dst *Header, src Header) {
if *dst == nil {
*dst = src
return
}
for k, vv := range src {
(*dst)[k] = vv
}
}
// unreadDataSizeLocked returns the number of bytes of unread input.
// It returns -1 if unknown.
// b.mu must be held.
func (b *body) unreadDataSizeLocked() int64 {
if lr, ok := b.src.(*io.LimitedReader); ok {
return lr.N
}
return -1
}
func (b *body) Close() error {
b.mu.Lock()
defer b.mu.Unlock()
if b.closed {
return nil
}
var err error
switch {
case b.sawEOF:
// Already saw EOF, so no need going to look for it.
case b.hdr == nil && b.closing:
// no trailer and closing the connection next.
// no point in reading to EOF.
case b.doEarlyClose:
// Read up to maxPostHandlerReadBytes bytes of the body, looking for
// for EOF (and trailers), so we can re-use this connection.
if lr, ok := b.src.(*io.LimitedReader); ok && lr.N > maxPostHandlerReadBytes {
// There was a declared Content-Length, and we have more bytes remaining
// than our maxPostHandlerReadBytes tolerance. So, give up.
b.earlyClose = true
} else {
var n int64
// Consume the body, or, which will also lead to us reading
// the trailer headers after the body, if present.
n, err = io.CopyN(ioutil.Discard, bodyLocked{b}, maxPostHandlerReadBytes)
if err == io.EOF {
err = nil
}
if n == maxPostHandlerReadBytes {
b.earlyClose = true
}
}
default:
// Fully consume the body, which will also lead to us reading
// the trailer headers after the body, if present.
_, err = io.Copy(ioutil.Discard, bodyLocked{b})
}
b.closed = true
return err
}
func (b *body) didEarlyClose() bool {
b.mu.Lock()
defer b.mu.Unlock()
return b.earlyClose
}
// bodyLocked is a io.Reader reading from a *body when its mutex is
// already held.
type bodyLocked struct {
b *body
}
func (bl bodyLocked) Read(p []byte) (n int, err error) {
if bl.b.closed {
return 0, ErrBodyReadAfterClose
}
return bl.b.readLocked(p)
}
// parseContentLength trims whitespace from s and returns -1 if no value
// is set, or the value if it's >= 0.
func parseContentLength(cl string) (int64, error) {
cl = strings.TrimSpace(cl)
if cl == "" {
return -1, nil
}
n, err := strconv.ParseInt(cl, 10, 64)
if err != nil || n < 0 {
return 0, &badStringError{"bad Content-Length", cl}
}
return n, nil
}
| roth1002/go | src/net/http/transfer.go | GO | bsd-3-clause | 22,421 |
// Copyright 2008 Peter William Birch <birchb@genyis.org>
//
// This software may be used and distributed according to the terms
// of the Genyris License, in the file "LICENSE", incorporated herein by reference.
//
package org.genyris.core;
import java.io.StringWriter;
import org.genyris.exception.AccessException;
import org.genyris.exception.GenyrisException;
import org.genyris.format.AbstractFormatter;
import org.genyris.format.BasicFormatter;
import org.genyris.interp.Closure;
import org.genyris.interp.Environment;
import org.genyris.interp.PairEnvironment;
import org.genyris.interp.UnboundException;
public class Pair extends ExpWithEmbeddedClasses {
private Exp _car;
private Exp _cdr;
private static boolean alreadyInProcedureMissing = false; // TODO: Not re-entrant
public Pair(Exp car, Exp cdr) {
_car = car;
_cdr = cdr;
}
public Symbol getBuiltinClassSymbol(Internable table) {
return table.PAIR();
}
public void acceptVisitor(Visitor guest) throws GenyrisException {
guest.visitPair(this);
}
public boolean equals(Object compare) {
if (!(compare instanceof Pair))
return false;
else
return this._car.equals(((Pair) compare)._car)
&& this._cdr.equals(((Pair) compare)._cdr);
}
public boolean isPair() {
return true;
}
public Exp car() {
return _car;
}
public Exp cdr() {
return _cdr;
}
public Exp setCar(Exp exp) {
this._car = exp;
;
return this;
}
public Exp setCdr(Exp exp) {
this._cdr = exp;
;
return this;
}
public String toString() {
StringWriter buffer = new StringWriter();
AbstractFormatter formatter = new BasicFormatter(buffer);
try {
this.acceptVisitor(formatter);
} catch (GenyrisException e) {
return e.getMessage();
}
return buffer.toString();
}
public int hashCode() {
return _car.hashCode() + _cdr.hashCode();
}
public Exp eval(Environment env) throws GenyrisException {
Closure proc = null;
Exp[] arguments = null;
Exp toEvaluate = this;
Exp retval = env.getNil();
do {
try {
proc = (Closure) (toEvaluate.car().eval(env));
} catch (UnboundException e1) {
try {
// Is there are sys:procedure-missing defined?
proc = env.getSymbolTable().PROCEDUREMISSING()
.lookupVariableValue(env);
} catch (UnboundException e2) {
// no - just throw exception
throw e1;
}
if( alreadyInProcedureMissing ) {
// protect user by catching undefineds in a sys:procedure-missing
alreadyInProcedureMissing = false;
throw new GenyrisException("Unbound symbol within "
+ env.getSymbolTable().PROCEDUREMISSING() + " "
+ e1.getMessage());
}
// Now process the missing function logic...
alreadyInProcedureMissing = true;
try {
arguments = prependArgument(toEvaluate.car(),
proc.computeArguments(env, toEvaluate.cdr()));
retval = proc.applyFunction(env, arguments);
} catch (GenyrisException e3) {
// turn off the flag and re-throw any exceptions
throw e3;
} finally {
alreadyInProcedureMissing = false;
}
// Process a trampoline if returned...
if(retval instanceof Biscuit) {
toEvaluate = ((Biscuit)retval).getExpression();
if( ! (toEvaluate instanceof Pair) ) {
// can only use this do-while loop for expressions,
// have to use function call for all others.
return toEvaluate.eval(env);
}
}
return retval;
}
arguments = proc.computeArguments(env, toEvaluate.cdr());
retval = proc.applyFunction(env, arguments);
if(retval instanceof Biscuit) {
toEvaluate = ((Biscuit)retval).getExpression();
if( ! (toEvaluate instanceof Pair) ) {
// can only use this do-while loop for expressions,
// have to use function call for all others.
return toEvaluate.eval(env);
}
}
} while (retval instanceof Biscuit);
return retval;
}
private Exp[] prependArgument(Exp firstArg, Exp[] tmparguments)
throws GenyrisException {
Exp[] arguments = new Exp[tmparguments.length + 1];
arguments[0] = firstArg;
for (int i = 0; i < tmparguments.length; i++) {
arguments[i + 1] = tmparguments[i];
}
return arguments;
}
public Exp evalSequence(Environment env) throws GenyrisException {
SimpleSymbol NIL = env.getNil();
Exp body = this;
if (body.cdr() == NIL) {
return body.car().eval(env);
} else {
body.car().eval(env);
return body.cdr().evalSequence(env);
}
}
public int length(Symbol NIL) throws AccessException {
Exp tmp = this;
int count = 0;
while (tmp != NIL && (tmp instanceof Pair)) {
tmp = tmp.cdr();
count++;
}
return count;
}
public Exp nth(int number, Symbol NIL) throws AccessException {
Exp tmp = this;
int count = 0;
while (tmp != NIL) {
if (count == number) {
return tmp.car();
}
tmp = tmp.cdr();
count++;
}
throw new AccessException("nth could not find item: " + number);
}
public Environment makeEnvironment(Environment parent) throws GenyrisException {
return new PairEnvironment(parent, this);
}
public static Exp reverse(Exp list, Exp NIL) throws GenyrisException {
if (list.isNil()) {
return list;
}
if (list instanceof Pair) {
Exp rev_result = NIL;
while (list != NIL) {
rev_result = new Pair(list.car(), rev_result);
list = list.cdr();
}
return (rev_result);
} else {
throw new GenyrisException("reverse: not a list: " + list);
}
}
public static Exp cons(Exp a, Exp b) {
return new Pair(a, b);
}
public static Exp cons2(Exp a, Exp b, Exp NIL) {
return new Pair(a, new Pair(b, NIL));
}
public static Exp cons3(Exp a, Exp b, Exp c, Exp NIL) {
return new Pair(a, new Pair(b, new Pair(c, NIL)));
}
public static Exp cons4(Exp a, Exp b, Exp c, Exp d, Exp NIL) {
return new Pair(a, new Pair(b, new Pair(c, new Pair(d, NIL))));
}
public Exp dir(Internable table) {
return Pair.cons2(new DynamicSymbol(table.LEFT()),
new DynamicSymbol(table.RIGHT()), super.dir(table));
}
}
| ravikumar10/genyris | src/org/genyris/core/Pair.java | Java | bsd-3-clause | 7,449 |
<?php
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/videointelligence/v1beta1/video_intelligence.proto
namespace Google\Cloud\Videointelligence\V1beta1;
use Google\Protobuf\Internal\GPBType;
use Google\Protobuf\Internal\RepeatedField;
use Google\Protobuf\Internal\GPBUtil;
/**
* Video context and/or feature-specific parameters.
*
* Generated from protobuf message <code>google.cloud.videointelligence.v1beta1.VideoContext</code>
*/
class VideoContext extends \Google\Protobuf\Internal\Message
{
/**
* Video segments to annotate. The segments may overlap and are not required
* to be contiguous or span the whole video. If unspecified, each video
* is treated as a single segment.
*
* Generated from protobuf field <code>repeated .google.cloud.videointelligence.v1beta1.VideoSegment segments = 1;</code>
*/
private $segments;
/**
* If label detection has been requested, what labels should be detected
* in addition to video-level labels or segment-level labels. If unspecified,
* defaults to `SHOT_MODE`.
*
* Generated from protobuf field <code>.google.cloud.videointelligence.v1beta1.LabelDetectionMode label_detection_mode = 2;</code>
*/
private $label_detection_mode = 0;
/**
* Whether the video has been shot from a stationary (i.e. non-moving) camera.
* When set to true, might improve detection accuracy for moving objects.
*
* Generated from protobuf field <code>bool stationary_camera = 3;</code>
*/
private $stationary_camera = false;
/**
* Model to use for label detection.
* Supported values: "latest" and "stable" (the default).
*
* Generated from protobuf field <code>string label_detection_model = 4;</code>
*/
private $label_detection_model = '';
/**
* Model to use for face detection.
* Supported values: "latest" and "stable" (the default).
*
* Generated from protobuf field <code>string face_detection_model = 5;</code>
*/
private $face_detection_model = '';
/**
* Model to use for shot change detection.
* Supported values: "latest" and "stable" (the default).
*
* Generated from protobuf field <code>string shot_change_detection_model = 6;</code>
*/
private $shot_change_detection_model = '';
/**
* Model to use for safe search detection.
* Supported values: "latest" and "stable" (the default).
*
* Generated from protobuf field <code>string safe_search_detection_model = 7;</code>
*/
private $safe_search_detection_model = '';
public function __construct() {
\GPBMetadata\Google\Cloud\Videointelligence\V1Beta1\VideoIntelligence::initOnce();
parent::__construct();
}
/**
* Video segments to annotate. The segments may overlap and are not required
* to be contiguous or span the whole video. If unspecified, each video
* is treated as a single segment.
*
* Generated from protobuf field <code>repeated .google.cloud.videointelligence.v1beta1.VideoSegment segments = 1;</code>
* @return \Google\Protobuf\Internal\RepeatedField
*/
public function getSegments()
{
return $this->segments;
}
/**
* Video segments to annotate. The segments may overlap and are not required
* to be contiguous or span the whole video. If unspecified, each video
* is treated as a single segment.
*
* Generated from protobuf field <code>repeated .google.cloud.videointelligence.v1beta1.VideoSegment segments = 1;</code>
* @param \Google\Cloud\Videointelligence\V1beta1\VideoSegment[]|\Google\Protobuf\Internal\RepeatedField $var
* @return $this
*/
public function setSegments($var)
{
$arr = GPBUtil::checkRepeatedField($var, \Google\Protobuf\Internal\GPBType::MESSAGE, \Google\Cloud\Videointelligence\V1beta1\VideoSegment::class);
$this->segments = $arr;
return $this;
}
/**
* If label detection has been requested, what labels should be detected
* in addition to video-level labels or segment-level labels. If unspecified,
* defaults to `SHOT_MODE`.
*
* Generated from protobuf field <code>.google.cloud.videointelligence.v1beta1.LabelDetectionMode label_detection_mode = 2;</code>
* @return int
*/
public function getLabelDetectionMode()
{
return $this->label_detection_mode;
}
/**
* If label detection has been requested, what labels should be detected
* in addition to video-level labels or segment-level labels. If unspecified,
* defaults to `SHOT_MODE`.
*
* Generated from protobuf field <code>.google.cloud.videointelligence.v1beta1.LabelDetectionMode label_detection_mode = 2;</code>
* @param int $var
* @return $this
*/
public function setLabelDetectionMode($var)
{
GPBUtil::checkEnum($var, \Google\Cloud\Videointelligence\V1beta1\LabelDetectionMode::class);
$this->label_detection_mode = $var;
return $this;
}
/**
* Whether the video has been shot from a stationary (i.e. non-moving) camera.
* When set to true, might improve detection accuracy for moving objects.
*
* Generated from protobuf field <code>bool stationary_camera = 3;</code>
* @return bool
*/
public function getStationaryCamera()
{
return $this->stationary_camera;
}
/**
* Whether the video has been shot from a stationary (i.e. non-moving) camera.
* When set to true, might improve detection accuracy for moving objects.
*
* Generated from protobuf field <code>bool stationary_camera = 3;</code>
* @param bool $var
* @return $this
*/
public function setStationaryCamera($var)
{
GPBUtil::checkBool($var);
$this->stationary_camera = $var;
return $this;
}
/**
* Model to use for label detection.
* Supported values: "latest" and "stable" (the default).
*
* Generated from protobuf field <code>string label_detection_model = 4;</code>
* @return string
*/
public function getLabelDetectionModel()
{
return $this->label_detection_model;
}
/**
* Model to use for label detection.
* Supported values: "latest" and "stable" (the default).
*
* Generated from protobuf field <code>string label_detection_model = 4;</code>
* @param string $var
* @return $this
*/
public function setLabelDetectionModel($var)
{
GPBUtil::checkString($var, True);
$this->label_detection_model = $var;
return $this;
}
/**
* Model to use for face detection.
* Supported values: "latest" and "stable" (the default).
*
* Generated from protobuf field <code>string face_detection_model = 5;</code>
* @return string
*/
public function getFaceDetectionModel()
{
return $this->face_detection_model;
}
/**
* Model to use for face detection.
* Supported values: "latest" and "stable" (the default).
*
* Generated from protobuf field <code>string face_detection_model = 5;</code>
* @param string $var
* @return $this
*/
public function setFaceDetectionModel($var)
{
GPBUtil::checkString($var, True);
$this->face_detection_model = $var;
return $this;
}
/**
* Model to use for shot change detection.
* Supported values: "latest" and "stable" (the default).
*
* Generated from protobuf field <code>string shot_change_detection_model = 6;</code>
* @return string
*/
public function getShotChangeDetectionModel()
{
return $this->shot_change_detection_model;
}
/**
* Model to use for shot change detection.
* Supported values: "latest" and "stable" (the default).
*
* Generated from protobuf field <code>string shot_change_detection_model = 6;</code>
* @param string $var
* @return $this
*/
public function setShotChangeDetectionModel($var)
{
GPBUtil::checkString($var, True);
$this->shot_change_detection_model = $var;
return $this;
}
/**
* Model to use for safe search detection.
* Supported values: "latest" and "stable" (the default).
*
* Generated from protobuf field <code>string safe_search_detection_model = 7;</code>
* @return string
*/
public function getSafeSearchDetectionModel()
{
return $this->safe_search_detection_model;
}
/**
* Model to use for safe search detection.
* Supported values: "latest" and "stable" (the default).
*
* Generated from protobuf field <code>string safe_search_detection_model = 7;</code>
* @param string $var
* @return $this
*/
public function setSafeSearchDetectionModel($var)
{
GPBUtil::checkString($var, True);
$this->safe_search_detection_model = $var;
return $this;
}
}
| eoogbe/api-client-staging | generated/php/google-cloud-video-intelligence-v1beta1/proto/src/Google/Cloud/Videointelligence/V1beta1/VideoContext.php | PHP | bsd-3-clause | 9,096 |
/**
* @license
* Copyright 2019 Google LLC.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* Code distributed by Google as part of this project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
import {PolymerElement} from '../deps/@polymer/polymer/polymer-element.js';
import {MessengerMixin} from './arcs-shared.js';
import {html} from '../deps/@polymer/polymer/lib/utils/html-tag.js';
import '../deps/golden-layout/src/css/goldenlayout-base.css.js';
import '../deps/golden-layout/src/css/goldenlayout-light-theme.css.js';
import './arcs-overview.js';
import './arcs-stores.js';
import './arcs-planning.js';
import './arcs-communication-channel.js';
import './arcs-environment.js';
import './arcs-notifications.js';
import './arcs-tracing.js';
import './arcs-pec-log.js';
import './arcs-hcr-list.js';
import './arcs-selector.js';
import './strategy-explorer/strategy-explorer.js';
import './arcs-recipe-editor.js';
import './arcs-connection-status.js';
class ArcsDevtoolsApp extends MessengerMixin(PolymerElement) {
static get template() {
return html`
<style include="shared-styles goldenlayout-base.css goldenlayout-light-theme.css">
:host {
height: 100vh;
display: flex;
flex-direction: column;
}
header {
height: 27px;
flex-grow: 0;
}
arcs-notifications:not([visible]) + [divider] {
display: none;
}
#main {
position: relative;
flex-grow: 1;
}
/* TODO: Create our own golden-layout theme instead of overriding. */
.lm_content {
background: white;
position: relative;
overflow: auto;
}
.lm_header .lm_tab {
/* Fixing uneven padding caused by missing close button.
This can be reverted once we allow closing and re-opening tools. */
padding: 0 10px 4px;
}
</style>
<arcs-communication-channel></arcs-communication-channel>
<arcs-connection-status></arcs-connection-status>
<header id="header" class="header">
<div section>
<arcs-notifications></arcs-notifications><div divider></div>
<arcs-selector active-page="[[routeData.page]]"></arcs-selector>
</div>
</header>
<div id="main"></div>
`;
}
static get is() { return 'arcs-devtools-app'; }
ready() {
super.ready();
const tools = {
'Overview': 'arcs-overview',
'Environment': 'arcs-environment',
'Storage': 'arcs-stores',
'Execution Log': 'arcs-pec-log',
'Strategizer': 'strategy-explorer',
'Planner': 'arcs-planning',
'Tracing': 'arcs-tracing',
'Editor': 'arcs-recipe-editor',
'HCR': 'arcs-hcr-list'
};
// TODO: Save user's layout to local storage and restore from it.
const layout = new GoldenLayout({
content: [{
type: 'stack',
content: Object.entries(tools).map(([name]) => ({
type: 'component',
componentName: name,
// TODO: Allow closing and then re-opening tools.
isClosable: false
}))
}],
settings: {
// Pulling a tool into a popup resets its state,
// which we cannot recover.
showPopoutIcon: false,
},
}, this.$.main);
for (const [name, elementName] of Object.entries(tools)) {
layout.registerComponent(name, function(container) {
const element = document.createElement(elementName);
container.getElement().append(element);
container.on('show', () => element.setAttribute('active', ''));
container.on('hide', () => element.removeAttribute('active'));
});
}
layout.init();
// We need to observe the body for changes as opposed to #main, because when the viewport
// shrinks #main will not shrink if it is filled with content, body however will.
new ResizeObserver(rects => {
const {height, width} = rects[0].contentRect;
layout.updateSize(width, height - this.$.header.offsetHeight);
}).observe(document.body);
}
}
window.customElements.define(ArcsDevtoolsApp.is, ArcsDevtoolsApp);
| PolymerLabs/arcs-live | devtools/src/arcs-devtools-app.js | JavaScript | bsd-3-clause | 4,211 |
import { Component } from '@angular/core';
import { NavParams, ViewController, ToastController } from 'ionic-angular';
import { Remote } from '../../app/remote';
import { Incident } from '../../app/incident';
import { notify } from '../../app/utils';
@Component({
selector: 'page-incident-form',
templateUrl: 'incident-form.html'
})
export class IncidentFormPage {
public categories = [
'Crash',
'Hazard',
'Maintenance Issue',
'Other'
];
private incident: Incident;
constructor(
private navParams: NavParams,
private viewCtrl: ViewController,
private toastCtrl: ToastController,
private remote: Remote) {
this.incident = new Incident(navParams.data);
}
private closeModal() {
this.viewCtrl.dismiss();
}
public sendIncident() {
this.remote.postIncident(this.incident)
.then(() => notify(this.toastCtrl, 'Report sent successfully!'))
.catch(() => notify(this.toastCtrl,
'Report upload failed. Please try again later.'));
this.closeModal();
}
}
| CUUATS/bikemoves-v2 | src/pages/incident-form/incident-form.ts | TypeScript | bsd-3-clause | 1,045 |
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE762_Mismatched_Memory_Management_Routines__delete_int64_t_malloc_72b.cpp
Label Definition File: CWE762_Mismatched_Memory_Management_Routines__delete.label.xml
Template File: sources-sinks-72b.tmpl.cpp
*/
/*
* @description
* CWE: 762 Mismatched Memory Management Routines
* BadSource: malloc Allocate data using malloc()
* GoodSource: Allocate data using new
* Sinks:
* GoodSink: Deallocate data using free()
* BadSink : Deallocate data using delete
* Flow Variant: 72 Data flow: data passed in a vector from one function to another in different source files
*
* */
#include "std_testcase.h"
#include <vector>
using namespace std;
namespace CWE762_Mismatched_Memory_Management_Routines__delete_int64_t_malloc_72
{
#ifndef OMITBAD
void badSink(vector<int64_t *> dataVector)
{
/* copy data out of dataVector */
int64_t * data = dataVector[2];
/* POTENTIAL FLAW: Deallocate memory using delete - the source memory allocation function may
* require a call to free() to deallocate the memory */
delete data;
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B uses the GoodSource with the BadSink */
void goodG2BSink(vector<int64_t *> dataVector)
{
int64_t * data = dataVector[2];
/* POTENTIAL FLAW: Deallocate memory using delete - the source memory allocation function may
* require a call to free() to deallocate the memory */
delete data;
}
/* goodB2G uses the BadSource with the GoodSink */
void goodB2GSink(vector<int64_t *> dataVector)
{
int64_t * data = dataVector[2];
/* FIX: Deallocate the memory using free() */
free(data);
}
#endif /* OMITGOOD */
} /* close namespace */
| JianpingZeng/xcc | xcc/test/juliet/testcases/CWE762_Mismatched_Memory_Management_Routines/s03/CWE762_Mismatched_Memory_Management_Routines__delete_int64_t_malloc_72b.cpp | C++ | bsd-3-clause | 1,762 |
# -*- coding: utf-8 -*-
"""
templatetricks.override_autoescaped
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Override which templates are autoescaped
http://flask.pocoo.org/snippets/41/
"""
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from flask import Flask
class JHtmlEscapingFlask(Flask):
def select_jinja_autoescape(self, filename):
if filename.endswith('.jhtml'):
return True
return Flask.select_jinja_autoescape(self, filename)
app = JHtmlEscapingFlask(__name__)
| fengsp/flask-snippets | templatetricks/override_autoescaped.py | Python | bsd-3-clause | 570 |
<?php
namespace Tests\Functionals\ExtremeCarpaccio\ApiContext;
use Behat\Gherkin\Node\TableNode;
use Tests\Functionals\ExtremeCarpaccio\ApiContext as Base;
/**
* Class OrderContext
*/
class OrderContext extends Base
{
/**
* @var array
*/
private $prices;
/**
* @var array
*/
private $quantities;
/**
* @var string
*/
private $country;
/**
* @var string
*/
private $reduction;
public function __construct($baseUrl)
{
parent::__construct($baseUrl);
$this->quantities = [];
$this->prices = [];
$this->reduction = 'STANDARD';
}
/**
* @When I receive a bad request
*/
public function iReceiveABadRequest()
{
$this->visitPath('/order');
}
/**
* @Then I handle the bad request
*/
public function iHandleTheBadRequest()
{
$this->assertResponseStatus(405);
}
/**
* @Given I have the following order
*/
public function iHaveTheFollowingOrder(TableNode $table)
{
$this->prices = $this->quantities = [];
foreach ($table as $row ) {
$this->prices[] = (float) $row['price'];
$this->quantities[] = (int) $row['quantity'];
}
}
/**
* @Given the country is :country
*/
public function theCountryIs($country)
{
$this->country = $country;
}
/**
* @Given the reduction is :reduction
*/
public function theReductionIs($reduction)
{
$this->reduction = $reduction;
}
/**
* @When I validate
*/
public function iValidate()
{
$post = [
'prices' => $this->prices,
'quantities' => $this->quantities,
'country' => $this->country,
'reduction' => $this->reduction,
];
$this->getSession()->getDriver()->getClient()->request('POST', $this->baseUrl.'/order', $post);
$this->assertResponseStatus(200);
}
/**
* @Then The total must be :total €
*/
public function theTotalMustBeEu($total)
{
$content = json_decode($this->getSession()->getDriver()->getContent(), true);
assert($content['total'] == $total);
}
}
| jak78/extreme-carpaccio | clients/php/silex/tests/functionals/features/bootstrap/ExtremeCarpaccio/ApiContext/OrderContext.php | PHP | bsd-3-clause | 2,266 |
""" Query modules mapping functions to their query strings
structured:
module_name { query_string: function_for_query }
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
# Standard imports
from future import standard_library
standard_library.install_aliases()
from builtins import range
from builtins import *
import sys
import os
import math
import datetime
import logging
# logging.basicConfig(format='%(asctime)s:%(levelname)s:%(message)s', level=logging.DEBUG)
import random
from uuid import UUID
# Our imports
from emission.core.get_database import get_section_db, get_trip_db, get_routeCluster_db, get_alternatives_db
from . import trip_old as trip
# 0763de67-f61e-3f5d-90e7-518e69793954
# 0763de67-f61e-3f5d-90e7-518e69793954_20150421T230304-0700_0
# helper for getCanonicalTrips
def get_clusters_info(uid):
c_db = get_routeCluster_db()
s_db = get_section_db()
clusterJson = c_db.find_one({"clusters":{"$exists":True}, "user": uid})
if clusterJson is None:
return []
c_info = []
clusterSectionLists= list(clusterJson["clusters"].values())
logging.debug( "Number of section lists for user %s is %s" % (uid, len(clusterSectionLists)))
for sectionList in clusterSectionLists:
first = True
logging.debug( "Number of sections in sectionList for user %s is %s" % (uid, len(sectionList)))
if (len(sectionList) == 0):
# There's no point in returning this cluster, let's move on
continue
distributionArrays = [[] for _ in range(5)]
for section in sectionList:
section_json = s_db.find_one({"_id":section})
if first:
representative_trip = section_json
first = False
appendIfPresent(distributionArrays[0], section_json, "section_start_datetime")
appendIfPresent(distributionArrays[1], section_json, "section_end_datetime")
appendIfPresent(distributionArrays[2], section_json, "section_start_point")
appendIfPresent(distributionArrays[3], section_json, "section_end_point")
appendIfPresent(distributionArrays[4], section_json, "confirmed_mode")
c_info.append((distributionArrays, representative_trip))
return c_info
def appendIfPresent(list,element,key):
if element is not None and key in element:
list.append(element[key])
else:
logging.debug("not appending element %s with key %s" % (element, key))
class AlternativesNotFound(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
#returns the top trips for the user, defaulting to the top 10 trips
def getCanonicalTrips(uid, get_representative=False): # number returned isnt used
"""
uid is a UUID object, not a string
"""
# canonical_trip_list = []
# x = 0
# if route clusters return nothing, then get common routes for user
#clusters = get_routeCluster_db().find_one({'$and':[{'user':uid},{'method':'lcs'}]})
# c = get_routeCluster_db().find_one({'$and':[{'user':uid},{'method':'lcs'}]})
logging.debug('UUID for canonical %s' % uid)
info = get_clusters_info(uid)
cluster_json_list = []
for (cluster, rt) in info:
json_dict = dict()
json_dict["representative_trip"] = rt
json_dict["start_point_distr"] = cluster[2]
json_dict["end_point_distr"] = cluster[3]
json_dict["start_time_distr"] = cluster[0]
json_dict["end_time_distr"] = cluster[1]
json_dict["confirmed_mode_list"] = cluster[4]
cluster_json_list.append(json_dict)
toRet = cluster_json_list
return toRet.__iter__()
#returns all trips to the user
def getAllTrips(uid):
#trips = list(get_trip_db().find({"user_id":uid, "type":"move"}))
query = {'user_id':uid, 'type':'move'}
return get_trip_db().find(query)
def getAllTrips_Date(uid, dys):
#trips = list(get_trip_db().find({"user_id":uid, "type":"move"}))
d = datetime.datetime.now() - datetime.timedelta(days=dys)
query = {'user_id':uid, 'type':'move','trip_start_datetime':{"$gt":d}}
return get_trip_db().find(query)
#returns all trips with no alternatives to the user
def getNoAlternatives(uid):
# If pipelineFlags exists then we have started alternatives, and so have
# already scheduled the query. No need to reschedule unless the query fails.
# TODO: If the query fails, then remove the pipelineFlags so that we will
# reschedule.
query = {'user_id':uid, 'type':'move', 'pipelineFlags': {'$exists': False}}
return get_trip_db().find(query)
def getNoAlternativesPastMonth(uid):
d = datetime.datetime.now() - datetime.timedelta(days=30)
query = {'user_id':uid, 'type':'move',
'trip_start_datetime':{"$gt":d},
'pipelineFlags': {'$exists': False}}
return get_trip_db().find(query)
# Returns the trips that are suitable for training
# Currently this is:
# - trips that have alternatives, and
# - have not yet been included in a training set
def getTrainingTrips(uid):
return getTrainingTrips_Date(uid, 30)
query = {'user_id':uid, 'type':'move'}
return get_trip_db().find(query)
def getTrainingTrips_Date(uid, dys):
d = datetime.datetime.now() - datetime.timedelta(days=dys)
query = {'user_id':uid, 'type':'move','trip_start_datetime':{"$gt":d}, "pipelineFlags":{"$exists":True}}
#query = {'user_id':uid, 'type':'move','trip_start_datetime':{"$gt":d}}
#print get_trip_db().count_documents(query)
return get_trip_db().find(query)
def getAlternativeTrips(trip_id):
#TODO: clean up datetime, and queries here
#d = datetime.datetime.now() - datetime.timedelta(days=6)
#query = {'trip_id':trip_id, 'trip_start_datetime':{"$gt":d}}
query = {'trip_id':trip_id}
alternatives = get_alternatives_db().find(query)
if alternatives.estimated_document_count() > 0:
logging.debug("Number of alternatives for trip %s is %d" % (trip_id, alternatives.estimated_document_count()))
return alternatives
raise AlternativesNotFound("No Alternatives Found")
def getRecentTrips(uid):
raise NotImplementedError()
def getTripsThroughMode(uid):
raise NotImplementedError()
modules = {
# Trip Module
'trips': {
'get_canonical': getCanonicalTrips,
'get_all': getAllTrips,
'get_no_alternatives': getNoAlternatives,
'get_no_alternatives_past_month': getNoAlternativesPastMonth,
'get_most_recent': getRecentTrips,
'get_trips_by_mode': getTripsThroughMode},
# Utility Module
'utility': {
'get_training': getTrainingTrips
},
# Recommender Module
'recommender': {
'get_improve': getCanonicalTrips
},
#Perturbation Module
'perturbation': {},
#Alternatives Module
# note: uses a different collection than section_db
'alternatives': {
'get_alternatives': getAlternativeTrips
}
}
| e-mission/e-mission-server | emission/core/wrapper/filter_modules.py | Python | bsd-3-clause | 7,166 |
package com.oracle.ptsdemo.healthcare.wsclient.osc.opty.generated;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="result" type="{http://xmlns.oracle.com/apps/sales/opptyMgmt/opportunities/opportunityService/}Opportunity"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"result"
})
@XmlRootElement(name = "getOpportunityResponse")
public class GetOpportunityResponse {
@XmlElement(required = true)
protected Opportunity result;
/**
* Gets the value of the result property.
*
* @return
* possible object is
* {@link Opportunity }
*
*/
public Opportunity getResult() {
return result;
}
/**
* Sets the value of the result property.
*
* @param value
* allowed object is
* {@link Opportunity }
*
*/
public void setResult(Opportunity value) {
this.result = value;
}
}
| dushmis/Oracle-Cloud | PaaS-SaaS_HealthCareApp/DoctorPatientCRMExtension/HealthCare/HealthCareWSProxyClient/src/com/oracle/ptsdemo/healthcare/wsclient/osc/opty/generated/GetOpportunityResponse.java | Java | bsd-3-clause | 1,582 |
<?php
class OrgCenterAction extends Action{
public function index(){
if(!session('?oid')){
//抛出错误页面
$this->error("企业用户未登录",U("Login/index"));
return;
}
//显示企业信息
$this->showOrgInfo();
//列出申请列表
$this->whoApplyed();
//发布的兼职
$this->showPublicedJob();
//进行中的兼职
$this->showIngJob();
$this->display();
}
//显示发布的兼职
private function showPublicedJob(){
$Jobs = M('Jobs');
$where = "pub_oid=".session('oid');
$field = "jid,title,current_peo,want_peo";
$arr2_jobs = $Jobs->where($where)->field($field)->select();
if($arr2_jobs){
$this->assign("pj",$arr2_jobs);
}elseif(is_null($arr2_jobs)){
$this->assign('pj_error_info','还没有发布兼职');
}else{
$this->assign('pj_error_info','读取错误');
}
}
//显示企业信息
private function showOrgInfo(){
$Orgs = M('Orgs');
$where = "oid=".session('oid');
$field = "mid,passwd";
$arr2_org = $Orgs->where($where)->field($field,true)->find();
if($arr2_org){
$this->assign('org_info',$arr2_org);
}elseif(is_null($arr2_org)){
$this->assign('org_error_info','无');
}else{
$this->assign('org_error_info','读取错误');
}
}
//编辑页面显示企业信息
public function editInfo(){
$Org = M('Orgs');
$where = "oid=".session('oid');
$field = "oid,passwd";
$arr_info = $Org->field($field,true)->where($where)->find();
//dump($arr_info);
if($arr_info){
$this->assign("orgInfo",$arr_info);
}else{
$this->ajaxReturn(0,"获取信息失败",0);
}
//输出招聘意向
$this->showMold();
$this->display();
}
//显示招聘意向
protected function showMold(){
$Mold = M('Mold');
$where = "1";
$field = "mid,name";
$arr2_mold = $Mold->where($where)->field($field)->select();
if($arr2_mold){
$this->assign('mold_info',$arr2_mold);
}elseif(is_null($arr2_mold)){
$this->assign('mold_error_info','空');
}else{
$this->assign('mold_error_info','读取错误');
}
}
//更新企业信息
public function updateInfo(){
$Org = D('Orgs');
$where = "oid=".session('oid');
if(!$Org->create($this->_post(),2)){
$this->ajaxReturn(0,$Org->getError(),0);
return;
}
$Org->intent = serialize($this->_post('intent'));
$flag = $Org->where($where)->save();
if($flag || $flag === 0){
$this->ajaxReturn(1,"更新成功",1);
}else{
$this->ajaxReturn(1,"更新失败",0);
}
}
//列出申请列表
private function whoApplyed(){
if(!session('?oid')){
$this->error("未登录",U('Login/index'));
return;
}
$Apply = M('Apply');
$where = "app_oid=" . session('oid') . " AND xm_apply.is_pass =1";
$field = "xm_apply.app_id AS app_id,xm_users.uid AS uid,xm_users.username AS username,xm_apply.ctime AS ctime,xm_apply.app_jid AS jid,xm_jobs.title AS title";
$join_user = "INNER JOIN xm_users ON xm_users.uid = xm_apply.app_uid";
$join_job = "INNER JOIN xm_jobs ON xm_jobs.jid = xm_apply.app_jid";
$arr2_apply = $Apply->where($where)->join($join_user)->join($join_job)->field($field)->select();
if($arr2_apply){
$this->assign("apply_list",$arr2_apply);
}elseif (is_null($arr2_apply)){
$this->assign("apply_error_info","无申请人");
}else{
$this->assign("apply_error_info","查询失败");
}
}
//显示正在进行中的兼职
protected function showIngJob() {
$Work = M('Working');
$where = '';
$field = "work_jid,title,xm_working.ctime";
$join = "INNER JOIN xm_jobs ON xm_jobs.jid=xm_working.work_jid AND xm_jobs.pub_oid = " . session('oid');
$group = "work_jid";
$arr2 = $Work->where($where)->join($join)->field($field)->group($group)->select();
if($arr2){
$this->assign('work_info',$arr2);
}elseif(is_null($arr2)){
$this->assign('work_error_info','还没有进行中的兼职');
}else{
$this->assign('work_error_info','读取错误');
}
}
//显示进行中的兼职详情
public function showIngJobDetail() {
$jid = $this->_get('jid');
$Work = M('Working');
$where = "work_jid=" . $jid;
$field = "work_uid,work_id,work_status,xm_working.ctime,username,pay_way,pay_alipay_id,pay_ccard_id,title,xm_working.is_pass";
$join1 = "INNER JOIN xm_jobs ON xm_jobs.jid=xm_working.work_jid";
$join2 = "INNER JOIN xm_users ON xm_users.uid=xm_working.work_uid";
$arr2 = $Work->where($where)->join($join1)->join($join2)->field($field)->select();
if($arr2){
$this->assign('work_info',$arr2);
}elseif(is_null($arr2)){
$this->assign('work_error_info','还没有进行中的兼职');
}else{
$this->assign('work_error_info','读取错误');
}
$this->display();
}
//兼职状态的确认
public function statusHandler() {
if(!session('?oid')){
$this->error('未登录',U("Login/index"));
return;
}
$f = $this->_get('f');
$wid = $this->_get('wid');
$Work = M('Working');
$where = "work_id=" . $wid;
$arr = array();
if($f == '0'){
$arr = array('is_pass'=>1,'work_status'=>2);//通过
}elseif($f == '1'){
$arr = array('is_pass'=>0,'work_status'=>2);//不通过
}
$flag = $Work->where($where)->save($arr);
if($flag || $flag === 0){
$this->ajaxReturn(1,'操作成功',1);
}else{
$this->ajaxReturn(2,'操作失败'.$Work->getLastSql(),1);
}
}
//是否通过申请人的兼职申请
public function isPass(){
if(!session('?oid')){
$this->error("企业用户未登录",U('Login/index'));
return;
}
if($this->_get('ispass') == 'yes'){ //通过
$uid = $this->_get('uid');
$jid = $this->_get('jid');
$app_id = $this->_get('app_id');
$where = "jid=" . $jid;
$Job = M('jobs');
// //xm_jobs表 current_peo +1
// if(!$Job->where($where)->setInc("current_peo",1)){
// echo $Job->getError();
// return;
// }
//working表添加记录
$Work = M('Working');
$w_data = array('work_uid' => $uid,
'work_jid' => $jid,
'ctime' => time()
);
if(!$Work->add($w_data)){
$this->ajaxReturn(0,'操作失败',0);
return;
}
//xm_apply表中 is_pass 改为2
$Apply = M('apply');
$flag = $Apply->where("app_id=".$app_id)->setField("is_pass", 2);
if($flag){
$this->ajaxReturn(1,"操作成功",1);
}else{
$this->ajaxReturn(1,"操作失败".$Apply->getLastSql(),0);
}
}else{
$app_id = $this->_get('app_id');
$Apply = M('apply');
$flag = $Apply->where("app_id=".$app_id)->setField("is_pass",3);
if($flag){
$this->ajaxReturn(1,"操作成功",1);
}else{
$this->ajaxReturn(0,"操作失败".$Apply->getError(),0);
}
}
}
}
?> | airect/part-time-job | Lib/Action/OrgCenterAction.class.php | PHP | bsd-3-clause | 6,573 |
class AuguryEnvironment < ActiveRecord::Base
attr_accessible :store_id, :token, :environment, :user, :url, :store_name
end
| urimikhli/spree_hub_connector | app/models/augury_environment.rb | Ruby | bsd-3-clause | 125 |
<?php
/**
* Class responsible for generating HTMLPurifier_Language objects, managing
* caching and fallbacks.
* @note Thanks to MediaWiki for the general logic, although this version
* has been entirely rewritten
* @todo Serialized cache for languages
*/
class HTMLPurifier_LanguageFactory
{
/**
* Cache of language code information used to load HTMLPurifier_Language objects.
* Structure is: $factory->cache[$language_code][$key] = $value
* @type array
*/
public $cache;
/**
* Valid keys in the HTMLPurifier_Language object. Designates which
* variables to slurp out of a message file.
* @type array
*/
public $keys = array('fallback', 'messages', 'errorNames');
/**
* Instance to validate language codes.
* @type HTMLPurifier_AttrDef_Lang
*
*/
protected $validator;
/**
* Cached copy of dirname(__FILE__), directory of current file without
* trailing slash.
* @type string
*/
protected $dir;
/**
* Keys whose contents are a hash map and can be merged.
* @type array
*/
protected $mergeable_keys_map = array('messages' => true, 'errorNames' => true);
/**
* Keys whose contents are a list and can be merged.
* @value array lookup
*/
protected $mergeable_keys_list = array();
/**
* Retrieve sole instance of the factory.
* @param HTMLPurifier_LanguageFactory $prototype Optional prototype to overload sole instance with,
* or bool true to reset to default factory.
* @return HTMLPurifier_LanguageFactory
*/
public static function instance($prototype = null)
{
static $instance = null;
if ($prototype !== null) {
$instance = $prototype;
} elseif ($instance === null || $prototype == true) {
$instance = new HTMLPurifier_LanguageFactory();
$instance->setup();
}
return $instance;
}
/**
* Sets up the singleton, much like a constructor
* @note Prevents people from getting this outside of the singleton
*/
public function setup()
{
$this->validator = new HTMLPurifier_AttrDef_Lang();
$this->dir = HTMLPURIFIER_PREFIX . '/HTMLPurifier';
}
/**
* Creates a language object, handles class fallbacks
* @param HTMLPurifier_Config $config
* @param HTMLPurifier_Context $context
* @param bool|string $code Code to override configuration with. Private parameter.
* @return HTMLPurifier_Language
*/
public function create($config, $context, $code = false)
{
// validate language code
if ($code === false) {
$code = $this->validator->validate(
$config->get('Core.Language'),
$config,
$context
);
} else {
$code = $this->validator->validate($code, $config, $context);
}
if ($code === false) {
$code = 'en'; // malformed code becomes English
}
$pcode = str_replace('-', '_', $code); // make valid PHP classname
static $depth = 0; // recursion protection
if ($code == 'en') {
$lang = new HTMLPurifier_Language($config, $context);
} else {
$class = 'HTMLPurifier_Language_' . $pcode;
$file = $this->dir . '/Language/classes/' . $code . '.php';
if (file_exists($file) || class_exists($class, false)) {
$lang = new $class($config, $context);
} else {
// Go fallback
$raw_fallback = $this->getFallbackFor($code);
$fallback = $raw_fallback ? $raw_fallback : 'en';
$depth++;
$lang = $this->create($config, $context, $fallback);
if (!$raw_fallback) {
$lang->error = true;
}
$depth--;
}
}
$lang->code = $code;
return $lang;
}
/**
* Returns the fallback language for language
* @note Loads the original language into cache
* @param string $code language code
* @return string|bool
*/
public function getFallbackFor($code)
{
$this->loadLanguage($code);
return $this->cache[$code]['fallback'];
}
/**
* Loads language into the cache, handles message file and fallbacks
* @param string $code language code
*/
public function loadLanguage($code)
{
static $languages_seen = array(); // recursion guard
// abort if we've already loaded it
if (isset($this->cache[$code])) {
return;
}
// generate filename
$filename = $this->dir . '/Language/messages/' . $code . '.php';
// default fallback : may be overwritten by the ensuing include
$fallback = ($code != 'en') ? 'en' : false;
// load primary localisation
if (!file_exists($filename)) {
// skip the include: will rely solely on fallback
$filename = $this->dir . '/Language/messages/en.php';
$cache = array();
} else {
include $filename;
$cache = compact($this->keys);
}
// load fallback localisation
if (!empty($fallback)) {
// infinite recursion guard
if (isset($languages_seen[$code])) {
trigger_error(
'Circular fallback reference in language ' .
$code,
E_USER_ERROR
);
$fallback = 'en';
}
$language_seen[$code] = true;
// load the fallback recursively
$this->loadLanguage($fallback);
$fallback_cache = $this->cache[$fallback];
// merge fallback with current language
foreach ($this->keys as $key) {
if (isset($cache[$key]) && isset($fallback_cache[$key])) {
if (isset($this->mergeable_keys_map[$key])) {
$cache[$key] = $cache[$key] + $fallback_cache[$key];
} elseif (isset($this->mergeable_keys_list[$key])) {
$cache[$key] = array_merge($fallback_cache[$key], $cache[$key]);
}
} else {
$cache[$key] = $fallback_cache[$key];
}
}
}
// save to cache for later retrieval
$this->cache[$code] = $cache;
return;
}
}
// vim: et sw=4 sts=4
| fayvlad/learn_yii2 | vendor/ezyang/htmlpurifier/library/HTMLPurifier/LanguageFactory.php | PHP | bsd-3-clause | 6,617 |
using System.Collections.Generic;
using System.Security;
using System.Text;
namespace Coevery.Parameters {
public interface ICommandLineParser {
IEnumerable<string> Parse(string commandLine);
}
public class CommandLineParser : ICommandLineParser {
[SecurityCritical]
public IEnumerable<string> Parse(string commandLine) {
return SplitArgs(commandLine);
}
public class State {
private readonly string _commandLine;
private readonly StringBuilder _stringBuilder;
private readonly List<string> _arguments;
private int _index;
public State(string commandLine) {
_commandLine = commandLine;
_stringBuilder = new StringBuilder();
_arguments = new List<string>();
}
public StringBuilder StringBuilder { get { return _stringBuilder; } }
public bool EOF { get { return _index >= _commandLine.Length; } }
public char Current { get { return _commandLine[_index]; } }
public IEnumerable<string> Arguments { get { return _arguments; } }
public void AddArgument() {
_arguments.Add(StringBuilder.ToString());
StringBuilder.Clear();
}
public void AppendCurrent() {
StringBuilder.Append(Current);
}
public void Append(char ch) {
StringBuilder.Append(ch);
}
public void MoveNext() {
if (!EOF)
_index++;
}
}
/// <summary>
/// Implement the same logic as found at
/// http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
/// The 3 special characters are quote, backslash and whitespaces, in order
/// of priority.
/// The semantics of a quote is: whatever the state of the lexer, copy
/// all characters verbatim until the next quote or EOF.
/// The semantics of backslash is: If the next character is a backslash or a quote,
/// copy the next character. Otherwise, copy the backslash and the next character.
/// The semantics of whitespace is: end the current argument and move on to the next one.
/// </summary>
private IEnumerable<string> SplitArgs(string commandLine) {
var state = new State(commandLine);
while (!state.EOF) {
switch (state.Current) {
case '"':
ProcessQuote(state);
break;
case '\\':
ProcessBackslash(state);
break;
case ' ':
case '\t':
if (state.StringBuilder.Length > 0)
state.AddArgument();
state.MoveNext();
break;
default:
state.AppendCurrent();
state.MoveNext();
break;
}
}
if (state.StringBuilder.Length > 0)
state.AddArgument();
return state.Arguments;
}
private void ProcessQuote(State state) {
state.MoveNext();
while (!state.EOF) {
if (state.Current == '"') {
state.MoveNext();
break;
}
state.AppendCurrent();
state.MoveNext();
}
state.AddArgument();
}
private void ProcessBackslash(State state) {
state.MoveNext();
if (state.EOF) {
state.Append('\\');
return;
}
if (state.Current == '"') {
state.Append('"');
state.MoveNext();
}
else {
state.Append('\\');
state.AppendCurrent();
state.MoveNext();
}
}
}
} | caoxk/OrchardEF | src/Tools/Coevery/Parameters/CommandLineParser.cs | C# | bsd-3-clause | 4,136 |
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE36_Absolute_Path_Traversal__wchar_t_environment_fopen_52b.cpp
Label Definition File: CWE36_Absolute_Path_Traversal.label.xml
Template File: sources-sink-52b.tmpl.cpp
*/
/*
* @description
* CWE: 36 Absolute Path Traversal
* BadSource: environment Read input from an environment variable
* GoodSource: Full path and file name
* Sink: fopen
* BadSink : Open the file named in data using fopen()
* Flow Variant: 52 Data flow: data passed as an argument from one function to another to another in three different source files
*
* */
#include "std_testcase.h"
#ifndef _WIN32
#include <wchar.h>
#endif
#define ENV_VARIABLE L"ADD"
#ifdef _WIN32
#define GETENV _wgetenv
#else
#define GETENV getenv
#endif
#ifdef _WIN32
#define FOPEN _wfopen
#else
#define FOPEN fopen
#endif
namespace CWE36_Absolute_Path_Traversal__wchar_t_environment_fopen_52
{
/* all the sinks are the same, we just want to know where the hit originated if a tool flags one */
#ifndef OMITBAD
/* bad function declaration */
void badSink_c(wchar_t * data);
void badSink_b(wchar_t * data)
{
badSink_c(data);
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B uses the GoodSource with the BadSink */
void goodG2BSink_c(wchar_t * data);
void goodG2BSink_b(wchar_t * data)
{
goodG2BSink_c(data);
}
#endif /* OMITGOOD */
} /* close namespace */
| JianpingZeng/xcc | xcc/test/juliet/testcases/CWE36_Absolute_Path_Traversal/s03/CWE36_Absolute_Path_Traversal__wchar_t_environment_fopen_52b.cpp | C++ | bsd-3-clause | 1,450 |
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="fr_CA">
<context>
<name>mainUI</name>
<message>
<location filename="../mainUI.ui" line="14"/>
<location filename="../mainUI.cpp" line="53"/>
<source>Calculator</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.ui" line="657"/>
<source>Advanced Operations</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="77"/>
<source>Percentage %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="79"/>
<source>Power %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="81"/>
<source>Base-10 Exponential %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="83"/>
<source>Exponential %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="85"/>
<source>Constant Pi %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="88"/>
<source>Square Root %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="90"/>
<source>Logarithm %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="92"/>
<source>Natural Log %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="95"/>
<source>Sine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="97"/>
<source>Cosine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="99"/>
<source>Tangent %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="102"/>
<source>Arc Sine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="104"/>
<source>Arc Cosine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="106"/>
<source>Arc Tangent %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="109"/>
<source>Hyperbolic Sine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="111"/>
<source>Hyperbolic Cosine %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="113"/>
<source>Hyperbolic Tangent %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../mainUI.cpp" line="176"/>
<source>Save Calculator History</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS>
| harcobbit/lumina | src-qt5/desktop-utils/lumina-calculator/i18n/l-calc_fr_CA.ts | TypeScript | bsd-3-clause | 3,767 |
///////////////////////////////////////////////////////////////////////////////////////////////////
// OpenGL Mathematics Copyright (c) 2005 - 2011 G-Truc Creation (www.g-truc.net)
///////////////////////////////////////////////////////////////////////////////////////////////////
// Created : 2008-08-17
// Updated : 2010-02-17
// Licence : This source is under MIT License
// File : glm/core/type_half.hpp
///////////////////////////////////////////////////////////////////////////////////////////////////
#ifndef glm_core_type_half
#define glm_core_type_half
#include <cstdlib>
namespace glm
{
namespace test
{
bool main_type_half();
}//namespace test
namespace detail
{
typedef short hdata;
float toFloat32(hdata value);
hdata toFloat16(float const & value);
///16-bit floating point type.
/// \ingroup gtc_half_float
class thalf
{
public:
// Constructors
GLM_FUNC_DECL thalf();
GLM_FUNC_DECL thalf(thalf const & s);
template <typename U>
GLM_FUNC_DECL explicit thalf(U const & s);
// Cast
//operator float();
GLM_FUNC_DECL operator float() const;
//operator double();
//operator double() const;
// Unary updatable operators
GLM_FUNC_DECL thalf& operator= (thalf const & s);
GLM_FUNC_DECL thalf& operator+=(thalf const & s);
GLM_FUNC_DECL thalf& operator-=(thalf const & s);
GLM_FUNC_DECL thalf& operator*=(thalf const & s);
GLM_FUNC_DECL thalf& operator/=(thalf const & s);
GLM_FUNC_DECL thalf& operator++();
GLM_FUNC_DECL thalf& operator--();
GLM_FUNC_DECL float toFloat() const{return toFloat32(data);}
GLM_FUNC_DECL hdata _data() const{return data;}
private:
hdata data;
};
thalf operator+ (thalf const & s1, thalf const & s2);
thalf operator- (thalf const & s1, thalf const & s2);
thalf operator* (thalf const & s1, thalf const & s2);
thalf operator/ (thalf const & s1, thalf const & s2);
// Unary constant operators
thalf operator- (thalf const & s);
thalf operator-- (thalf const & s, int);
thalf operator++ (thalf const & s, int);
}//namespace detail
}//namespace glm
#include "type_half.inl"
#endif//glm_core_type_half
| jhaberstro/subsurface-scattering | subsurface-scattering/glm/core/type_half.hpp | C++ | bsd-3-clause | 2,259 |
import assert from "assert";
import {Point2D, Vector2D, Matrix2D} from "../index.js";
describe("Point2D", () => {
it("new point", () => {
const p = new Point2D(10, 20);
assert.strictEqual(p.x, 10);
assert.strictEqual(p.y, 20);
});
it("clone", () => {
const p = new Point2D(10, 20);
const c = p.clone();
assert.strictEqual(p.x, c.x);
assert.strictEqual(p.y, c.y);
assert.strictEqual(c.x, 10);
assert.strictEqual(c.y, 20);
});
it("add", () => {
const p1 = new Point2D(10, 20);
const p2 = new Point2D(20, 30);
const p3 = p1.add(p2);
assert.strictEqual(p3.x, 30);
assert.strictEqual(p3.y, 50);
});
it("subtract", () => {
const p1 = new Point2D(10, 20);
const p2 = new Point2D(20, 40);
const p3 = p1.subtract(p2);
assert.strictEqual(p3.x, -10);
assert.strictEqual(p3.y, -20);
});
it("multiply", () => {
const p1 = new Point2D(10, 20);
const p2 = p1.multiply(0.5);
assert.strictEqual(p2.x, 5);
assert.strictEqual(p2.y, 10);
});
it("divide", () => {
const p1 = new Point2D(10, 20);
const p2 = p1.divide(2);
assert.strictEqual(p2.x, 5);
assert.strictEqual(p2.y, 10);
});
it("equal", () => {
const p1 = new Point2D(10, 20);
const p2 = new Point2D(10, 20);
assert.strictEqual(p1.equals(p2), true);
});
it("not equal", () => {
const p1 = new Point2D(10, 20);
const p2 = new Point2D(10, 21);
assert.strictEqual(p1.equals(p2), false);
});
it("interpolate between two points", () => {
const p1 = new Point2D(10, 20);
const p2 = new Point2D(30, 40);
const p3 = p1.lerp(p2, 0.25);
assert.strictEqual(p3.x, 15);
assert.strictEqual(p3.y, 25);
});
it("distance between two points", () => {
const p1 = new Point2D(10, 20);
const p2 = new Point2D(13, 24);
const dist = p1.distanceFrom(p2);
assert.strictEqual(dist, 5);
});
it("min", () => {
const p1 = new Point2D(30, 5);
const p2 = new Point2D(10, 50);
const p3 = p1.min(p2);
assert.strictEqual(p3.x, 10);
assert.strictEqual(p3.y, 5);
});
it("max", () => {
const p1 = new Point2D(30, 5);
const p2 = new Point2D(10, 50);
const p3 = p1.max(p2);
assert.strictEqual(p3.x, 30);
assert.strictEqual(p3.y, 50);
});
it("translate", () => {
const p1 = new Point2D(10, 20);
const m = new Matrix2D().translate(20, 30);
const p2 = p1.transform(m);
assert.strictEqual(p2.x, 30);
assert.strictEqual(p2.y, 50);
});
it("scale", () => {
const p1 = new Point2D(10, 20);
const m = new Matrix2D().scale(2);
const p2 = p1.transform(m);
assert.strictEqual(p2.x, 20);
assert.strictEqual(p2.y, 40);
});
it("scale non-uniform", () => {
const p1 = new Point2D(10, 20);
const m = new Matrix2D().scaleNonUniform(2, 3);
const p2 = p1.transform(m);
assert.strictEqual(p2.x, 20);
assert.strictEqual(p2.y, 60);
});
it("rotate", () => {
const p1 = new Point2D(10, 0);
const m = new Matrix2D().rotate(Math.PI / 4.0);
const p2 = p1.transform(m);
assert.strictEqual(p2.x, 7.0710678118654755);
assert.strictEqual(p2.y, 7.071067811865475);
});
it("rotate from vector", () => {
const p1 = new Point2D(10, 0);
const v = new Vector2D(Math.PI / 4.0, Math.PI / 4.0);
const m = new Matrix2D().rotateFromVector(v);
const p2 = p1.transform(m);
assert.strictEqual(p2.x, 7.0710678118654755);
assert.strictEqual(p2.y, 7.0710678118654755);
});
it("flip x", () => {
const p1 = new Point2D(10, 20);
const m = new Matrix2D().flipX();
const p2 = p1.transform(m);
assert.strictEqual(p2.x, -10);
assert.strictEqual(p2.y, 20);
});
it("flip y", () => {
const p1 = new Point2D(10, 20);
const m = new Matrix2D().flipY();
const p2 = p1.transform(m);
assert.strictEqual(p2.x, 10);
assert.strictEqual(p2.y, -20);
});
it("inverse transform", () => {
const p1 = new Point2D(10, 20);
const m = new Matrix2D().translate(30, 50).inverse();
const p2 = p1.transform(m);
assert.strictEqual(p2.x, -20);
assert.strictEqual(p2.y, -30);
});
it("to string", () => {
const p = new Point2D(10, 20);
assert.strictEqual("point(10,20)", p.toString());
});
});
| lysz210/kld-affine | test/point_tests.js | JavaScript | bsd-3-clause | 4,778 |
################################# LICENSE ##################################
# Copyright (c) 2009, South African Astronomical Observatory (SAAO) #
# All rights reserved. #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions #
# are met: #
# #
# * Redistributions of source code must retain the above copyright #
# notice, this list of conditions and the following disclaimer. #
# * Redistributions in binary form must reproduce the above copyright #
# notice, this list of conditions and the following disclaimer #
# in the documentation and/or other materials provided with the #
# distribution. #
# * Neither the name of the South African Astronomical Observatory #
# (SAAO) nor the names of its contributors may be used to endorse #
# or promote products derived from this software without specific #
# prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY THE SAAO ''AS IS'' AND ANY EXPRESS OR #
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED #
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE #
# DISCLAIMED. IN NO EVENT SHALL THE SAAO BE LIABLE FOR ANY #
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL #
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS #
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) #
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, #
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN #
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
############################################################################
"""
RINGFILTER determines the center coordinates of a ring, bins the ring radially and computes its power spectrum, and allows the user to select a smoothing filter for the ring. It uses T. Williams code. The code assumes all the files are in the same directory. Also assumes that if there is a config file, it is also in the same directory as the data. Note that this config file is in the original FORTRAN code format so that the user does not have to write another file.
Updates:
20100706
* First wrote the code
"""
# Ensure python 2.5 compatibility
from __future__ import with_statement
import os
import sys
import numpy as np
#import pyfits
from pyraf import iraf
from pyraf.iraf import pysalt
import saltsafekey
import saltsafeio
import fpsafeio
from saltsafelog import logging
from salterror import SaltIOError
# This reads the FORTRAN config file if it exists
from fortranfp import ringfilter_wrapper
from fortranfp.ringfilter_wrapper import getpfp
debug=True
def saltfpringfilter(axc,ayc,arad,rxc,ryc,filterfreq,filterwidth,itmax,conv, fitwidth,image,logfile,useconfig,configfile,verbose):
""" Determines the center coordinates of a ring, bins the ring radially and computes its power spectrum, and allows the user to select a smoothing filter for the ring. """
# default parameter values are set up in the pyraf .par file. The values used are then changed if a FORTRAN config file exists and the user elects to override the pyraf .par file.
# Is the input FORTRAN config file specified?
# If it is blank, then it will be ignored.
if useconfig:
configfile = configfile.strip()
if len(configfile) > 0:
#check exists
saltsafeio.fileexists(configfile)
# read updated parameters from the file
array=getpfp(configfile,"axc")
s=len(array)
flag = array[s-1]
if flag == 1:
axc=float(array[0])
array=getpfp(configfile,"ayc")
s=len(array)
flag = array[s-1]
if flag == 1:
ayc=float(array[0])
array=getpfp(configfile,"arad")
s=len(array)
flag = array[s-1]
if flag == 1:
arad=float(array[0])
array=getpfp(configfile,"rxc")
s=len(array)
flag = array[s-1]
if flag == 1:
rxc=float(array[0])
array=getpfp(configfile,"ryc")
s=len(array)
flag = array[s-1]
if flag == 1:
ryc=float(array[0])
array=getpfp(configfile,"calring_filter_width")
s=len(array)
flag = array[s-1]
if flag == 1:
filterwidth=int(array[0])
array=getpfp(configfile,"calring_filter_freq")
s=len(array)
flag = array[s-1]
if flag == 1:
filterfreq=int(array[0])
array=getpfp(configfile,"calring_itmax")
s=len(array)
flag = array[s-1]
if flag == 1:
itmax=int(array[0])
array=getpfp(configfile,"calring_conv")
s=len(array)
flag = array[s-1]
if flag == 1:
conv=float(array[0])
array=getpfp(configfile,"calring_fitwidth")
s=len(array)
flag = array[s-1]
if flag == 1:
fitwidth=float(array[0])
# getting paths for filenames
pathin = os.path.dirname(image)
basein = os.path.basename(image)
pathlog = os.path.dirname(logfile)
baselog = os.path.basename(logfile)
# forcing logfiles to be created in the same directory as the input data
# (we change to this directory once starting the fortran code)
if len(pathin) > 0:
logfile = baselog
# start log now that all parameter are set up
with logging(logfile, debug) as log:
# Some basic checks, many tests are done in the FORTRAN code itself
# is the input file specified?
saltsafeio.filedefined('Input',image)
# if the input file is a file, does it exist?
if basein[0] != '@':
saltsafeio.fileexists(image)
infile = image
# if the input file is a list, throw an error
if basein[0] == '@':
raise SaltIOError(basein + ' list input instead of a file' )
# optionally update the FORTRAN config file with new values - not implemented currently
# If all looks OK, run the FORTRAN code
if len(pathin) > 0:
dir = pathin
else:
dir = './'
infile = basein
print dir, infile, 'input directory and input file'
# Get current working directory as the Fortran code changes dir
startdir = os.getcwd()
ringfilter_wrapper.ringfilter(dir,axc, ayc,arad, rxc,ryc,filterfreq,filterwidth,itmax,conv,fitwidth,infile)
# go back to starting directory
os.chdir(startdir)
# -----------------------------------------------------------
# main code
parfile = iraf.osfn("saltfp$saltfpringfilter.par")
t = iraf.IrafTaskFactory(taskname="saltfpringfilter",value=parfile,function=saltfpringfilter,pkgname='saltfp')
| saltastro/pysalt | saltfp/saltfpringfilter.py | Python | bsd-3-clause | 7,647 |
<?php
/*=========================================================================
MIDAS Server
Copyright (c) Kitware SAS. 20 rue de la Villette. All rights reserved.
69328 Lyon, FRANCE.
See Copyright.txt for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notices for more information.
=========================================================================*/
/** Sort Daos*/
class SortdaoComponent extends AppComponent
{
public $field = '';
public $order = 'asc';
/** sort daos*/
public function sortByDate($a, $b)
{
$field = $this->field;
if($this->field == '' || !isset($a->$field))
{
throw new Zend_Exception("Error field.");
}
$a_t = strtotime($a->$field );
$b_t = strtotime($b->$field );
if($a_t == $b_t)
{
return 0;
}
if($this->order == 'asc')
{
return ($a_t > $b_t ) ? -1 : 1;
}
else
{
return ($a_t > $b_t ) ? 1 : -1;
}
}//end sortByDate
/** sort by name*/
public function sortByName($a, $b)
{
$field = $this->field;
if($this->field == '' || !isset($a->$field))
{
throw new Zend_Exception("Error field.");
}
$a_n = strtolower($a->$field);
$b_n = strtolower($b->$field);
if($a_n == $b_n)
{
return 0;
}
if($this->order == 'asc')
{
return ($a_n < $b_n) ? -1 : 1;
}
else
{
return ($a_n < $b_n ) ? 1 : -1;
}
}//end sortByDate
/** sort by number*/
public function sortByNumber($a, $b)
{
$field = $this->field;
if($this->field == '' || !isset($a->$field))
{
throw new Zend_Exception("Error field.");
}
$a_n = strtolower($a->$field);
$b_n = strtolower($b->$field);
if($a_n == $b_n)
{
return 0;
}
if($this->order == 'asc')
{
return ($a_n < $b_n) ? -1 : 1;
}
else
{
return ($a_n < $b_n ) ? 1 : -1;
}
}//end sortByNumber
/** Unique*/
public function arrayUniqueDao($array, $keep_key_assoc = false)
{
$duplicate_keys = array();
$tmp = array();
foreach($array as $key => $val)
{
// convert objects to arrays, in_array() does not support objects
if(is_object($val))
{
$val = (array)$val;
}
if(!in_array($val, $tmp))
{
$tmp[] = $val;
}
else
{
$duplicate_keys[] = $key;
}
}
foreach($duplicate_keys as $key)
{
unset($array[$key]);
}
return $keep_key_assoc ? $array : array_values($array);
}
} // end class | mgrauer/midas3score | core/controllers/components/SortdaoComponent.php | PHP | bsd-3-clause | 2,752 |
__author__ = 'keltonhalbert, wblumberg'
from sharppy.viz import plotSkewT, plotHodo, plotText, plotAnalogues
from sharppy.viz import plotThetae, plotWinds, plotSpeed, plotKinematics #, plotGeneric
from sharppy.viz import plotSlinky, plotWatch, plotAdvection, plotSTP, plotWinter
from sharppy.viz import plotSHIP, plotSTPEF, plotFire, plotVROT
from PySide.QtCore import *
from PySide.QtGui import *
import sharppy.sharptab.profile as profile
import sharppy.sharptab as tab
import sharppy.io as io
from datetime import datetime, timedelta
import numpy as np
import platform
from os.path import expanduser
import os
from sharppy.version import __version__, __version_name__
class SPCWidget(QWidget):
"""
This will create the full SPC window, handle the organization
of the insets, and handle all click/key events and features.
"""
inset_generators = {
'SARS':plotAnalogues,
'STP STATS':plotSTP,
'COND STP':plotSTPEF,
'WINTER':plotWinter,
'FIRE':plotFire,
'SHIP':plotSHIP,
'VROT':plotVROT,
}
inset_names = {
'SARS':'Sounding Analogues',
'STP STATS':'Sig-Tor Stats',
'COND STP':'EF-Scale Probs (Sig-Tor)',
'WINTER':'Winter Weather',
'FIRE':'Fire Weather',
'SHIP':'Sig-Hail Stats',
'VROT':'EF-Scale Probs (V-Rot)',
}
def __init__(self, **kwargs):
parent = kwargs.get('parent', None)
super(SPCWidget, self).__init__(parent=parent)
"""
"""
## these are the keyword arguments used to define what
## sort of profile is being viewed
self.prof_collections = []
self.prof_ids = []
self.default_prof = None
self.pc_idx = 0
self.config = kwargs.get("cfg")
self.dgz = False
self.mode = ""
## these are used to display profiles
self.parcel_type = "MU"
self.coll_observed = False
if not self.config.has_section('insets'):
self.config.add_section('insets')
self.config.set('insets', 'right_inset', 'STP STATS')
self.config.set('insets', 'left_inset', 'SARS')
if not self.config.has_section('parcel_types'):
self.config.add_section('parcel_types')
self.config.set('parcel_types', 'pcl1', 'SFC')
self.config.set('parcel_types', 'pcl2', 'ML')
self.config.set('parcel_types', 'pcl3', 'FCST')
self.config.set('parcel_types', 'pcl4', 'MU')
if not self.config.has_option('paths', 'save_img'):
self.config.set('paths', 'save_img', expanduser('~'))
self.config.set('paths', 'save_txt', expanduser('~'))
## these are the boolean flags used throughout the program
self.swap_inset = False
## initialize empty variables to hold objects that will be
## used later
self.left_inset_ob = None
self.right_inset_ob = None
## these are used for insets and inset swapping
insets = sorted(SPCWidget.inset_names.items(), key=lambda i: i[1])
inset_ids, inset_names = zip(*insets)
self.available_insets = inset_ids
self.left_inset = self.config.get('insets', 'left_inset')
self.right_inset = self.config.get('insets', 'right_inset')
self.insets = {}
self.parcel_types = [self.config.get('parcel_types', 'pcl1'), self.config.get('parcel_types', 'pcl2'), \
self.config.get('parcel_types', 'pcl3'),self.config.get('parcel_types', 'pcl4')]
## initialize the rest of the window attributes, layout managers, etc
self.setStyleSheet("QWidget {background-color: rgb(0, 0, 0);}")
## set the the whole window's layout manager
self.grid = QGridLayout()
self.grid.setContentsMargins(1,1,1,1)
self.grid.setHorizontalSpacing(0)
self.grid.setVerticalSpacing(2)
self.setLayout(self.grid)
## handle the upper right portion of the window...
## hodograph, SRWinds, Storm Slinky, theta-e all go in this frame
self.urparent = QFrame()
self.urparent_grid = QGridLayout()
self.urparent_grid.setContentsMargins(0, 0, 0, 0)
self.urparent_grid.setVerticalSpacing(0)
self.urparent.setLayout(self.urparent_grid)
self.ur = QFrame()
self.ur.setStyleSheet("QFrame {"
" background-color: rgb(0, 0, 0);"
" border-width: 0px;"
" border-style: solid;"
" border-color: rgb(255, 255, 255);"
" margin: 0px;}")
self.brand = QLabel("SHARPpy Beta v%s %s" % (__version__, __version_name__))
self.brand.setAlignment(Qt.AlignRight)
self.brand.setStyleSheet("QFrame {"
" background-color: rgb(0, 0, 0);"
" text-align: right;"
" padding-top: 4px;"
" padding-bottom: 4px;"
" font-size: 11px;"
" color: #FFFFFF;}")
## this layout manager will handle the upper right portion of the window
self.grid2 = QGridLayout()
self.grid2.setHorizontalSpacing(0)
self.grid2.setVerticalSpacing(0)
self.grid2.setContentsMargins(0, 0, 0, 0)
self.ur.setLayout(self.grid2)
self.urparent_grid.addWidget(self.brand, 0, 0, 1, 0)
self.urparent_grid.addWidget(self.ur, 1, 0, 50, 0)
## add the upper-right frame to the main frame
self.grid.addWidget(self.urparent, 0, 1, 3, 1)
## Handle the Text Areas
self.text = QFrame()
self.text.setStyleSheet("QWidget {"
" background-color: rgb(0, 0, 0);"
" border-width: 2px;"
" border-style: solid;"
" border-color: #3399CC;}")
self.grid3 = QGridLayout()
self.grid3.setHorizontalSpacing(0)
self.grid3.setContentsMargins(0, 0, 0, 0)
self.text.setLayout(self.grid3)
## set to menu stuff
self.setUpdatesEnabled(True)
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.showCursorMenu)
## initialize the data frames
self.initData()
self.loadWidgets()
def getParcelObj(self, prof, name):
if name == "SFC":
return prof.sfcpcl
elif name == "ML":
return prof.mlpcl
elif name == "FCST":
return prof.fcstpcl
elif name == "MU":
return prof.mupcl
elif name == 'EFF':
return prof.effpcl
elif name == "USER":
return prof.usrpcl
def getParcelName(self, prof, pcl):
if pcl == prof.sfcpcl:
return "SFC"
elif pcl == prof.mlpcl:
return "ML"
elif pcl == prof.fcstpcl:
return "FCST"
elif pcl == prof.mupcl:
return "MU"
elif pcl == prof.effpcl:
return "EFF"
elif pcl == prof.usrpcl:
return "USER"
def saveimage(self):
path = self.config.get('paths', 'save_img')
file_types = "PNG (*.png)"
file_name, result = QFileDialog.getSaveFileName(self, "Save Image", path, file_types)
if result:
pixmap = QPixmap.grabWidget(self)
pixmap.save(file_name, 'PNG', 100)
self.config.set('paths', 'save_img', os.path.dirname(file_name))
def savetext(self):
path = self.config.get('paths', 'save_txt')
file_types = "TXT (*.txt)"
file_name, result = QFileDialog.getSaveFileName(self, "Save Sounding Text", path, file_types)
if result:
self.default_prof.toFile(file_name)
self.config.set('paths', 'save_txt', os.path.dirname(file_name))
def initData(self):
"""
Initializes all the widgets for the window.
This gets initially called by __init__
:return:
"""
self.sound = plotSkewT(dgz=self.dgz)
self.hodo = plotHodo()
## initialize the non-swappable insets
self.speed_vs_height = plotSpeed()
self.inferred_temp_advection = plotAdvection()
self.storm_slinky = plotSlinky()
self.thetae_vs_pressure = plotThetae()
self.srwinds_vs_height = plotWinds()
self.watch_type = plotWatch()
self.convective = plotText(self.parcel_types)
self.kinematic = plotKinematics()
# intialize swappable insets
for inset, inset_gen in SPCWidget.inset_generators.iteritems():
self.insets[inset] = inset_gen()
self.right_inset_ob = self.insets[self.right_inset]
self.left_inset_ob = self.insets[self.left_inset]
# Connect signals to slots
self.convective.updatepcl.connect(self.updateParcel)
self.sound.parcel.connect(self.defineUserParcel)
self.sound.modified.connect(self.modifyProf)
self.sound.reset.connect(self.resetProfModifications)
self.hodo.modified.connect(self.modifyProf)
self.hodo.reset.connect(self.resetProfModifications)
self.insets["SARS"].updatematch.connect(self.updateSARS)
def addProfileCollection(self, prof_col, prof_id, focus=True):
self.prof_collections.append(prof_col)
self.prof_ids.append(prof_id)
self.sound.addProfileCollection(prof_col)
self.hodo.addProfileCollection(prof_col)
if focus:
self.pc_idx = len(self.prof_collections) - 1
if not prof_col.getMeta('observed'):
self.coll_observed = False
self.sound.setAllObserved(self.coll_observed, update_gui=False)
self.hodo.setAllObserved(self.coll_observed, update_gui=False)
cur_dt = self.prof_collections[self.pc_idx].getCurrentDate()
for prof_col in self.prof_collections:
if not prof_col.getMeta('observed'):
prof_col.setCurrentDate(cur_dt)
self.updateProfs()
@Slot(str)
def setProfileCollection(self, prof_id):
try:
self.pc_idx = self.prof_ids.index(prof_id)
except ValueError:
print "Hmmm, that profile doesn't exist to be focused ..."
return
cur_dt = self.prof_collections[self.pc_idx].getCurrentDate()
for prof_col in self.prof_collections:
if not prof_col.getMeta('observed'):
prof_col.setCurrentDate(cur_dt)
self.updateProfs()
def rmProfileCollection(self, prof_id):
try:
pc_idx = self.prof_ids.index(prof_id)
except ValueError:
print "Hmmm, that profile doesn't exist to be removed ..."
prof_col = self.prof_collections.pop(pc_idx)
self.prof_ids.pop(pc_idx)
self.sound.rmProfileCollection(prof_col)
self.hodo.rmProfileCollection(prof_col)
# If we've removed an analog, remove it from the profile it's an analog to.
if prof_col.hasMeta('filematch'):
filematch = prof_col.getMeta('filematch')
for pc in self.prof_collections:
if pc.hasMeta('analogfile'):
keys, vals = zip(*pc.getMeta('analogfile').items())
if filematch in vals:
keys = list(keys); vals = list(vals)
idx = vals.index(filematch)
vals.pop(idx)
keys.pop(idx)
pc.setMeta('analogfile', dict(zip(keys, vals)))
self.insets['SARS'].clearSelection()
if self.pc_idx == pc_idx:
self.pc_idx = 0
elif self.pc_idx > pc_idx:
self.pc_idx -= 1
self.updateProfs()
def isAllObserved(self):
return all( pc.getMeta('observed') for pc in self.prof_collections )
def isInterpolated(self):
return self.prof_collections[self.pc_idx].isInterpolated()
def updateProfs(self):
prof_col = self.prof_collections[self.pc_idx]
self.default_prof = prof_col.getHighlightedProf()
# update the profiles
self.sound.setActiveCollection(self.pc_idx, update_gui=False)
self.hodo.setActiveCollection(self.pc_idx)
self.storm_slinky.setProf(self.default_prof)
self.inferred_temp_advection.setProf(self.default_prof)
self.speed_vs_height.setProf(self.default_prof)
self.srwinds_vs_height.setProf(self.default_prof)
self.thetae_vs_pressure.setProf(self.default_prof)
self.watch_type.setProf(self.default_prof)
self.convective.setProf(self.default_prof)
self.kinematic.setProf(self.default_prof)
for inset in self.insets.keys():
self.insets[inset].setProf(self.default_prof)
# Update the parcels to match the new profiles
parcel = self.getParcelObj(self.default_prof, self.parcel_type)
self.sound.setParcel(parcel)
self.storm_slinky.setParcel(parcel)
@Slot(tab.params.Parcel)
def updateParcel(self, pcl):
self.parcel_type = self.getParcelName(self.default_prof, pcl)
self.sound.setParcel(pcl)
self.storm_slinky.setParcel(pcl)
self.config.set('parcel_types', 'pcl1', self.convective.pcl_types[0])
self.config.set('parcel_types', 'pcl2', self.convective.pcl_types[1])
self.config.set('parcel_types', 'pcl3', self.convective.pcl_types[2])
self.config.set('parcel_types', 'pcl4', self.convective.pcl_types[3])
@Slot(str)
def updateSARS(self, filematch):
prof_col = self.prof_collections[self.pc_idx]
dec = io.spc_decoder.SPCDecoder(filematch)
match_col = dec.getProfiles()
match_col.setMeta('model', 'Analog')
match_col.setMeta('run', prof_col.getCurrentDate())
match_col.setMeta('fhour', None)
match_col.setMeta('observed', True)
match_col.setMeta('filematch', filematch)
match_col.setAnalogToDate(prof_col.getCurrentDate())
dt = prof_col.getCurrentDate()
if prof_col.hasMeta('analogfile'):
analogfiles = prof_col.getMeta('analogfile')
analogfiles[dt] = filematch
else:
analogfiles = {dt:filematch}
prof_col.setMeta('analogfile', analogfiles)
self.parentWidget().addProfileCollection(match_col, focus=False)
@Slot(tab.params.Parcel)
def defineUserParcel(self, parcel):
self.prof_collections[self.pc_idx].defineUserParcel(parcel)
self.updateProfs()
self.setFocus()
@Slot(int, dict)
def modifyProf(self, idx, kwargs):
self.prof_collections[self.pc_idx].modify(idx, **kwargs)
self.updateProfs()
self.setFocus()
def interpProf(self):
self.prof_collections[self.pc_idx].interp()
self.updateProfs()
self.setFocus()
@Slot(list)
def resetProfModifications(self, args):
self.prof_collections[self.pc_idx].resetModification(*args)
self.updateProfs()
self.setFocus()
def resetProfInterpolation(self):
self.prof_collections[self.pc_idx].resetInterpolation()
self.updateProfs()
self.setFocus()
@Slot()
def toggleCollectObserved(self):
self.coll_observed = not self.coll_observed
self.sound.setAllObserved(self.coll_observed)
self.hodo.setAllObserved(self.coll_observed)
def loadWidgets(self):
## add the upper-right window insets
self.grid2.addWidget(self.speed_vs_height, 0, 0, 11, 3)
self.grid2.addWidget(self.inferred_temp_advection, 0, 3, 11, 2)
self.grid2.addWidget(self.hodo, 0, 5, 8, 24)
self.grid2.addWidget(self.storm_slinky, 8, 5, 3, 6)
self.grid2.addWidget(self.thetae_vs_pressure, 8, 11, 3, 6)
self.grid2.addWidget(self.srwinds_vs_height, 8, 17, 3, 6)
self.grid2.addWidget(self.watch_type, 8, 23, 3, 6)
# Draw the kinematic and convective insets
self.grid3.addWidget(self.convective, 0, 0)
self.grid3.addWidget(self.kinematic, 0, 1)
# Set Left Inset
self.grid3.addWidget(self.left_inset_ob, 0, 2)
# Set Right Inset
self.grid3.addWidget(self.right_inset_ob, 0, 3)
## do a check for setting the dendretic growth zone
if self.left_inset == "WINTER" or self.right_inset == "WINTER":
self.sound.setDGZ(True)
self.dgz = True
self.grid.addWidget(self.sound, 0, 0, 3, 1)
self.grid.addWidget(self.text, 3, 0, 1, 2)
def advanceTime(self, direction):
if len(self.prof_collections) == 0 or self.coll_observed:
return
prof_col = self.prof_collections[self.pc_idx]
if prof_col.getMeta('observed'):
cur_dt = prof_col.getCurrentDate()
cur_loc = prof_col.getMeta('loc')
idxs, dts = zip(*sorted(((idx, pc.getCurrentDate()) for idx, pc in enumerate(self.prof_collections) if pc.getMeta('loc') == cur_loc and pc.getMeta('observed')), key=lambda x: x[1]))
dt_idx = dts.index(cur_dt)
dt_idx = (dt_idx + direction) % len(dts)
self.pc_idx = idxs[dt_idx]
cur_dt = self.prof_collections[self.pc_idx].getCurrentDate()
else:
cur_dt = prof_col.advanceTime(direction)
for prof_col in self.prof_collections:
if not prof_col.getMeta('observed'):
prof_col.setCurrentDate(cur_dt)
self.parcel_types = self.convective.pcl_types
self.updateProfs()
prof_col = self.prof_collections[self.pc_idx]
if prof_col.hasMeta('analogfile'):
match = prof_col.getMeta('analogfile')
dt = prof_col.getCurrentDate()
if dt in match:
self.insets['SARS'].setSelection(match[dt])
else:
self.insets['SARS'].clearSelection()
else:
self.insets['SARS'].clearSelection()
def swapProfCollections(self):
# See if we have any other observed profiles loaded at this time.
prof_col = self.prof_collections[self.pc_idx]
dt = prof_col.getCurrentDate()
idxs, pcs = zip(*[ (idx, pc) for idx, pc in enumerate(self.prof_collections) if pc.getCurrentDate() == dt or self.coll_observed ])
loc_idx = pcs.index(prof_col)
loc_idx = (loc_idx + 1) % len(pcs)
self.pc_idx = idxs[loc_idx]
self.updateProfs()
if self.prof_collections[self.pc_idx].hasMeta('analogfile'):
match = self.prof_collections[self.pc_idx].getMeta('analogfile')
dt = prof_col.getCurrentDate()
if dt in match:
self.insets['SARS'].setSelection(match[dt])
else:
self.insets['SARS'].clearSelection()
else:
self.insets['SARS'].clearSelection()
def closeEvent(self, e):
self.sound.closeEvent(e)
for prof_coll in self.prof_collections:
prof_coll.cancelCopy()
def makeInsetMenu(self, *exclude):
# This will make the menu of the available insets.
self.popupmenu=QMenu("Inset Menu")
self.menu_ag = QActionGroup(self, exclusive=True)
for inset in self.available_insets:
if inset not in exclude:
inset_action = QAction(self)
inset_action.setText(SPCWidget.inset_names[inset])
inset_action.setData(inset)
inset_action.setCheckable(True)
inset_action.triggered.connect(self.swapInset)
a = self.menu_ag.addAction(inset_action)
self.popupmenu.addAction(a)
def showCursorMenu(self, pos):
self.makeInsetMenu(self.left_inset, self.right_inset)
if self.childAt(pos.x(), pos.y()) is self.right_inset_ob:
self.inset_to_swap = "RIGHT"
self.popupmenu.popup(self.mapToGlobal(pos))
self.setFocus()
elif self.childAt(pos.x(), pos.y()) is self.left_inset_ob:
self.inset_to_swap = "LEFT"
self.popupmenu.popup(self.mapToGlobal(pos))
self.setFocus()
def swapInset(self):
## This will swap either the left or right inset depending on whether or not the
## self.inset_to_swap value is LEFT or RIGHT.
a = self.menu_ag.checkedAction()
if self.inset_to_swap == "LEFT":
if self.left_inset == "WINTER" and self.dgz:
self.sound.setDGZ(False)
self.dgz = False
# Delete and re-make the inset. For some stupid reason, pyside/QT forces you to
# delete something you want to remove from the layout.
self.left_inset_ob.deleteLater()
self.insets[self.left_inset] = SPCWidget.inset_generators[self.left_inset]()
self.insets[self.left_inset].setProf(self.default_prof)
self.left_inset = a.data()
self.left_inset_ob = self.insets[self.left_inset]
self.grid3.addWidget(self.left_inset_ob, 0, 2)
self.config.set('insets', 'left_inset', self.left_inset)
elif self.inset_to_swap == "RIGHT":
if self.right_inset == "WINTER" and self.dgz:
self.sound.setDGZ(False)
self.dgz = False
# Delete and re-make the inset. For some stupid reason, pyside/QT forces you to
# delete something you want to remove from the layout.
self.right_inset_ob.deleteLater()
self.insets[self.right_inset] = SPCWidget.inset_generators[self.right_inset]()
self.insets[self.right_inset].setProf(self.default_prof)
self.right_inset = a.data()
self.right_inset_ob = self.insets[self.right_inset]
self.grid3.addWidget(self.right_inset_ob, 0, 3)
self.config.set('insets', 'right_inset', self.right_inset)
if a.data() == "WINTER":
self.sound.setDGZ(True)
self.dgz = True
self.setFocus()
self.update()
class SPCWindow(QMainWindow):
closed = Signal()
def __init__(self, **kwargs):
parent = kwargs.get('parent', None)
super(SPCWindow, self).__init__()
self.menu_items = []
self.picker_window = parent
self.__initUI(**kwargs)
def __initUI(self, **kwargs):
kwargs['parent'] = self
self.spc_widget = SPCWidget(**kwargs)
self.setCentralWidget(self.spc_widget)
self.createMenuBar()
title = 'SHARPpy: Sounding and Hodograph Analysis and Research Program '
title += 'in Python'
self.setWindowTitle(title)
self.setStyleSheet("QMainWindow { background-color: rgb(0, 0, 0); }")
## handle the attribute of the main window
if platform.system() == 'Windows':
self.setGeometry(10,30,1180,800)
else:
self.setGeometry(0, 0, 1180, 800)
self.show()
self.raise_()
def createMenuBar(self):
bar = self.menuBar()
filemenu = bar.addMenu("File")
saveimage = QAction("Save Image", self, shortcut=QKeySequence("Ctrl+S"))
saveimage.triggered.connect(self.spc_widget.saveimage)
filemenu.addAction(saveimage)
savetext = QAction("Save Text", self, shortcut=QKeySequence("Ctrl+Shift+S"))
savetext.triggered.connect(self.spc_widget.savetext)
filemenu.addAction(savetext)
self.profilemenu = bar.addMenu("Profiles")
self.allobserved = QAction("Collect Observed", self, checkable=True, shortcut=QKeySequence("C"))
self.allobserved.triggered.connect(self.spc_widget.toggleCollectObserved)
self.profilemenu.addAction(self.allobserved)
self.interpolate = QAction("Interpolate Focused Profile", self, shortcut=QKeySequence("I"))
self.interpolate.triggered.connect(self.interpProf)
self.profilemenu.addAction(self.interpolate)
self.resetinterp = QAction("Reset Interpolation", self, shortcut=QKeySequence("I"))
self.resetinterp.triggered.connect(self.resetProf)
self.resetinterp.setVisible(False)
self.profilemenu.addAction(self.resetinterp)
self.profilemenu.addSeparator()
self.focus_mapper = QSignalMapper(self)
self.remove_mapper = QSignalMapper(self)
self.focus_mapper.mapped[str].connect(self.spc_widget.setProfileCollection)
self.remove_mapper.mapped[str].connect(self.rmProfileCollection)
def createProfileMenu(self, prof_col):
menu_name = self.createMenuName(prof_col)
prof_menu = self.profilemenu.addMenu(menu_name)
focus = QAction("Focus", self)
focus.triggered.connect(self.focus_mapper.map)
self.focus_mapper.setMapping(focus, menu_name)
prof_menu.addAction(focus)
remove = QAction("Remove", self)
remove.triggered.connect(self.remove_mapper.map)
self.remove_mapper.setMapping(remove, menu_name)
prof_menu.addAction(remove)
if len(self.menu_items) == 0:
remove.setVisible(False)
self.menu_items.append(prof_menu)
def removeProfileMenu(self, menu_name):
menu_items = [ mitem for mitem in self.menu_items if mitem.title() == menu_name ]
for mitem in menu_items:
mitem.menuAction().setVisible(False)
def addProfileCollection(self, prof_col, focus=True):
menu_name = self.createMenuName(prof_col)
if any( mitem.title() == menu_name and mitem.menuAction().isVisible() for mitem in self.menu_items ):
self.spc_widget.setProfileCollection(menu_name)
return
if not prof_col.getMeta('observed'):
self.allobserved.setDisabled(True)
self.allobserved.setChecked(False)
self.createProfileMenu(prof_col)
visible_mitems = [ mitem for mitem in self.menu_items if mitem.menuAction().isVisible() ]
if len(visible_mitems) > 1:
actions = visible_mitems[0].actions()
names = [ act.text() for act in actions ]
actions[names.index("Remove")].setVisible(True)
try:
self.spc_widget.addProfileCollection(prof_col, menu_name, focus=focus)
except Exception as exc:
self.abortProfileAdd(menu_name, str(exc))
@Slot(str)
def rmProfileCollection(self, menu_name):
self.removeProfileMenu(menu_name)
self.spc_widget.rmProfileCollection(menu_name)
if self.spc_widget.isAllObserved():
self.allobserved.setDisabled(False)
visible_mitems = [ mitem for mitem in self.menu_items if mitem.menuAction().isVisible() ]
if len(visible_mitems) == 1:
actions = visible_mitems[0].actions()
names = [ act.text() for act in actions ]
actions[names.index("Remove")].setVisible(False)
def abortProfileAdd(self, menu_name, exc):
msgbox = QMessageBox()
msgbox.setText("An error has occurred while retrieving the data.")
msgbox.setInformativeText("Try another site or model or try again later.")
msgbox.setDetailedText(exc)
msgbox.setIcon(QMessageBox.Critical)
msgbox.exec_()
if len(self.menu_items) == 1:
self.focusPicker()
self.close()
else:
self.rmProfileCollection(menu_name)
def keyPressEvent(self, e):
#TODO: Up and down keys to loop through profile collection members.
if e.key() == Qt.Key_Left:
self.spc_widget.advanceTime(-1)
self.setInterpolated(self.spc_widget.isInterpolated())
elif e.key() == Qt.Key_Right:
self.spc_widget.advanceTime(1)
self.setInterpolated(self.spc_widget.isInterpolated())
elif e.key() == Qt.Key_Space:
# Swap the profile collections
self.spc_widget.swapProfCollections()
self.setInterpolated(self.spc_widget.isInterpolated())
elif e.matches(QKeySequence.Save):
# Save an image
self.spc_widget.saveimage()
elif e.key() == Qt.Key_W:
self.focusPicker()
def closeEvent(self, e):
self.spc_widget.closeEvent(e)
self.closed.emit()
def createMenuName(self, prof_col):
pc_loc = prof_col.getMeta('loc')
pc_date = prof_col.getMeta('run').strftime("%d/%HZ")
pc_model = prof_col.getMeta('model')
return "%s (%s %s)" % (pc_loc, pc_date, pc_model)
def interpProf(self):
self.setInterpolated(True)
self.spc_widget.interpProf()
def resetProf(self):
self.setInterpolated(False)
self.spc_widget.resetProfInterpolation()
def setInterpolated(self, is_interpolated):
self.resetinterp.setVisible(is_interpolated)
self.interpolate.setVisible(not is_interpolated)
def focusPicker(self):
if self.picker_window is not None:
self.picker_window.activateWindow()
self.picker_window.setFocus()
self.picker_window.raise_()
| djgagne/SHARPpy | sharppy/viz/SPCWindow.py | Python | bsd-3-clause | 29,262 |
<?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@zend.com so we can send you a copy immediately.
*
* @category Zend
* @package Zend_Gdata
* @subpackage App
* @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
/**
* @namespace
*/
namespace Zend\GData\App;
/**
* Gdata App exceptions
*
* Class to represent exceptions that occur during Gdata App operations.
*
* @uses \Exception
* @category Zend
* @package Zend_Gdata
* @subpackage App
* @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
class Exception extends \Exception
{
}
| Techlightenment/zf2 | library/Zend/GData/App/Exception.php | PHP | bsd-3-clause | 1,153 |
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE122_Heap_Based_Buffer_Overflow__c_CWE193_char_memcpy_74b.cpp
Label Definition File: CWE122_Heap_Based_Buffer_Overflow__c_CWE193.label.xml
Template File: sources-sink-74b.tmpl.cpp
*/
/*
* @description
* CWE: 122 Heap Based Buffer Overflow
* BadSource: Allocate memory for a string, but do not allocate space for NULL terminator
* GoodSource: Allocate enough memory for a string and the NULL terminator
* Sinks: memcpy
* BadSink : Copy string to data using memcpy()
* Flow Variant: 74 Data flow: data passed in a map from one function to another in different source files
*
* */
#include "std_testcase.h"
#include <map>
#ifndef _WIN32
#include <wchar.h>
#endif
/* MAINTENANCE NOTE: The length of this string should equal the 10 */
#define SRC_STRING "AAAAAAAAAA"
using namespace std;
namespace CWE122_Heap_Based_Buffer_Overflow__c_CWE193_char_memcpy_74
{
#ifndef OMITBAD
void badSink(map<int, char *> dataMap)
{
/* copy data out of dataMap */
char * data = dataMap[2];
{
char source[10+1] = SRC_STRING;
/* Copy length + 1 to include NUL terminator from source */
/* POTENTIAL FLAW: data may not have enough space to hold source */
memcpy(data, source, (strlen(source) + 1) * sizeof(char));
printLine(data);
free(data);
}
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B uses the GoodSource with the BadSink */
void goodG2BSink(map<int, char *> dataMap)
{
char * data = dataMap[2];
{
char source[10+1] = SRC_STRING;
/* Copy length + 1 to include NUL terminator from source */
/* POTENTIAL FLAW: data may not have enough space to hold source */
memcpy(data, source, (strlen(source) + 1) * sizeof(char));
printLine(data);
free(data);
}
}
#endif /* OMITGOOD */
} /* close namespace */
| JianpingZeng/xcc | xcc/test/juliet/testcases/CWE122_Heap_Based_Buffer_Overflow/s06/CWE122_Heap_Based_Buffer_Overflow__c_CWE193_char_memcpy_74b.cpp | C++ | bsd-3-clause | 1,947 |
<?php
/*
Copyright (c) 2008, Till Brehm, projektfarm Gmbh
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of ISPConfig nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
require_once '../../lib/config.inc.php';
require_once '../../lib/app.inc.php';
//* Check permissions for module
$app->auth->check_module_permissions('admin');
$app->auth->check_security_permissions('admin_allow_langedit');
//* This is only allowed for administrators
if(!$app->auth->is_admin()) die('only allowed for administrators.');
if($conf['demo_mode'] == true) $app->error('This function is disabled in demo mode.');
$app->uses('tpl');
$app->tpl->newTemplate('form.tpl.htm');
$app->tpl->setInclude('content_tpl', 'templates/language_edit.htm');
$lang = $_REQUEST['lang'];
$module = $_REQUEST['module'];
$lang_file = $_REQUEST['lang_file'];
if(!preg_match("/^[a-z]+$/i", $lang)) die('unallowed characters in language name.');
if(!preg_match("/^[a-z_]+$/i", $module)) die('unallowed characters in module name.');
if(!preg_match("/^[a-z\._]+$/i", $lang_file)) die('unallowed characters in language file name.');
$msg = '';
//* Save data
if(isset($_POST['records']) && is_array($_POST['records'])) {
$file_content = "<?php\n";
foreach($_POST['records'] as $key => $val) {
$val = stripslashes($val);
$val = preg_replace('/(^|[^\\\\])((\\\\\\\\)*)"/', '$1$2\\"', $val);
$val = str_replace('$', '', $val);
$file_content .= '$wb['."'$key'".'] = "'.$val.'";'."\n";
$msg = 'File saved.';
}
$file_content .= "?>\n";
if($module == 'global') {
file_put_contents(ISPC_LIB_PATH."/lang/$lang_file" , $file_content);
} else {
file_put_contents(ISPC_WEB_PATH."/$module/lib/lang/$lang_file" , $file_content);
}
}
$app->tpl->setVar(array('module' => $module, 'lang_file' => $lang_file, 'lang' => $lang, 'msg' => $msg));
if($module == 'global') {
include ISPC_LIB_PATH."/lang/$lang_file";
$file_path = ISPC_LIB_PATH."/lang/$lang_file";
} else {
include ISPC_WEB_PATH."/$module/lib/lang/$lang_file";
$file_path = ISPC_WEB_PATH."/$module/lib/lang/$lang_file";
}
$app->tpl->setVar("file_path", $file_path);
$keyword_list = array();
if(isset($wb) && is_array($wb)) {
foreach($wb as $key => $val) {
$keyword_list[] = array('key' => $key, 'val' => htmlentities($val, ENT_COMPAT | ENT_HTML401, 'UTF-8'));
}
$app->tpl->setLoop('records', $keyword_list);
unset($wb);
}
//* load language file
$lng_file = 'lib/lang/'.$_SESSION['s']['language'].'_language_edit.lng';
include $lng_file;
$app->tpl->setVar($wb);
$app->tpl_defaults();
$app->tpl->pparse();
?>
| patriziotufarolo/ispconfig | interface/web/admin/language_edit.php | PHP | bsd-3-clause | 3,916 |
;(function ($, window) {
"use strict";
/**
* @options
* @param customClass [string] <''> "Class applied to instance"
* @param lables.up [string] <'Up'> "Up arrow label"
* @param lables.down [string] <'Down'> "Down arrow label"
*/
var options = {
customClass: "",
labels: {
up: "Up",
down: "Down"
}
};
var pub = {
/**
* @method
* @name defaults
* @description Sets default plugin options
* @param opts [object] <{}> "Options object"
* @example $.stepper("defaults", opts);
*/
defaults: function(opts) {
options = $.extend(options, opts || {});
return (typeof this === 'object') ? $(this) : true;
},
/**
* @method
* @name destroy
* @description Removes instance of plugin
* @example $(".target").stepper("destroy");
*/
destroy: function() {
return $(this).each(function(i) {
var data = $(this).data("stepper");
if (data) {
// Unbind click events
data.$stepper.off(".stepper")
.find(".stepper-arrow")
.remove();
// Restore DOM
data.$input.unwrap()
.removeClass("stepper-input");
}
});
},
/**
* @method
* @name disable
* @description Disables target instance
* @example $(".target").stepper("disable");
*/
disable: function() {
return $(this).each(function(i) {
var data = $(this).data("stepper");
if (data) {
data.$input.attr("disabled", "disabled");
data.$stepper.addClass("disabled");
}
});
},
/**
* @method
* @name enable
* @description Enables target instance
* @example $(".target").stepper("enable");
*/
enable: function() {
return $(this).each(function(i) {
var data = $(this).data("stepper");
if (data) {
data.$input.attr("disabled", null);
data.$stepper.removeClass("disabled");
}
});
}
};
/**
* @method private
* @name _init
* @description Initializes plugin
* @param opts [object] "Initialization options"
*/
function _init(opts) {
// Local options
opts = $.extend({}, options, opts || {});
// Apply to each element
var $items = $(this);
for (var i = 0, count = $items.length; i < count; i++) {
_build($items.eq(i), opts);
}
return $items;
}
/**
* @method private
* @name _build
* @description Builds each instance
* @param $select [jQuery object] "Target jQuery object"
* @param opts [object] <{}> "Options object"
*/
function _build($input, opts) {
if (!$input.hasClass("stepper-input")) {
// EXTEND OPTIONS
opts = $.extend({}, opts, $input.data("stepper-options"));
// HTML5 attributes
var min = parseFloat($input.attr("min")),
max = parseFloat($input.attr("max")),
step = parseFloat($input.attr("step")) || 1;
// Modify DOM
$input.addClass("stepper-input")
.wrap('<div class="stepper ' + opts.customClass + '" />')
.after('<span class="stepper-arrow up">' + opts.labels.up + '</span><span class="stepper-arrow down">' + opts.labels.down + '</span>');
// Store data
var $stepper = $input.parent(".stepper"),
data = $.extend({
$stepper: $stepper,
$input: $input,
$arrow: $stepper.find(".stepper-arrow"),
min: (typeof min !== undefined && !isNaN(min)) ? min : false,
max: (typeof max !== undefined && !isNaN(max)) ? max : false,
step: (typeof step !== undefined && !isNaN(step)) ? step : 1,
timer: null
}, opts);
data.digits = _digits(data.step);
// Check disabled
if ($input.is(":disabled")) {
$stepper.addClass("disabled");
}
// Bind keyboard events
$stepper.on("keypress", ".stepper-input", data, _onKeyup);
// Bind click events
$stepper.on("touchstart.stepper mousedown.stepper", ".stepper-arrow", data, _onMouseDown)
.data("stepper", data);
}
}
/**
* @method private
* @name _onKeyup
* @description Handles keypress event on inputs
* @param e [object] "Event data"
*/
function _onKeyup(e) {
var data = e.data;
// If arrow keys
if (e.keyCode === 38 || e.keyCode === 40) {
e.preventDefault();
_step(data, (e.keyCode === 38) ? data.step : -data.step);
}
}
/**
* @method private
* @name _onMouseDown
* @description Handles mousedown event on instance arrows
* @param e [object] "Event data"
*/
function _onMouseDown(e) {
e.preventDefault();
e.stopPropagation();
// Make sure we reset the states
_onMouseUp(e);
var data = e.data;
if (!data.$input.is(':disabled') && !data.$stepper.hasClass("disabled")) {
var change = $(e.target).hasClass("up") ? data.step : -data.step;
data.timer = _startTimer(data.timer, 125, function() {
_step(data, change, false);
});
_step(data, change);
$("body").on("touchend.stepper mouseup.stepper", data, _onMouseUp);
}
}
/**
* @method private
* @name _onMouseUp
* @description Handles mouseup event on instance arrows
* @param e [object] "Event data"
*/
function _onMouseUp(e) {
e.preventDefault();
e.stopPropagation();
var data = e.data;
_clearTimer(data.timer);
$("body").off(".stepper");
}
/**
* @method private
* @name _step
* @description Steps through values
* @param e [object] "Event data"
* @param change [string] "Change value"
*/
function _step(data, change) {
var originalValue = parseFloat(data.$input.val()),
value = change;
if (typeof originalValue === undefined || isNaN(originalValue)) {
if (data.min !== false) {
value = data.min;
} else {
value = 0;
}
} else if (data.min !== false && originalValue < data.min) {
value = data.min;
} else {
value += originalValue;
}
var diff = (value - data.min) % data.step;
if (diff !== 0) {
value -= diff;
}
if (data.min !== false && value < data.min) {
value = data.min;
}
if (data.max !== false && value > data.max) {
value -= data.step;
}
if (value !== originalValue) {
value = _round(value, data.digits);
data.$input.val(value)
.trigger("change");
}
}
/**
* @method private
* @name _startTimer
* @description Starts an internal timer
* @param timer [int] "Timer ID"
* @param time [int] "Time until execution"
* @param callback [int] "Function to execute"
*/
function _startTimer(timer, time, callback) {
_clearTimer(timer);
return setInterval(callback, time);
}
/**
* @method private
* @name _clearTimer
* @description Clears an internal timer
* @param timer [int] "Timer ID"
*/
function _clearTimer(timer) {
if (timer) {
clearInterval(timer);
timer = null;
}
}
/**
* @method private
* @name _digits
* @description Analyzes and returns significant digit count
* @param value [float] "Value to analyze"
* @return [int] "Number of significant digits"
*/
function _digits(value) {
var test = String(value);
if (test.indexOf(".") > -1) {
return test.length - test.indexOf(".") - 1;
} else {
return 0;
}
}
/**
* @method private
* @name _round
* @description Rounds a number to a sepcific significant digit count
* @param value [float] "Value to round"
* @param digits [float] "Digits to round to"
* @return [number] "Rounded number"
*/
function _round(value, digits) {
var exp = Math.pow(10, digits);
return Math.round(value * exp) / exp;
}
$.fn.stepper = function(method) {
if (pub[method]) {
return pub[method].apply(this, Array.prototype.slice.call(arguments, 1));
} else if (typeof method === 'object' || !method) {
return _init.apply(this, arguments);
}
return this;
};
$.stepper = function(method) {
if (method === "defaults") {
pub.defaults.apply(this, Array.prototype.slice.call(arguments, 1));
}
};
})(jQuery, this);
| juniorspecialist/moab | web/js/jquery.fs.stepper.js | JavaScript | bsd-3-clause | 7,683 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Amazon EC2, Eucalyptus and Nimbus drivers.
"""
from __future__ import with_statement
import sys
import base64
import os
import copy
from xml.etree import ElementTree as ET
from libcloud.utils.py3 import b
from libcloud.utils.xml import fixxpath, findtext, findattr, findall
from libcloud.common.aws import AWSBaseResponse, SignedAWSConnection
from libcloud.common.types import (InvalidCredsError, MalformedResponseError,
LibcloudError)
from libcloud.compute.providers import Provider
from libcloud.compute.types import NodeState
from libcloud.compute.base import Node, NodeDriver, NodeLocation, NodeSize
from libcloud.compute.base import NodeImage, StorageVolume
API_VERSION = '2010-08-31'
NAMESPACE = 'http://ec2.amazonaws.com/doc/%s/' % (API_VERSION)
"""
Sizes must be hardcoded, because Amazon doesn't provide an API to fetch them.
From http://aws.amazon.com/ec2/instance-types/
"""
INSTANCE_TYPES = {
't1.micro': {
'id': 't1.micro',
'name': 'Micro Instance',
'ram': 613,
'disk': 15,
'bandwidth': None
},
'm1.small': {
'id': 'm1.small',
'name': 'Small Instance',
'ram': 1740,
'disk': 160,
'bandwidth': None
},
'm1.medium': {
'id': 'm1.medium',
'name': 'Medium Instance',
'ram': 3700,
'disk': 410,
'bandwidth': None
},
'm1.large': {
'id': 'm1.large',
'name': 'Large Instance',
'ram': 7680,
'disk': 850,
'bandwidth': None
},
'm1.xlarge': {
'id': 'm1.xlarge',
'name': 'Extra Large Instance',
'ram': 15360,
'disk': 1690,
'bandwidth': None
},
'c1.medium': {
'id': 'c1.medium',
'name': 'High-CPU Medium Instance',
'ram': 1740,
'disk': 350,
'bandwidth': None
},
'c1.xlarge': {
'id': 'c1.xlarge',
'name': 'High-CPU Extra Large Instance',
'ram': 7680,
'disk': 1690,
'bandwidth': None
},
'm2.xlarge': {
'id': 'm2.xlarge',
'name': 'High-Memory Extra Large Instance',
'ram': 17510,
'disk': 420,
'bandwidth': None
},
'm2.2xlarge': {
'id': 'm2.2xlarge',
'name': 'High-Memory Double Extra Large Instance',
'ram': 35021,
'disk': 850,
'bandwidth': None
},
'm2.4xlarge': {
'id': 'm2.4xlarge',
'name': 'High-Memory Quadruple Extra Large Instance',
'ram': 70042,
'disk': 1690,
'bandwidth': None
},
'm3.xlarge': {
'id': 'm3.xlarge',
'name': 'Extra Large Instance',
'ram': 15360,
'disk': None,
'bandwidth': None
},
'm3.2xlarge': {
'id': 'm3.2xlarge',
'name': 'Double Extra Large Instance',
'ram': 30720,
'disk': None,
'bandwidth': None
},
'cg1.4xlarge': {
'id': 'cg1.4xlarge',
'name': 'Cluster GPU Quadruple Extra Large Instance',
'ram': 22528,
'disk': 1690,
'bandwidth': None
},
'cc1.4xlarge': {
'id': 'cc1.4xlarge',
'name': 'Cluster Compute Quadruple Extra Large Instance',
'ram': 23552,
'disk': 1690,
'bandwidth': None
},
'cc2.8xlarge': {
'id': 'cc2.8xlarge',
'name': 'Cluster Compute Eight Extra Large Instance',
'ram': 63488,
'disk': 3370,
'bandwidth': None
},
'cr1.8xlarge': {
'id': 'cr1.8xlarge',
'name': 'High Memory Cluster Eight Extra Large',
'ram': 244000,
'disk': 240,
'bandwidth': None
},
'hs1.8xlarge': {
'id': 'hs1.8xlarge',
'name': 'High Storage Eight Extra Large Instance',
'ram': 119808,
'disk': 48000,
'bandwidth': None
}
}
REGION_DETAILS = {
'us-east-1': {
'endpoint': 'ec2.us-east-1.amazonaws.com',
'api_name': 'ec2_us_east',
'country': 'USA',
'instance_types': [
't1.micro',
'm1.small',
'm1.medium',
'm1.large',
'm1.xlarge',
'm2.xlarge',
'm2.2xlarge',
'm2.4xlarge',
'm3.xlarge',
'm3.2xlarge',
'c1.medium',
'c1.xlarge',
'cc1.4xlarge',
'cc2.8xlarge',
'cg1.4xlarge',
'cr1.8xlarge',
'hs1.8xlarge'
]
},
'us-west-1': {
'endpoint': 'ec2.us-west-1.amazonaws.com',
'api_name': 'ec2_us_west',
'country': 'USA',
'instance_types': [
't1.micro',
'm1.small',
'm1.medium',
'm1.large',
'm1.xlarge',
'm2.xlarge',
'm2.2xlarge',
'm2.4xlarge',
'm3.xlarge',
'm3.2xlarge',
'c1.medium',
'c1.xlarge'
]
},
'us-west-2': {
'endpoint': 'ec2.us-west-2.amazonaws.com',
'api_name': 'ec2_us_west_oregon',
'country': 'US',
'instance_types': [
't1.micro',
'm1.small',
'm1.medium',
'm1.large',
'm1.xlarge',
'm2.xlarge',
'm2.2xlarge',
'm2.4xlarge',
'c1.medium',
'c1.xlarge',
'cc2.8xlarge'
]
},
'eu-west-1': {
'endpoint': 'ec2.eu-west-1.amazonaws.com',
'api_name': 'ec2_eu_west',
'country': 'Ireland',
'instance_types': [
't1.micro',
'm1.small',
'm1.medium',
'm1.large',
'm1.xlarge',
'm2.xlarge',
'm2.2xlarge',
'm2.4xlarge',
'm3.xlarge',
'm3.2xlarge',
'c1.medium',
'c1.xlarge',
'cc2.8xlarge'
]
},
'ap-southeast-1': {
'endpoint': 'ec2.ap-southeast-1.amazonaws.com',
'api_name': 'ec2_ap_southeast',
'country': 'Singapore',
'instance_types': [
't1.micro',
'm1.small',
'm1.medium',
'm1.large',
'm1.xlarge',
'm2.xlarge',
'm2.2xlarge',
'm2.4xlarge',
'm3.xlarge',
'm3.2xlarge',
'c1.medium',
'c1.xlarge'
]
},
'ap-northeast-1': {
'endpoint': 'ec2.ap-northeast-1.amazonaws.com',
'api_name': 'ec2_ap_northeast',
'country': 'Japan',
'instance_types': [
't1.micro',
'm1.small',
'm1.medium',
'm1.large',
'm1.xlarge',
'm2.xlarge',
'm2.2xlarge',
'm2.4xlarge',
'm3.xlarge',
'm3.2xlarge',
'c1.medium',
'c1.xlarge'
]
},
'sa-east-1': {
'endpoint': 'ec2.sa-east-1.amazonaws.com',
'api_name': 'ec2_sa_east',
'country': 'Brazil',
'instance_types': [
't1.micro',
'm1.small',
'm1.medium',
'm1.large',
'm1.xlarge',
'm2.xlarge',
'm2.2xlarge',
'm2.4xlarge',
'c1.medium',
'c1.xlarge'
]
},
'ap-southeast-2': {
'endpoint': 'ec2.ap-southeast-2.amazonaws.com',
'api_name': 'ec2_ap_southeast_2',
'country': 'Australia',
'instance_types': [
't1.micro',
'm1.small',
'm1.medium',
'm1.large',
'm1.xlarge',
'm2.xlarge',
'm2.2xlarge',
'm2.4xlarge',
'm3.xlarge',
'm3.2xlarge',
'c1.medium',
'c1.xlarge'
]
},
'nimbus': {
# Nimbus clouds have 3 EC2-style instance types but their particular
# RAM allocations are configured by the admin
'country': 'custom',
'instance_types': [
'm1.small',
'm1.large',
'm1.xlarge'
]
}
}
VALID_EC2_DATACENTERS = REGION_DETAILS.keys()
VALID_EC2_DATACENTERS = [d for d in VALID_EC2_DATACENTERS if d != 'nimbus']
class EC2NodeLocation(NodeLocation):
def __init__(self, id, name, country, driver, availability_zone):
super(EC2NodeLocation, self).__init__(id, name, country, driver)
self.availability_zone = availability_zone
def __repr__(self):
return (('<EC2NodeLocation: id=%s, name=%s, country=%s, '
'availability_zone=%s driver=%s>')
% (self.id, self.name, self.country,
self.availability_zone, self.driver.name))
class EC2Response(AWSBaseResponse):
"""
EC2 specific response parsing and error handling.
"""
def parse_error(self):
err_list = []
# Okay, so for Eucalyptus, you can get a 403, with no body,
# if you are using the wrong user/password.
msg = "Failure: 403 Forbidden"
if self.status == 403 and self.body[:len(msg)] == msg:
raise InvalidCredsError(msg)
try:
body = ET.XML(self.body)
except:
raise MalformedResponseError("Failed to parse XML",
body=self.body, driver=EC2NodeDriver)
for err in body.findall('Errors/Error'):
code, message = err.getchildren()
err_list.append("%s: %s" % (code.text, message.text))
if code.text == "InvalidClientTokenId":
raise InvalidCredsError(err_list[-1])
if code.text == "SignatureDoesNotMatch":
raise InvalidCredsError(err_list[-1])
if code.text == "AuthFailure":
raise InvalidCredsError(err_list[-1])
if code.text == "OptInRequired":
raise InvalidCredsError(err_list[-1])
if code.text == "IdempotentParameterMismatch":
raise IdempotentParamError(err_list[-1])
return "\n".join(err_list)
class EC2Connection(SignedAWSConnection):
"""
Represents a single connection to the EC2 Endpoint.
"""
version = API_VERSION
host = REGION_DETAILS['us-east-1']['endpoint']
responseCls = EC2Response
class ExEC2AvailabilityZone(object):
"""
Extension class which stores information about an EC2 availability zone.
Note: This class is EC2 specific.
"""
def __init__(self, name, zone_state, region_name):
self.name = name
self.zone_state = zone_state
self.region_name = region_name
def __repr__(self):
return (('<ExEC2AvailabilityZone: name=%s, zone_state=%s, '
'region_name=%s>')
% (self.name, self.zone_state, self.region_name))
class BaseEC2NodeDriver(NodeDriver):
"""
Base Amazon EC2 node driver.
Used for main EC2 and other derivate driver classes to inherit from it.
"""
connectionCls = EC2Connection
path = '/'
features = {'create_node': ['ssh_key']}
NODE_STATE_MAP = {
'pending': NodeState.PENDING,
'running': NodeState.RUNNING,
'shutting-down': NodeState.UNKNOWN,
'terminated': NodeState.TERMINATED
}
def _pathlist(self, key, arr):
"""
Converts a key and an array of values into AWS query param format.
"""
params = {}
i = 0
for value in arr:
i += 1
params["%s.%s" % (key, i)] = value
return params
def _get_boolean(self, element):
tag = "{%s}%s" % (NAMESPACE, 'return')
return element.findtext(tag) == 'true'
def _get_state_boolean(self, element):
"""
Checks for the instances's state
"""
state = findall(element=element,
xpath='instancesSet/item/currentState/name',
namespace=NAMESPACE)[0].text
return state in ('stopping', 'pending', 'starting')
def _get_terminate_boolean(self, element):
status = element.findtext(".//{%s}%s" % (NAMESPACE, 'name'))
return any([term_status == status
for term_status
in ('shutting-down', 'terminated')])
def _to_nodes(self, object, xpath, groups=None):
return [self._to_node(el, groups=groups)
for el in object.findall(fixxpath(xpath=xpath,
namespace=NAMESPACE))]
def _to_node(self, element, groups=None):
try:
state = self.NODE_STATE_MAP[findattr(element=element,
xpath="instanceState/name",
namespace=NAMESPACE)
]
except KeyError:
state = NodeState.UNKNOWN
instance_id = findtext(element=element, xpath='instanceId',
namespace=NAMESPACE)
tags = dict((findtext(element=item, xpath='key', namespace=NAMESPACE),
findtext(element=item, xpath='value',
namespace=NAMESPACE))
for item in findall(element=element,
xpath='tagSet/item',
namespace=NAMESPACE)
)
name = tags.get('Name', instance_id)
public_ip = findtext(element=element, xpath='ipAddress',
namespace=NAMESPACE)
public_ips = [public_ip] if public_ip else []
private_ip = findtext(element=element, xpath='privateIpAddress',
namespace=NAMESPACE)
private_ips = [private_ip] if private_ip else []
n = Node(
id=findtext(element=element, xpath='instanceId',
namespace=NAMESPACE),
name=name,
state=state,
public_ips=public_ips,
private_ips=private_ips,
driver=self.connection.driver,
extra={
'dns_name': findattr(element=element, xpath="dnsName",
namespace=NAMESPACE),
'instanceId': findattr(element=element, xpath="instanceId",
namespace=NAMESPACE),
'imageId': findattr(element=element, xpath="imageId",
namespace=NAMESPACE),
'private_dns': findattr(element=element,
xpath="privateDnsName",
namespace=NAMESPACE),
'status': findattr(element=element, xpath="instanceState/name",
namespace=NAMESPACE),
'keyname': findattr(element=element, xpath="keyName",
namespace=NAMESPACE),
'launchindex': findattr(element=element,
xpath="amiLaunchIndex",
namespace=NAMESPACE),
'productcode': [
p.text for p in findall(
element=element,
xpath="productCodesSet/item/productCode",
namespace=NAMESPACE
)],
'instancetype': findattr(element=element, xpath="instanceType",
namespace=NAMESPACE),
'launchdatetime': findattr(element=element, xpath="launchTime",
namespace=NAMESPACE),
'availability': findattr(element,
xpath="placement/availabilityZone",
namespace=NAMESPACE),
'kernelid': findattr(element=element, xpath="kernelId",
namespace=NAMESPACE),
'ramdiskid': findattr(element=element, xpath="ramdiskId",
namespace=NAMESPACE),
'clienttoken': findattr(element=element, xpath="clientToken",
namespace=NAMESPACE),
'groups': groups,
'tags': tags
}
)
return n
def _to_images(self, object):
return [self._to_image(el) for el in object.findall(
fixxpath(xpath='imagesSet/item', namespace=NAMESPACE))
]
def _to_image(self, element):
n = NodeImage(
id=findtext(element=element, xpath='imageId', namespace=NAMESPACE),
name=findtext(element=element, xpath='imageLocation',
namespace=NAMESPACE),
driver=self.connection.driver,
extra={
'state': findattr(element=element, xpath="imageState",
namespace=NAMESPACE),
'ownerid': findattr(element=element, xpath="imageOwnerId",
namespace=NAMESPACE),
'owneralias': findattr(element=element,
xpath="imageOwnerAlias",
namespace=NAMESPACE),
'ispublic': findattr(element=element,
xpath="isPublic",
namespace=NAMESPACE),
'architecture': findattr(element=element,
xpath="architecture",
namespace=NAMESPACE),
'imagetype': findattr(element=element,
xpath="imageType",
namespace=NAMESPACE),
'platform': findattr(element=element,
xpath="platform",
namespace=NAMESPACE),
'rootdevicetype': findattr(element=element,
xpath="rootDeviceType",
namespace=NAMESPACE),
'virtualizationtype': findattr(
element=element, xpath="virtualizationType",
namespace=NAMESPACE),
'hypervisor': findattr(element=element,
xpath="hypervisor",
namespace=NAMESPACE)
}
)
return n
def _to_volume(self, element, name):
volId = findtext(element=element, xpath='volumeId',
namespace=NAMESPACE)
size = findtext(element=element, xpath='size', namespace=NAMESPACE)
return StorageVolume(id=volId,
name=name,
size=int(size),
driver=self)
def list_nodes(self, ex_node_ids=None):
"""
List all nodes
Ex_node_ids parameter is used to filter the list of
nodes that should be returned. Only the nodes
with the corresponding node ids will be returned.
@param ex_node_ids: List of C{node.id}
@type ex_node_ids: C{list} of C{str}
@rtype: C{list} of L{Node}
"""
params = {'Action': 'DescribeInstances'}
if ex_node_ids:
params.update(self._pathlist('InstanceId', ex_node_ids))
elem = self.connection.request(self.path, params=params).object
nodes = []
for rs in findall(element=elem, xpath='reservationSet/item',
namespace=NAMESPACE):
groups = [g.findtext('')
for g in findall(element=rs,
xpath='groupSet/item/groupId',
namespace=NAMESPACE)]
nodes += self._to_nodes(rs, 'instancesSet/item', groups)
nodes_elastic_ips_mappings = self.ex_describe_addresses(nodes)
for node in nodes:
ips = nodes_elastic_ips_mappings[node.id]
node.public_ips.extend(ips)
return nodes
def list_sizes(self, location=None):
available_types = REGION_DETAILS[self.region_name]['instance_types']
sizes = []
for instance_type in available_types:
attributes = INSTANCE_TYPES[instance_type]
attributes = copy.deepcopy(attributes)
price = self._get_size_price(size_id=instance_type)
attributes.update({'price': price})
sizes.append(NodeSize(driver=self, **attributes))
return sizes
def list_images(self, location=None, ex_image_ids=None):
"""
List all images
Ex_image_ids parameter is used to filter the list of
images that should be returned. Only the images
with the corresponding image ids will be returned.
@param ex_image_ids: List of C{NodeImage.id}
@type ex_image_ids: C{list} of C{str}
@rtype: C{list} of L{NodeImage}
"""
params = {'Action': 'DescribeImages'}
if ex_image_ids:
params.update(self._pathlist('ImageId', ex_image_ids))
images = self._to_images(
self.connection.request(self.path, params=params).object
)
return images
def list_locations(self):
locations = []
for index, availability_zone in \
enumerate(self.ex_list_availability_zones()):
locations.append(EC2NodeLocation(
index, availability_zone.name, self.country, self,
availability_zone)
)
return locations
def create_volume(self, size, name, location=None, snapshot=None):
params = {
'Action': 'CreateVolume',
'Size': str(size)}
if location is not None:
params['AvailabilityZone'] = location.availability_zone.name
volume = self._to_volume(
self.connection.request(self.path, params=params).object,
name=name)
self.ex_create_tags(volume, {'Name': name})
return volume
def destroy_volume(self, volume):
params = {
'Action': 'DeleteVolume',
'VolumeId': volume.id}
response = self.connection.request(self.path, params=params).object
return self._get_boolean(response)
def attach_volume(self, node, volume, device):
params = {
'Action': 'AttachVolume',
'VolumeId': volume.id,
'InstanceId': node.id,
'Device': device}
self.connection.request(self.path, params=params)
return True
def detach_volume(self, volume):
params = {
'Action': 'DetachVolume',
'VolumeId': volume.id}
self.connection.request(self.path, params=params)
return True
def ex_create_keypair(self, name):
"""Creates a new keypair
@note: This is a non-standard extension API, and only works for EC2.
@param name: The name of the keypair to Create. This must be
unique, otherwise an InvalidKeyPair.Duplicate exception is raised.
@type name: C{str}
@rtype: C{dict}
"""
params = {
'Action': 'CreateKeyPair',
'KeyName': name,
}
response = self.connection.request(self.path, params=params).object
key_material = findtext(element=response, xpath='keyMaterial',
namespace=NAMESPACE)
key_fingerprint = findtext(element=response, xpath='keyFingerprint',
namespace=NAMESPACE)
return {
'keyMaterial': key_material,
'keyFingerprint': key_fingerprint,
}
def ex_import_keypair(self, name, keyfile):
"""
imports a new public key
@note: This is a non-standard extension API, and only works for EC2.
@param name: The name of the public key to import. This must be
unique, otherwise an InvalidKeyPair.Duplicate exception is raised.
@type name: C{str}
@param keyfile: The filename with path of the public key to import.
@type keyfile: C{str}
@rtype: C{dict}
"""
with open(os.path.expanduser(keyfile)) as fh:
content = fh.read()
base64key = base64.b64encode(content)
params = {
'Action': 'ImportKeyPair',
'KeyName': name,
'PublicKeyMaterial': base64key
}
response = self.connection.request(self.path, params=params).object
key_name = findtext(element=response, xpath='keyName',
namespace=NAMESPACE)
key_fingerprint = findtext(element=response, xpath='keyFingerprint',
namespace=NAMESPACE)
return {
'keyName': key_name,
'keyFingerprint': key_fingerprint,
}
def ex_describe_all_keypairs(self):
"""
Describes all keypairs.
@note: This is a non-standard extension API, and only works for EC2.
@rtype: C{list} of C{str}
"""
params = {
'Action': 'DescribeKeyPairs'
}
response = self.connection.request(self.path, params=params).object
names = []
for elem in findall(element=response, xpath='keySet/item',
namespace=NAMESPACE):
name = findtext(element=elem, xpath='keyName', namespace=NAMESPACE)
names.append(name)
return names
def ex_describe_keypairs(self, name):
"""Describes a keypair by name
@note: This is a non-standard extension API, and only works for EC2.
@param name: The name of the keypair to describe.
@type name: C{str}
@rtype: C{dict}
"""
params = {
'Action': 'DescribeKeyPairs',
'KeyName.1': name
}
response = self.connection.request(self.path, params=params).object
key_name = findattr(element=response, xpath='keySet/item/keyName',
namespace=NAMESPACE)
return {
'keyName': key_name
}
def ex_list_security_groups(self):
"""
List existing Security Groups.
@note: This is a non-standard extension API, and only works for EC2.
@rtype: C{list} of C{str}
"""
params = {'Action': 'DescribeSecurityGroups'}
response = self.connection.request(self.path, params=params).object
groups = []
for group in findall(element=response, xpath='securityGroupInfo/item',
namespace=NAMESPACE):
name = findtext(element=group, xpath='groupName',
namespace=NAMESPACE)
groups.append(name)
return groups
def ex_create_security_group(self, name, description):
"""
Creates a new Security Group
@note: This is a non-standard extension API, and only works for EC2.
@param name: The name of the security group to Create.
This must be unique.
@type name: C{str}
@param description: Human readable description of a Security
Group.
@type description: C{str}
@rtype: C{str}
"""
params = {'Action': 'CreateSecurityGroup',
'GroupName': name,
'GroupDescription': description}
return self.connection.request(self.path, params=params).object
def ex_authorize_security_group(self, name, from_port, to_port, cidr_ip,
protocol='tcp'):
"""
Edit a Security Group to allow specific traffic.
@note: This is a non-standard extension API, and only works for EC2.
@param name: The name of the security group to edit
@type name: C{str}
@param from_port: The beginning of the port range to open
@type from_port: C{str}
@param to_port: The end of the port range to open
@type to_port: C{str}
@param cidr_ip: The ip to allow traffic for.
@type cidr_ip: C{str}
@param protocol: tcp/udp/icmp
@type protocol: C{str}
@rtype: C{bool}
"""
params = {'Action': 'AuthorizeSecurityGroupIngress',
'GroupName': name,
'IpProtocol': protocol,
'FromPort': str(from_port),
'ToPort': str(to_port),
'CidrIp': cidr_ip}
try:
resp = self.connection.request(
self.path, params=params.copy()).object
return bool(findtext(element=resp, xpath='return',
namespace=NAMESPACE))
except Exception:
e = sys.exc_info()[1]
if e.args[0].find('InvalidPermission.Duplicate') == -1:
raise e
def ex_authorize_security_group_permissive(self, name):
"""
Edit a Security Group to allow all traffic.
@note: This is a non-standard extension API, and only works for EC2.
@param name: The name of the security group to edit
@type name: C{str}
@rtype: C{list} of C{str}
"""
results = []
params = {'Action': 'AuthorizeSecurityGroupIngress',
'GroupName': name,
'IpProtocol': 'tcp',
'FromPort': '0',
'ToPort': '65535',
'CidrIp': '0.0.0.0/0'}
try:
results.append(
self.connection.request(self.path, params=params.copy()).object
)
except Exception:
e = sys.exc_info()[1]
if e.args[0].find("InvalidPermission.Duplicate") == -1:
raise e
params['IpProtocol'] = 'udp'
try:
results.append(
self.connection.request(self.path, params=params.copy()).object
)
except Exception:
e = sys.exc_info()[1]
if e.args[0].find("InvalidPermission.Duplicate") == -1:
raise e
params.update({'IpProtocol': 'icmp', 'FromPort': '-1', 'ToPort': '-1'})
try:
results.append(
self.connection.request(self.path, params=params.copy()).object
)
except Exception:
e = sys.exc_info()[1]
if e.args[0].find("InvalidPermission.Duplicate") == -1:
raise e
return results
def ex_list_availability_zones(self, only_available=True):
"""
Return a list of L{ExEC2AvailabilityZone} objects for the
current region.
Note: This is an extension method and is only available for EC2
driver.
@keyword only_available: If true, return only availability zones
with state 'available'
@type only_available: C{str}
@rtype: C{list} of L{ExEC2AvailabilityZone}
"""
params = {'Action': 'DescribeAvailabilityZones'}
if only_available:
params.update({'Filter.0.Name': 'state'})
params.update({'Filter.0.Value.0': 'available'})
params.update({'Filter.1.Name': 'region-name'})
params.update({'Filter.1.Value.0': self.region_name})
result = self.connection.request(self.path,
params=params.copy()).object
availability_zones = []
for element in findall(element=result,
xpath='availabilityZoneInfo/item',
namespace=NAMESPACE):
name = findtext(element=element, xpath='zoneName',
namespace=NAMESPACE)
zone_state = findtext(element=element, xpath='zoneState',
namespace=NAMESPACE)
region_name = findtext(element=element, xpath='regionName',
namespace=NAMESPACE)
availability_zone = ExEC2AvailabilityZone(
name=name,
zone_state=zone_state,
region_name=region_name
)
availability_zones.append(availability_zone)
return availability_zones
def ex_describe_tags(self, resource):
"""
Return a dictionary of tags for a resource (Node or StorageVolume).
@param resource: resource which should be used
@type resource: L{Node} or L{StorageVolume}
@return: dict Node tags
@rtype: C{dict}
"""
params = {'Action': 'DescribeTags',
'Filter.0.Name': 'resource-id',
'Filter.0.Value.0': resource.id,
'Filter.1.Name': 'resource-type',
'Filter.1.Value.0': 'instance',
}
result = self.connection.request(self.path,
params=params.copy()).object
tags = {}
for element in findall(element=result, xpath='tagSet/item',
namespace=NAMESPACE):
key = findtext(element=element, xpath='key', namespace=NAMESPACE)
value = findtext(element=element,
xpath='value', namespace=NAMESPACE)
tags[key] = value
return tags
def ex_create_tags(self, resource, tags):
"""
Create tags for a resource (Node or StorageVolume).
@param resource: Resource to be tagged
@type resource: L{Node} or L{StorageVolume}
@param tags: A dictionary or other mapping of strings to strings,
associating tag names with tag values.
@type tags: C{dict}
@rtype: C{bool}
"""
if not tags:
return
params = {'Action': 'CreateTags',
'ResourceId.0': resource.id}
for i, key in enumerate(tags):
params['Tag.%d.Key' % i] = key
params['Tag.%d.Value' % i] = tags[key]
result = self.connection.request(self.path,
params=params.copy()).object
element = findtext(element=result, xpath='return',
namespace=NAMESPACE)
return element == 'true'
def ex_delete_tags(self, resource, tags):
"""
Delete tags from a resource.
@param resource: Resource to be tagged
@type resource: L{Node} or L{StorageVolume}
@param tags: A dictionary or other mapping of strings to strings,
specifying the tag names and tag values to be deleted.
@type tags: C{dict}
@rtype: C{bool}
"""
if not tags:
return
params = {'Action': 'DeleteTags',
'ResourceId.0': resource.id}
for i, key in enumerate(tags):
params['Tag.%d.Key' % i] = key
params['Tag.%d.Value' % i] = tags[key]
result = self.connection.request(self.path,
params=params.copy()).object
element = findtext(element=result, xpath='return',
namespace=NAMESPACE)
return element == 'true'
def _add_instance_filter(self, params, node):
"""
Add instance filter to the provided params dictionary.
"""
params.update({
'Filter.0.Name': 'instance-id',
'Filter.0.Value.0': node.id
})
def ex_describe_all_addresses(self, only_allocated=False):
"""
Return all the Elastic IP addresses for this account
optionally, return only the allocated addresses
@param only_allocated: If true, return only those addresses
that are associated with an instance
@type only_allocated: C{str}
@return: list list of elastic ips for this particular account.
@rtype: C{list} of C{str}
"""
params = {'Action': 'DescribeAddresses'}
result = self.connection.request(self.path,
params=params.copy()).object
# the list which we return
elastic_ip_addresses = []
for element in findall(element=result, xpath='addressesSet/item',
namespace=NAMESPACE):
instance_id = findtext(element=element, xpath='instanceId',
namespace=NAMESPACE)
# if only allocated addresses are requested
if only_allocated and not instance_id:
continue
ip_address = findtext(element=element, xpath='publicIp',
namespace=NAMESPACE)
elastic_ip_addresses.append(ip_address)
return elastic_ip_addresses
def ex_associate_addresses(self, node, elastic_ip_address):
"""
Associate an IP address with a particular node.
@param node: Node instance
@type node: L{Node}
@param elastic_ip_address: IP address which should be used
@type elastic_ip_address: C{str}
@rtype: C{bool}
"""
params = {'Action': 'AssociateAddress'}
params.update(self._pathlist('InstanceId', [node.id]))
params.update({'PublicIp': elastic_ip_address})
res = self.connection.request(self.path, params=params).object
return self._get_boolean(res)
def ex_describe_addresses(self, nodes):
"""
Return Elastic IP addresses for all the nodes in the provided list.
@param nodes: List of C{Node} instances
@type nodes: C{list} of L{Node}
@return: Dictionary where a key is a node ID and the value is a
list with the Elastic IP addresses associated with this node.
@rtype: C{dict}
"""
if not nodes:
return {}
params = {'Action': 'DescribeAddresses'}
if len(nodes) == 1:
self._add_instance_filter(params, nodes[0])
result = self.connection.request(self.path,
params=params.copy()).object
node_instance_ids = [node.id for node in nodes]
nodes_elastic_ip_mappings = {}
for node_id in node_instance_ids:
nodes_elastic_ip_mappings.setdefault(node_id, [])
for element in findall(element=result, xpath='addressesSet/item',
namespace=NAMESPACE):
instance_id = findtext(element=element, xpath='instanceId',
namespace=NAMESPACE)
ip_address = findtext(element=element, xpath='publicIp',
namespace=NAMESPACE)
if instance_id not in node_instance_ids:
continue
nodes_elastic_ip_mappings[instance_id].append(ip_address)
return nodes_elastic_ip_mappings
def ex_describe_addresses_for_node(self, node):
"""
Return a list of Elastic IP addresses associated with this node.
@param node: Node instance
@type node: L{Node}
@return: list Elastic IP addresses attached to this node.
@rtype: C{list} of C{str}
"""
node_elastic_ips = self.ex_describe_addresses([node])
return node_elastic_ips[node.id]
def ex_modify_instance_attribute(self, node, attributes):
"""
Modify node attributes.
A list of valid attributes can be found at http://goo.gl/gxcj8
@param node: Node instance
@type node: L{Node}
@param attributes: Dictionary with node attributes
@type attributes: C{dict}
@return: True on success, False otherwise.
@rtype: C{bool}
"""
attributes = attributes or {}
attributes.update({'InstanceId': node.id})
params = {'Action': 'ModifyInstanceAttribute'}
params.update(attributes)
result = self.connection.request(self.path,
params=params.copy()).object
element = findtext(element=result, xpath='return',
namespace=NAMESPACE)
return element == 'true'
def ex_change_node_size(self, node, new_size):
"""
Change the node size.
Note: Node must be turned of before changing the size.
@param node: Node instance
@type node: L{Node}
@param new_size: NodeSize intance
@type new_size: L{NodeSize}
@return: True on success, False otherwise.
@rtype: C{bool}
"""
if 'instancetype' in node.extra:
current_instance_type = node.extra['instancetype']
if current_instance_type == new_size.id:
raise ValueError('New instance size is the same as' +
'the current one')
attributes = {'InstanceType.Value': new_size.id}
return self.ex_modify_instance_attribute(node, attributes)
def create_node(self, **kwargs):
"""Create a new EC2 node
Reference: http://bit.ly/8ZyPSy [docs.amazonwebservices.com]
@inherits: L{NodeDriver.create_node}
@keyword ex_mincount: Minimum number of instances to launch
@type ex_mincount: C{int}
@keyword ex_maxcount: Maximum number of instances to launch
@type ex_maxcount: C{int}
@keyword ex_securitygroup: Name of security group
@type ex_securitygroup: C{str}
@keyword ex_keyname: The name of the key pair
@type ex_keyname: C{str}
@keyword ex_userdata: User data
@type ex_userdata: C{str}
@keyword ex_clienttoken: Unique identifier to ensure idempotency
@type ex_clienttoken: C{str}
@keyword ex_blockdevicemappings: C{list} of C{dict} block device
mappings. Example:
[{'DeviceName': '/dev/sdb', 'VirtualName': 'ephemeral0'}]
@type ex_blockdevicemappings: C{list} of C{dict}
"""
image = kwargs["image"]
size = kwargs["size"]
params = {
'Action': 'RunInstances',
'ImageId': image.id,
'MinCount': kwargs.get('ex_mincount', '1'),
'MaxCount': kwargs.get('ex_maxcount', '1'),
'InstanceType': size.id
}
if 'ex_securitygroup' in kwargs:
if not isinstance(kwargs['ex_securitygroup'], list):
kwargs['ex_securitygroup'] = [kwargs['ex_securitygroup']]
for sig in range(len(kwargs['ex_securitygroup'])):
params['SecurityGroup.%d' % (sig + 1,)] =\
kwargs['ex_securitygroup'][sig]
if 'location' in kwargs:
availability_zone = getattr(kwargs['location'],
'availability_zone', None)
if availability_zone:
if availability_zone.region_name != self.region_name:
raise AttributeError('Invalid availability zone: %s'
% (availability_zone.name))
params['Placement.AvailabilityZone'] = availability_zone.name
if 'ex_keyname' in kwargs:
params['KeyName'] = kwargs['ex_keyname']
if 'ex_userdata' in kwargs:
params['UserData'] = base64.b64encode(b(kwargs['ex_userdata']))\
.decode('utf-8')
if 'ex_clienttoken' in kwargs:
params['ClientToken'] = kwargs['ex_clienttoken']
if 'ex_blockdevicemappings' in kwargs:
for index, mapping in enumerate(kwargs['ex_blockdevicemappings']):
params['BlockDeviceMapping.%d.DeviceName' % (index + 1)] = \
mapping['DeviceName']
params['BlockDeviceMapping.%d.VirtualName' % (index + 1)] = \
mapping['VirtualName']
object = self.connection.request(self.path, params=params).object
nodes = self._to_nodes(object, 'instancesSet/item')
for node in nodes:
tags = {'Name': kwargs['name']}
try:
self.ex_create_tags(resource=node, tags=tags)
except Exception:
continue
node.name = kwargs['name']
node.extra.update({'tags': tags})
if len(nodes) == 1:
return nodes[0]
else:
return nodes
def reboot_node(self, node):
params = {'Action': 'RebootInstances'}
params.update(self._pathlist('InstanceId', [node.id]))
res = self.connection.request(self.path, params=params).object
return self._get_boolean(res)
def ex_start_node(self, node):
"""
Start the node by passing in the node object, does not work with
instance store backed instances
@param node: Node which should be used
@type node: L{Node}
@rtype: C{bool}
"""
params = {'Action': 'StartInstances'}
params.update(self._pathlist('InstanceId', [node.id]))
res = self.connection.request(self.path, params=params).object
return self._get_state_boolean(res)
def ex_stop_node(self, node):
"""
Stop the node by passing in the node object, does not work with
instance store backed instances
@param node: Node which should be used
@type node: L{Node}
@rtype: C{bool}
"""
params = {'Action': 'StopInstances'}
params.update(self._pathlist('InstanceId', [node.id]))
res = self.connection.request(self.path, params=params).object
return self._get_state_boolean(res)
def destroy_node(self, node):
params = {'Action': 'TerminateInstances'}
params.update(self._pathlist('InstanceId', [node.id]))
res = self.connection.request(self.path, params=params).object
return self._get_terminate_boolean(res)
class EC2NodeDriver(BaseEC2NodeDriver):
"""
Amazon EC2 node driver.
"""
connectionCls = EC2Connection
type = Provider.EC2
name = 'Amazon EC2'
website = 'http://aws.amazon.com/ec2/'
path = '/'
region_name = 'us-east-1'
country = 'USA'
api_name = 'ec2_us_east'
features = {'create_node': ['ssh_key']}
NODE_STATE_MAP = {
'pending': NodeState.PENDING,
'running': NodeState.RUNNING,
'shutting-down': NodeState.UNKNOWN,
'terminated': NodeState.TERMINATED
}
class IdempotentParamError(LibcloudError):
"""
Request used the same client token as a previous,
but non-identical request.
"""
def __str__(self):
return repr(self.value)
class EC2EUConnection(EC2Connection):
"""
Connection class for EC2 in the Western Europe Region
"""
host = REGION_DETAILS['eu-west-1']['endpoint']
class EC2EUNodeDriver(EC2NodeDriver):
"""
Driver class for EC2 in the Western Europe Region.
"""
api_name = 'ec2_eu_west'
name = 'Amazon EC2 (eu-west-1)'
friendly_name = 'Amazon Europe Ireland'
country = 'IE'
region_name = 'eu-west-1'
connectionCls = EC2EUConnection
class EC2USWestConnection(EC2Connection):
"""
Connection class for EC2 in the Western US Region
"""
host = REGION_DETAILS['us-west-1']['endpoint']
class EC2USWestNodeDriver(EC2NodeDriver):
"""
Driver class for EC2 in the Western US Region
"""
api_name = 'ec2_us_west'
name = 'Amazon EC2 (us-west-1)'
friendly_name = 'Amazon US N. California'
country = 'US'
region_name = 'us-west-1'
connectionCls = EC2USWestConnection
class EC2USWestOregonConnection(EC2Connection):
"""
Connection class for EC2 in the Western US Region (Oregon).
"""
host = REGION_DETAILS['us-west-2']['endpoint']
class EC2USWestOregonNodeDriver(EC2NodeDriver):
"""
Driver class for EC2 in the US West Oregon region.
"""
api_name = 'ec2_us_west_oregon'
name = 'Amazon EC2 (us-west-2)'
friendly_name = 'Amazon US West - Oregon'
country = 'US'
region_name = 'us-west-2'
connectionCls = EC2USWestOregonConnection
class EC2APSEConnection(EC2Connection):
"""
Connection class for EC2 in the Southeast Asia Pacific Region.
"""
host = REGION_DETAILS['ap-southeast-1']['endpoint']
class EC2APNEConnection(EC2Connection):
"""
Connection class for EC2 in the Northeast Asia Pacific Region.
"""
host = REGION_DETAILS['ap-northeast-1']['endpoint']
class EC2APSENodeDriver(EC2NodeDriver):
"""
Driver class for EC2 in the Southeast Asia Pacific Region.
"""
api_name = 'ec2_ap_southeast'
name = 'Amazon EC2 (ap-southeast-1)'
friendly_name = 'Amazon Asia-Pacific Singapore'
country = 'SG'
region_name = 'ap-southeast-1'
connectionCls = EC2APSEConnection
class EC2APNENodeDriver(EC2NodeDriver):
"""
Driver class for EC2 in the Northeast Asia Pacific Region.
"""
api_name = 'ec2_ap_northeast'
name = 'Amazon EC2 (ap-northeast-1)'
friendly_name = 'Amazon Asia-Pacific Tokyo'
country = 'JP'
region_name = 'ap-northeast-1'
connectionCls = EC2APNEConnection
class EC2SAEastConnection(EC2Connection):
"""
Connection class for EC2 in the South America (Sao Paulo) Region.
"""
host = REGION_DETAILS['sa-east-1']['endpoint']
class EC2SAEastNodeDriver(EC2NodeDriver):
"""
Driver class for EC2 in the South America (Sao Paulo) Region.
"""
api_name = 'ec2_sa_east'
name = 'Amazon EC2 (sa-east-1)'
friendly_name = 'Amazon South America Sao Paulo'
country = 'BR'
region_name = 'sa-east-1'
connectionCls = EC2SAEastConnection
class EC2APSESydneyConnection(EC2Connection):
"""
Connection class for EC2 in the Southeast Asia Pacific (Sydney) Region.
"""
host = REGION_DETAILS['ap-southeast-2']['endpoint']
class EC2APSESydneyNodeDriver(EC2NodeDriver):
"""
Driver class for EC2 in the Southeast Asia Pacific (Sydney) Region.
"""
api_name = 'ec2_ap_southeast_2'
name = 'Amazon EC2 (ap-southeast-2)'
friendly_name = 'Amazon Asia-Pacific Sydney'
country = 'AU'
region_name = 'ap-southeast-2'
connectionCls = EC2APSESydneyConnection
class EucConnection(EC2Connection):
"""
Connection class for Eucalyptus
"""
host = None
class EucNodeDriver(BaseEC2NodeDriver):
"""
Driver class for Eucalyptus
"""
name = 'Eucalyptus'
website = 'http://www.eucalyptus.com/'
api_name = 'ec2_us_east'
region_name = 'us-east-1'
connectionCls = EucConnection
def __init__(self, key, secret=None, secure=True, host=None,
path=None, port=None):
"""
@inherits: L{EC2NodeDriver.__init__}
@param path: The host where the API can be reached.
@type path: C{str}
"""
super(EucNodeDriver, self).__init__(key, secret, secure, host, port)
if path is None:
path = "/services/Eucalyptus"
self.path = path
def list_locations(self):
raise NotImplementedError(
'list_locations not implemented for this driver')
def _add_instance_filter(self, params, node):
"""
Eucalyptus driver doesn't support filtering on instance id so this is a
no-op.
"""
pass
class NimbusConnection(EC2Connection):
"""
Connection class for Nimbus
"""
host = None
class NimbusNodeDriver(BaseEC2NodeDriver):
"""
Driver class for Nimbus
"""
type = Provider.NIMBUS
name = 'Nimbus'
website = 'http://www.nimbusproject.org/'
country = 'Private'
api_name = 'nimbus'
region_name = 'nimbus'
friendly_name = 'Nimbus Private Cloud'
connectionCls = NimbusConnection
def ex_describe_addresses(self, nodes):
"""
Nimbus doesn't support elastic IPs, so this is a passthrough.
@inherits: L{EC2NodeDriver.ex_describe_addresses}
"""
nodes_elastic_ip_mappings = {}
for node in nodes:
# empty list per node
nodes_elastic_ip_mappings[node.id] = []
return nodes_elastic_ip_mappings
def ex_create_tags(self, resource, tags):
"""
Nimbus doesn't support creating tags, so this is a passthrough.
@inherits: L{EC2NodeDriver.ex_create_tags}
"""
pass
| ema/conpaas | conpaas-services/contrib/libcloud/compute/drivers/ec2.py | Python | bsd-3-clause | 53,777 |
var React = require('react');
var ReactFeatureFlags = require('ReactFeatureFlags');
var ReactDOM;
var AsyncComponent = React.unstable_AsyncComponent;
describe('ReactDOMFiberAsync', () => {
var container;
beforeEach(() => {
container = document.createElement('div');
ReactDOM = require('react-dom');
});
it('renders synchronously by default', () => {
var ops = [];
ReactDOM.render(<div>Hi</div>, container, () => {
ops.push(container.textContent);
});
ReactDOM.render(<div>Bye</div>, container, () => {
ops.push(container.textContent);
});
expect(ops).toEqual(['Hi', 'Bye']);
});
describe('with feature flag disabled', () => {
beforeEach(() => {
jest.resetModules();
ReactFeatureFlags = require('ReactFeatureFlags');
container = document.createElement('div');
ReactFeatureFlags.enableAsyncSubtreeAPI = false;
ReactDOM = require('react-dom');
});
it('renders synchronously', () => {
ReactDOM.render(
<AsyncComponent><div>Hi</div></AsyncComponent>,
container,
);
expect(container.textContent).toEqual('Hi');
ReactDOM.render(
<AsyncComponent><div>Bye</div></AsyncComponent>,
container,
);
expect(container.textContent).toEqual('Bye');
});
});
describe('with feature flag enabled', () => {
beforeEach(() => {
jest.resetModules();
ReactFeatureFlags = require('ReactFeatureFlags');
container = document.createElement('div');
ReactFeatureFlags.enableAsyncSubtreeAPI = true;
ReactDOM = require('react-dom');
});
it('AsyncComponent at the root makes the entire tree async', () => {
ReactDOM.render(
<AsyncComponent><div>Hi</div></AsyncComponent>,
container,
);
expect(container.textContent).toEqual('');
jest.runAllTimers();
expect(container.textContent).toEqual('Hi');
ReactDOM.render(
<AsyncComponent><div>Bye</div></AsyncComponent>,
container,
);
expect(container.textContent).toEqual('Hi');
jest.runAllTimers();
expect(container.textContent).toEqual('Bye');
});
it('updates inside an async tree are async by default', () => {
let instance;
class Component extends React.Component {
state = {step: 0};
render() {
instance = this;
return <div>{this.state.step}</div>;
}
}
ReactDOM.render(
<AsyncComponent><Component /></AsyncComponent>,
container,
);
expect(container.textContent).toEqual('');
jest.runAllTimers();
expect(container.textContent).toEqual('0');
instance.setState({step: 1});
expect(container.textContent).toEqual('0');
jest.runAllTimers();
expect(container.textContent).toEqual('1');
});
it('AsyncComponent creates an async subtree', () => {
let instance;
class Component extends React.unstable_AsyncComponent {
state = {step: 0};
render() {
instance = this;
return <div>{this.state.step}</div>;
}
}
ReactDOM.render(<div><Component /></div>, container);
jest.runAllTimers();
instance.setState({step: 1});
expect(container.textContent).toEqual('0');
jest.runAllTimers();
expect(container.textContent).toEqual('1');
});
it('updates inside an async subtree are async by default', () => {
class Component extends React.unstable_AsyncComponent {
render() {
return <Child />;
}
}
let instance;
class Child extends React.Component {
state = {step: 0};
render() {
instance = this;
return <div>{this.state.step}</div>;
}
}
ReactDOM.render(<div><Component /></div>, container);
jest.runAllTimers();
instance.setState({step: 1});
expect(container.textContent).toEqual('0');
jest.runAllTimers();
expect(container.textContent).toEqual('1');
});
it('flushSync batches sync updates and flushes them at the end of the batch', () => {
let ops = [];
let instance;
class Component extends React.Component {
state = {text: ''};
push(val) {
this.setState(state => ({text: state.text + val}));
}
componentDidUpdate() {
ops.push(this.state.text);
}
render() {
instance = this;
return <span>{this.state.text}</span>;
}
}
ReactDOM.render(<Component />, container);
instance.push('A');
expect(ops).toEqual(['A']);
expect(container.textContent).toEqual('A');
ReactDOM.flushSync(() => {
instance.push('B');
instance.push('C');
// Not flushed yet
expect(container.textContent).toEqual('A');
expect(ops).toEqual(['A']);
});
expect(container.textContent).toEqual('ABC');
expect(ops).toEqual(['A', 'ABC']);
instance.push('D');
expect(container.textContent).toEqual('ABCD');
expect(ops).toEqual(['A', 'ABC', 'ABCD']);
});
it('flushSync flushes updates even if nested inside another flushSync', () => {
let ops = [];
let instance;
class Component extends React.Component {
state = {text: ''};
push(val) {
this.setState(state => ({text: state.text + val}));
}
componentDidUpdate() {
ops.push(this.state.text);
}
render() {
instance = this;
return <span>{this.state.text}</span>;
}
}
ReactDOM.render(<Component />, container);
instance.push('A');
expect(ops).toEqual(['A']);
expect(container.textContent).toEqual('A');
ReactDOM.flushSync(() => {
instance.push('B');
instance.push('C');
// Not flushed yet
expect(container.textContent).toEqual('A');
expect(ops).toEqual(['A']);
ReactDOM.flushSync(() => {
instance.push('D');
});
// The nested flushSync caused everything to flush.
expect(container.textContent).toEqual('ABCD');
expect(ops).toEqual(['A', 'ABCD']);
});
expect(container.textContent).toEqual('ABCD');
expect(ops).toEqual(['A', 'ABCD']);
});
it('flushSync throws if already performing work', () => {
class Component extends React.Component {
componentDidUpdate() {
ReactDOM.flushSync(() => {});
}
render() {
return null;
}
}
// Initial mount
ReactDOM.render(<Component />, container);
// Update
expect(() => ReactDOM.render(<Component />, container)).toThrow(
'flushSync was called from inside a lifecycle method',
);
});
it('flushSync flushes updates before end of the tick', () => {
let ops = [];
let instance;
class Component extends React.unstable_AsyncComponent {
state = {text: ''};
push(val) {
this.setState(state => ({text: state.text + val}));
}
componentDidUpdate() {
ops.push(this.state.text);
}
render() {
instance = this;
return <span>{this.state.text}</span>;
}
}
ReactDOM.render(<Component />, container);
jest.runAllTimers();
// Updates are async by default
instance.push('A');
expect(ops).toEqual([]);
expect(container.textContent).toEqual('');
ReactDOM.flushSync(() => {
instance.push('B');
instance.push('C');
// Not flushed yet
expect(container.textContent).toEqual('');
expect(ops).toEqual([]);
});
// Only the active updates have flushed
expect(container.textContent).toEqual('BC');
expect(ops).toEqual(['BC']);
instance.push('D');
expect(container.textContent).toEqual('BC');
expect(ops).toEqual(['BC']);
// Flush the async updates
jest.runAllTimers();
expect(container.textContent).toEqual('BCAD');
expect(ops).toEqual(['BC', 'BCAD']);
});
});
});
| yangshun/react | src/renderers/dom/fiber/__tests__/ReactDOMFiberAsync-test.js | JavaScript | bsd-3-clause | 8,185 |
__version__ = "1.12.0"
__version_info__ = ( 1, 12, 0 )
| JeffHoogland/bodhi3packages | python3-efl-i386/usr/lib/python3.4/dist-packages/efl/__init__.py | Python | bsd-3-clause | 56 |
//==============================================================================
// Copyright 2003 & onward LASMEA UMR 6602 CNRS/Univ. Clermont II
// Copyright 2009 & onward LRI UMR 8623 CNRS/Univ Paris Sud XI
//
// Distributed under the Boost Software License, Version 1.0.
// See accompanying file LICENSE.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt
//==============================================================================
#ifndef NT2_OPERATOR_INCLUDE_FUNCTIONS_GE_HPP_INCLUDED
#define NT2_OPERATOR_INCLUDE_FUNCTIONS_GE_HPP_INCLUDED
#include <nt2/operator/include/functions/is_greater_equal.hpp>
#endif
| hainm/pythran | third_party/nt2/operator/include/functions/ge.hpp | C++ | bsd-3-clause | 688 |
#!/usr/bin/env python
"""Backport from python2.7 to python <= 2.6."""
from itertools import repeat as _repeat, chain as _chain, starmap as _starmap
try:
from itertools import izip_longest as _zip_longest
except ImportError:
from itertools import izip
def _zip_longest(*args, **kwds):
# izip_longest('ABCD', 'xy', fillvalue='-') --> Ax By C- D-
fillvalue = kwds.get('fillvalue')
def sentinel(counter = ([fillvalue]*(len(args)-1)).pop):
yield counter() # yields the fillvalue, or raises IndexError
fillers = _repeat(fillvalue)
iters = [_chain(it, sentinel(), fillers) for it in args]
try:
for tup in izip(*iters):
yield tup
except IndexError:
pass
class OrderedDict(dict):
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
if not hasattr(self, '_keys'):
self._keys = []
self.update(*args, **kwds)
def clear(self):
del self._keys[:]
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
self._keys.append(key)
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
self._keys.remove(key)
def __iter__(self):
return iter(self._keys)
def __reversed__(self):
return reversed(self._keys)
def popitem(self):
if not self:
raise KeyError('dictionary is empty')
key = self._keys.pop()
value = dict.pop(self, key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
inst_dict.pop('_keys', None)
return (self.__class__, (items,), inst_dict)
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
def update(self, other=(), **kwds):
if hasattr(other, "keys"):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
__marker = object()
def pop(self, key, default=__marker):
try:
value = self[key]
except KeyError:
if default is self.__marker:
raise
return default
else:
del self[key]
return value
def keys(self):
return list(self)
def values(self):
return [self[key] for key in self]
def items(self):
return [(key, self[key]) for key in self]
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self.items()))
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
if isinstance(other, OrderedDict):
return all(p==q for p, q in _zip_longest(self.items(), other.items()))
return dict.__eq__(self, other)
# End class OrderedDict
| lewisodriscoll/sasview | src/sas/sascalc/data_util/ordereddict.py | Python | bsd-3-clause | 3,441 |
#!/usr/bin/env python
'''
Author: Christopher Duffy
Date: February 2015
Name: nmap_scanner.py
Purpose: To scan a network
Copyright (c) 2015, Christopher Duffy All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met: * Redistributions
of source code must retain the above copyright notice, this list of conditions and
the following disclaimer. * Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. * Neither the
name of the nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import sys
try:
import nmap
except:
sys.exit("[!] Install the nmap library: pip install python-nmap")
# Argument Validator
if len(sys.argv) != 3:
sys.exit("Please provide two arguments the first being the targets the second the ports")
ports = str(sys.argv[2])
addrs = str(sys.argv[1])
scanner = nmap.PortScanner()
scanner.scan(addrs, ports)
for host in scanner.all_hosts():
if “” in host:
print("The host's IP address is %s and it's hostname was not found") % (host, scanner[host].hostname())
else:
print("The host's IP address is %s and it's hostname is %s") % (host, scanner[host].hostname())
| liorvh/pythonpentest | nmap_scannner.py | Python | bsd-3-clause | 2,228 |
# Nov 22, 2014
# This patch is to create all the prep/sample template files and link them in
# the database so they are present for download
from os.path import join
from time import strftime
from qiita_db.util import get_mountpoint
from qiita_db.sql_connection import SQLConnectionHandler
from qiita_db.metadata_template import SampleTemplate, PrepTemplate
conn_handler = SQLConnectionHandler()
_id, fp_base = get_mountpoint('templates')[0]
for study_id in conn_handler.execute_fetchall(
"SELECT study_id FROM qiita.study"):
study_id = study_id[0]
if SampleTemplate.exists(study_id):
st = SampleTemplate(study_id)
fp = join(fp_base, '%d_%s.txt' % (study_id, strftime("%Y%m%d-%H%M%S")))
st.to_file(fp)
st.add_filepath(fp)
for prep_template_id in conn_handler.execute_fetchall(
"SELECT prep_template_id FROM qiita.prep_template"):
prep_template_id = prep_template_id[0]
pt = PrepTemplate(prep_template_id)
study_id = pt.study_id
fp = join(fp_base, '%d_prep_%d_%s.txt' % (pt.study_id, prep_template_id,
strftime("%Y%m%d-%H%M%S")))
pt.to_file(fp)
pt.add_filepath(fp)
| RNAer/qiita | qiita_db/support_files/patches/python_patches/6.py | Python | bsd-3-clause | 1,165 |
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE415_Double_Free__new_delete_long_41.cpp
Label Definition File: CWE415_Double_Free__new_delete.label.xml
Template File: sources-sinks-41.tmpl.cpp
*/
/*
* @description
* CWE: 415 Double Free
* BadSource: Allocate data using new and Deallocae data using delete
* GoodSource: Allocate data using new
* Sinks:
* GoodSink: do nothing
* BadSink : Deallocate data using delete
* Flow Variant: 41 Data flow: data passed as an argument from one function to another in the same source file
*
* */
#include "std_testcase.h"
#include <wchar.h>
namespace CWE415_Double_Free__new_delete_long_41
{
#ifndef OMITBAD
static void badSink(long * data)
{
/* POTENTIAL FLAW: Possibly deleting memory twice */
delete data;
}
void bad()
{
long * data;
/* Initialize data */
data = NULL;
data = new long;
/* POTENTIAL FLAW: delete data in the source - the bad sink deletes data as well */
delete data;
badSink(data);
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B() uses the GoodSource with the BadSink */
static void goodG2BSink(long * data)
{
/* POTENTIAL FLAW: Possibly deleting memory twice */
delete data;
}
static void goodG2B()
{
long * data;
/* Initialize data */
data = NULL;
data = new long;
/* FIX: Do NOT delete data in the source - the bad sink deletes data */
goodG2BSink(data);
}
/* goodB2G() uses the BadSource with the GoodSink */
static void goodB2GSink(long * data)
{
/* do nothing */
/* FIX: Don't attempt to delete the memory */
; /* empty statement needed for some flow variants */
}
static void goodB2G()
{
long * data;
/* Initialize data */
data = NULL;
data = new long;
/* POTENTIAL FLAW: delete data in the source - the bad sink deletes data as well */
delete data;
goodB2GSink(data);
}
void good()
{
goodG2B();
goodB2G();
}
#endif /* OMITGOOD */
} /* close namespace */
/* Below is the main(). It is only used when building this testcase on
its own for testing or for building a binary to use in testing binary
analysis tools. It is not used when compiling all the testcases as one
application, which is how source code analysis tools are tested. */
#ifdef INCLUDEMAIN
using namespace CWE415_Double_Free__new_delete_long_41; /* so that we can use good and bad easily */
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif
| JianpingZeng/xcc | xcc/test/juliet/testcases/CWE415_Double_Free/s02/CWE415_Double_Free__new_delete_long_41.cpp | C++ | bsd-3-clause | 2,863 |
//==============================================================================
// Copyright 2003 - 2011 LASMEA UMR 6602 CNRS/Univ. Clermont II
// Copyright 2009 - 2011 LRI UMR 8623 CNRS/Univ Paris Sud XI
//
// Distributed under the Boost Software License, Version 1.0.
// See accompanying file LICENSE.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt
//==============================================================================
#ifndef NT2_IEEE_FUNCTIONS_EPS_HPP_INCLUDED
#define NT2_IEEE_FUNCTIONS_EPS_HPP_INCLUDED
#include <boost/simd/ieee/include/functions/eps.hpp>
#include <nt2/include/functor.hpp>
/* Automatically generated for module core.base */
namespace nt2
{
namespace tag
{
#ifdef DOXYGEN_ONLY
/*! \brief Same as \classref{boost::simd::tag::eps_} **/
struct eps_ {};
#endif
using boost::simd::tag::eps_;
}
#ifdef DOXYGEN_ONLY
/*! \brief Same as \funcref{boost::simd::eps} **/
template<class... Args>
details::unspecified eps(Args&&... args);
#endif
using boost::simd::eps;
}
#endif
| hainm/pythran | third_party/nt2/ieee/functions/eps.hpp | C++ | bsd-3-clause | 1,118 |
<?php
namespace DmCommon\Factory\Service;
use Zend\Log\Logger;
use Zend\ServiceManager\FactoryInterface;
use Zend\ServiceManager\ServiceLocatorInterface;
use DmCommon\Service\ErrorHandler as ErrorHandlerService;
class ErrorHandler implements FactoryInterface
{
/**
* @param ServiceLocatorInterface $serviceLocator
*
* @return ErrorHandlerService
*/
public function createService(ServiceLocatorInterface $serviceLocator)
{
/** @var Logger $logger */
$logger = $serviceLocator->get('DmCommon\Exception\Log');
$service = new ErrorHandlerService($logger);
return $service;
}
}
| peteraba/dermailer | vendor/peteraba/dm-common/src/DmCommon/Factory/Service/ErrorHandler.php | PHP | bsd-3-clause | 647 |
from __future__ import print_function
import numpy as np
from bokeh.client import push_session
from bokeh.io import curdoc
from bokeh.models import (ColumnDataSource, DataRange1d, Plot, Circle, WidgetBox,
Row, Button, TapTool)
N = 9
x = np.linspace(-2, 2, N)
y = x**2
source1 = ColumnDataSource(dict(x = x, y = y, radius = [0.1]*N))
xdr1 = DataRange1d()
ydr1 = DataRange1d()
plot1 = Plot(x_range=xdr1, y_range=ydr1, plot_width=400, plot_height=400)
plot1.title.text = "Plot1"
plot1.tools.append(TapTool(plot=plot1))
plot1.add_glyph(source1, Circle(x="x", y="y", radius="radius", fill_color="red"))
source2 = ColumnDataSource(dict(x = x, y = y, color = ["blue"]*N))
xdr2 = DataRange1d()
ydr2 = DataRange1d()
plot2 = Plot(x_range=xdr2, y_range=ydr2, plot_width=400, plot_height=400)
plot2.title.text = "Plot2"
plot2.tools.append(TapTool(plot=plot2))
plot2.add_glyph(source2, Circle(x="x", y="y", radius=0.1, fill_color="color"))
def on_selection_change1(attr, _, inds):
color = ["blue"]*N
if inds['1d']['indices']:
indices = inds['1d']['indices']
for i in indices:
color[i] = "red"
source2.data["color"] = color
source1.on_change('selected', on_selection_change1)
def on_selection_change2(attr, _, inds):
inds = inds['1d']['indices']
if inds:
[index] = inds
radius = [0.1]*N
radius[index] = 0.2
else:
radius = [0.1]*N
source1.data["radius"] = radius
source2.on_change('selected', on_selection_change2)
reset = Button(label="Reset")
def on_reset_click():
source1.selected = {
'0d': {'flag': False, 'indices': []},
'1d': {'indices': []},
'2d': {'indices': {}}
}
source2.selected = {
'0d': {'flag': False, 'indices': []},
'1d': {'indices': []},
'2d': {'indices': {}}
}
reset.on_click(on_reset_click)
widgetBox = WidgetBox(children=[reset], width=150)
row = Row(children=[widgetBox, plot1, plot2])
document = curdoc()
document.add_root(row)
if __name__ == "__main__":
print("\npress ctrl-C to exit")
session = push_session(document)
session.show()
session.loop_until_closed()
| DuCorey/bokeh | examples/models/server/linked_tap.py | Python | bsd-3-clause | 2,181 |
<?php
namespace backend\controllers;
use Yii;
use common\models\TblAssetTestKeyboard;
use common\models\SearchTblAssetTestKeyboard;
use yii\web\Controller;
use yii\web\NotFoundHttpException;
use yii\filters\VerbFilter;
/**
* TblAssetTestKeyboardController implements the CRUD actions for TblAssetTestKeyboard model.
*/
class TblAssetTestKeyboardController extends Controller
{
public function behaviors()
{
return [
'verbs' => [
'class' => VerbFilter::className(),
'actions' => [
'delete' => ['post'],
],
],
];
}
/**
* Lists all TblAssetTestKeyboard models.
* @return mixed
*/
public function actionIndex()
{
$searchModel = new SearchTblAssetTestKeyboard();
$dataProvider = $searchModel->search(Yii::$app->request->queryParams);
return $this->render('index', [
'searchModel' => $searchModel,
'dataProvider' => $dataProvider,
]);
}
/**
* Displays a single TblAssetTestKeyboard model.
* @param integer $id
* @return mixed
*/
public function actionView($id)
{
return $this->render('view', [
'model' => $this->findModel($id),
]);
}
/**
* Creates a new TblAssetTestKeyboard model.
* If creation is successful, the browser will be redirected to the 'view' page.
* @return mixed
*/
public function actionCreate()
{
$model = new TblAssetTestKeyboard();
if ($model->load(Yii::$app->request->post()) && $model->save()) {
return $this->redirect(['view', 'id' => $model->id]);
} else {
return $this->render('create', [
'model' => $model,
]);
}
}
/**
* Updates an existing TblAssetTestKeyboard model.
* If update is successful, the browser will be redirected to the 'view' page.
* @param integer $id
* @return mixed
*/
public function actionUpdate($id)
{
$model = $this->findModel($id);
if ($model->load(Yii::$app->request->post()) && $model->save()) {
return $this->redirect(['view', 'id' => $model->id]);
} else {
return $this->render('update', [
'model' => $model,
]);
}
}
/**
* Deletes an existing TblAssetTestKeyboard model.
* If deletion is successful, the browser will be redirected to the 'index' page.
* @param integer $id
* @return mixed
*/
public function actionDelete($id)
{
$this->findModel($id)->delete();
return $this->redirect(['index']);
}
/**
* Finds the TblAssetTestKeyboard model based on its primary key value.
* If the model is not found, a 404 HTTP exception will be thrown.
* @param integer $id
* @return TblAssetTestKeyboard the loaded model
* @throws NotFoundHttpException if the model cannot be found
*/
protected function findModel($id)
{
if (($model = TblAssetTestKeyboard::findOne($id)) !== null) {
return $model;
} else {
throw new NotFoundHttpException('The requested page does not exist.');
}
}
}
| jflash49/TSS | backend/controllers/TblAssetTestKeyboardController.php | PHP | bsd-3-clause | 3,299 |
package eta.runtime.apply;
import eta.runtime.stg.Closure;
import eta.runtime.stg.StgContext;
public class Function5 extends Function {
public int arity() { return 5; }
@Override
public Closure apply6(StgContext context, Closure p1, Closure p2, Closure p3, Closure p4, Closure p5, Closure p6) {
boolean old = context.getAndSetTrampoline();
Closure result = apply5(context, p1, p2, p3, p4, p5);
context.trampoline = old;
return result.apply1(context, p6);
}
}
| rahulmutt/ghcvm | rts/src/main/java/eta/runtime/apply/Function5.java | Java | bsd-3-clause | 510 |
"""
BrowserID support
"""
from social.backends.base import BaseAuth
from social.exceptions import AuthFailed, AuthMissingParameter
class PersonaAuth(BaseAuth):
"""BrowserID authentication backend"""
name = 'persona'
def get_user_id(self, details, response):
"""Use BrowserID email as ID"""
return details['email']
def get_user_details(self, response):
"""Return user details, BrowserID only provides Email."""
# {'status': 'okay',
# 'audience': 'localhost:8000',
# 'expires': 1328983575529,
# 'email': 'name@server.com',
# 'issuer': 'browserid.org'}
email = response['email']
return {'username': email.split('@', 1)[0],
'email': email,
'fullname': '',
'first_name': '',
'last_name': ''}
def extra_data(self, user, uid, response, details):
"""Return users extra data"""
return {'audience': response['audience'],
'issuer': response['issuer']}
def auth_complete(self, *args, **kwargs):
"""Completes loging process, must return user instance"""
if not 'assertion' in self.data:
raise AuthMissingParameter(self, 'assertion')
response = self.get_json('https://browserid.org/verify', data={
'assertion': self.data['assertion'],
'audience': self.strategy.request_host()
}, method='POST')
if response.get('status') == 'failure':
raise AuthFailed(self)
kwargs.update({'response': response, 'backend': self})
return self.strategy.authenticate(*args, **kwargs)
| nvbn/python-social-auth | social/backends/persona.py | Python | bsd-3-clause | 1,664 |
/**
* Copyright (c) 2014, Regents of the University of California
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package edu.ucla.wise.commons;
/**
* This class contains the constants used in the application.
*/
public class WiseConstants {
public static final String ADMIN_APP = "admin";
public static final String SURVEY_APP = "survey";
/* This is used by remote servlet loader to initiate the monitoring process. */
public static final String SURVEY_HEALTH_LOADER = "survey_health";
public enum STATES {
started, completed, incompleter, non_responder, interrupted, start_reminder_1, start_reminder_2, start_reminder_3, completion_reminder_1, completion_reminder_2, completion_reminder_3,
}
public enum SURVEY_STATUS {
OK, FAIL, NOT_AVAIL
}
public static final long surveyCheckInterval = 10 * 60 * 1000; // 10 mins
public static final long surveyUpdateInterval = 5 * 60 * 1000; // 5 mins
public static final long dbSmtpCheckInterval = 3 * 60 * 1000; // 3 mins
public static final String NEW_INVITEE_JSP_PAGE = "new_invitee.jsp";
public static final String HTML_EXTENSION = ".htm";
public static final String NEWLINE = "\n";
public static final String COMMA = ",";
public static final Object NULL = "NULL";
} | ctsidev/SecureWise | wise/src/edu/ucla/wise/commons/WiseConstants.java | Java | bsd-3-clause | 2,788 |