code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
4
991
language
stringclasses
9 values
license
stringclasses
15 values
size
int32
3
1.05M
/* * DBeaver - Universal Database Manager * Copyright (C) 2013-2016 Denis Forveille (titou10.titou10@gmail.com) * Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.db2.model; import java.sql.ResultSet; import java.sql.Timestamp; import java.util.Collection; import java.util.Map; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.ext.db2.DB2Constants; import org.jkiss.dbeaver.ext.db2.DB2Utils; import org.jkiss.dbeaver.ext.db2.editors.DB2SourceObject; import org.jkiss.dbeaver.ext.db2.editors.DB2TableTablespaceListProvider; import org.jkiss.dbeaver.ext.db2.model.cache.DB2TableTriggerCache; import org.jkiss.dbeaver.ext.db2.model.dict.DB2TableAccessMode; import org.jkiss.dbeaver.ext.db2.model.dict.DB2TableCompressionMode; import org.jkiss.dbeaver.ext.db2.model.dict.DB2TableDropRule; import org.jkiss.dbeaver.ext.db2.model.dict.DB2TableLockSize; import org.jkiss.dbeaver.ext.db2.model.dict.DB2TablePartitionMode; import org.jkiss.dbeaver.ext.db2.model.dict.DB2TableStatus; import org.jkiss.dbeaver.ext.db2.model.dict.DB2TableTemporalType; import org.jkiss.dbeaver.ext.db2.model.dict.DB2TableType; import org.jkiss.dbeaver.ext.db2.model.dict.DB2YesNo; import org.jkiss.dbeaver.model.DBPNamedObject2; import org.jkiss.dbeaver.model.DBPRefreshableObject; import org.jkiss.dbeaver.model.data.DBDPseudoAttribute; import org.jkiss.dbeaver.model.data.DBDPseudoAttributeContainer; import org.jkiss.dbeaver.model.exec.DBCException; import org.jkiss.dbeaver.model.impl.DBObjectNameCaseTransformer; import org.jkiss.dbeaver.model.impl.DBSObjectCache; import org.jkiss.dbeaver.model.impl.jdbc.JDBCUtils; import org.jkiss.dbeaver.model.impl.jdbc.cache.JDBCObjectSimpleCache; import org.jkiss.dbeaver.model.impl.jdbc.cache.JDBCStructCache; import org.jkiss.dbeaver.model.meta.Association; import org.jkiss.dbeaver.model.meta.Property; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.model.struct.DBSObject; import org.jkiss.dbeaver.model.struct.DBSObjectState; import org.jkiss.dbeaver.model.struct.rdb.DBSTableForeignKey; import org.jkiss.dbeaver.utils.GeneralUtils; import org.jkiss.utils.CommonUtils; /** * DB2 Table * * @author Denis Forveille */ public class DB2Table extends DB2TableBase implements DBPNamedObject2, DBPRefreshableObject, DB2SourceObject, DBDPseudoAttributeContainer { private static final String LINE_SEPARATOR = GeneralUtils.getDefaultLineSeparator(); private static final String C_PT = "SELECT * FROM SYSCAT.DATAPARTITIONS WHERE TABSCHEMA = ? AND TABNAME = ? ORDER BY SEQNO WITH UR"; private static final String C_PE = "SELECT * FROM SYSCAT.PERIODS WHERE TABSCHEMA = ? AND TABNAME = ? ORDER BY PERIODNAME WITH UR"; private DB2TableTriggerCache tableTriggerCache = new DB2TableTriggerCache(); // Dependent of DB2 Version. OK because the folder is hidden in plugin.xml private DBSObjectCache<DB2Table, DB2TablePartition> partitionCache; private DBSObjectCache<DB2Table, DB2TablePeriod> periodCache; private DB2TableStatus status; private DB2TableType type; private Object tablespace; private Object indexTablespace; private Object longTablespace; private String dataCapture; private String constChecked; private DB2TablePartitionMode partitionMode; private Boolean append; private DB2TableLockSize lockSize; private String volatileMode; private DB2TableCompressionMode compression; private DB2TableAccessMode accessMode; private Boolean mdcClustered; private DB2TableDropRule dropRule; private DB2TableTemporalType temporalType; private Timestamp alterTime; private Timestamp invalidateTime; private Timestamp lastRegenTime; private Timestamp statsTime; private Long card; private Long nPages; private Long fPages; private Long overFLow; // ----------------- // Constructors // ----------------- public DB2Table(DBRProgressMonitor monitor, DB2Schema schema, ResultSet dbResult) throws DBException { super(monitor, schema, dbResult); this.status = CommonUtils.valueOf(DB2TableStatus.class, JDBCUtils.safeGetString(dbResult, "STATUS")); this.type = CommonUtils.valueOf(DB2TableType.class, JDBCUtils.safeGetString(dbResult, "TYPE")); this.statsTime = JDBCUtils.safeGetTimestamp(dbResult, "STATS_TIME"); this.dataCapture = JDBCUtils.safeGetString(dbResult, "DATACAPTURE"); this.constChecked = JDBCUtils.safeGetString(dbResult, "CONST_CHECKED"); this.partitionMode = CommonUtils.valueOf(DB2TablePartitionMode.class, JDBCUtils.safeGetString(dbResult, "PARTITION_MODE")); this.append = JDBCUtils.safeGetBoolean(dbResult, "APPEND_MODE", DB2YesNo.Y.name()); this.volatileMode = JDBCUtils.safeGetString(dbResult, "VOLATILE"); this.compression = CommonUtils.valueOf(DB2TableCompressionMode.class, JDBCUtils.safeGetString(dbResult, "COMPRESSION")); this.accessMode = CommonUtils.valueOf(DB2TableAccessMode.class, JDBCUtils.safeGetString(dbResult, "ACCESS_MODE")); this.mdcClustered = JDBCUtils.safeGetBoolean(dbResult, "CLUSTERED", DB2YesNo.Y.name()); this.dropRule = CommonUtils.valueOf(DB2TableDropRule.class, JDBCUtils.safeGetString(dbResult, "DROPRULE")); this.card = JDBCUtils.safeGetLongNullable(dbResult, "CARD"); this.nPages = JDBCUtils.safeGetLongNullable(dbResult, "NPAGES"); this.fPages = JDBCUtils.safeGetLongNullable(dbResult, "FPAGES"); this.overFLow = JDBCUtils.safeGetLongNullable(dbResult, "OVERFLOW"); this.invalidateTime = JDBCUtils.safeGetTimestamp(dbResult, "INVALIDATE_TIME"); this.lastRegenTime = JDBCUtils.safeGetTimestamp(dbResult, "LAST_REGEN_TIME"); if (getDataSource().isAtLeastV9_5()) { this.alterTime = JDBCUtils.safeGetTimestamp(dbResult, "ALTER_TIME"); } if (getDataSource().isAtLeastV10_1()) { this.temporalType = CommonUtils.valueOf(DB2TableTemporalType.class, JDBCUtils.safeGetString(dbResult, "TEMPORALTYPE")); } String lockSizeString = JDBCUtils.safeGetString(dbResult, "LOCKSIZE"); if (CommonUtils.isNotEmpty(lockSizeString)) { this.lockSize = CommonUtils.valueOf(DB2TableLockSize.class, lockSizeString); } this.tablespace = JDBCUtils.safeGetString(dbResult, "TBSPACE"); this.indexTablespace = JDBCUtils.safeGetString(dbResult, "INDEX_TBSPACE"); this.longTablespace = JDBCUtils.safeGetString(dbResult, "LONG_TBSPACE"); this.partitionCache = new JDBCObjectSimpleCache<>(DB2TablePartition.class, C_PT, schema.getName(), getName()); this.periodCache = new JDBCObjectSimpleCache<>(DB2TablePeriod.class, C_PE, schema.getName(), getName()); } public DB2Table(DB2Schema schema, String name) { super(schema, name, false); this.type = DB2TableType.T; this.status = DB2TableStatus.N; } // ----------------- // Business Contract // ----------------- @Override public boolean isView() { return false; } @Override public JDBCStructCache<DB2Schema, DB2Table, DB2TableColumn> getCache() { return getContainer().getTableCache(); } @Override public DBSObject refreshObject(@NotNull DBRProgressMonitor monitor) throws DBException { getContainer().getConstraintCache().clearObjectCache(this); getContainer().getAssociationCache().clearObjectCache(this); getContainer().getReferenceCache().clearObjectCache(this); super.refreshObject(monitor); return getContainer().getTableCache().refreshObject(monitor, getContainer(), this); } @NotNull @Override public DBSObjectState getObjectState() { return status.getState(); } @Override public void refreshObjectState(@NotNull DBRProgressMonitor monitor) throws DBCException { } @Override public String getObjectDefinitionText(DBRProgressMonitor monitor, Map<String, Object> options) throws DBException { return DB2Utils.generateDDLforTable(monitor, LINE_SEPARATOR, getDataSource(), this); } // ----------------- // Associations // ----------------- @Association public Collection<DB2Trigger> getTriggers(DBRProgressMonitor monitor) throws DBException { return tableTriggerCache.getAllObjects(monitor, this); } @Association public Collection<DB2TablePartition> getPartitions(DBRProgressMonitor monitor) throws DBException { // TODO DF: beurk: Consequences of "Integrated cache" that can not be created in class def= NPE with managers if (partitionCache == null) { return null; } else { return partitionCache.getAllObjects(monitor, this); } } @Association public Collection<DB2TablePeriod> getPeriods(DBRProgressMonitor monitor) throws DBException { // TODO DF: beurk: Consequences of "Integrated cache" that can not be created in class def= NPE with managers if (periodCache == null) { return null; } else { return periodCache.getAllObjects(monitor, this); } } @Nullable @Override @Association public Collection<DB2TableUniqueKey> getConstraints(@NotNull DBRProgressMonitor monitor) throws DBException { return getContainer().getConstraintCache().getObjects(monitor, getContainer(), this); } public DB2TableUniqueKey getConstraint(DBRProgressMonitor monitor, String ukName) throws DBException { return getContainer().getConstraintCache().getObject(monitor, getContainer(), this, ukName); } @Override @Association public Collection<DB2TableForeignKey> getAssociations(@NotNull DBRProgressMonitor monitor) throws DBException { return getContainer().getAssociationCache().getObjects(monitor, getContainer(), this); } public DBSTableForeignKey getAssociation(DBRProgressMonitor monitor, String ukName) throws DBException { return getContainer().getAssociationCache().getObject(monitor, getContainer(), this, ukName); } @Override @Association public Collection<DB2TableReference> getReferences(@NotNull DBRProgressMonitor monitor) throws DBException { return getContainer().getReferenceCache().getObjects(monitor, getContainer(), this); } public DBSTableForeignKey getReference(DBRProgressMonitor monitor, String ukName) throws DBException { return getContainer().getReferenceCache().getObject(monitor, getContainer(), this, ukName); } @Association public Collection<DB2TableCheckConstraint> getCheckConstraints(DBRProgressMonitor monitor) throws DBException { return getContainer().getCheckCache().getObjects(monitor, getContainer(), this); } public DB2TableCheckConstraint getCheckConstraint(DBRProgressMonitor monitor, String ukName) throws DBException { return getContainer().getCheckCache().getObject(monitor, getContainer(), this, ukName); } // ----------------- // Properties // ----------------- @NotNull @Override @Property(viewable = true, editable = true, valueTransformer = DBObjectNameCaseTransformer.class, order = 1) public String getName() { return super.getName(); } @Property(viewable = true, editable = false, order = 3, category = DB2Constants.CAT_STATS) public Long getCard() { return card; } @Property(viewable = true, editable = false, order = 4) public DB2TableStatus getStatus() { return status; } @Property(viewable = true, editable = false, order = 5) public DB2TableType getType() { return type; } @Property(viewable = true, editable = true, order = 10, category = DB2Constants.CAT_TABLESPACE, listProvider = DB2TableTablespaceListProvider.class) public DB2Tablespace getTablespace(DBRProgressMonitor monitor) throws DBException { return DB2Tablespace.resolveTablespaceReference(monitor, getDataSource(), tablespace); } public void setTablespace(DB2Tablespace tablespace) { this.tablespace = tablespace; } @Property(viewable = false, editable = true, order = 11, category = DB2Constants.CAT_TABLESPACE, listProvider = DB2TableTablespaceListProvider.class) public DB2Tablespace getIndexTablespace(DBRProgressMonitor monitor) throws DBException { return DB2Tablespace.resolveTablespaceReference(monitor, getDataSource(), indexTablespace); } public void setIndexTablespace(DB2Tablespace indexTablespace) { this.indexTablespace = indexTablespace; } @Property(viewable = false, editable = true, order = 12, category = DB2Constants.CAT_TABLESPACE, listProvider = DB2TableTablespaceListProvider.class) public DB2Tablespace getLongTablespace(DBRProgressMonitor monitor) throws DBException { return DB2Tablespace.resolveTablespaceReference(monitor, getDataSource(), longTablespace); } public void setLongTablespace(DB2Tablespace longTablespace) { this.longTablespace = longTablespace; } @Property(viewable = false, editable = false, category = DB2Constants.CAT_STATS) public Timestamp getStatsTime() { return statsTime; } @Property(viewable = false, editable = false, category = DB2Constants.CAT_STATS) public Long getnPages() { return nPages; } @Property(viewable = false, editable = false, category = DB2Constants.CAT_STATS) public Long getfPages() { return fPages; } @Property(viewable = false, editable = false, category = DB2Constants.CAT_STATS) public Long getOverFLow() { return overFLow; } @Property(viewable = false, editable = false, order = 100) public Boolean getAppend() { return append; } @Property(viewable = false, editable = false, order = 101) public String getVolatileMode() { return volatileMode; } @Property(viewable = false, editable = false, order = 104) public DB2TableLockSize getLockSize() { return lockSize; } @Property(viewable = false, editable = false, order = 105) public DB2TableCompressionMode getCompression() { return compression; } @Property(viewable = false, editable = false, order = 106) public DB2TableAccessMode getAccessMode() { return accessMode; } @Property(viewable = false, editable = false, order = 107) public Boolean getMdcClustered() { return mdcClustered; } @Property(viewable = false, editable = false, order = 108) public DB2TableDropRule getDropRule() { return dropRule; } @Property(viewable = false, editable = false, order = 109) public String getDataCapture() { return dataCapture; } @Property(viewable = false, editable = false, order = 110) public DB2TablePartitionMode getPartitionMode() { return partitionMode; } @Property(viewable = false, editable = false, order = 111) public String getConstChecked() { return constChecked; } @Property(viewable = false, editable = false, order = 120, category = DB2Constants.CAT_TEMPORAL) public DB2TableTemporalType getTemporalType() { return temporalType; } @Property(viewable = false, editable = false, order = 101, category = DB2Constants.CAT_DATETIME) public Timestamp getAlterTime() { return alterTime; } @Property(viewable = false, editable = false, order = 102, category = DB2Constants.CAT_DATETIME) public Timestamp getInvalidateTime() { return invalidateTime; } @Property(viewable = false, editable = false, order = 103, category = DB2Constants.CAT_DATETIME) public Timestamp getLastRegenTime() { return lastRegenTime; } @Override public DBDPseudoAttribute[] getPseudoAttributes() throws DBException { if (getDataSource().isAtLeastV9_5()) { return new DBDPseudoAttribute[] { DB2Constants.PSEUDO_ATTR_RID_BIT }; } else { return null; } } }
liuyuanyuan/dbeaver
plugins/org.jkiss.dbeaver.ext.db2/src/org/jkiss/dbeaver/ext/db2/model/DB2Table.java
Java
apache-2.0
16,982
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.jms; import javax.jms.Destination; import javax.jms.JMSException; import javax.jms.Message; import javax.jms.MessageListener; import javax.jms.Session; import org.apache.camel.AsyncCallback; import org.apache.camel.AsyncProcessor; import org.apache.camel.Exchange; import org.apache.camel.ExchangePattern; import org.apache.camel.Processor; import org.apache.camel.RollbackExchangeException; import org.apache.camel.RuntimeCamelException; import org.apache.camel.util.AsyncProcessorConverterHelper; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.jms.core.JmsOperations; import org.springframework.jms.core.MessageCreator; import org.springframework.jms.listener.SessionAwareMessageListener; import static org.apache.camel.util.ObjectHelper.wrapRuntimeCamelException; /** * A JMS {@link MessageListener} which can be used to delegate processing to a * Camel endpoint. * * Note that instance of this object has to be thread safe (reentrant) * * @version */ public class EndpointMessageListener implements SessionAwareMessageListener { private static final Logger LOG = LoggerFactory.getLogger(EndpointMessageListener.class); private final JmsEndpoint endpoint; private final AsyncProcessor processor; private JmsBinding binding; private boolean eagerLoadingOfProperties; private Object replyToDestination; private JmsOperations template; private boolean disableReplyTo; private boolean async; public EndpointMessageListener(JmsEndpoint endpoint, Processor processor) { this.endpoint = endpoint; this.processor = AsyncProcessorConverterHelper.convert(processor); } @Override public void onMessage(Message message, Session session) throws JMSException { LOG.trace("onMessage START"); LOG.debug("{} consumer received JMS message: {}", endpoint, message); boolean sendReply; RuntimeCamelException rce; try { Object replyDestination = getReplyToDestination(message); // we can only send back a reply if there was a reply destination configured // and disableReplyTo hasn't been explicit enabled sendReply = replyDestination != null && !disableReplyTo; // we should also not send back reply to ourself if this destination and replyDestination is the same Destination destination = JmsMessageHelper.getJMSDestination(message); if (destination != null && sendReply && !endpoint.isReplyToSameDestinationAllowed() && destination.equals(replyDestination)) { LOG.debug("JMSDestination and JMSReplyTo is the same, will skip sending a reply message to itself: {}", destination); sendReply = false; } final Exchange exchange = createExchange(message, session, replyDestination); if (eagerLoadingOfProperties) { exchange.getIn().getBody(); exchange.getIn().getHeaders(); } String correlationId = message.getJMSCorrelationID(); if (correlationId != null) { LOG.debug("Received Message has JMSCorrelationID [{}]", correlationId); } // process the exchange either asynchronously or synchronous LOG.trace("onMessage.process START"); AsyncCallback callback = new EndpointMessageListenerAsyncCallback(message, exchange, endpoint, sendReply, replyDestination); // async is by default false, which mean we by default will process the exchange synchronously // to keep backwards compatible, as well ensure this consumer will pickup messages in order // (eg to not consume the next message before the previous has been fully processed) // but if end user explicit configure consumerAsync=true, then we can process the message // asynchronously (unless endpoint has been configured synchronous, or we use transaction) boolean forceSync = endpoint.isSynchronous() || endpoint.isTransacted(); if (forceSync || !isAsync()) { // must process synchronous if transacted or configured to do so if (LOG.isTraceEnabled()) { LOG.trace("Processing exchange {} synchronously", exchange.getExchangeId()); } try { processor.process(exchange); } catch (Exception e) { exchange.setException(e); } finally { callback.done(true); } } else { // process asynchronous using the async routing engine if (LOG.isTraceEnabled()) { LOG.trace("Processing exchange {} asynchronously", exchange.getExchangeId()); } boolean sync = processor.process(exchange, callback); if (!sync) { // will be done async so return now return; } } // if we failed processed the exchange from the async callback task, then grab the exception rce = exchange.getException(RuntimeCamelException.class); } catch (Exception e) { rce = wrapRuntimeCamelException(e); } // an exception occurred so rethrow to trigger rollback on JMS listener // the JMS listener will use the error handler to handle the uncaught exception if (rce != null) { LOG.trace("onMessage END throwing exception: {}", rce.getMessage()); // Spring message listener container will handle uncaught exceptions // being thrown from this onMessage, and will us the ErrorHandler configured // on the JmsEndpoint to handle the exception throw rce; } LOG.trace("onMessage END"); } /** * Callback task that is performed when the exchange has been processed */ private final class EndpointMessageListenerAsyncCallback implements AsyncCallback { private final Message message; private final Exchange exchange; private final JmsEndpoint endpoint; private final boolean sendReply; private final Object replyDestination; private EndpointMessageListenerAsyncCallback(Message message, Exchange exchange, JmsEndpoint endpoint, boolean sendReply, Object replyDestination) { this.message = message; this.exchange = exchange; this.endpoint = endpoint; this.sendReply = sendReply; this.replyDestination = replyDestination; } @Override public void done(boolean doneSync) { LOG.trace("onMessage.process END"); // now we evaluate the processing of the exchange and determine if it was a success or failure // we also grab information from the exchange to be used for sending back a reply (if we are to do so) // so the following logic seems a bit complicated at first glance // if we send back a reply it can either be the message body or transferring a caused exception org.apache.camel.Message body = null; Exception cause = null; RuntimeCamelException rce = null; if (exchange.isFailed() || exchange.isRollbackOnly()) { if (exchange.isRollbackOnly()) { // rollback only so wrap an exception so we can rethrow the exception to cause rollback rce = wrapRuntimeCamelException(new RollbackExchangeException(exchange)); } else if (exchange.getException() != null) { // an exception occurred while processing if (endpoint.isTransferException()) { // send the exception as reply, so null body and set the exception as the cause body = null; cause = exchange.getException(); } else { // only throw exception if endpoint is not configured to transfer exceptions back to caller // do not send a reply but wrap and rethrow the exception rce = wrapRuntimeCamelException(exchange.getException()); } } else { org.apache.camel.Message msg = exchange.hasOut() ? exchange.getOut() : exchange.getIn(); if (msg.isFault()) { // a fault occurred while processing body = msg; cause = null; } } } else { // process OK so get the reply body if we are InOut and has a body // If the ppl don't want to send the message back, he should use the InOnly if (sendReply && exchange.getPattern().isOutCapable()) { if (exchange.hasOut()) { body = exchange.getOut(); } else { body = exchange.getIn(); } cause = null; } } // send back reply if there was no error and we are supposed to send back a reply if (rce == null && sendReply && (body != null || cause != null)) { LOG.trace("onMessage.sendReply START"); if (replyDestination instanceof Destination) { sendReply((Destination)replyDestination, message, exchange, body, cause); } else { sendReply((String)replyDestination, message, exchange, body, cause); } LOG.trace("onMessage.sendReply END"); } // if an exception occurred if (rce != null) { if (doneSync) { // we were done sync, so put exception on exchange, so we can grab it in the onMessage // method and rethrow it exchange.setException(rce); } else { // we were done async, so use the endpoint error handler if (endpoint.getErrorHandler() != null) { endpoint.getErrorHandler().handleError(rce); } } } } } public Exchange createExchange(Message message, Session session, Object replyDestination) { Exchange exchange = endpoint.createExchange(); JmsBinding binding = getBinding(); exchange.setProperty(Exchange.BINDING, binding); exchange.setIn(new JmsMessage(message, session, binding)); // lets set to an InOut if we have some kind of reply-to destination if (replyDestination != null && !disableReplyTo) { // only change pattern if not already out capable if (!exchange.getPattern().isOutCapable()) { exchange.setPattern(ExchangePattern.InOut); } } return exchange; } // Properties // ------------------------------------------------------------------------- public JmsBinding getBinding() { if (binding == null) { binding = endpoint.getBinding(); } return binding; } /** * Sets the binding used to convert from a Camel message to and from a JMS * message * * @param binding the binding to use */ public void setBinding(JmsBinding binding) { this.binding = binding; } public boolean isEagerLoadingOfProperties() { return eagerLoadingOfProperties; } public void setEagerLoadingOfProperties(boolean eagerLoadingOfProperties) { this.eagerLoadingOfProperties = eagerLoadingOfProperties; } public synchronized JmsOperations getTemplate() { if (template == null) { template = endpoint.createInOnlyTemplate(); } return template; } public void setTemplate(JmsOperations template) { this.template = template; } public boolean isDisableReplyTo() { return disableReplyTo; } /** * Allows the reply-to behaviour to be disabled */ public void setDisableReplyTo(boolean disableReplyTo) { this.disableReplyTo = disableReplyTo; } public Object getReplyToDestination() { return replyToDestination; } /** * Provides an explicit reply to destination which overrides * any incoming value of {@link Message#getJMSReplyTo()} * * @param replyToDestination the destination that should be used to send replies to * as either a String or {@link javax.jms.Destination} type. */ public void setReplyToDestination(Object replyToDestination) { this.replyToDestination = replyToDestination; } public boolean isAsync() { return async; } /** * Sets whether asynchronous routing is enabled. * <p/> * By default this is <tt>false</tt>. If configured as <tt>true</tt> then * this listener will process the {@link org.apache.camel.Exchange} asynchronous. */ public void setAsync(boolean async) { this.async = async; } // Implementation methods //------------------------------------------------------------------------- /** * Strategy to determine which correlation id to use among <tt>JMSMessageID</tt> and <tt>JMSCorrelationID</tt>. * * @param message the JMS message * @return the correlation id to use * @throws JMSException can be thrown */ protected String determineCorrelationId(final Message message) throws JMSException { final String messageId = message.getJMSMessageID(); final String correlationId = message.getJMSCorrelationID(); if (endpoint.getConfiguration().isUseMessageIDAsCorrelationID()) { return messageId; } else if (ObjectHelper.isEmpty(correlationId)) { // correlation id is empty so fallback to message id return messageId; } else { return correlationId; } } protected void sendReply(Destination replyDestination, final Message message, final Exchange exchange, final org.apache.camel.Message out, final Exception cause) { if (replyDestination == null) { LOG.debug("Cannot send reply message as there is no replyDestination for: {}", out); return; } getTemplate().send(replyDestination, new MessageCreator() { public Message createMessage(Session session) throws JMSException { Message reply = endpoint.getBinding().makeJmsMessage(exchange, out, session, cause); final String correlationID = determineCorrelationId(message); reply.setJMSCorrelationID(correlationID); if (LOG.isDebugEnabled()) { LOG.debug("{} sending reply JMS message [correlationId:{}]: {}", endpoint, correlationID, reply); } return reply; } }); } protected void sendReply(String replyDestination, final Message message, final Exchange exchange, final org.apache.camel.Message out, final Exception cause) { if (replyDestination == null) { LOG.debug("Cannot send reply message as there is no replyDestination for: {}", out); return; } getTemplate().send(replyDestination, new MessageCreator() { public Message createMessage(Session session) throws JMSException { Message reply = endpoint.getBinding().makeJmsMessage(exchange, out, session, cause); final String correlationID = determineCorrelationId(message); reply.setJMSCorrelationID(correlationID); if (LOG.isDebugEnabled()) { LOG.debug("{} sending reply JMS message [correlationId:{}]: {}", endpoint, correlationID, reply); } return reply; } }); } protected Object getReplyToDestination(Message message) throws JMSException { // lets send a response back if we can Object destination = getReplyToDestination(); if (destination == null) { destination = JmsMessageHelper.getJMSReplyTo(message); } return destination; } @Override public String toString() { return "EndpointMessageListener[" + endpoint + "]"; } }
onders86/camel
components/camel-jms/src/main/java/org/apache/camel/component/jms/EndpointMessageListener.java
Java
apache-2.0
17,602
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.core; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.transforms.Combine.CombineFn; import org.apache.beam.sdk.transforms.Combine.KeyedCombineFn; import org.apache.beam.sdk.transforms.CombineWithContext.KeyedCombineFnWithContext; import org.apache.beam.sdk.transforms.GroupByKey; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.util.AppliedCombineFn; import org.apache.beam.sdk.util.state.AccumulatorCombiningState; import org.apache.beam.sdk.util.state.BagState; import org.apache.beam.sdk.util.state.CombiningState; import org.apache.beam.sdk.util.state.MergingStateAccessor; import org.apache.beam.sdk.util.state.ReadableState; import org.apache.beam.sdk.util.state.StateAccessor; import org.apache.beam.sdk.util.state.StateMerging; import org.apache.beam.sdk.util.state.StateTag; import org.apache.beam.sdk.util.state.StateTags; /** * {@link ReduceFn} implementing the default reduction behaviors of {@link GroupByKey}. * * @param <K> The type of key being processed. * @param <InputT> The type of values associated with the key. * @param <OutputT> The output type that will be produced for each key. * @param <W> The type of windows this operates on. */ public abstract class SystemReduceFn<K, InputT, AccumT, OutputT, W extends BoundedWindow> extends ReduceFn<K, InputT, OutputT, W> { private static final String BUFFER_NAME = "buf"; /** * Create a factory that produces {@link SystemReduceFn} instances that that buffer all of the * input values in persistent state and produces an {@code Iterable<T>}. */ public static <K, T, W extends BoundedWindow> SystemReduceFn<K, T, Iterable<T>, Iterable<T>, W> buffering(final Coder<T> inputCoder) { final StateTag<Object, BagState<T>> bufferTag = StateTags.makeSystemTagInternal(StateTags.bag(BUFFER_NAME, inputCoder)); return new SystemReduceFn<K, T, Iterable<T>, Iterable<T>, W>(bufferTag) { @Override public void prefetchOnMerge(MergingStateAccessor<K, W> state) throws Exception { StateMerging.prefetchBags(state, bufferTag); } @Override public void onMerge(OnMergeContext c) throws Exception { StateMerging.mergeBags(c.state(), bufferTag); } }; } /** * Create a factory that produces {@link SystemReduceFn} instances that combine all of the input * values using a {@link CombineFn}. */ public static <K, InputT, AccumT, OutputT, W extends BoundedWindow> SystemReduceFn<K, InputT, AccumT, OutputT, W> combining( final Coder<K> keyCoder, final AppliedCombineFn<K, InputT, AccumT, OutputT> combineFn) { final StateTag<K, AccumulatorCombiningState<InputT, AccumT, OutputT>> bufferTag; if (combineFn.getFn() instanceof KeyedCombineFnWithContext) { bufferTag = StateTags.makeSystemTagInternal( StateTags.<K, InputT, AccumT, OutputT>keyedCombiningValueWithContext( BUFFER_NAME, combineFn.getAccumulatorCoder(), (KeyedCombineFnWithContext<K, InputT, AccumT, OutputT>) combineFn.getFn())); } else { bufferTag = StateTags.makeSystemTagInternal( StateTags.<K, InputT, AccumT, OutputT>keyedCombiningValue( BUFFER_NAME, combineFn.getAccumulatorCoder(), (KeyedCombineFn<K, InputT, AccumT, OutputT>) combineFn.getFn())); } return new SystemReduceFn<K, InputT, AccumT, OutputT, W>(bufferTag) { @Override public void prefetchOnMerge(MergingStateAccessor<K, W> state) throws Exception { StateMerging.prefetchCombiningValues(state, bufferTag); } @Override public void onMerge(OnMergeContext c) throws Exception { StateMerging.mergeCombiningValues(c.state(), bufferTag); } }; } private StateTag<? super K, ? extends CombiningState<InputT, OutputT>> bufferTag; public SystemReduceFn( StateTag<? super K, ? extends CombiningState<InputT, OutputT>> bufferTag) { this.bufferTag = bufferTag; } @Override public void processValue(ProcessValueContext c) throws Exception { c.state().access(bufferTag).add(c.value()); } @Override public void prefetchOnTrigger(StateAccessor<K> state) { state.access(bufferTag).readLater(); } @Override public void onTrigger(OnTriggerContext c) throws Exception { c.output(c.state().access(bufferTag).read()); } @Override public void clearState(Context c) throws Exception { c.state().access(bufferTag).clear(); } @Override public ReadableState<Boolean> isEmpty(StateAccessor<K> state) { return state.access(bufferTag).isEmpty(); } }
yafengguo/Apache-beam
runners/core-java/src/main/java/org/apache/beam/runners/core/SystemReduceFn.java
Java
apache-2.0
5,486
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ // // This source code implements specifications defined by the Java // Community Process. In order to remain compliant with the specification // DO NOT add / change / or delete method signatures! // package javax.jms; /** * @version $Rev$ $Date$ */ public interface TopicConnection extends Connection { TopicSession createTopicSession(boolean transacted, int acknowledgeMode) throws JMSException; ConnectionConsumer createConnectionConsumer( Topic topic, String messageSelector, ServerSessionPool sessionPool, int maxMessages) throws JMSException; ConnectionConsumer createDurableConnectionConsumer( Topic topic, String subscriptionName, String messageSelector, ServerSessionPool sessionPool, int maxMessages) throws JMSException; }
salyh/geronimo-specs
geronimo-jms_1.1_spec/src/main/java/javax/jms/TopicConnection.java
Java
apache-2.0
1,656
// // $Id: TruncatedLorentzian.cpp 2051 2010-06-15 18:39:13Z chambm $ // // // Original author: Darren Kessner <darren@proteowizard.org> // // Copyright 2006 Louis Warschaw Prostate Cancer Center // Cedars Sinai Medical Center, Los Angeles, California 90048 // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #define PWIZ_SOURCE #include "TruncatedLorentzian.hpp" #include "pwiz/utility/misc/Std.hpp" #define i_ (complex<double>(0,1)) #define pi_ M_PI namespace pwiz { namespace frequency { struct TruncatedLorentzian::Impl { public: Impl(double T) : T_(T), cacheLevel_(-1), f_(0), alpha_(0,0), tau_(0), f0_(0), x_(0), L_(0), dLdx_(0), dxdt_(0), dxdf_(0), dLdt_(0), dLdf_(0), d2Ldx2_(0), d2xdt2_(0), d2Ldt2_(0), d2Ldf2_(0), d2Ldtdf_(0) {} complex<double> value(double f, const ublas::vector<double>& p); void d1(double f, const ublas::vector<double>& p, ublas::vector< complex<double> >& result); void d2(double f, const ublas::vector<double>& p, ublas::matrix< complex<double> >& result); double fwhm(const ublas::vector<double>& p) const; private: double T_; int cacheLevel_; // (-1 == invalid) // function values (valid if cacheLevel >= 0) double f_; complex<double> alpha_; double tau_; double f0_; complex<double> x_; complex<double> L_; // first derivatives (valid if cacheLevel >= 1) complex<double> dLdx_; complex<double> dxdt_; complex<double> dxdf_; complex<double> dLdt_; complex<double> dLdf_; // second derivatives (valid if cacheLevel >= 2) complex<double> d2Ldx2_; complex<double> d2xdt2_; complex<double> d2Ldt2_; complex<double> d2Ldf2_; complex<double> d2Ldtdf_; void calculate(double f, const ublas::vector<double>& p, int cacheLevel); }; complex<double> TruncatedLorentzian::Impl::value(double f, const ublas::vector<double>& p) { calculate(f, p, 0); return alpha_ * L_; } void TruncatedLorentzian::Impl::d1(double f, const ublas::vector<double>& p, ublas::vector< complex<double> >& result) { calculate(f, p, 1); result.resize(4); result.clear(); result(0) = L_; result(1) = i_ * L_; result(2) = alpha_ * dLdt_; result(3) = alpha_ * dLdf_; } void TruncatedLorentzian::Impl::d2(double f, const ublas::vector<double>& p, ublas::matrix< complex<double> >& result) { calculate(f, p, 2); result.resize(4,4); result.clear(); result(0,0) = result(0,1) = result(1,0) = result(1,1) = 0; result(0,2) = result(2,0) = dLdt_; result(0,3) = result(3,0) = dLdf_; result(1,2) = result(2,1) = i_ * dLdt_; result(1,3) = result(3,1) = i_ * dLdf_; result(2,2) = alpha_ * d2Ldt2_; result(2,3) = result(3,2) = alpha_ * d2Ldtdf_; result(3,3) = alpha_ * d2Ldf2_; } double TruncatedLorentzian::Impl::fwhm(const ublas::vector<double>& p) const { return sqrt(T_*T_+p(Tau)*p(Tau))/(T_*p(Tau)); } void TruncatedLorentzian::Impl::calculate(double f, const ublas::vector<double>& p, int cacheLevel) { // cache with key <f,p> if (f != f_ || p(AlphaR) != alpha_.real() || p(AlphaI) != alpha_.imag() || p(Tau) != tau_ || p(F0) != f0_) { // recache *this = Impl(T_); // zero out everything except T_ f_ = f; alpha_ = complex<double>(p(AlphaR), p(AlphaI)); tau_ = p(Tau); f0_ = p(F0); } else { // cache hit //cout << "cache hit!\n"; } if (cacheLevel>=0 && cacheLevel_<0) { x_ = 1/tau_ + 2*pi_*i_*(f_-f0_); L_ = (1.-exp(-x_*T_))/x_; cacheLevel_ = 0; } if (cacheLevel>=1 && cacheLevel_<1) { dLdx_ = ((T_*x_+1.)*exp(-x_*T_) - 1.) / (x_*x_); dxdt_ = -1/(tau_*tau_); dxdf_ = -2*pi_*i_; dLdt_ = dLdx_ * dxdt_; dLdf_ = dLdx_ * dxdf_; cacheLevel_ = 1; } if (cacheLevel>=2 && cacheLevel_<2) { d2Ldx2_ = (2. - (pow(T_*x_+1.,2)+1.)*exp(-x_*T_)) / pow(x_,3); d2xdt2_ = 2/pow(tau_,3); d2Ldt2_ = d2Ldx2_*pow(dxdt_,2) + dLdx_*d2xdt2_; d2Ldf2_ = d2Ldx2_*pow(dxdf_,2); d2Ldtdf_ = d2Ldx2_ * dxdt_ * dxdf_; cacheLevel_ = 2; } } PWIZ_API_DECL TruncatedLorentzian::TruncatedLorentzian(double T) : impl_(new Impl(T)) {} PWIZ_API_DECL TruncatedLorentzian::~TruncatedLorentzian() {} // this must be here to delete Impl properly PWIZ_API_DECL complex<double> TruncatedLorentzian::operator()(double f, const ublas::vector<double>& p) const { return impl_->value(f, p); } PWIZ_API_DECL ublas::vector< complex<double> > TruncatedLorentzian::dp(double f, const ublas::vector<double>& p) const { ublas::vector< complex<double> > result; impl_->d1(f, p, result); return result; } PWIZ_API_DECL ublas::matrix< complex<double> > TruncatedLorentzian::dp2(double f, const ublas::vector<double>& p) const { ublas::matrix< complex<double> > result; impl_->d2(f, p, result); return result; } PWIZ_API_DECL void TruncatedLorentzian::outputSamples(const string& filename, const ublas::vector<double>& p, double shift, double scale) const { cout << "[TruncatedLorentzian] Writing file " << filename << endl; ofstream os(filename.c_str()); if (!os) { cout << "[TruncatedLorentzian::outputSamples()] Unable to write to file " << filename << endl; return; } os.precision(8); double fwhm = impl_->fwhm(p); for (double f=p(F0)-5*fwhm; f<p(F0)+5*fwhm; f+=.01*fwhm) { complex<double> value = impl_->value(f, p); os << f+shift << " 0 " << value.real()*scale << ' ' << value.imag()*scale << ' ' << sqrt(norm(value))*scale << endl; } } } // namespace frequency } // namespace pwiz
biospi/mzmlb
pwiz/pwiz/analysis/frequency/TruncatedLorentzian.cpp
C++
apache-2.0
6,475
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.indexing.common.task.batch.parallel; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.druid.indexer.partitions.HashedPartitionsSpec; import org.apache.druid.segment.TestHelper; import org.junit.Before; import org.junit.Test; import java.util.Collections; public class PartialHashSegmentMergeIngestionSpecTest { private static final ObjectMapper OBJECT_MAPPER = ParallelIndexTestingFactory.createObjectMapper(); private static final HashPartitionLocation HASH_PARTITION_LOCATION = new HashPartitionLocation( ParallelIndexTestingFactory.HOST, ParallelIndexTestingFactory.PORT, ParallelIndexTestingFactory.USE_HTTPS, ParallelIndexTestingFactory.SUBTASK_ID, ParallelIndexTestingFactory.INTERVAL, ParallelIndexTestingFactory.PARTITION_ID ); private static final PartialHashSegmentMergeIOConfig IO_CONFIG = new PartialHashSegmentMergeIOConfig(Collections.singletonList(HASH_PARTITION_LOCATION)); private static final HashedPartitionsSpec PARTITIONS_SPEC = new HashedPartitionsSpec( null, 1, Collections.emptyList() ); private PartialHashSegmentMergeIngestionSpec target; @Before public void setup() { target = new PartialHashSegmentMergeIngestionSpec( ParallelIndexTestingFactory.createDataSchema(ParallelIndexTestingFactory.INPUT_INTERVALS), IO_CONFIG, new ParallelIndexTestingFactory.TuningConfigBuilder() .partitionsSpec(PARTITIONS_SPEC) .build() ); } @Test public void serializesDeserializes() { TestHelper.testSerializesDeserializes(OBJECT_MAPPER, target); } }
Fokko/druid
indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/PartialHashSegmentMergeIngestionSpecTest.java
Java
apache-2.0
2,471
/* * Copyright 2019 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.room.integration.testapp.vo; import androidx.room.DatabaseView; // View of join table with playlists with more than 1 song @DatabaseView("SELECT * FROM PlaylistSongXRef WHERE mPlaylistId IN (SELECT mPlaylistId FROM" + " PlaylistSongXRef GROUP BY mPlaylistId HAVING COUNT(mSongId) > 1)") public class PlaylistMultiSongXRefView { public final int mPlaylistId; public final int mSongId; public PlaylistMultiSongXRefView(int playlistId, int songId) { mPlaylistId = playlistId; mSongId = songId; } }
AndroidX/androidx
room/integration-tests/testapp/src/androidTest/java/androidx/room/integration/testapp/vo/PlaylistMultiSongXRefView.java
Java
apache-2.0
1,172
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package demo.hw.server; import java.util.List; import javax.jws.WebMethod; import javax.jws.WebParam; import javax.jws.WebService; import javax.jws.soap.SOAPBinding; import demo.hw.server.data.Beverage; import demo.hw.server.data.Category; import demo.hw.server.data.Ingredient; @WebService(targetNamespace = "urn:cxf.apache.org:jsjaxws") @SOAPBinding(parameterStyle = SOAPBinding.ParameterStyle.BARE) public interface JavascriptExample { @WebMethod Beverage[] getBeveragesWithIngredient(@WebParam(name = "ingredient") Ingredient i); @WebMethod Beverage[] getBeverageWithIngredientCategory(@WebParam(name = "category") Category c); @WebMethod Beverage[] getBeverageWithIngredientCategories(@WebParam(name = "categories") List<Category> c); }
mulesoft-consulting/sumtotal-connector
lib/apache-cxf-2.7.5/samples/js_browser_client_java_first/src/main/java/demo/hw/server/JavascriptExample.java
Java
apache-2.0
1,576
/** * \file metric_space_search.hpp * * This library provides a class that implements a Dynamic Vantage-Point Tree (DVP-Tree) that * allows for O(logN) time nearest-neighbor queries in a metric-space. A DVP-tree is essentially * a generalization of a search tree which only requires the space to have a metric which * respects the triangular inequality. * * \author Sven Mikael Persson <mikael.s.persson@gmail.com> * \date April 2011 */ /* * Copyright 2011 Sven Mikael Persson * * THIS SOFTWARE IS DISTRIBUTED UNDER THE TERMS OF THE GNU GENERAL PUBLIC LICENSE v3 (GPLv3). * * This file is part of ReaK. * * ReaK is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * ReaK is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with ReaK (as LICENSE in the root folder). * If not, see <http://www.gnu.org/licenses/>. */ #ifndef REAK_METRIC_SPACE_SEARCH_HPP #define REAK_METRIC_SPACE_SEARCH_HPP #include <boost/bind.hpp> #include <boost/lambda/lambda.hpp> #include <boost/graph/graph_concepts.hpp> #include <boost/property_map/property_map.hpp> #include <boost/graph/adjacency_list.hpp> #include <boost/graph/topology.hpp> #include <boost/graph/properties.hpp> #include <map> #include <vector> #include "metric_space_concept.hpp" namespace boost { enum vertex_mu_distance_t { vertex_mu_distance }; BOOST_INSTALL_PROPERTY(vertex, mu_distance); }; /** * This class template is a callable class that can be used to choose the best * vantage-point to use out of a set of points. In theory, the best vantage-point * is the one which deviates the most from the other points in the set, however, * this functor will approximately select that point by searching for it only * in a random subset of the given range of points. * \tparam RandomNumberGenerator The random number generator type to be use to obtain the randomness needed in the search. */ template <typename RandomNumberGenerator = boost::minstd_rand> class random_best_vp_chooser { private: unsigned int m_divider; RandomNumberGenerator m_rand; public: /** * Default construction. * \param aDivider The divider of the set (determines the fraction of the points to search), default is 10. */ random_best_vp_chooser(unsigned int aDivider = 10) : m_divider(aDivider), m_rand(std::time(0)) { }; /** * Parametrized construction. * \param aRand The random number generator to use. * \param aDivider The divider of the set (determines the fraction of the points to search), default is 10. */ random_best_vp_chooser(const RandomNumberGenerator& aRand, unsigned int aDivider = 10) : m_divider(aDivider), m_rand(aRand) { }; /** * This call-operator will choose a vantage-point from within the given range. * \tparam RandomAccessIter A random-access iterator type that can describe the point-range. * \tparam Topology The topology type on which the points can reside, should model the MetricSpaceConcept. * \tparam PositionMap The property-map type that can map the vertex descriptors (which should be the value-type of the iterators) to a point (position). * \param aBegin The start of the range of vertices. * \param aEnd The end of the range of vertices (one element past the end). * \param aSpace The topology on which the points reside. * \param aPosition The property-map used to obtain the positions from the vertices. * \return A random-access iterator to the chosen vantage-point. */ template <typename RandomAccessIter, typename Topology, typename PositionMap> RandomAccessIter operator() (RandomAccessIter aBegin, RandomAccessIter aEnd, const Topology& aSpace, PositionMap aPosition) { // BOOST_CONCEPT_ASSERT((MetricSpaceConcept<Topology>)); // not necessary - just need distance() function typedef typename metric_topology_traits<Topology>::point_type Point; RandomAccessIter best_pt = aEnd; double best_dev = -1; //std::cout << "Number of loops: " << (aEnd - aBegin) / m_divider + 1 << std::endl; int numLoops = (aEnd - aBegin) / m_divider + 1; for(unsigned int i=0; i < (aEnd - aBegin) / m_divider + 1;++i) { std::cout << i << " out of " << numLoops << std::endl; RandomAccessIter current_pt = aBegin + (m_rand() % (aEnd - aBegin)); double current_mean = 0.0; double current_dev = 0.0; Point current_vp = get(aPosition, *current_pt); for(unsigned int j=0; aBegin + j != aEnd; ++j) { double dist = aSpace.distance(current_vp, get(aPosition, *(aBegin + j))); current_mean = (current_mean * j + dist) / (j + 1); current_dev = (current_dev * j + dist * dist) / (j + 1); }; double current_var = current_dev - current_mean * current_mean; if(current_var < 0) current_var = 0.0; current_dev = std::sqrt(current_var); if(current_dev > best_dev) { best_pt = current_pt; best_dev = current_dev; }; }; return best_pt; }; }; /** * This class implements a Dynamic Vantage-Point Tree (DVP-Tree) that * allows for O(logN) time nearest-neighbor queries in a metric-space. A DVP-tree is essentially * a generalization of a search tree which only requires the space to have a metric which * respects the triangular inequality. * \tparam Key The key type for the tree, essentially the key value is the vertex descriptor type. * \tparam Topology The topology type on which the points can reside, should model the MetricSpaceConcept. * \tparam PositionMap The property-map type that can map the vertex descriptors (which should be the value-type of the iterators) to a point (position). * \tparam Arity The arity of the tree, e.g., 2 means a binary-tree. * \tparam VPChooser The functor type to use to choose the vantage-point out of a set of vertices. */ template <typename Key, typename Topology, typename PositionMap, unsigned int Arity = 2, typename VPChooser = random_best_vp_chooser<> > class dvp_tree { public: //BOOST_CONCEPT_ASSERT((MetricSpaceConcept<Topology>)); // This actually is not necessary, all that is required is a distance() function. typedef typename metric_topology_traits<Topology>::point_type point_type; typedef typename metric_topology_traits<Topology>::point_difference_type point_difference_type; typedef double distance_type; private: typedef boost::property< boost::vertex_index_t, Key, boost::property< boost::vertex_mu_distance_t, distance_type, boost::no_property > > vertex_properties; typedef boost::no_property edge_properties; typedef boost::adjacency_list< boost::listS, boost::listS, boost::bidirectionalS, vertex_properties, edge_properties, boost::listS> tree_indexer; typedef boost::adjacency_list_traits<boost::listS,boost::listS,boost::bidirectionalS,boost::listS>::vertex_descriptor vertex_type; typedef boost::adjacency_list_traits<boost::listS,boost::listS,boost::bidirectionalS,boost::listS>::edge_descriptor edge_type; typedef typename boost::graph_traits<tree_indexer>::out_edge_iterator out_edge_iter; typedef typename boost::graph_traits<tree_indexer>::in_edge_iterator in_edge_iter; tree_indexer m_tree; vertex_type m_root; typename boost::property_map< tree_indexer, boost::vertex_index_t >::type m_key; typename boost::property_map< tree_indexer, boost::vertex_mu_distance_t >::type m_mu; const Topology& m_space; PositionMap m_position; VPChooser m_vp_chooser; //non-copyable. dvp_tree(const dvp_tree<Key,Topology,PositionMap,Arity,VPChooser>&); dvp_tree<Key,Topology,PositionMap,Arity,VPChooser>& operator=(const dvp_tree<Key,Topology,PositionMap,Arity,VPChooser>&); static bool closer(std::map<Key,distance_type>& m, const Key& k1, const Key& k2) { return m[k1] < m[k2]; }; void construct_node(vertex_type aNode, typename std::vector<Key>::iterator aBegin, typename std::vector<Key>::iterator aEnd, std::map<Key,distance_type>& aDistMap) { typedef typename std::vector<Key>::iterator KeyIter; KeyIter vp_ind = m_vp_chooser(aBegin, aEnd, m_space, m_position); point_type vp_pt = get(m_position, *vp_ind); for(KeyIter it = aBegin; it != aEnd; ++it) { aDistMap[*it] = m_space.distance(vp_pt, get(m_position, *it)); }; std::sort(aBegin,aEnd,boost::bind(closer,aDistMap,_1,_2)); put(m_key, aNode, *aBegin); aBegin++; if((aEnd - aBegin) < static_cast<int>(Arity)) { for(KeyIter it = aBegin; it != aEnd; ++it) { vertex_type k = boost::add_vertex(m_tree); put(m_key, k, *it); put(m_mu, k, aDistMap[*it]); boost::add_edge(aNode,k,m_tree); }; } else { for(unsigned int i=Arity;i>=1;--i) { vertex_type k = boost::add_vertex(m_tree); boost::add_edge(aNode,k,m_tree); unsigned int num_children = (aEnd - aBegin) / i; put(m_mu, k, aDistMap[*(aBegin + (num_children-1))]); construct_node(k,aBegin,aBegin + num_children,aDistMap); aBegin = aBegin + num_children; }; }; }; void find_nearest_impl(const point_type& aPoint, distance_type& aSigma, vertex_type aNode, std::multimap<distance_type, Key>& aList, std::size_t K) const { using namespace boost; typedef typename std::multimap<distance_type, Key>::value_type ListType; Key current_key = get(m_key, aNode); distance_type current_dist = m_space.distance(aPoint, get(m_position, current_key)); if(current_dist < aSigma) { //is the vantage point within current search bound? Yes... aList.insert(ListType(current_dist, current_key)); //then add the vantage point to the NN list. if(aList.size() > K) { //are there too many nearest neighbors? Yes... aList.erase((++aList.rbegin()).base()); //delete last element to keep aList with K elements aSigma = aList.rbegin()->first; //distance of the last element is now the search bound aSigma. }; }; out_edge_iter ei,ei_end; //first, locate the partition in which aPoint is: if(out_degree(aNode,m_tree) == 0) return; for(tie(ei,ei_end) = out_edges(aNode,m_tree); ei != ei_end; ++ei) { if(current_dist < get(m_mu, target(*ei,m_tree))) break; }; if(ei == ei_end) --ei; //back-track if the end was reached. find_nearest_impl(aPoint,aSigma,target(*ei,m_tree),aList,K); //search in the most likely node. out_edge_iter ei_left = ei; out_edge_iter ei_right = ei; ++ei_right; tie(ei,ei_end) = out_edges(aNode,m_tree); //find the bounds again (start and end). while(true) { if(ei_left == ei) { out_edge_iter ei_rightleft = ei_right; --ei_rightleft; while((ei_right != ei_end) && (get(m_mu,target(*ei_rightleft,m_tree)) < current_dist + aSigma)) { find_nearest_impl(aPoint,aSigma,target(*ei_right,m_tree),aList,K); ++ei_rightleft; ++ei_right; }; break; } else if(ei_right == ei_end) { out_edge_iter ei_leftleft = ei_left; while((ei_left != ei) && (get(m_mu,target(*(--ei_leftleft),m_tree)) > current_dist - aSigma)) { find_nearest_impl(aPoint,aSigma,target(*ei_leftleft,m_tree),aList,K); --ei_left; }; break; } else { out_edge_iter ei_leftleft = ei_left; --ei_leftleft; distance_type d1 = get(m_mu,target(*ei_leftleft,m_tree)) + aSigma - current_dist; //greater than 0 if ei_leftleft should be searched. out_edge_iter ei_rightleft = ei_right; --ei_rightleft; distance_type d2 = get(m_mu,target(*ei_rightleft,m_tree)) - aSigma - current_dist; //less than 0 if ei_right should be searched. if(d1 + d2 > 0) { //this means that ei_leftleft's boundary is closer to aPoint. find_nearest_impl(aPoint,aSigma,target(*ei_leftleft,m_tree),aList,K); ei_left = ei_leftleft; } else { find_nearest_impl(aPoint,aSigma,target(*ei_right,m_tree),aList,K); ++ei_right; }; }; }; }; vertex_type get_leaf(const point_type& aPoint, vertex_type aNode) { using namespace boost; Key current_key = get(m_key, aNode); distance_type current_dist = m_space.distance(aPoint, get(m_position, current_key)); out_edge_iter ei,ei_end; //first, locate the partition in which aPoint is: if(out_degree(aNode,m_tree) == 0) return aNode; vertex_type result = aNode; for(tie(ei,ei_end) = out_edges(aNode,m_tree); ei != ei_end; ++ei) { result = target(*ei,m_tree); if(current_dist < get(m_mu, result)) break; }; return get_leaf(aPoint,result); }; vertex_type get_key(Key aVertex, const point_type& aPoint, vertex_type aNode) { using namespace boost; Key current_key = get(m_key, aNode); if(current_key == aVertex) return aNode; distance_type current_dist = m_space.distance(aPoint, get(m_position, current_key)); out_edge_iter ei,ei_end; //first, locate the partition in which aPoint is: if(out_degree(aNode,m_tree) == 0) return vertex_type(); vertex_type result = aNode; for(tie(ei,ei_end) = out_edges(aNode,m_tree); ei != ei_end; ++ei) { result = target(*ei,m_tree); if(current_dist < get(m_mu, result)) break; }; return get_key(aVertex,aPoint,result); }; void update_mu_upwards(const point_type& aPoint, vertex_type aNode) { using namespace boost; if(aNode == m_root) return; vertex_type parent = source(*(in_edges(aNode,m_tree).first),m_tree); distance_type dist = m_space.distance(aPoint,get(m_position,get(m_key,parent))); if(dist > get(m_mu,aNode)) put(m_mu,aNode,dist); update_mu_upwards(aPoint,parent); }; bool is_leaf_node(vertex_type aNode) { using namespace boost; if(out_degree(aNode,m_tree) == 0) return true; out_edge_iter ei,ei_end; for(tie(ei,ei_end) = out_edges(aNode,m_tree); ei != ei_end; ++ei) { if(out_degree(target(*ei,m_tree),m_tree) != 0) return false; }; return true; }; bool is_node_full(vertex_type aNode, int& depth_limit) { using namespace boost; if(depth_limit < 0) return false; if((out_degree(aNode,m_tree) == 0) && (depth_limit == 0)) return true; --depth_limit; if((out_degree(aNode,m_tree) == 0) || (out_degree(aNode,m_tree) < Arity)) return false; out_edge_iter ei,ei_end; if(is_leaf_node(aNode)) { if(depth_limit == 0) return true; else return false; }; for(tie(ei,ei_end) = out_edges(aNode,m_tree); ei != ei_end; ++ei) { int new_depth_limit = depth_limit; if(!is_node_full(target(*ei,m_tree),new_depth_limit)) { depth_limit = new_depth_limit; return false; }; }; return true; }; void collect_keys(std::vector<Key>& aList, vertex_type aNode) { using namespace boost; aList.push_back(get(m_key, aNode)); out_edge_iter ei,ei_end; for(tie(ei,ei_end) = out_edges(aNode,m_tree); ei != ei_end; ++ei) collect_keys(aList,target(*ei,m_tree)); }; template <typename Predicate> void collect_keys(std::vector<Key>& aList, vertex_type aNode, Predicate aShouldAdd) { using namespace boost; Key k = get(m_key, aNode); if(aShouldAdd(k)) aList.push_back(k); out_edge_iter ei,ei_end; for(tie(ei,ei_end) = out_edges(aNode,m_tree); ei != ei_end; ++ei) collect_keys(aList,target(*ei,m_tree)); }; void clear_node(vertex_type aNode) { using namespace boost; if(out_degree(aNode,m_tree) == 0) return; out_edge_iter ei,ei_end; std::vector<vertex_type> children; children.reserve(out_degree(aNode,m_tree)); for(tie(ei,ei_end) = out_edges(aNode,m_tree); ei != ei_end; ++ei) { vertex_type v = target(*ei,m_tree); clear_node(v); children.push_back(v); }; for(std::vector<vertex_type>::iterator it = children.begin(); it != children.end(); ++it) { remove_edge(aNode, *it,m_tree); remove_vertex(*it,m_tree); }; }; public: /** * Construct the DVP-tree from a graph, topology and property-map. * \tparam Graph The graph type on which the vertices are taken from, should model the boost::VertexListGraphConcept. * \param g The graph from which to take the vertices. * \param aSpace The topology on which the positions of the vertices reside. * \param aPosition The property-map that can be used to obtain the positions of the vertices. * \param aVPChooser The vantage-point chooser functor (policy class). */ template <typename Graph> dvp_tree(const Graph& g, const Topology& aSpace, PositionMap aPosition, VPChooser aVPChooser = VPChooser()) : m_tree(), m_root(), m_key(boost::get(boost::vertex_index,m_tree)), m_mu(boost::get(boost::vertex_mu_distance,m_tree)), m_space(aSpace), m_position(aPosition), m_vp_chooser(aVPChooser) { using namespace boost; if(num_vertices(g) == 0) return; m_root = add_vertex(m_tree); typename graph_traits<Graph>::vertex_iterator vi,vi_end; tie(vi,vi_end) = vertices(g); std::vector<Key> v(vi,vi_end); //Copy the list of vertices to random access memory. std::map<Key,distance_type> dist_map; construct_node(m_root, v.begin(), v.end(), dist_map); }; /** * Construct the DVP-tree from a range, topology and property-map. * \tparam ForwardIterator The forward-iterator type from which the vertices can be obtained. * \param aBegin The start of the range from which to take the vertices. * \param aEnd The end of the range from which to take the vertices (one-past-last). * \param aSpace The topology on which the positions of the vertices reside. * \param aPosition The property-map that can be used to obtain the positions of the vertices. * \param aVPChooser The vantage-point chooser functor (policy class). */ template <typename ForwardIterator> dvp_tree(ForwardIterator aBegin, ForwardIterator aEnd, const Topology& aSpace, PositionMap aPosition, VPChooser aVPChooser = VPChooser()) : m_tree(), m_root(), m_key(boost::get(boost::vertex_index,m_tree)), m_mu(boost::get(boost::vertex_mu_distance,m_tree)), m_space(aSpace), m_position(aPosition), m_vp_chooser(aVPChooser) { using namespace boost; if(aBegin == aEnd) return; m_root = add_vertex(m_tree); std::vector<Key> v(aBegin,aEnd); //Copy the list of vertices to random access memory. std::map<Key,distance_type> dist_map; construct_node(m_root, v.begin(), v.end(), dist_map); }; /** * Checks if the DVP-tree is empty. * \return True if the DVP-tree is empty. */ bool empty() const { return (boost::num_vertices(m_tree) == 0); }; /** * Returns the size of the DVP-tree (the number of vertices it contains. * \return The size of the DVP-tree (the number of vertices it contains. */ std::size_t size() const { return boost::num_vertices(m_tree); }; /** * Inserts a key-value (vertex). * \param u The vertex to be added to the DVP-tree. */ void insert(Key u) { using namespace boost; if(num_vertices(m_tree) == 0) { m_root = add_vertex(m_tree); put(m_mu,m_root,0.0); put(m_key,m_root,u); return; }; point_type u_pt = get(m_position, u); vertex_type u_realleaf = get_leaf(u_pt,m_root); if(u_realleaf == m_root) { //if the root is the leaf, it requires special attention since no parent exists. vertex_type u_node = add_vertex(m_tree); add_edge(u_realleaf,u_node,m_tree); put(m_key, u_node, u); put(m_mu, u_node, m_space.distance(u_pt,get(m_position,get(m_key,u_realleaf)))); update_mu_upwards(u_pt,u_realleaf); return; }; vertex_type u_leaf = source(*(in_edges(u_realleaf,m_tree).first),m_tree); if((out_degree(u_leaf,m_tree) < Arity) || (!is_leaf_node(u_leaf))) { // leaf node is not full of children, an additional child can be added // (must be reconstructed to keep ordering, but this is a trivial operation O(Arity)). //OR // if leaf is not really a leaf, then it means that this sub-tree is definitely not balanced and not full either, // then all the Keys ought to be collected and u_leaf ought to be reconstructed. std::vector<Key> key_list; collect_keys(key_list,u_leaf); key_list.push_back(u); clear_node(u_leaf); std::map<Key,distance_type> dist_map; construct_node(u_leaf, key_list.begin(), key_list.end(), dist_map); update_mu_upwards(u_pt,u_leaf); } else { //if it is a full-leaf, then this is a leaf node, and it is balanced but full, // we should then find a non-full parent. vertex_type p = u_leaf; int actual_depth_limit = 1; int last_depth_limit = actual_depth_limit; while((p != m_root) && (is_node_full(p,last_depth_limit))) { p = source(*(in_edges(p,m_tree).first),m_tree); last_depth_limit = ++actual_depth_limit; }; bool is_p_full = false; if(p == m_root) is_p_full = is_node_full(p,last_depth_limit); if((!is_p_full) && (last_depth_limit >= 0)) { //this means that we can add our key to the sub-tree of p and reconstruct from there. std::vector<Key> key_list; collect_keys(key_list,p); key_list.push_back(u); clear_node(p); std::map<Key,distance_type> dist_map; construct_node(p, key_list.begin(), key_list.end(), dist_map); update_mu_upwards(u_pt,p); } else { //this means that either the root node is full or there are branches of the tree that are deeper than u_realleaf, // and thus, in either case, u_realleaf should be expanded. p = add_vertex(m_tree); add_edge(u_realleaf,p,m_tree); put(m_key, p, u); put(m_mu, p, m_space.distance(u_pt,get(m_position,get(m_key,u_realleaf)))); update_mu_upwards(u_pt,u_realleaf); }; }; }; /** * Inserts a range of key-values (vertices). * \tparam ForwardIterator A forward-iterator type that can be used to obtain the vertices. * \param aBegin The start of the range from which to take the vertices. * \param aEnd The end of the range from which to take the vertices (one-past-last). */ template <typename ForwardIterator> void insert(ForwardIterator aBegin, ForwardIterator aEnd) { if(boost::num_vertices(m_tree) == 0) return; std::for_each(aBegin,aEnd,boost::bind(&dvp_tree<Key,Topology,PositionMap,Arity,VPChooser>::insert_and_find_nearest,this,_1)); //TODO: There's got to be a better way to insert many elements (most likely a similar strategy to the erase multiple function). }; /** * Erases the given vertex from the DVP-tree. * \param u The vertex to be removed from the DVP-tree. */ void erase(Key u) { using namespace boost; if(num_vertices(m_tree) == 0) return; point_type u_pt = get(m_position, u); vertex_type u_node = get_key(u, u_pt, m_root); if(u_node == vertex_type()) return; out_edge_iter ei,ei_end; std::vector<Key> key_list; for(tie(ei,ei_end) = out_edges(u_node,m_tree); ei != ei_end; ++ei) collect_keys(key_list,target(*ei,m_tree)); clear_node(u_node); std::map<Key,distance_type> dist_map; construct_node(u_node, key_list.begin(), key_list.end(), dist_map); }; /** * Erases the given vertex-range from the DVP-tree. * \tparam ForwardIterator A forward-iterator type that can be used to obtain the vertices. * \param aBegin The start of the range from which to take the vertices to be erased. * \param aEnd The end of the range from which to take the vertices to be erased (one-past-last). */ template <typename ForwardIterator> void erase(ForwardIterator aBegin, ForwardIterator aEnd) { using namespace boost; if(num_vertices(m_tree) == 0) return; typedef std::list< std::pair< vertex_type,std::vector<Key> > > key_listing; key_listing key_lists; //will hold a list of unique nodes and all their non-erased for(;aBegin != aEnd;++aBegin) { point_type u_pt = get(m_position, *aBegin); vertex_type u_node = get_key(*aBegin, u_pt, m_root); if(u_node == vertex_type()) continue; key_lists.push_back( std::make_pair(u_node, std::vector<Key>()) ); for(typename key_listing::iterator it = key_lists.begin(); ((it != key_lists.end()) && (it->first != u_node)); ++it) { Key v = get(m_key,it->first); if(get_key(v,get(m_position,v),u_node) == it->first) { //this means that v is a child of u_node, thus, it's key_list should be merged with u_nodes list. key_lists.back().second.insert(key_lists.back().second.end(),it->second.begin(),it->second.end()); remove_edge(source(*(in_edges(v,m_tree).first),m_tree),v,m_tree); remove_vertex(v,m_tree); key_lists.erase(it--); }; }; out_edge_iter ei,ei_end; for(tie(ei,ei_end) = out_edges(u_node,m_tree); ei != ei_end; ++ei) collect_keys(key_lists.back().second,target(*ei,m_tree),bind<bool>(lambda::unlambda(_1 == std::find(aBegin,aEnd,_2)),aEnd,_1)); clear_node(u_node); }; //at this point, there should be a list of nodes to be reconstructed along with a list of keys that they should contain. for(typename key_listing::iterator it = key_lists.begin(); it != key_lists.end(); ++it) { std::map<Key,distance_type> dist_map; construct_node(it->first,it->second.begin(),it->second.end(),dist_map); }; }; /** * Clears the DVP-tree. */ void clear() { m_tree.clear(); m_root = vertex_type(); }; /** * Finds the nearest neighbor to a given position. * \param aPoint The position from which to find the nearest-neighbor of. * \return The vertex in the DVP-tree that is closest to the given point. */ Key find_nearest(const point_type& aPoint) const { using namespace boost; if(num_vertices(m_tree) == 0) return Key(); std::multimap<distance_type,Key> m; distance_type sig = std::numeric_limits<distance_type>::infinity(); find_nearest_impl(aPoint,sig,m_root,m,1); return m.begin()->second; }; /** * Finds the K nearest-neighbors to a given position. * \param aPoint The position from which to find the nearest-neighbors. * \param aList Stores, as output, a map of all the K nearest-neighbors to aPoint, the map gives the distance and vertex pairs. * \param K The number of nearest-neighbors. */ void find_nearest(const point_type& aPoint, std::multimap<distance_type, Key>& aList, std::size_t K) const { using namespace boost; if(num_vertices(m_tree) == 0) return; aList.clear(); distance_type sig = std::numeric_limits<distance_type>::infinity(); find_nearest_impl(aPoint,sig,m_root,aList,K); }; /** * Finds the nearest-neighbors to a given position within a given range (radius). * \param aPoint The position from which to find the nearest-neighbors. * \param aList Stores, as output, a map of all the nearest-neighbors to aPoint, the map gives the distance and vertex pairs. * \param R The maximum distance value for the nearest-neighbors. */ void find_in_range(const point_type& aPoint, std::multimap<distance_type, Key>& aList, distance_type R) const { using namespace boost; if(num_vertices(m_tree) == 0) return; find_nearest_impl(aPoint,R,m_root,aList,num_vertices(m_tree)); }; /** * Takes a vertex, finds its nearest-neighbor and then it adds it to the DVP-tree. * \param aVertex The vertex to be added to the DVP-tree. * \return The nearest-neighbor of the given vertex. */ Key insert_and_find_nearest(Key aVertex) { Key result = find_nearest(get(m_position,aVertex)); insert(aVertex); }; /** * Takes a vertex, finds its K nearest-neighbors and then it adds it to the DVP-tree. * \param aVertex The vertex to be added to the DVP-tree. * \param aList Stores, as output, a map of all the K nearest-neighbors to aVertex, the map gives the distance and vertex pairs. * \param K The number of nearest-neighbors. */ void insert_and_find_nearest(Key aVertex, std::list<Key>& aList, std::size_t K) { find_nearest(get(m_position,aVertex),aList,K); insert(aVertex); }; /** * Takes a vertex, finds its nearest-neighbors within a range and then it adds it to the DVP-tree. * \param aVertex The vertex to be added to the DVP-tree. * \param aList Stores, as output, a map of all the nearest-neighbors to aVertex, the map gives the distance and vertex pairs. * \param R The maximum distance value for the nearest-neighbors. */ void insert_and_find_in_range(Key aVertex, std::list<Key>& aList, distance_type R) { find_in_range(get(m_position,aVertex),aList,R); insert(aVertex); }; }; template <typename Graph, typename DVPTree> struct multi_dvp_tree_search { typename std::map<Graph*, DVPTree*> graph_tree_map; multi_dvp_tree_search() : graph_tree_map() { }; template <typename Topology, typename PositionMap> typename boost::graph_traits<Graph>::vertex_descriptor operator()(const typename boost::property_traits<PositionMap>::value_type& p, Graph& g, const Topology& space, PositionMap position) { typename std::map<Graph*,DVPTree*>::iterator it = graph_tree_map.find(&g); if((it != graph_tree_map.end()) && (it->second)) return it->second->find_nearest(p); else return typename boost::graph_traits<Graph>::vertex_descriptor(); }; }; #endif
daviddoria/TempRepo
NearestNeighbor/metric_space_search.hpp
C++
apache-2.0
30,247
/* * * Copyright (c) 2019, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * / */ package org.wso2.carbon.apimgt.rest.api.publisher.v1.impl; import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.cxf.jaxrs.ext.MessageContext; import org.wso2.carbon.apimgt.api.APIManagementException; import org.wso2.carbon.apimgt.api.model.Tier; import org.wso2.carbon.apimgt.impl.APIConstants; import org.wso2.carbon.apimgt.impl.utils.APIUtil; import org.wso2.carbon.apimgt.rest.api.publisher.v1.ThrottlingPoliciesApiService; import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.ThrottlingPolicyDTO; import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.ThrottlingPolicyListDTO; import org.wso2.carbon.apimgt.rest.api.publisher.v1.utils.mappings.ThrottlingPolicyMappingUtil; import org.wso2.carbon.apimgt.rest.api.util.RestApiConstants; import org.wso2.carbon.apimgt.rest.api.util.utils.RestApiUtil; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import javax.ws.rs.core.Response; /** * This is the service implementation class for Publisher throttling policies related operations */ public class ThrottlingPoliciesApiServiceImpl implements ThrottlingPoliciesApiService { private static final Log log = LogFactory.getLog(ThrottlingPoliciesApiServiceImpl.class); /** * Retrieves all the Tiers * * @param policyLevel tier level (api/application or resource) * @param limit max number of objects returns * @param offset starting index * @param ifNoneMatch If-None-Match header value * @return Response object containing resulted tiers */ @Override public Response getAllThrottlingPolicies(String policyLevel, Integer limit, Integer offset, String ifNoneMatch,MessageContext messageContext) { //pre-processing //setting default limit and offset if they are null limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT; offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT; List<Tier> tierList = getThrottlingPolicyList(policyLevel); ThrottlingPolicyListDTO policyListDTO = ThrottlingPolicyMappingUtil .fromTierListToDTO(tierList, policyLevel, limit, offset); //todo: set total counts properly ThrottlingPolicyMappingUtil.setPaginationParams(policyListDTO, policyLevel, limit, offset, tierList.size()); return Response.ok().entity(policyListDTO).build(); } /** * Returns the matched throttling policy to the given policy name * * @param policyName name of the throttling policy * @param policyLevel throttling policy level (subscription or api) * @param ifNoneMatch If-None-Match header value * @return ThrottlingPolicyDTO matched to the given throttling policy name */ @Override public Response getThrottlingPolicyByName(String policyName, String policyLevel, String ifNoneMatch, MessageContext messageContext) { try { String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain(); ThrottlingPolicyDTO.PolicyLevelEnum policyLevelEnum; Tier foundTier = null; if (StringUtils.isBlank(policyLevel)) { RestApiUtil.handleBadRequest("policyLevel cannot be empty", log); } //retrieves the tier based on the given tier-level if (ThrottlingPolicyDTO.PolicyLevelEnum.SUBSCRIPTION.toString().equals(policyLevel)) { foundTier = APIUtil.getTierFromCache(policyName, tenantDomain); policyLevelEnum = ThrottlingPolicyDTO.PolicyLevelEnum.SUBSCRIPTION; } else if (ThrottlingPolicyDTO.PolicyLevelEnum.API.toString().equals(policyLevel)) { Map<String, Tier> resourceTiersMap = APIUtil.getTiers(APIConstants.TIER_RESOURCE_TYPE, tenantDomain); policyLevelEnum = ThrottlingPolicyDTO.PolicyLevelEnum.API; if (resourceTiersMap != null) { foundTier = RestApiUtil.findTier(resourceTiersMap.values(), policyName); } } else { RestApiUtil.handleResourceNotFoundError( "policyLevel should be one of " + Arrays.toString(ThrottlingPolicyDTO.PolicyLevelEnum.values()), log); return null; } //returns if the tier is found, otherwise send 404 if (foundTier != null) { return Response.ok() .entity(ThrottlingPolicyMappingUtil.fromTierToDTO(foundTier, policyLevelEnum.toString())) .build(); } else { RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_THROTTLING_POLICY, policyName, log); } } catch (APIManagementException e) { String errorMessage = "Error while retrieving throttling policies"; RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; } /** * Returns the throttling policies which belongs to the given policy level * @param policyLevel * @return list of throttling policies */ public List<Tier> getThrottlingPolicyList(String policyLevel) { try { List<Tier> tierList = new ArrayList<>(); String tenantDomain = RestApiUtil.getLoggedInUserTenantDomain(); if (StringUtils.isBlank(policyLevel)) { RestApiUtil.handleBadRequest("policyLevel cannot be empty", log); } //retrieves the tier based on the given tier-level if (ThrottlingPolicyDTO.PolicyLevelEnum.SUBSCRIPTION.toString().equals(policyLevel)) { Map<String, Tier> apiTiersMap = APIUtil.getTiers(APIConstants.TIER_API_TYPE, tenantDomain); if (apiTiersMap != null) { tierList.addAll(apiTiersMap.values()); } } else if (ThrottlingPolicyDTO.PolicyLevelEnum.API.toString().equals(policyLevel)) { Map<String, Tier> resourceTiersMap = APIUtil.getTiers(APIConstants.TIER_RESOURCE_TYPE, tenantDomain); if (resourceTiersMap != null) { tierList.addAll(resourceTiersMap.values()); } } else { RestApiUtil.handleResourceNotFoundError( "policyLevel should be one of " + Arrays.toString(ThrottlingPolicyDTO.PolicyLevelEnum.values()), log); } return tierList; } catch (APIManagementException e) { String errorMessage = "Error while retrieving tiers"; RestApiUtil.handleInternalServerError(errorMessage, e, log); } return null; } }
pubudu538/carbon-apimgt
components/apimgt/org.wso2.carbon.apimgt.rest.api.publisher.v1/src/main/java/org/wso2/carbon/apimgt/rest/api/publisher/v1/impl/ThrottlingPoliciesApiServiceImpl.java
Java
apache-2.0
7,570
//////////////////////////////////////////////////////////////////////////////// /// DISCLAIMER /// /// Copyright 2021 ArangoDB GmbH, Cologne, Germany /// /// Licensed under the Apache License, Version 2.0 (the "License"); /// you may not use this file except in compliance with the License. /// You may obtain a copy of the License at /// /// http://www.apache.org/licenses/LICENSE-2.0 /// /// Unless required by applicable law or agreed to in writing, software /// distributed under the License is distributed on an "AS IS" BASIS, /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. /// See the License for the specific language governing permissions and /// limitations under the License. /// /// Copyright holder is ArangoDB GmbH, Cologne, Germany /// /// @author Andrey Abramov //////////////////////////////////////////////////////////////////////////////// #ifndef IRESEARCH_HASH_CONTAINER_UTILS #define IRESEARCH_HASH_CONTAINER_UTILS #include <absl/container/flat_hash_set.h> #include "hash_utils.hpp" namespace iresearch { //////////////////////////////////////////////////////////////////////////////// /// @brief first - hash value, second - reference //////////////////////////////////////////////////////////////////////////////// template<typename T> using value_ref_t = std::pair<size_t, T>; //////////////////////////////////////////////////////////////////////////////// /// @struct transparent hash for value_ref_t //////////////////////////////////////////////////////////////////////////////// class value_ref_hash { public: using is_transparent = void; template<typename T> size_t operator()(const value_ref_t<T>& value) const noexcept { return value.first; } template<typename Char> size_t operator()(const hashed_basic_string_ref<Char>& value) const noexcept { return value.hash(); } }; //////////////////////////////////////////////////////////////////////////////// /// @struct transparent equality comparator for value_ref_t //////////////////////////////////////////////////////////////////////////////// template<typename T> struct value_ref_eq { using is_transparent = void; using self_t = value_ref_eq<T>; using ref_t = value_ref_t<T>; using value_t = typename ref_t::second_type; bool operator()(const ref_t& lhs, const ref_t& rhs) const noexcept { return lhs.second == rhs.second; } }; //////////////////////////////////////////////////////////////////////////////// /// @brief Abseil hash containers behave in a way that in presence of removals /// rehash may still happen even if enough space was allocated //////////////////////////////////////////////////////////////////////////////// template<typename Eq> using flat_hash_set = absl::flat_hash_set<typename Eq::ref_t, value_ref_hash, Eq>; } #endif// IRESEARCH_HASH_CONTAINER_UTILS
wiltonlazary/arangodb
3rdParty/iresearch/core/utils/hash_set_utils.hpp
C++
apache-2.0
2,858
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.bookkeeper.proto; import org.apache.bookkeeper.proto.BookkeeperInternalCallbacks.GenericCallback; /** * An interface to manage channel pooling for bookie client. */ interface PerChannelBookieClientPool { /** * intialize the pool. the implementation should not be blocked. */ void intialize(); /** * Obtain a channel from channel pool to execute operations. * * @param callback * callback to return channel from channel pool. */ void obtain(GenericCallback<PerChannelBookieClient> callback); /** * Disconnect the connections in the pool. * * @param wait * whether need to wait until pool disconnected. */ void disconnect(boolean wait); /** * Close the pool. * * @param wait * whether need to wait until pool closed. */ void close(boolean wait); }
twitter/bookkeeper
bookkeeper-server/src/main/java/org/apache/bookkeeper/proto/PerChannelBookieClientPool.java
Java
apache-2.0
1,732
#!/usr/bin/env python # Copyright 2012 Google Inc. All Rights Reserved. """AFF4 object representing client stats.""" from grr.lib import aff4 from grr.lib import rdfvalue from grr.lib.aff4_objects import standard class ClientStats(standard.VFSDirectory): """A container for all client statistics.""" class SchemaCls(standard.VFSDirectory.SchemaCls): STATS = aff4.Attribute("aff4:stats", rdfvalue.ClientStats, "Client Stats.", "Client stats")
MiniSEC/GRR_clone
lib/aff4_objects/client_stats.py
Python
apache-2.0
482
package org.apache.commons.ognl; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.Enumeration; import java.util.Map; /** * Implementation of PropertyAccessor that provides "property" reference to "nextElement" (aliases to "next" also) and * "hasMoreElements" (also aliased to "hasNext"). */ public class EnumerationPropertyAccessor extends ObjectPropertyAccessor implements PropertyAccessor // This is here to make javadoc show this class as an implementor { @Override public Object getProperty( Map<String, Object> context, Object target, Object name ) throws OgnlException { Object result; Enumeration<?> e = (Enumeration<?>) target; // check performed by the invoker if ( name instanceof String ) { if ( "next".equals( name ) || "nextElement".equals( name ) ) { result = e.nextElement(); } else { if ( "hasNext".equals( name ) || "hasMoreElements".equals( name ) ) { result = e.hasMoreElements() ? Boolean.TRUE : Boolean.FALSE; } else { result = super.getProperty( context, target, name ); } } } else { result = super.getProperty( context, target, name ); } return result; } @Override public void setProperty( Map<String, Object> context, Object target, Object name, Object value ) throws OgnlException { throw new IllegalArgumentException( "can't set property " + name + " on Enumeration" ); } }
apache/commons-ognl
src/main/java/org/apache/commons/ognl/EnumerationPropertyAccessor.java
Java
apache-2.0
2,456
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/kms/model/AlgorithmSpec.h> #include <aws/core/utils/HashingUtils.h> #include <aws/core/Globals.h> #include <aws/core/utils/EnumParseOverflowContainer.h> using namespace Aws::Utils; namespace Aws { namespace KMS { namespace Model { namespace AlgorithmSpecMapper { static const int RSAES_PKCS1_V1_5_HASH = HashingUtils::HashString("RSAES_PKCS1_V1_5"); static const int RSAES_OAEP_SHA_1_HASH = HashingUtils::HashString("RSAES_OAEP_SHA_1"); static const int RSAES_OAEP_SHA_256_HASH = HashingUtils::HashString("RSAES_OAEP_SHA_256"); AlgorithmSpec GetAlgorithmSpecForName(const Aws::String& name) { int hashCode = HashingUtils::HashString(name.c_str()); if (hashCode == RSAES_PKCS1_V1_5_HASH) { return AlgorithmSpec::RSAES_PKCS1_V1_5; } else if (hashCode == RSAES_OAEP_SHA_1_HASH) { return AlgorithmSpec::RSAES_OAEP_SHA_1; } else if (hashCode == RSAES_OAEP_SHA_256_HASH) { return AlgorithmSpec::RSAES_OAEP_SHA_256; } EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer(); if(overflowContainer) { overflowContainer->StoreOverflow(hashCode, name); return static_cast<AlgorithmSpec>(hashCode); } return AlgorithmSpec::NOT_SET; } Aws::String GetNameForAlgorithmSpec(AlgorithmSpec enumValue) { switch(enumValue) { case AlgorithmSpec::RSAES_PKCS1_V1_5: return "RSAES_PKCS1_V1_5"; case AlgorithmSpec::RSAES_OAEP_SHA_1: return "RSAES_OAEP_SHA_1"; case AlgorithmSpec::RSAES_OAEP_SHA_256: return "RSAES_OAEP_SHA_256"; default: EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer(); if(overflowContainer) { return overflowContainer->RetrieveOverflow(static_cast<int>(enumValue)); } return {}; } } } // namespace AlgorithmSpecMapper } // namespace Model } // namespace KMS } // namespace Aws
awslabs/aws-sdk-cpp
aws-cpp-sdk-kms/source/model/AlgorithmSpec.cpp
C++
apache-2.0
2,370
/****************************************************************************** * $Id: gdaldither.cpp 33715 2016-03-13 08:52:06Z goatbar $ * * Project: CIETMap Phase 2 * Purpose: Convert RGB (24bit) to a pseudo-colored approximation using * Floyd-Steinberg dithering (error diffusion). * Author: Frank Warmerdam, warmerdam@pobox.com * ****************************************************************************** * Copyright (c) 2001, Frank Warmerdam * Copyright (c) 2007, Even Rouault <even dot rouault at mines-paris dot org> * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included * in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. ****************************************************************************** * * Notes: * * [1] Floyd-Steinberg dither: * I should point out that the actual fractions we used were, assuming * you are at X, moving left to right: * * X 7/16 * 3/16 5/16 1/16 * * Note that the error goes to four neighbors, not three. I think this * will probably do better (at least for black and white) than the * 3/8-3/8-1/4 distribution, at the cost of greater processing. I have * seen the 3/8-3/8-1/4 distribution described as "our" algorithm before, * but I have no idea who the credit really belongs to. * -- * Lou Steinberg * */ #include "gdal_priv.h" #include "gdal_alg.h" #include "gdal_alg_priv.h" #if defined(__x86_64) || defined(_M_X64) #define USE_SSE2 #endif #ifdef USE_SSE2 #include <emmintrin.h> #define CAST_PCT(x) ((GByte*)x) #define ALIGN_INT_ARRAY_ON_16_BYTE(x) ( (((GPtrDiff_t)(x) % 16) != 0 ) ? (int*)((GByte*)(x) + 16 - ((GPtrDiff_t)(x) % 16)) : (x) ) #else #define CAST_PCT(x) x #endif #define MAKE_COLOR_CODE(r,g,b) ((r)|((g)<<8)|((b)<<16)) CPL_CVSID("$Id: gdaldither.cpp 33715 2016-03-13 08:52:06Z goatbar $"); static void FindNearestColor( int nColors, int *panPCT, GByte *pabyColorMap, int nCLevels ); static int FindNearestColor( int nColors, int *panPCT, int nRedValue, int nGreenValue, int nBlueValue ); /* Structure for a hashmap from a color code to a color index of the color table */ /* NOTE: if changing the size of this structure, edit */ /* MEDIAN_CUT_AND_DITHER_BUFFER_SIZE_65536 in gdal_alg_priv.h and take into account HashHistogram in gdalmediancut.cpp */ typedef struct { GUInt32 nColorCode; GUInt32 nColorCode2; GUInt32 nColorCode3; GByte nIndex; GByte nIndex2; GByte nIndex3; GByte nPadding; } ColorIndex; /************************************************************************/ /* GDALDitherRGB2PCT() */ /************************************************************************/ /** * 24bit to 8bit conversion with dithering. * * This functions utilizes Floyd-Steinberg dithering in the process of * converting a 24bit RGB image into a pseudocolored 8bit image using a * provided color table. * * The red, green and blue input bands do not necessarily need to come * from the same file, but they must be the same width and height. They will * be clipped to 8bit during reading, so non-eight bit bands are generally * inappropriate. Likewise the hTarget band will be written with 8bit values * and must match the width and height of the source bands. * * The color table cannot have more than 256 entries. * * @param hRed Red input band. * @param hGreen Green input band. * @param hBlue Blue input band. * @param hTarget Output band. * @param hColorTable the color table to use with the output band. * @param pfnProgress callback for reporting algorithm progress matching the * GDALProgressFunc() semantics. May be NULL. * @param pProgressArg callback argument passed to pfnProgress. * * @return CE_None on success or CE_Failure if an error occurs. */ int CPL_STDCALL GDALDitherRGB2PCT( GDALRasterBandH hRed, GDALRasterBandH hGreen, GDALRasterBandH hBlue, GDALRasterBandH hTarget, GDALColorTableH hColorTable, GDALProgressFunc pfnProgress, void * pProgressArg ) { return GDALDitherRGB2PCTInternal( hRed, hGreen, hBlue, hTarget, hColorTable, 5, NULL, TRUE, pfnProgress, pProgressArg ); } int GDALDitherRGB2PCTInternal( GDALRasterBandH hRed, GDALRasterBandH hGreen, GDALRasterBandH hBlue, GDALRasterBandH hTarget, GDALColorTableH hColorTable, int nBits, GInt16* pasDynamicColorMap, /* NULL or at least 256 * 256 * 256 * sizeof(GInt16) bytes */ int bDither, GDALProgressFunc pfnProgress, void * pProgressArg ) { VALIDATE_POINTER1( hRed, "GDALDitherRGB2PCT", CE_Failure ); VALIDATE_POINTER1( hGreen, "GDALDitherRGB2PCT", CE_Failure ); VALIDATE_POINTER1( hBlue, "GDALDitherRGB2PCT", CE_Failure ); VALIDATE_POINTER1( hTarget, "GDALDitherRGB2PCT", CE_Failure ); VALIDATE_POINTER1( hColorTable, "GDALDitherRGB2PCT", CE_Failure ); int nXSize, nYSize; CPLErr err = CE_None; /* -------------------------------------------------------------------- */ /* Validate parameters. */ /* -------------------------------------------------------------------- */ nXSize = GDALGetRasterBandXSize( hRed ); nYSize = GDALGetRasterBandYSize( hRed ); if( GDALGetRasterBandXSize( hGreen ) != nXSize || GDALGetRasterBandYSize( hGreen ) != nYSize || GDALGetRasterBandXSize( hBlue ) != nXSize || GDALGetRasterBandYSize( hBlue ) != nYSize ) { CPLError( CE_Failure, CPLE_IllegalArg, "Green or blue band doesn't match size of red band.\n" ); return CE_Failure; } if( GDALGetRasterBandXSize( hTarget ) != nXSize || GDALGetRasterBandYSize( hTarget ) != nYSize ) { CPLError( CE_Failure, CPLE_IllegalArg, "GDALDitherRGB2PCT(): " "Target band doesn't match size of source bands.\n" ); return CE_Failure; } if( pfnProgress == NULL ) pfnProgress = GDALDummyProgress; /* -------------------------------------------------------------------- */ /* Setup more direct colormap. */ /* -------------------------------------------------------------------- */ int nColors, iColor; #ifdef USE_SSE2 int anPCTUnaligned[256+4]; /* 4 for alignment on 16-byte boundary */ int* anPCT = ALIGN_INT_ARRAY_ON_16_BYTE(anPCTUnaligned); #else int anPCT[256*4]; #endif nColors = GDALGetColorEntryCount( hColorTable ); if (nColors == 0 ) { CPLError( CE_Failure, CPLE_IllegalArg, "GDALDitherRGB2PCT(): " "Color table must not be empty.\n" ); return CE_Failure; } else if (nColors > 256) { CPLError( CE_Failure, CPLE_IllegalArg, "GDALDitherRGB2PCT(): " "Color table cannot have more than 256 entries.\n" ); return CE_Failure; } iColor = 0; do { GDALColorEntry sEntry; GDALGetColorEntryAsRGB( hColorTable, iColor, &sEntry ); CAST_PCT(anPCT)[4*iColor+0] = static_cast<GByte>(sEntry.c1); CAST_PCT(anPCT)[4*iColor+1] = static_cast<GByte>(sEntry.c2); CAST_PCT(anPCT)[4*iColor+2] = static_cast<GByte>(sEntry.c3); CAST_PCT(anPCT)[4*iColor+3] = 0; iColor ++; } while( iColor < nColors ); #ifdef USE_SSE2 /* Pad to multiple of 8 colors */ int nColorsMod8 = nColors % 8; if( nColorsMod8 ) { for( iColor = 0; iColor < 8 - nColorsMod8; iColor ++) { anPCT[nColors+iColor] = anPCT[nColors-1]; } } #endif /* -------------------------------------------------------------------- */ /* Setup various variables. */ /* -------------------------------------------------------------------- */ GByte *pabyRed, *pabyGreen, *pabyBlue, *pabyIndex; GByte *pabyColorMap = NULL; int *panError; int nCLevels = 1 << nBits; ColorIndex* psColorIndexMap = NULL; pabyRed = (GByte *) VSI_MALLOC_VERBOSE(nXSize); pabyGreen = (GByte *) VSI_MALLOC_VERBOSE(nXSize); pabyBlue = (GByte *) VSI_MALLOC_VERBOSE(nXSize); pabyIndex = (GByte *) VSI_MALLOC_VERBOSE(nXSize); panError = (int *) VSI_CALLOC_VERBOSE(sizeof(int),(nXSize+2) * 3); if (pabyRed == NULL || pabyGreen == NULL || pabyBlue == NULL || pabyIndex == NULL || panError == NULL) { err = CE_Failure; goto end_and_cleanup; } if( pasDynamicColorMap == NULL ) { /* -------------------------------------------------------------------- */ /* Build a 24bit to 8 bit color mapping. */ /* -------------------------------------------------------------------- */ pabyColorMap = (GByte *) VSI_MALLOC_VERBOSE(nCLevels * nCLevels * nCLevels * sizeof(GByte)); if( pabyColorMap == NULL ) { err = CE_Failure; goto end_and_cleanup; } FindNearestColor( nColors, anPCT, pabyColorMap, nCLevels); } else { pabyColorMap = NULL; if( nBits == 8 && (GIntBig)nXSize * nYSize <= 65536 ) { /* If the image is small enough, then the number of colors */ /* will be limited and using a hashmap, rather than a full table */ /* will be more efficient */ psColorIndexMap = (ColorIndex*)pasDynamicColorMap; memset(psColorIndexMap, 0xFF, sizeof(ColorIndex) * PRIME_FOR_65536); } else { memset(pasDynamicColorMap, 0xFF, 256 * 256 * 256 * sizeof(GInt16)); } } /* ==================================================================== */ /* Loop over all scanlines of data to process. */ /* ==================================================================== */ int iScanline; for( iScanline = 0; iScanline < nYSize; iScanline++ ) { int nLastRedError, nLastGreenError, nLastBlueError, i; /* -------------------------------------------------------------------- */ /* Report progress */ /* -------------------------------------------------------------------- */ if( !pfnProgress( iScanline / (double) nYSize, NULL, pProgressArg ) ) { CPLError( CE_Failure, CPLE_UserInterrupt, "User Terminated" ); err = CE_Failure; goto end_and_cleanup; } /* -------------------------------------------------------------------- */ /* Read source data. */ /* -------------------------------------------------------------------- */ err = GDALRasterIO( hRed, GF_Read, 0, iScanline, nXSize, 1, pabyRed, nXSize, 1, GDT_Byte, 0, 0 ); if( err == CE_None ) err = GDALRasterIO( hGreen, GF_Read, 0, iScanline, nXSize, 1, pabyGreen, nXSize, 1, GDT_Byte, 0, 0 ); if( err == CE_None ) err = GDALRasterIO( hBlue, GF_Read, 0, iScanline, nXSize, 1, pabyBlue, nXSize, 1, GDT_Byte, 0, 0 ); if( err != CE_None ) goto end_and_cleanup; /* -------------------------------------------------------------------- */ /* Apply the error from the previous line to this one. */ /* -------------------------------------------------------------------- */ if( bDither ) { for( i = 0; i < nXSize; i++ ) { pabyRed[i] = (GByte) MAX(0,MIN(255,(pabyRed[i] + panError[i*3+0+3]))); pabyGreen[i] = (GByte) MAX(0,MIN(255,(pabyGreen[i] + panError[i*3+1+3]))); pabyBlue[i] = (GByte) MAX(0,MIN(255,(pabyBlue[i] + panError[i*3+2+3]))); } memset( panError, 0, sizeof(int) * (nXSize+2) * 3 ); } /* -------------------------------------------------------------------- */ /* Figure out the nearest color to the RGB value. */ /* -------------------------------------------------------------------- */ nLastRedError = 0; nLastGreenError = 0; nLastBlueError = 0; for( i = 0; i < nXSize; i++ ) { int iIndex, nError, nSixth; int nRedValue, nGreenValue, nBlueValue; nRedValue = MAX(0,MIN(255, pabyRed[i] + nLastRedError)); nGreenValue = MAX(0,MIN(255, pabyGreen[i] + nLastGreenError)); nBlueValue = MAX(0,MIN(255, pabyBlue[i] + nLastBlueError)); if( psColorIndexMap ) { GUInt32 nColorCode = MAKE_COLOR_CODE(nRedValue, nGreenValue, nBlueValue); GUInt32 nIdx = nColorCode % PRIME_FOR_65536; //int nCollisions = 0; //static int nMaxCollisions = 0; while( true ) { if( psColorIndexMap[nIdx].nColorCode == nColorCode ) { iIndex = psColorIndexMap[nIdx].nIndex; break; } if( (int)psColorIndexMap[nIdx].nColorCode < 0 ) { psColorIndexMap[nIdx].nColorCode = nColorCode; iIndex = FindNearestColor( nColors, anPCT, nRedValue, nGreenValue, nBlueValue ); psColorIndexMap[nIdx].nIndex = (GByte) iIndex; break; } if( psColorIndexMap[nIdx].nColorCode2 == nColorCode ) { iIndex = psColorIndexMap[nIdx].nIndex2; break; } if( (int)psColorIndexMap[nIdx].nColorCode2 < 0 ) { psColorIndexMap[nIdx].nColorCode2 = nColorCode; iIndex = FindNearestColor( nColors, anPCT, nRedValue, nGreenValue, nBlueValue ); psColorIndexMap[nIdx].nIndex2 = (GByte) iIndex; break; } if( psColorIndexMap[nIdx].nColorCode3 == nColorCode ) { iIndex = psColorIndexMap[nIdx].nIndex3; break; } if( (int)psColorIndexMap[nIdx].nColorCode3 < 0 ) { psColorIndexMap[nIdx].nColorCode3 = nColorCode; iIndex = FindNearestColor( nColors, anPCT, nRedValue, nGreenValue, nBlueValue ); psColorIndexMap[nIdx].nIndex3 = (GByte) iIndex; break; } do { //nCollisions ++; nIdx+=257; if( nIdx >= PRIME_FOR_65536 ) nIdx -= PRIME_FOR_65536; } while( (int)psColorIndexMap[nIdx].nColorCode >= 0 && psColorIndexMap[nIdx].nColorCode != nColorCode && (int)psColorIndexMap[nIdx].nColorCode2 >= 0 && psColorIndexMap[nIdx].nColorCode2 != nColorCode&& (int)psColorIndexMap[nIdx].nColorCode3 >= 0 && psColorIndexMap[nIdx].nColorCode3 != nColorCode ); /*if( nCollisions > nMaxCollisions ) { nMaxCollisions = nCollisions; printf("nCollisions = %d for R=%d,G=%d,B=%d\n", nCollisions, nRedValue, nGreenValue, nBlueValue); }*/ } } else if( pasDynamicColorMap == NULL ) { int iRed = nRedValue * nCLevels / 256; int iGreen = nGreenValue * nCLevels / 256; int iBlue = nBlueValue * nCLevels / 256; iIndex = pabyColorMap[iRed + iGreen * nCLevels + iBlue * nCLevels * nCLevels]; } else { GUInt32 nColorCode = MAKE_COLOR_CODE(nRedValue, nGreenValue, nBlueValue); GInt16* psIndex = &pasDynamicColorMap[nColorCode]; if( *psIndex < 0 ) iIndex = *psIndex = static_cast<GInt16>(FindNearestColor( nColors, anPCT, nRedValue, nGreenValue, nBlueValue )); else iIndex = *psIndex; } pabyIndex[i] = (GByte) iIndex; if( !bDither ) continue; /* -------------------------------------------------------------------- */ /* Compute Red error, and carry it on to the next error line. */ /* -------------------------------------------------------------------- */ nError = nRedValue - CAST_PCT(anPCT)[4*iIndex+0]; nSixth = nError / 6; panError[i*3 ] += nSixth; panError[i*3+6 ] = nSixth; panError[i*3+3 ] += nError - 5 * nSixth; nLastRedError = 2 * nSixth; /* -------------------------------------------------------------------- */ /* Compute Green error, and carry it on to the next error line. */ /* -------------------------------------------------------------------- */ nError = nGreenValue - CAST_PCT(anPCT)[4*iIndex+1]; nSixth = nError / 6; panError[i*3 +1] += nSixth; panError[i*3+6+1] = nSixth; panError[i*3+3+1] += nError - 5 * nSixth; nLastGreenError = 2 * nSixth; /* -------------------------------------------------------------------- */ /* Compute Blue error, and carry it on to the next error line. */ /* -------------------------------------------------------------------- */ nError = nBlueValue - CAST_PCT(anPCT)[4*iIndex+2]; nSixth = nError / 6; panError[i*3 +2] += nSixth; panError[i*3+6+2] = nSixth; panError[i*3+3+2] += nError - 5 * nSixth; nLastBlueError = 2 * nSixth; } /* -------------------------------------------------------------------- */ /* Write results. */ /* -------------------------------------------------------------------- */ err = GDALRasterIO( hTarget, GF_Write, 0, iScanline, nXSize, 1, pabyIndex, nXSize, 1, GDT_Byte, 0, 0 ); if( err != CE_None ) break; } pfnProgress( 1.0, NULL, pProgressArg ); /* -------------------------------------------------------------------- */ /* Cleanup */ /* -------------------------------------------------------------------- */ end_and_cleanup: CPLFree( pabyRed ); CPLFree( pabyGreen ); CPLFree( pabyBlue ); CPLFree( pabyIndex ); CPLFree( panError ); CPLFree( pabyColorMap ); return err; } static int FindNearestColor( int nColors, int *panPCT, int nRedValue, int nGreenValue, int nBlueValue ) { #ifdef USE_SSE2 int iColor; int nBestDist = 768, nBestIndex = 0; int anDistanceUnaligned[16+4]; /* 4 for alignment on 16-byte boundary */ int* anDistance = ALIGN_INT_ARRAY_ON_16_BYTE(anDistanceUnaligned); const __m128i ff = _mm_set1_epi32(0xFFFFFFFF); const __m128i mask_low = _mm_srli_epi64(ff, 32); const __m128i mask_high = _mm_slli_epi64(ff, 32); unsigned int nColorVal = MAKE_COLOR_CODE(nRedValue, nGreenValue, nBlueValue); const __m128i thisColor = _mm_set1_epi32(nColorVal); const __m128i thisColor_low = _mm_srli_epi64(thisColor, 32); const __m128i thisColor_high = _mm_slli_epi64(thisColor, 32); for( iColor = 0; iColor < nColors; iColor+=8 ) { __m128i pctColor = _mm_load_si128((__m128i*)&panPCT[iColor]); __m128i pctColor2 = _mm_load_si128((__m128i*)&panPCT[iColor+4]); _mm_store_si128((__m128i*)anDistance, _mm_sad_epu8(_mm_and_si128(pctColor,mask_low),thisColor_low)); _mm_store_si128((__m128i*)(anDistance+4), _mm_sad_epu8(_mm_and_si128(pctColor,mask_high),thisColor_high)); _mm_store_si128((__m128i*)(anDistance+8), _mm_sad_epu8(_mm_and_si128(pctColor2,mask_low),thisColor_low)); _mm_store_si128((__m128i*)(anDistance+12), _mm_sad_epu8(_mm_and_si128(pctColor2,mask_high),thisColor_high)); if( anDistance[0] < nBestDist ) { nBestIndex = iColor; nBestDist = anDistance[0]; } if( anDistance[4] < nBestDist ) { nBestIndex = iColor+1; nBestDist = anDistance[4]; } if( anDistance[2] < nBestDist ) { nBestIndex = iColor+2; nBestDist = anDistance[2]; } if( anDistance[6] < nBestDist ) { nBestIndex = iColor+3; nBestDist = anDistance[6]; } if( anDistance[8+0] < nBestDist ) { nBestIndex = iColor+4; nBestDist = anDistance[8+0]; } if( anDistance[8+4] < nBestDist ) { nBestIndex = iColor+4+1; nBestDist = anDistance[8+4]; } if( anDistance[8+2] < nBestDist ) { nBestIndex = iColor+4+2; nBestDist = anDistance[8+2]; } if( anDistance[8+6] < nBestDist ) { nBestIndex = iColor+4+3; nBestDist = anDistance[8+6]; } } return nBestIndex; #else int iColor; int nBestDist = 768, nBestIndex = 0; for( iColor = 0; iColor < nColors; iColor++ ) { int nThisDist; nThisDist = ABS(nRedValue - panPCT[4*iColor+0]) + ABS(nGreenValue - panPCT[4*iColor+1]) + ABS(nBlueValue - panPCT[4*iColor+2]); if( nThisDist < nBestDist ) { nBestIndex = iColor; nBestDist = nThisDist; } } return nBestIndex; #endif } /************************************************************************/ /* FindNearestColor() */ /* */ /* Finear near PCT color for any RGB color. */ /************************************************************************/ static void FindNearestColor( int nColors, int *panPCT, GByte *pabyColorMap, int nCLevels ) { int iBlue, iGreen, iRed; /* -------------------------------------------------------------------- */ /* Loop over all the cells in the high density cube. */ /* -------------------------------------------------------------------- */ for( iBlue = 0; iBlue < nCLevels; iBlue++ ) { for( iGreen = 0; iGreen < nCLevels; iGreen++ ) { for( iRed = 0; iRed < nCLevels; iRed++ ) { int nRedValue, nGreenValue, nBlueValue; nRedValue = (iRed * 255) / (nCLevels-1); nGreenValue = (iGreen * 255) / (nCLevels-1); nBlueValue = (iBlue * 255) / (nCLevels-1); int nBestIndex = FindNearestColor( nColors, panPCT, nRedValue, nGreenValue, nBlueValue ); pabyColorMap[iRed + iGreen*nCLevels + iBlue*nCLevels*nCLevels] = (GByte)nBestIndex; } } } }
nbuchanan/node-gdal
deps/libgdal/gdal/alg/gdaldither.cpp
C++
apache-2.0
25,530
/* * Copyright 2015 Samsung Electronics All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package oic.simulator.clientcontroller.view; import org.eclipse.jface.dialogs.TitleAreaDialog; import org.eclipse.jface.viewers.CellEditor; import org.eclipse.jface.viewers.CheckboxCellEditor; import org.eclipse.jface.viewers.ComboBoxCellEditor; import org.eclipse.jface.viewers.EditingSupport; import org.eclipse.jface.viewers.TextCellEditor; import org.eclipse.jface.viewers.TreeViewer; import org.eclipse.jface.window.Window; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CCombo; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.MessageBox; import org.eclipse.swt.widgets.Text; import org.eclipse.swt.widgets.Tree; import org.eclipse.swt.widgets.TreeItem; import java.util.Date; import java.util.List; import org.oic.simulator.ArrayProperty; import org.oic.simulator.AttributeProperty; import org.oic.simulator.AttributeValue; import org.oic.simulator.AttributeValue.TypeInfo; import org.oic.simulator.AttributeValue.ValueType; import org.oic.simulator.ILogger.Level; import org.oic.simulator.InvalidArgsException; import org.oic.simulator.SimulatorResourceAttribute; import oic.simulator.clientcontroller.Activator; import oic.simulator.clientcontroller.manager.ResourceManager; import oic.simulator.clientcontroller.remoteresource.AttributeElement; import oic.simulator.clientcontroller.remoteresource.RemoteResource; import oic.simulator.clientcontroller.remoteresource.ResourceRepresentation; import oic.simulator.clientcontroller.utils.AttributeValueBuilder; import oic.simulator.clientcontroller.utils.Utility; import oic.simulator.clientcontroller.view.dialogs.PostRequestDialog; import oic.simulator.clientcontroller.view.dialogs.UpdatePrimitiveArrayAttributeDialog; /** * This class provides editing support to the resources attributes table in the * attributes view. */ public class AttributeEditingSupport { private AttributeValueEditor attValueEditor; private PostSelectionEditor postSelectionEditor; public AttributeValueEditor createAttributeValueEditor(TreeViewer viewer, TitleAreaDialog dialog) { attValueEditor = new AttributeValueEditor(viewer, dialog); return attValueEditor; } public PostSelectionEditor createPostSelectionEditor(TreeViewer viewer) { postSelectionEditor = new PostSelectionEditor(viewer); return postSelectionEditor; } class AttributeValueEditor extends EditingSupport { private final TreeViewer viewer; private CCombo comboBox; private TitleAreaDialog dialog; public AttributeValueEditor(TreeViewer viewer, TitleAreaDialog dialog) { super(viewer); this.viewer = viewer; this.dialog = dialog; } @Override protected boolean canEdit(Object arg0) { return true; } @Override protected CellEditor getCellEditor(final Object element) { ResourceManager resourceManager = Activator.getDefault() .getResourceManager(); RemoteResource res = resourceManager .getCurrentResourceInSelection(); if (null == res) { return null; } final SimulatorResourceAttribute attribute; if (!(element instanceof AttributeElement)) { return null; } final AttributeElement attributeElement = ((AttributeElement) element); attribute = attributeElement.getSimulatorResourceAttribute(); if (null == attribute) { return null; } final AttributeValue val = attribute.value(); if (null == val) { return null; } final TypeInfo type = val.typeInfo(); if (type.mBaseType == ValueType.RESOURCEMODEL) { return null; } CellEditor editor; if (type.mType == ValueType.ARRAY && res.isConfigUploaded() && isArrayAttributeValid(attribute)) { editor = new TextCellEditor(viewer.getTree()); editor.setStyle(SWT.READ_ONLY); final Text txt = (Text) editor.getControl(); txt.addModifyListener(new ModifyListener() { @Override public void modifyText(ModifyEvent e) { UpdatePrimitiveArrayAttributeDialog dialog = new UpdatePrimitiveArrayAttributeDialog( Display.getDefault().getActiveShell(), attribute); if (dialog.open() == Window.OK) { updateAttributeValue(attributeElement, attribute, dialog.getNewValueObj()); } // Update the viewer in a separate UI thread. Display.getDefault().asyncExec(new Runnable() { @Override public void run() { // Set the post state of the top-level // attribute. AttributeElement rootElement = getRootElement(attributeElement); rootElement.setPostState(true); viewer.refresh(rootElement, true); } }); } }); } else { String values[] = null; List<String> valueSet = resourceManager .getAllValuesOfAttribute(attribute); values = convertListToStringArray(valueSet); editor = new ComboBoxCellEditor(viewer.getTree(), values); comboBox = (CCombo) editor.getControl(); comboBox.addModifyListener(new ModifyListener() { @Override public void modifyText(ModifyEvent event) { // Set the post state of the top-level attribute. AttributeElement rootElement = getRootElement(attributeElement); rootElement.setPostState(true); if (AttributeValueEditor.this.dialog instanceof PostRequestDialog) { viewer.update(rootElement, null); } } }); } return editor; } @Override protected Object getValue(Object element) { int indexOfItem = 0; SimulatorResourceAttribute att = null; if (element instanceof AttributeElement) { att = ((AttributeElement) element) .getSimulatorResourceAttribute(); } if (att == null) { return 0; } final AttributeValue val = att.value(); if (null == val) { return null; } final TypeInfo type = val.typeInfo(); if (type.mBaseType == ValueType.RESOURCEMODEL) { return null; } String valueString = Utility.getAttributeValueAsString(att.value()); if (null == valueString) { valueString = ""; } if (type.mType == ValueType.ARRAY) { ResourceManager resourceManager = Activator.getDefault() .getResourceManager(); RemoteResource res = resourceManager .getCurrentResourceInSelection(); if (null != res && res.isConfigUploaded() && isArrayAttributeValid(att)) { return valueString; } } List<String> valueSet = Activator.getDefault().getResourceManager() .getAllValuesOfAttribute(att); if (null != valueSet) { indexOfItem = valueSet.indexOf(valueString); } if (indexOfItem == -1) { indexOfItem = 0; } return indexOfItem; } @Override protected void setValue(Object element, Object value) { SimulatorResourceAttribute att = null; if (element instanceof AttributeElement) { att = ((AttributeElement) element) .getSimulatorResourceAttribute(); } if (att == null) { return; } AttributeValue val = att.value(); if (null == val) { return; } TypeInfo type = val.typeInfo(); if (type.mBaseType == ValueType.RESOURCEMODEL) { return; } if (type.mType == ValueType.ARRAY) { ResourceManager resourceManager = Activator.getDefault() .getResourceManager(); RemoteResource res = resourceManager .getCurrentResourceInSelection(); if (null != res && res.isConfigUploaded() && isArrayAttributeValid(att)) { return; } } String oldValue = String.valueOf(Utility .getAttributeValueAsString(val)); if (null == oldValue) { oldValue = ""; } String newValue = comboBox.getText(); if (type.mType == ValueType.ARRAY && type.mBaseType != ValueType.RESOURCEMODEL) { newValue = Utility.removeWhiteSpacesInArrayValues(newValue); } if (!oldValue.equals(newValue)) { boolean invalid = false; // Get the AttriuteValue from the string AttributeValue attValue = null; try { attValue = AttributeValueBuilder.build(newValue, type.mBaseType); } catch (Exception e) { Activator .getDefault() .getLogManager() .log(Level.ERROR.ordinal(), new Date(), "There is an error while creating the new attribute value.\n" + Utility.getSimulatorErrorString( e, null)); } if (null == attValue) { invalid = true; } else { TypeInfo resTypeInfo = attValue.typeInfo(); if (type.mDepth != resTypeInfo.mDepth || type.mType != resTypeInfo.mType || type.mBaseType != resTypeInfo.mBaseType) { invalid = true; } } if (invalid) { MessageBox dialog = new MessageBox(viewer.getTree() .getShell(), SWT.ICON_ERROR | SWT.OK); dialog.setText("Invalid Value"); dialog.setMessage("Given value is invalid"); dialog.open(); } else { updateAttributeValue((AttributeElement) element, att, attValue); } } viewer.update(element, null); } private boolean isArrayAttributeValid( SimulatorResourceAttribute attribute) { if (null == attribute) return false; AttributeValue val = attribute.value(); if (null == val) return false; AttributeProperty prop = attribute.property(); if (null == prop || !prop.isArray()) return false; ArrayProperty arrProp = prop.asArray(); if (null == arrProp) return false; AttributeProperty elementProp = arrProp.getElementProperty(); if (null == elementProp) return false; TypeInfo info = val.typeInfo(); if (info.mBaseType == ValueType.RESOURCEMODEL) return false; return true; } public String[] convertListToStringArray(List<String> values) { String[] strArr; if (null != values && values.size() > 0) { strArr = values.toArray(new String[1]); } else { strArr = new String[1]; } return strArr; } public void updateAttributeValue(AttributeElement attributeElement, SimulatorResourceAttribute att, AttributeValue value) { // Update the post status. Object parent = attributeElement.getParent(); AttributeElement rootElement = attributeElement; while (parent != null && parent instanceof AttributeElement) { rootElement = (AttributeElement) parent; parent = ((AttributeElement) parent).getParent(); } rootElement.setPostState(true); // Set the attribute value. attributeElement.getSimulatorResourceAttribute().setValue(value); // Update the hierarchy. parent = attributeElement.getParent(); if (null != parent && parent instanceof AttributeElement) { try { ((AttributeElement) parent).deepSetChildValue(att); } catch (InvalidArgsException e) { Activator .getDefault() .getLogManager() .log(Level.ERROR.ordinal(), new Date(), Utility.getSimulatorErrorString(e, null)); e.printStackTrace(); } } } } private static class PostSelectionEditor extends EditingSupport { private final TreeViewer viewer; public PostSelectionEditor(TreeViewer viewer) { super(viewer); this.viewer = viewer; } @Override protected boolean canEdit(Object arg0) { return true; } @Override protected CellEditor getCellEditor(Object element) { if (element instanceof AttributeElement && ((AttributeElement) element).getParent() instanceof ResourceRepresentation) { return new CheckboxCellEditor(null, SWT.CHECK | SWT.READ_ONLY); } return null; } @Override protected Object getValue(Object element) { if (element instanceof AttributeElement) { return ((AttributeElement) element).getPostState(); } return false; } @Override protected void setValue(Object element, Object value) { if (!(element instanceof AttributeElement)) { return; } boolean status = (Boolean) value; ((AttributeElement) element).setPostState(status); viewer.update(element, null); Tree t = viewer.getTree(); TreeItem item = t.getSelection()[0]; if (null == item) { return; } // Update the post state of the top-most parent of this attribute. TreeItem parent = item.getParentItem(); if (null != parent) { while (parent.getParentItem() != null) { parent = parent.getParentItem(); } Object data = parent.getData(); ((AttributeElement) data).setPostState(status); } } } private AttributeElement getRootElement(AttributeElement element) { AttributeElement root = null; Object parent = element.getParent(); if (parent instanceof ResourceRepresentation) { return element; } while (!(parent instanceof ResourceRepresentation)) { root = (AttributeElement) parent; parent = ((AttributeElement) parent).getParent(); } return root; } }
iotivity/iotivity
service/simulator/java/eclipse-plugin/ClientControllerPlugin/src/oic/simulator/clientcontroller/view/AttributeEditingSupport.java
Java
apache-2.0
17,171
require 'fog/aws/core' require 'fog/cdn' module Fog module CDN class AWS < Fog::Service extend Fog::AWS::CredentialFetcher::ServiceMethods requires :aws_access_key_id, :aws_secret_access_key recognizes :host, :path, :port, :scheme, :version, :persistent, :use_iam_profile, :aws_session_token, :aws_credentials_expire_at model_path 'fog/aws/models/cdn' model :distribution collection :distributions model :streaming_distribution collection :streaming_distributions request_path 'fog/aws/requests/cdn' request 'delete_distribution' request 'delete_streaming_distribution' request 'get_distribution' request 'get_distribution_list' request 'get_invalidation_list' request 'get_invalidation' request 'get_streaming_distribution' request 'get_streaming_distribution_list' request 'post_distribution' request 'post_streaming_distribution' request 'post_invalidation' request 'put_distribution_config' request 'put_streaming_distribution_config' class Mock def self.data @data ||= Hash.new do |hash, key| hash[key] = { :distributions => {}, :streaming_distributions => {}, :invalidations => {} } end end def self.reset @data = nil end def initialize(options={}) @use_iam_profile = options[:use_iam_profile] setup_credentials(options) end def data self.class.data[@aws_access_key_id] end def reset_data self.class.data.delete(@aws_access_key_id) end def signature(params) "foo" end def setup_credentials(options={}) @aws_access_key_id = options[:aws_access_key_id] end def self.distribution_id random_id(14) end def self.generic_id random_id(14) end def self.domain_name "#{random_id(12).downcase}.cloudfront.net" end def self.random_id(length) Fog::Mock.random_selection("abcdefghijklmnopqrstuvwxyz0123456789", length).upcase end CDN_ERRORS = { :access_denies => {:code => 'AccessDenied',:msg => 'Access denied.',:status => 403}, :inappropriate_xml => {:code => 'InappropriateXML',:msg => 'The XML document you provided was well-formed and valid, but not appropriate for this operation.',:status => 400}, :internal_error => {:code => 'InternalError',:msg => 'We encountered an internal error. Please try again.',:status => 500}, :invalid_action => {:code => 'InvalidAction',:msg => 'The action specified is not valid.',:status => 400}, :invalid_argument => {:code => 'InvalidArgument',:msg => '%s', :status => 400}, :not_implemented => {:code => 'NotImplemented', :msg => 'Not implemented.',:status => 501}, :no_such_distribution => { :code => 'NoSuchDistribution', :msg => 'The specified distribution does not exist', :status => 404 }, :no_such_streaming_distribution => { :code => 'NoSuchStreamingDistribution', :msg => 'The specified streaming distribution does not exist', :status => 404 }, :no_such_invalidation => { :code => 'NoSuchInvalidation', :msg => 'The specified invalidation does not exist', :status => 404 }, :cname_exists => { :code => 'CNAMEAlreadyExists', :msg => 'One or more of the CNAMEs you provided are already associated with a different distribution', :status => 409 }, :illegal_update => { :code => 'IllegalUpdate', :msg => 'Origin and CallerReference cannot be updated.', :status => 400 }, :invalid_if_match_version => { :code => 'InvalidIfMatchVersion', :msg => 'The If-Match version is missing or not valid for the distribution.', :status => 400}, :distribution_not_disabled => { :code => 'DistributionNotDisabled', :msg => 'The distribution you are trying to delete has not been disabled.', :status => 409 }, } def self.error(code, argument = '') if error = CDN_ERRORS[code] raise_error(error[:status], error[:code], error[:msg] % argument) end end def self.raise_error(status, code, message='') response = Excon::Response.new response.status = status response.body = <<EOF <ErrorResponse xmlns="http://cloudfront.amazonaws.com/doc/2010-11-01/"> <Error> <Type>Sender</Type> <Code>#{code}</Code> <Message>#{message}.</Message> </Error> <RequestId>#{Fog::AWS::Mock.request_id}</RequestId> </ErrorResponse> EOF raise(Excon::Errors.status_error({:expects => 201}, response)) end end class Real include Fog::AWS::CredentialFetcher::ConnectionMethods # Initialize connection to Cloudfront # # ==== Notes # options parameter must include values for :aws_access_key_id and # :aws_secret_access_key in order to create a connection # # ==== Examples # cdn = Fog::AWS::CDN.new( # :aws_access_key_id => your_aws_access_key_id, # :aws_secret_access_key => your_aws_secret_access_key # ) # # ==== Parameters # * options<~Hash> - config arguments for connection. Defaults to {}. # # ==== Returns # * cdn object with connection to aws. def initialize(options={}) require 'fog/core/parser' @use_iam_profile = options[:use_iam_profile] setup_credentials(options) @connection_options = options[:connection_options] || {} @host = options[:host] || 'cloudfront.amazonaws.com' @path = options[:path] || '/' @persistent = options.fetch(:persistent, true) @port = options[:port] || 443 @scheme = options[:scheme] || 'https' @version = options[:version] || '2010-11-01' @connection = Fog::Connection.new("#{@scheme}://#{@host}:#{@port}#{@path}", @persistent, @connection_options) end def reload @connection.reset end private def setup_credentials(options) @aws_access_key_id = options[:aws_access_key_id] @aws_secret_access_key = options[:aws_secret_access_key] @aws_session_token = options[:aws_session_token] @aws_credentials_expire_at = options[:aws_credentials_expire_at] @hmac = Fog::HMAC.new('sha1', @aws_secret_access_key) end def request(params, &block) refresh_credentials_if_expired params[:headers] ||= {} params[:headers]['Date'] = Fog::Time.now.to_date_header params[:headers]['x-amz-security-token'] = @aws_session_token if @aws_session_token params[:headers]['Authorization'] = "AWS #{@aws_access_key_id}:#{signature(params)}" params[:path] = "/#{@version}/#{params[:path]}" @connection.request(params, &block) end def signature(params) string_to_sign = params[:headers]['Date'] signed_string = @hmac.sign(string_to_sign) Base64.encode64(signed_string).chomp! end end end end end
jreichhold/chef-repo
vendor/ruby/2.0.0/gems/fog-1.20.0/lib/fog/aws/cdn.rb
Ruby
apache-2.0
7,443
#include "sp/sp.h" #include "Widget.h" namespace sp { namespace graphics { namespace ui { Widget::Widget(const maths::Rectangle& bounds) : m_Bounds(bounds), m_Active(true), m_Focused(false) { } bool Widget::OnMousePressed(events::MousePressedEvent& e) { return false; } bool Widget::OnMouseReleased(events::MouseReleasedEvent& e) { return false; } bool Widget::OnMouseMoved(events::MouseMovedEvent& e) { return false; } void Widget::OnUpdate() { } void Widget::OnRender(Renderer2D& renderer) { } } } }
Jacob-Mango/Sparky
Sparky-core/src/sp/graphics/ui/Widget.cpp
C++
apache-2.0
535
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ module VZ.Sorting { /** * Compares tag names asciinumerically broken into components. * * <p>This is the comparison function used for sorting most string values in * TensorBoard. Unlike the standard asciibetical comparator, this function * knows that 'a10b' > 'a2b'. Fixed point and engineering notation are * supported. This function also splits the input by slash and underscore to * perform array comparison. Therefore it knows that 'a/a' < 'a+/a' even * though '+' < '/' in the ASCII table. */ export function compareTagNames(a, b: string): number { let ai = 0; let bi = 0; while (true) { if (ai === a.length) return bi === b.length ? 0 : -1; if (bi === b.length) return 1; if (isDigit(a[ai]) && isDigit(b[bi])) { const ais = ai; const bis = bi; ai = consumeNumber(a, ai + 1); bi = consumeNumber(b, bi + 1); const an = parseFloat(a.slice(ais, ai)); const bn = parseFloat(b.slice(bis, bi)); if (an < bn) return -1; if (an > bn) return 1; continue; } if (isBreak(a[ai])) { if (!isBreak(b[bi])) return -1; } else if (isBreak(b[bi])) { return 1; } else if (a[ai] < b[bi]) { return -1; } else if (a[ai] > b[bi]) { return 1; } ai++; bi++; } } function consumeNumber(s: string, i: number): number { enum State { NATURAL, REAL, EXPONENT_SIGN, EXPONENT } let state = State.NATURAL; for (; i < s.length; i++) { if (state === State.NATURAL) { if (s[i] === '.') { state = State.REAL; } else if (s[i] === 'e' || s[i] === 'E') { state = State.EXPONENT_SIGN; } else if (!isDigit(s[i])) { break; } } else if (state === State.REAL) { if (s[i] === 'e' || s[i] === 'E') { state = State.EXPONENT_SIGN; } else if (!isDigit(s[i])) { break; } } else if (state === State.EXPONENT_SIGN) { if (isDigit(s[i]) || s[i] === '+' || s[i] === '-') { state = State.EXPONENT; } else { break; } } else if (state === State.EXPONENT) { if (!isDigit(s[i])) break; } } return i; } function isDigit(c: string): boolean { return '0' <= c && c <= '9'; } function isBreak(c: string): boolean { // TODO(jart): Remove underscore when people stop using it like a slash. return c === '/' || c === '_' || isDigit(c); } }
natanielruiz/android-yolo
jni-build/jni/include/tensorflow/tensorboard/components/vz-sorting/sorting.ts
TypeScript
apache-2.0
3,190
// // Licensed to Green Energy Corp (www.greenenergycorp.com) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. Green Enery Corp licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // #include "IntegrationTest.h" #include <sstream> #include <opendnp3/APL/PhysicalLayerFactory.h> #include <opendnp3/APL/IPhysicalLayerAsync.h> #include <opendnp3/APL/test/util/AsyncTestObjectASIO.h> #include <opendnp3/DNP3/MasterStackConfig.h> #include <opendnp3/DNP3/SlaveStackConfig.h> #include <opendnp3/DNP3/MasterStack.h> #include <opendnp3/DNP3/SlaveStack.h> #include <boost/asio.hpp> #include <boost/foreach.hpp> #include <boost/random/variate_generator.hpp> #include <boost/random/uniform_int.hpp> #include <boost/bind.hpp> using namespace std; using namespace apl; using namespace apl::dnp; IntegrationTest::IntegrationTest(Logger* apLogger, FilterLevel aLevel, boost::uint16_t aStartPort, size_t aNumPairs, size_t aNumPoints) : Loggable(apLogger), M_START_PORT(aStartPort), mManager(apLogger), NUM_POINTS(aNumPoints) { this->InitLocalObserver(); for (size_t i = 0; i < aNumPairs; ++i) { AddStackPair(aLevel, aNumPoints); } mFanout.AddObserver(&mLocalFDO); } void IntegrationTest::InitLocalObserver() { Transaction tr(&mLocalFDO); for (size_t i = 0; i < NUM_POINTS; ++i) { mLocalFDO.Update(this->RandomBinary(), i); mLocalFDO.Update(this->RandomAnalog(), i); mLocalFDO.Update(this->RandomCounter(), i); } } void IntegrationTest::ResetObservers() { for (size_t i = 0; i < this->mMasterObservers.size(); ++i) { mMasterObservers[i]->Reset(); } } bool IntegrationTest::WaitForSameData(millis_t aTimeout, bool aDescribeAnyMissingData) { LOG_BLOCK(LEV_EVENT, "Wait for same data"); for (size_t i = 0; i < this->mMasterObservers.size(); ++i) { ComparingDataObserver* pObs = mMasterObservers[i].get(); if(!pObs->WaitForSameData(aTimeout)) { if(aDescribeAnyMissingData) pObs->DescribeMissingData(); return false; } } return true; } size_t IntegrationTest::IncrementData() { LOG_BLOCK(LEV_EVENT, "Incrementing data"); size_t num = 0; this->ResetObservers(); /* * Resource Acquisition Is Initialization (RAII) Pattern. * When the Transaction instance is created, it acquires the resource. * When it is destroyed, it releases the resource. The scoping using * the {} block forces destruction of the Transaction at the right time. */ Transaction tr(&mFanout); for (size_t i = 0; i < NUM_POINTS; ++i) { mFanout.Update(this->Next(mLocalFDO.mBinaryMap[i]), i); mFanout.Update(this->Next(mLocalFDO.mAnalogMap[i]), i); mFanout.Update(this->Next(mLocalFDO.mCounterMap[i]), i); num += 3; } return num; } Binary IntegrationTest::RandomBinary() { Binary v(mRandomBool.NextBool(), BQ_ONLINE); return v; } Analog IntegrationTest::RandomAnalog() { Analog v(mRandomInt32.Next(), AQ_ONLINE); return v; } Counter IntegrationTest::RandomCounter() { Counter v(mRandomUInt32.Next(), CQ_ONLINE); return v; } Binary IntegrationTest::Next(const Binary& arPoint) { Binary point(!arPoint.GetValue(), arPoint.GetQuality()); return point; } Analog IntegrationTest::Next(const Analog& arPoint) { Analog point(arPoint.GetValue() + 1, arPoint.GetQuality()); return point; } Counter IntegrationTest::Next(const Counter& arPoint) { Counter point(arPoint.GetValue() + 1, arPoint.GetQuality()); return point; } void IntegrationTest::AddStackPair(FilterLevel aLevel, size_t aNumPoints) { boost::uint16_t port = M_START_PORT + static_cast<boost::uint16_t>(this->mMasterObservers.size()); ostringstream oss; oss << "Port: " << port; std::string client = oss.str() + " Client "; std::string server = oss.str() + " Server "; boost::shared_ptr<ComparingDataObserver> pMasterFDO(new ComparingDataObserver(mpLogger->GetSubLogger(client), &mLocalFDO)); mMasterObservers.push_back(pMasterFDO); PhysLayerSettings s(aLevel, 1000); this->mManager.AddTCPClient(client, s, "127.0.0.1", port); this->mManager.AddTCPServer(server, s, "127.0.0.1", port); /* * Add a Master instance. The code is wrapped in braces so that we can * re-use the 'cfg' variable name. */ { MasterStackConfig cfg; cfg.app.RspTimeout = 20000; cfg.master.IntegrityRate = 60000; // set this to retry, if the task // timer doesn't close properly, // this will seal the deal cfg.master.EnableUnsol = true; cfg.master.DoUnsolOnStartup = true; cfg.master.UnsolClassMask = PC_ALL_EVENTS; this->mManager.AddMaster(client, client, aLevel, pMasterFDO.get(), cfg); } /* * Add a Slave instance. The code is wrapped in braces so that we can * re-use the 'cfg' variable name. */ { SlaveStackConfig cfg; cfg.app.RspTimeout = 20000; cfg.slave.mDisableUnsol = false; cfg.slave.mUnsolPackDelay = 0; cfg.device = DeviceTemplate(aNumPoints, aNumPoints, aNumPoints); IDataObserver* pObs = this->mManager.AddSlave(server, server, aLevel, &mCmdAcceptor, cfg); this->mFanout.AddObserver(pObs); } } /* vim: set ts=4 sw=4: */
meaw/dnp3
src/opendnp3/DNP3/test/IntegrationTest.cpp
C++
apache-2.0
5,611
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.impl; import org.camunda.bpm.engine.query.QueryProperty; /** * @author Roman Smirnov * */ public interface HistoricIncidentQueryProperty { public static final QueryProperty INCIDENT_ID = new QueryPropertyImpl("ID_"); public static final QueryProperty INCIDENT_MESSAGE = new QueryPropertyImpl("INCIDENT_MSG_"); public static final QueryProperty INCIDENT_CREATE_TIME = new QueryPropertyImpl("CREATE_TIME_"); public static final QueryProperty INCIDENT_END_TIME = new QueryPropertyImpl("END_TIME_"); public static final QueryProperty INCIDENT_TYPE = new QueryPropertyImpl("INCIDENT_TYPE_"); public static final QueryProperty EXECUTION_ID = new QueryPropertyImpl("EXECUTION_ID_"); public static final QueryProperty ACTIVITY_ID = new QueryPropertyImpl("ACTIVITY_ID_"); public static final QueryProperty PROCESS_INSTANCE_ID = new QueryPropertyImpl("PROC_INST_ID_"); public static final QueryProperty PROCESS_DEFINITION_ID = new QueryPropertyImpl("PROC_DEF_ID_"); public static final QueryProperty PROCESS_DEFINITION_KEY = new QueryPropertyImpl("PROC_DEF_KEY_"); public static final QueryProperty CAUSE_INCIDENT_ID = new QueryPropertyImpl("CAUSE_INCIDENT_ID_"); public static final QueryProperty ROOT_CAUSE_INCIDENT_ID = new QueryPropertyImpl("ROOT_CAUSE_INCIDENT_ID_"); public static final QueryProperty HISTORY_CONFIGURATION = new QueryPropertyImpl("HISTORY_CONFIGURATION_"); public static final QueryProperty CONFIGURATION = new QueryPropertyImpl("CONFIGURATION_"); public static final QueryProperty TENANT_ID = new QueryPropertyImpl("TENANT_ID_"); public static final QueryProperty INCIDENT_STATE = new QueryPropertyImpl("INCIDENT_STATE_"); }
langfr/camunda-bpm-platform
engine/src/main/java/org/camunda/bpm/engine/impl/HistoricIncidentQueryProperty.java
Java
apache-2.0
2,517
// Copyright David Abrahams 2002. Permission to copy, use, // modify, sell and distribute this software is granted provided this // copyright notice appears in all copies. This software is provided // "as is" without express or implied warranty, and with no claim as // to its suitability for any purpose. #ifndef DECORATED_TYPE_ID_DWA2002517_HPP # define DECORATED_TYPE_ID_DWA2002517_HPP # include <boost/python/type_id.hpp> # include <boost/python/detail/indirect_traits.hpp> # include <boost/type_traits/cv_traits.hpp> namespace boost { namespace python { namespace detail { struct decorated_type_info : totally_ordered<decorated_type_info> { enum decoration { const_ = 0x1, volatile_ = 0x2, reference = 0x4 }; decorated_type_info(type_info, decoration = decoration()); inline bool operator<(decorated_type_info const& rhs) const; inline bool operator==(decorated_type_info const& rhs) const; friend BOOST_PYTHON_DECL std::ostream& operator<<(std::ostream&, decorated_type_info const&); operator type_info const&() const; private: // type typedef type_info base_id_t; private: // data members decoration m_decoration; base_id_t m_base_type; }; template <class T> inline decorated_type_info decorated_type_id(boost::type<T>* = 0) { return decorated_type_info( type_id<T>() , decorated_type_info::decoration( (is_const<T>::value || python::detail::is_reference_to_const<T>::value ? decorated_type_info::const_ : 0) | (is_volatile<T>::value || python::detail::is_reference_to_volatile<T>::value ? decorated_type_info::volatile_ : 0) | (is_reference<T>::value ? decorated_type_info::reference : 0) ) ); } inline decorated_type_info::decorated_type_info(type_info base_t, decoration decoration) : m_decoration(decoration) , m_base_type(base_t) { } inline bool decorated_type_info::operator<(decorated_type_info const& rhs) const { return m_decoration < rhs.m_decoration || m_decoration == rhs.m_decoration && m_base_type < rhs.m_base_type; } inline bool decorated_type_info::operator==(decorated_type_info const& rhs) const { return m_decoration == rhs.m_decoration && m_base_type == rhs.m_base_type; } inline decorated_type_info::operator type_info const&() const { return m_base_type; } BOOST_PYTHON_DECL std::ostream& operator<<(std::ostream&, decorated_type_info const&); }}} // namespace boost::python::detail #endif // DECORATED_TYPE_ID_DWA2002517_HPP
OLR-xray/OLR-3.0
src/3rd party/boost/boost/python/detail/decorated_type_id.hpp
C++
apache-2.0
2,562
package vm // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "time" "golang.org/x/net/context" "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" "github.com/go-openapi/swag" strfmt "github.com/go-openapi/strfmt" ) // NewFindVmsByDeploymentParams creates a new FindVmsByDeploymentParams object // with the default values initialized. func NewFindVmsByDeploymentParams() *FindVmsByDeploymentParams { var () return &FindVmsByDeploymentParams{ timeout: cr.DefaultTimeout, } } // NewFindVmsByDeploymentParamsWithTimeout creates a new FindVmsByDeploymentParams object // with the default values initialized, and the ability to set a timeout on a request func NewFindVmsByDeploymentParamsWithTimeout(timeout time.Duration) *FindVmsByDeploymentParams { var () return &FindVmsByDeploymentParams{ timeout: timeout, } } // NewFindVmsByDeploymentParamsWithContext creates a new FindVmsByDeploymentParams object // with the default values initialized, and the ability to set a context for a request func NewFindVmsByDeploymentParamsWithContext(ctx context.Context) *FindVmsByDeploymentParams { var () return &FindVmsByDeploymentParams{ Context: ctx, } } /*FindVmsByDeploymentParams contains all the parameters to send to the API endpoint for the find vms by deployment operation typically these are written to a http.Request */ type FindVmsByDeploymentParams struct { /*Deployment deployment values that need to be considered for filter */ Deployment []string timeout time.Duration Context context.Context } // WithTimeout adds the timeout to the find vms by deployment params func (o *FindVmsByDeploymentParams) WithTimeout(timeout time.Duration) *FindVmsByDeploymentParams { o.SetTimeout(timeout) return o } // SetTimeout adds the timeout to the find vms by deployment params func (o *FindVmsByDeploymentParams) SetTimeout(timeout time.Duration) { o.timeout = timeout } // WithContext adds the context to the find vms by deployment params func (o *FindVmsByDeploymentParams) WithContext(ctx context.Context) *FindVmsByDeploymentParams { o.SetContext(ctx) return o } // SetContext adds the context to the find vms by deployment params func (o *FindVmsByDeploymentParams) SetContext(ctx context.Context) { o.Context = ctx } // WithDeployment adds the deployment to the find vms by deployment params func (o *FindVmsByDeploymentParams) WithDeployment(deployment []string) *FindVmsByDeploymentParams { o.SetDeployment(deployment) return o } // SetDeployment adds the deployment to the find vms by deployment params func (o *FindVmsByDeploymentParams) SetDeployment(deployment []string) { o.Deployment = deployment } // WriteToRequest writes these params to a swagger request func (o *FindVmsByDeploymentParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { r.SetTimeout(o.timeout) var res []error valuesDeployment := o.Deployment joinedDeployment := swag.JoinByFormat(valuesDeployment, "multi") // query array param deployment if err := r.SetQueryParam("deployment", joinedDeployment...); err != nil { return err } if len(res) > 0 { return errors.CompositeValidationError(res...) } return nil }
mattcui/bosh-softlayer-cpi-release
src/bosh-softlayer-cpi/softlayer/vps_service/client/vm/find_vms_by_deployment_parameters.go
GO
apache-2.0
3,329
/* * Licensed to GraphHopper and Peter Karich under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.routing.util; import com.graphhopper.routing.VirtualEdgeIteratorState; import com.graphhopper.util.EdgeIterator; import com.graphhopper.util.GHUtility; import com.graphhopper.util.Helper; import com.graphhopper.util.PMap; import org.junit.Test; import static org.junit.Assert.assertEquals; /** * @author Peter Karich */ public class FastestWeightingTest { private final FlagEncoder encoder = new EncodingManager("CAR").getEncoder("CAR"); @Test public void testMinWeightHasSameUnitAs_getWeight() { FastestWeighting instance = new FastestWeighting(encoder); long flags = encoder.setProperties(encoder.getMaxSpeed(), true, true); assertEquals(instance.getMinWeight(10), instance.calcWeight(createEdge(10, flags), false, EdgeIterator.NO_EDGE), 1e-8); } @Test public void testWeightWrongHeading() { FastestWeighting instance = new FastestWeighting(encoder, new PMap().put("heading_penalty", "100")); VirtualEdgeIteratorState virtEdge = new VirtualEdgeIteratorState(0, 1, 1, 2, 10, encoder.setProperties(10, true, true), "test", Helper.createPointList(51, 0, 51, 1)); double time = instance.calcWeight(virtEdge, false, 0); virtEdge.setVirtualEdgePreference(true, false); // heading penalty on edge assertEquals(time + 100, instance.calcWeight(virtEdge, false, 0), 1e-8); // but not in reverse heading assertEquals(time, instance.calcWeight(virtEdge, true, 0), 1e-8); // only after setting it virtEdge.setVirtualEdgePreference(true, true); assertEquals(time + 100, instance.calcWeight(virtEdge, true, 0), 1e-8); // but not after releasing it virtEdge.setVirtualEdgePreference(false, true); assertEquals(time, instance.calcWeight(virtEdge, true, 0), 1e-8); // test default penalty instance = new FastestWeighting(encoder); assertEquals(time + FastestWeighting.DEFAULT_HEADING_PENALTY, instance.calcWeight(virtEdge, false, 0), 1e-8); } @Test public void testSpeed0() { FastestWeighting instance = new FastestWeighting(encoder); assertEquals(1.0 / 0, instance.calcWeight(createEdge(10, encoder.setProperties(0, true, true)), false, EdgeIterator.NO_EDGE), 1e-8); // 0 / 0 returns NaN but calcWeight should not return NaN! assertEquals(1.0 / 0, instance.calcWeight(createEdge(0, encoder.setProperties(0, true, true)), false, EdgeIterator.NO_EDGE), 1e-8); } EdgeIterator createEdge( final double distance, final long flags ) { return new GHUtility.DisabledEdgeIterator() { @Override public double getDistance() { return distance; } @Override public long getFlags() { return flags; } @Override public boolean getBoolean( int key, boolean reverse, boolean _default ) { return _default; } }; } }
kod3r/graphhopper
core/src/test/java/com/graphhopper/routing/util/FastestWeightingTest.java
Java
apache-2.0
3,923
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.dataflow.worker.fn.control; import static org.apache.beam.runners.dataflow.worker.fn.control.RegisterAndProcessBundleOperation.encodeAndConcat; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.empty; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertSame; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import org.apache.beam.model.fnexecution.v1.BeamFnApi; import org.apache.beam.model.fnexecution.v1.BeamFnApi.InstructionRequest; import org.apache.beam.model.fnexecution.v1.BeamFnApi.InstructionRequest.RequestCase; import org.apache.beam.model.fnexecution.v1.BeamFnApi.InstructionResponse; import org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleDescriptor; import org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleProgressResponse; import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateAppendRequest; import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateClearRequest; import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateGetRequest; import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateKey; import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateRequest; import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateResponse; import org.apache.beam.runners.core.InMemoryMultimapSideInputView; import org.apache.beam.runners.core.InMemoryStateInternals; import org.apache.beam.runners.core.SideInputReader; import org.apache.beam.runners.dataflow.worker.DataflowExecutionContext.DataflowStepContext; import org.apache.beam.runners.dataflow.worker.DataflowPortabilityPCollectionView; import org.apache.beam.runners.dataflow.worker.util.common.worker.OperationContext; import org.apache.beam.runners.fnexecution.control.InstructionRequestHandler; import org.apache.beam.runners.fnexecution.state.StateDelegator; import org.apache.beam.runners.fnexecution.state.StateRequestHandler; import org.apache.beam.sdk.coders.ByteArrayCoder; import org.apache.beam.sdk.coders.KvCoder; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.fn.IdGenerator; import org.apache.beam.sdk.fn.IdGenerators; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.util.CoderUtils; import org.apache.beam.sdk.util.MoreFutures; import org.apache.beam.sdk.util.ThrowingRunnable; import org.apache.beam.sdk.util.WindowedValue.FullWindowedValueCoder; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollectionView; import org.apache.beam.sdk.values.TupleTag; import org.apache.beam.sdk.values.ValueInSingleWindow.Coder; import org.apache.beam.vendor.grpc.v1p36p0.com.google.protobuf.ByteString; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableTable; import org.checkerframework.checker.nullness.qual.Nullable; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; /** Tests for {@link RegisterAndProcessBundleOperation}. */ @RunWith(JUnit4.class) @SuppressWarnings({ "rawtypes", // TODO(https://issues.apache.org/jira/browse/BEAM-10556) "FutureReturnValueIgnored", }) public class RegisterAndProcessBundleOperationTest { private static final BeamFnApi.RegisterRequest REGISTER_REQUEST = BeamFnApi.RegisterRequest.newBuilder() .addProcessBundleDescriptor(BeamFnApi.ProcessBundleDescriptor.newBuilder().setId("555")) .build(); @Mock private OperationContext mockContext; @Mock private StateDelegator mockBeamFnStateDelegator; @Captor private ArgumentCaptor<StateRequestHandler> stateHandlerCaptor; private AtomicInteger stateServiceRegisterCounter; private AtomicInteger stateServiceDeregisterCounter; private AtomicInteger stateServiceAbortCounter; @Before public void setUp() { MockitoAnnotations.initMocks(this); stateServiceRegisterCounter = new AtomicInteger(); stateServiceDeregisterCounter = new AtomicInteger(); stateServiceAbortCounter = new AtomicInteger(); when(mockBeamFnStateDelegator.registerForProcessBundleInstructionId( any(String.class), any(StateRequestHandler.class))) .thenAnswer( new Answer<StateDelegator.Registration>() { @Override public StateDelegator.Registration answer(InvocationOnMock invocationOnMock) throws Throwable { stateServiceRegisterCounter.incrementAndGet(); return new StateDelegator.Registration() { @Override public void deregister() { stateServiceDeregisterCounter.incrementAndGet(); } @Override public void abort() { stateServiceAbortCounter.incrementAndGet(); } }; } }); } private IdGenerator makeIdGeneratorStartingFrom(long initialValue) { return new IdGenerator() { AtomicLong longs = new AtomicLong(initialValue); @Override public String getId() { return Long.toString(longs.getAndIncrement()); } }; } @Test public void testSupportsRestart() { new RegisterAndProcessBundleOperation( IdGenerators.decrementingLongs(), new TestInstructionRequestHandler() { @Override public CompletionStage<InstructionResponse> handle(InstructionRequest request) { CompletableFuture<InstructionResponse> responseFuture = new CompletableFuture<>(); completeFuture(request, responseFuture); return responseFuture; } }, mockBeamFnStateDelegator, REGISTER_REQUEST, ImmutableMap.of(), ImmutableMap.of(), ImmutableMap.of(), ImmutableTable.of(), ImmutableMap.of(), mockContext) .supportsRestart(); } @Test public void testRegisterOnlyOnFirstBundle() throws Exception { List<BeamFnApi.InstructionRequest> requests = new ArrayList<>(); IdGenerator idGenerator = makeIdGeneratorStartingFrom(777L); RegisterAndProcessBundleOperation operation = new RegisterAndProcessBundleOperation( idGenerator, new TestInstructionRequestHandler() { @Override public CompletionStage<InstructionResponse> handle(InstructionRequest request) { requests.add(request); switch (request.getRequestCase()) { case REGISTER: case PROCESS_BUNDLE: return CompletableFuture.completedFuture(responseFor(request).build()); default: // block forever on other requests return new CompletableFuture<>(); } } }, mockBeamFnStateDelegator, REGISTER_REQUEST, ImmutableMap.of(), ImmutableMap.of(), ImmutableMap.of(), ImmutableTable.of(), ImmutableMap.of(), mockContext); // Ensure that the first time we start we send the register and process bundle requests assertThat(requests, empty()); operation.start(); assertEquals( requests.get(0), BeamFnApi.InstructionRequest.newBuilder() .setInstructionId("777") .setRegister(REGISTER_REQUEST) .build()); assertEquals( requests.get(1), BeamFnApi.InstructionRequest.newBuilder() .setInstructionId("778") .setProcessBundle( BeamFnApi.ProcessBundleRequest.newBuilder().setProcessBundleDescriptorId("555")) .build()); operation.finish(); // Ensure on restart that we only send the process bundle request operation.start(); assertEquals( requests.get(2), BeamFnApi.InstructionRequest.newBuilder() .setInstructionId("779") .setProcessBundle( BeamFnApi.ProcessBundleRequest.newBuilder().setProcessBundleDescriptorId("555")) .build()); operation.finish(); } @Test public void testProcessingBundleBlocksOnFinish() throws Exception { List<BeamFnApi.InstructionRequest> requests = new ArrayList<>(); IdGenerator idGenerator = makeIdGeneratorStartingFrom(777L); ExecutorService executorService = Executors.newCachedThreadPool(); RegisterAndProcessBundleOperation operation = new RegisterAndProcessBundleOperation( idGenerator, new TestInstructionRequestHandler() { @Override public CompletionStage<InstructionResponse> handle(InstructionRequest request) { requests.add(request); switch (request.getRequestCase()) { case REGISTER: return CompletableFuture.completedFuture(responseFor(request).build()); case PROCESS_BUNDLE: CompletableFuture<InstructionResponse> responseFuture = new CompletableFuture<>(); executorService.submit( () -> { // Purposefully sleep simulating SDK harness doing work Thread.sleep(100); responseFuture.complete(responseFor(request).build()); completeFuture(request, responseFuture); return null; }); return responseFuture; default: // Anything else hangs; nothing else should be blocking return new CompletableFuture<>(); } } }, mockBeamFnStateDelegator, REGISTER_REQUEST, ImmutableMap.of(), ImmutableMap.of(), ImmutableMap.of(), ImmutableTable.of(), ImmutableMap.of(), mockContext); operation.start(); // This method blocks till the requests are completed operation.finish(); // Ensure that the messages were received assertEquals( requests.get(0), BeamFnApi.InstructionRequest.newBuilder() .setInstructionId("777") .setRegister(REGISTER_REQUEST) .build()); assertEquals( requests.get(1), BeamFnApi.InstructionRequest.newBuilder() .setInstructionId("778") .setProcessBundle( BeamFnApi.ProcessBundleRequest.newBuilder().setProcessBundleDescriptorId("555")) .build()); } @Test public void testProcessingBundleHandlesUserStateRequests() throws Exception { IdGenerator idGenerator = makeIdGeneratorStartingFrom(777L); ExecutorService executorService = Executors.newCachedThreadPool(); InMemoryStateInternals<ByteString> stateInternals = InMemoryStateInternals.forKey(ByteString.EMPTY); DataflowStepContext mockStepContext = mock(DataflowStepContext.class); DataflowStepContext mockUserStepContext = mock(DataflowStepContext.class); when(mockStepContext.namespacedToUser()).thenReturn(mockUserStepContext); when(mockUserStepContext.stateInternals()).thenReturn(stateInternals); InstructionRequestHandler instructionRequestHandler = new TestInstructionRequestHandler() { @Override public CompletionStage<InstructionResponse> handle(InstructionRequest request) { switch (request.getRequestCase()) { case REGISTER: return CompletableFuture.completedFuture(responseFor(request).build()); case PROCESS_BUNDLE: return MoreFutures.supplyAsync( () -> { StateRequest partialRequest = StateRequest.newBuilder() .setStateKey( StateKey.newBuilder() .setBagUserState( StateKey.BagUserState.newBuilder() .setTransformId("testPTransformId") .setWindow(ByteString.EMPTY) .setUserStateId("testUserStateId"))) .buildPartial(); StateRequest get = partialRequest .toBuilder() .setGet(StateGetRequest.getDefaultInstance()) .build(); StateRequest clear = partialRequest .toBuilder() .setClear(StateClearRequest.getDefaultInstance()) .build(); StateRequest append = partialRequest .toBuilder() .setAppend( StateAppendRequest.newBuilder() .setData(ByteString.copyFromUtf8("ABC"))) .build(); StateRequestHandler stateHandler = stateHandlerCaptor.getValue(); StateResponse.Builder getWhenEmptyResponse = MoreFutures.get(stateHandler.handle(get)); assertEquals(ByteString.EMPTY, getWhenEmptyResponse.getGet().getData()); StateResponse.Builder appendWhenEmptyResponse = MoreFutures.get(stateHandler.handle(append)); assertNotNull(appendWhenEmptyResponse); StateResponse.Builder appendWhenEmptyResponse2 = MoreFutures.get(stateHandler.handle(append)); assertNotNull(appendWhenEmptyResponse2); StateResponse.Builder getWhenHasValueResponse = MoreFutures.get(stateHandler.handle(get)); assertEquals( ByteString.copyFromUtf8("ABC").concat(ByteString.copyFromUtf8("ABC")), getWhenHasValueResponse.getGet().getData()); StateResponse.Builder clearResponse = MoreFutures.get(stateHandler.handle(clear)); assertNotNull(clearResponse); return responseFor(request).build(); }); default: // block forever return new CompletableFuture<>(); } } }; RegisterAndProcessBundleOperation operation = new RegisterAndProcessBundleOperation( idGenerator, instructionRequestHandler, mockBeamFnStateDelegator, REGISTER_REQUEST, ImmutableMap.of(), ImmutableMap.of("testPTransformId", mockStepContext), ImmutableMap.of(), ImmutableTable.of(), ImmutableMap.of(), mockContext); operation.start(); verify(mockBeamFnStateDelegator) .registerForProcessBundleInstructionId(eq("778"), stateHandlerCaptor.capture()); // This method blocks till the requests are completed operation.finish(); // Ensure that the number of reigstrations matches the number of deregistrations assertEquals(stateServiceRegisterCounter.get(), stateServiceDeregisterCounter.get()); assertEquals(0, stateServiceAbortCounter.get()); } @Test public void testProcessingBundleHandlesMultimapSideInputRequests() throws Exception { IdGenerator idGenerator = makeIdGeneratorStartingFrom(777L); ExecutorService executorService = Executors.newCachedThreadPool(); DataflowStepContext mockStepContext = mock(DataflowStepContext.class); DataflowStepContext mockUserStepContext = mock(DataflowStepContext.class); when(mockStepContext.namespacedToUser()).thenReturn(mockUserStepContext); CountDownLatch waitForStateHandler = new CountDownLatch(1); // Issues state calls to the Runner after a process bundle request is sent. InstructionRequestHandler fakeClient = new TestInstructionRequestHandler() { @Override public CompletionStage<InstructionResponse> handle(InstructionRequest request) { switch (request.getRequestCase()) { case REGISTER: return CompletableFuture.completedFuture(responseFor(request).build()); case PROCESS_BUNDLE: return MoreFutures.supplyAsync( () -> { StateKey getKey = StateKey.newBuilder() .setMultimapSideInput( StateKey.MultimapSideInput.newBuilder() .setTransformId("testPTransformId") .setSideInputId("testSideInputId") .setWindow( ByteString.copyFrom( CoderUtils.encodeToByteArray( GlobalWindow.Coder.INSTANCE, GlobalWindow.INSTANCE))) .setKey( ByteString.copyFrom( CoderUtils.encodeToByteArray( ByteArrayCoder.of(), "ABC".getBytes(StandardCharsets.UTF_8), Coder.Context.NESTED)))) .build(); StateRequest getRequest = StateRequest.newBuilder() .setStateKey(getKey) .setGet(StateGetRequest.getDefaultInstance()) .build(); waitForStateHandler.await(); StateRequestHandler stateHandler = stateHandlerCaptor.getValue(); StateResponse.Builder getResponse = MoreFutures.get(stateHandler.handle(getRequest)); assertEquals( encodeAndConcat(Arrays.asList("X", "Y", "Z"), StringUtf8Coder.of()), getResponse.getGet().getData()); return responseFor(request).build(); }); default: // block forever on other request types return new CompletableFuture<>(); } } }; SideInputReader fakeSideInputReader = new SideInputReader() { @Override public <T> @Nullable T get(PCollectionView<T> view, BoundedWindow window) { assertEquals(GlobalWindow.INSTANCE, window); assertEquals("testSideInputId", view.getTagInternal().getId()); return (T) InMemoryMultimapSideInputView.fromIterable( ByteArrayCoder.of(), ImmutableList.of( KV.of("ABC".getBytes(StandardCharsets.UTF_8), "X"), KV.of("ABC".getBytes(StandardCharsets.UTF_8), "Y"), KV.of("ABC".getBytes(StandardCharsets.UTF_8), "Z"))); } @Override public <T> boolean contains(PCollectionView<T> view) { return "testSideInputId".equals(view.getTagInternal().getId()); } @Override public boolean isEmpty() { return false; } }; RegisterAndProcessBundleOperation operation = new RegisterAndProcessBundleOperation( idGenerator, fakeClient, mockBeamFnStateDelegator, REGISTER_REQUEST, ImmutableMap.of(), ImmutableMap.of("testPTransformId", mockStepContext), ImmutableMap.of("testPTransformId", fakeSideInputReader), ImmutableTable.of( "testPTransformId", "testSideInputId", DataflowPortabilityPCollectionView.with( new TupleTag<>("testSideInputId"), FullWindowedValueCoder.of( KvCoder.of(ByteArrayCoder.of(), StringUtf8Coder.of()), GlobalWindow.Coder.INSTANCE))), ImmutableMap.of(), mockContext); operation.start(); verify(mockBeamFnStateDelegator) .registerForProcessBundleInstructionId(eq("778"), stateHandlerCaptor.capture()); waitForStateHandler.countDown(); // This method blocks till the requests are completed operation.finish(); // Ensure that the number of reigstrations matches the number of deregistrations assertEquals(stateServiceRegisterCounter.get(), stateServiceDeregisterCounter.get()); assertEquals(0, stateServiceAbortCounter.get()); } @Test public void testAbortCancelsAndCleansUpDuringRegister() throws Exception { IdGenerator idGenerator = makeIdGeneratorStartingFrom(777L); ExecutorService executorService = Executors.newCachedThreadPool(); CountDownLatch waitForAbortToComplete = new CountDownLatch(1); AtomicReference<ThrowingRunnable> abortReference = new AtomicReference<>(); RegisterAndProcessBundleOperation operation = new RegisterAndProcessBundleOperation( idGenerator, new TestInstructionRequestHandler() { @Override public CompletionStage<InstructionResponse> handle(InstructionRequest request) { CompletableFuture<InstructionResponse> responseFuture = new CompletableFuture<>(); if (request.getRequestCase() == RequestCase.PROCESS_BUNDLE) { executorService.submit( (Callable<Void>) () -> { abortReference.get().run(); waitForAbortToComplete.countDown(); return null; }); } else { completeFuture(request, responseFuture); } return responseFuture; } }, mockBeamFnStateDelegator, REGISTER_REQUEST, ImmutableMap.of(), ImmutableMap.of(), ImmutableMap.of(), ImmutableTable.of(), ImmutableMap.of(), mockContext); abortReference.set(operation::abort); operation.start(); waitForAbortToComplete.await(); // Ensure that the number of registrations matches the number of aborts assertEquals(stateServiceRegisterCounter.get(), stateServiceAbortCounter.get()); assertEquals(0, stateServiceDeregisterCounter.get()); } @Test public void testAbortCancelsAndCleansUpDuringProcessBundle() throws Exception { IdGenerator idGenerator = makeIdGeneratorStartingFrom(777L); ExecutorService executorService = Executors.newCachedThreadPool(); CountDownLatch waitForAbortToComplete = new CountDownLatch(1); AtomicReference<ThrowingRunnable> abortReference = new AtomicReference<>(); RegisterAndProcessBundleOperation operation = new RegisterAndProcessBundleOperation( idGenerator, new TestInstructionRequestHandler() { @Override public CompletionStage<InstructionResponse> handle(InstructionRequest request) { CompletableFuture<InstructionResponse> responseFuture = new CompletableFuture<>(); if (request.getRequestCase() == RequestCase.PROCESS_BUNDLE) { executorService.submit( (Callable<Void>) () -> { abortReference.get().run(); waitForAbortToComplete.countDown(); return null; }); } else { completeFuture(request, responseFuture); } return responseFuture; } }, mockBeamFnStateDelegator, REGISTER_REQUEST, ImmutableMap.of(), ImmutableMap.of(), ImmutableMap.of(), ImmutableTable.of(), ImmutableMap.of(), mockContext); abortReference.set(operation::abort); operation.start(); waitForAbortToComplete.await(); // Ensure that the number of registrations matches the number of aborts assertEquals(stateServiceRegisterCounter.get(), stateServiceAbortCounter.get()); assertEquals(0, stateServiceDeregisterCounter.get()); } private InstructionResponse.Builder responseFor(BeamFnApi.InstructionRequest request) { BeamFnApi.InstructionResponse.Builder response = BeamFnApi.InstructionResponse.newBuilder().setInstructionId(request.getInstructionId()); if (request.hasRegister()) { response.setRegister(BeamFnApi.RegisterResponse.getDefaultInstance()); } else if (request.hasProcessBundle()) { response.setProcessBundle(BeamFnApi.ProcessBundleResponse.getDefaultInstance()); } else if (request.hasFinalizeBundle()) { response.setFinalizeBundle(BeamFnApi.FinalizeBundleResponse.getDefaultInstance()); } else if (request.hasProcessBundleProgress()) { response.setProcessBundleProgress( BeamFnApi.ProcessBundleProgressResponse.getDefaultInstance()); } else if (request.hasProcessBundleSplit()) { response.setProcessBundleSplit(BeamFnApi.ProcessBundleSplitResponse.getDefaultInstance()); } return response; } private void completeFuture( BeamFnApi.InstructionRequest request, CompletableFuture<InstructionResponse> response) { response.complete(responseFor(request).build()); } @Test public void testGetProcessBundleProgressReturnsDefaultInstanceIfNoBundleIdCached() throws Exception { InstructionRequestHandler mockInstructionRequestHandler = mock(InstructionRequestHandler.class); RegisterAndProcessBundleOperation operation = new RegisterAndProcessBundleOperation( IdGenerators.decrementingLongs(), mockInstructionRequestHandler, mockBeamFnStateDelegator, REGISTER_REQUEST, ImmutableMap.of(), ImmutableMap.of(), ImmutableMap.of(), ImmutableTable.of(), ImmutableMap.of(), mockContext); assertEquals( ProcessBundleProgressResponse.getDefaultInstance(), MoreFutures.get(operation.getProcessBundleProgress())); } @Test public void testGetProcessBundleProgressFetchesProgressResponseWhenBundleIdCached() throws Exception { InstructionRequestHandler mockInstructionRequestHandler = mock(InstructionRequestHandler.class); RegisterAndProcessBundleOperation operation = new RegisterAndProcessBundleOperation( IdGenerators.decrementingLongs(), mockInstructionRequestHandler, mockBeamFnStateDelegator, REGISTER_REQUEST, ImmutableMap.of(), ImmutableMap.of(), ImmutableMap.of(), ImmutableTable.of(), ImmutableMap.of(), mockContext); operation.getProcessBundleInstructionId(); // this generates and caches bundleId ProcessBundleProgressResponse expectedResult = ProcessBundleProgressResponse.newBuilder().build(); InstructionResponse instructionResponse = InstructionResponse.newBuilder().setProcessBundleProgress(expectedResult).build(); CompletableFuture resultFuture = CompletableFuture.completedFuture(instructionResponse); when(mockInstructionRequestHandler.handle(any())).thenReturn(resultFuture); final ProcessBundleProgressResponse result = MoreFutures.get(operation.getProcessBundleProgress()); assertSame("Return value from mockInstructionRequestHandler", expectedResult, result); } private abstract static class TestInstructionRequestHandler implements InstructionRequestHandler { @Override public void registerProcessBundleDescriptor(ProcessBundleDescriptor descriptor) {} @Override public void close() {} } }
lukecwik/incubator-beam
runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/control/RegisterAndProcessBundleOperationTest.java
Java
apache-2.0
30,668
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ function drawApplicationTimeline(groupArray, eventObjArray, startTime, offset) { var groups = new vis.DataSet(groupArray); var items = new vis.DataSet(eventObjArray); var container = $("#application-timeline")[0]; var options = { groupOrder: function(a, b) { return a.value - b.value }, editable: false, align: 'left', showCurrentTime: false, start: startTime, zoomable: false, locale: "en", moment: function (date) { return vis.moment(date).utcOffset(offset); } }; var applicationTimeline = new vis.Timeline(container); applicationTimeline.setOptions(options); applicationTimeline.setGroups(groups); applicationTimeline.setItems(items); setupZoomable("#application-timeline-zoom-lock", applicationTimeline); setupExecutorEventAction(); function getIdForJobEntry(baseElem) { var jobIdText = $($(baseElem).find(".application-timeline-content")[0]).text(); var jobId = jobIdText.match("\\(Job (\\d+)\\)$")[1]; return jobId; } function getSelectorForJobEntry(jobId) { return "#job-" + jobId; } function setupJobEventAction() { $(".vis-item.vis-range.job.application-timeline-object").each(function() { $(this).click(function() { var jobId = getIdForJobEntry(this); var jobPagePath = uiRoot + appBasePath + "/jobs/job/?id=" + jobId; window.location.href = jobPagePath; }); $(this).hover( function() { $(getSelectorForJobEntry(getIdForJobEntry(this))).addClass("corresponding-item-hover"); $($(this).find("div.application-timeline-content")[0]).tooltip("show"); }, function() { $(getSelectorForJobEntry(getIdForJobEntry(this))).removeClass("corresponding-item-hover"); $($(this).find("div.application-timeline-content")[0]).tooltip("hide"); } ); }); } setupJobEventAction(); $("span.expand-application-timeline").click(function() { var status = window.localStorage.getItem("expand-application-timeline") == "true"; status = !status; $("#application-timeline").toggleClass('collapsed'); var visibilityState = status ? "" : "none"; $("#application-timeline").css("display", visibilityState); // Switch the class of the arrow from open to closed. $(this).find('.expand-application-timeline-arrow').toggleClass('arrow-open'); $(this).find('.expand-application-timeline-arrow').toggleClass('arrow-closed'); window.localStorage.setItem("expand-application-timeline", "" + status); }); } $(function () { if ($("span.expand-application-timeline").length && window.localStorage.getItem("expand-application-timeline") == "true") { // Set it to false so that the click function can revert it window.localStorage.setItem("expand-application-timeline", "false"); $("span.expand-application-timeline").trigger('click'); } else { $("#application-timeline").css("display", "none"); } }); function drawJobTimeline(groupArray, eventObjArray, startTime, offset) { var groups = new vis.DataSet(groupArray); var items = new vis.DataSet(eventObjArray); var container = $('#job-timeline')[0]; var options = { groupOrder: function(a, b) { return a.value - b.value; }, editable: false, align: 'left', showCurrentTime: false, start: startTime, zoomable: false, locale: "en", moment: function (date) { return vis.moment(date).utcOffset(offset); } }; var jobTimeline = new vis.Timeline(container); jobTimeline.setOptions(options); jobTimeline.setGroups(groups); jobTimeline.setItems(items); setupZoomable("#job-timeline-zoom-lock", jobTimeline); setupExecutorEventAction(); function getStageIdAndAttemptForStageEntry(baseElem) { var stageIdText = $($(baseElem).find(".job-timeline-content")[0]).text(); var stageIdAndAttempt = stageIdText.match("\\(Stage (\\d+\\.\\d+)\\)$")[1].split("."); return stageIdAndAttempt; } function getSelectorForStageEntry(stageIdAndAttempt) { return "#stage-" + stageIdAndAttempt[0] + "-" + stageIdAndAttempt[1]; } function setupStageEventAction() { $(".vis-item.vis-range.stage.job-timeline-object").each(function() { $(this).click(function() { var stageIdAndAttempt = getStageIdAndAttemptForStageEntry(this); var stagePagePath = uiRoot + appBasePath + "/stages/stage/?id=" + stageIdAndAttempt[0] + "&attempt=" + stageIdAndAttempt[1]; window.location.href = stagePagePath; }); $(this).hover( function() { $(getSelectorForStageEntry(getStageIdAndAttemptForStageEntry(this))) .addClass("corresponding-item-hover"); $($(this).find("div.job-timeline-content")[0]).tooltip("show"); }, function() { $(getSelectorForStageEntry(getStageIdAndAttemptForStageEntry(this))) .removeClass("corresponding-item-hover"); $($(this).find("div.job-timeline-content")[0]).tooltip("hide"); } ); }); } setupStageEventAction(); $("span.expand-job-timeline").click(function() { var status = window.localStorage.getItem("expand-job-timeline") == "true"; status = !status; $("#job-timeline").toggleClass('collapsed'); var visibilityState = status ? "" : "none"; $("#job-timeline").css("display", visibilityState); // Switch the class of the arrow from open to closed. $(this).find('.expand-job-timeline-arrow').toggleClass('arrow-open'); $(this).find('.expand-job-timeline-arrow').toggleClass('arrow-closed'); window.localStorage.setItem("expand-job-timeline", "" + status); }); } $(function () { if ($("span.expand-job-timeline").length && window.localStorage.getItem("expand-job-timeline") == "true") { // Set it to false so that the click function can revert it window.localStorage.setItem("expand-job-timeline", "false"); $("span.expand-job-timeline").trigger('click'); } else { $("#job-timeline").css("display", "none"); } }); function drawTaskAssignmentTimeline(groupArray, eventObjArray, minLaunchTime, maxFinishTime, offset) { var groups = new vis.DataSet(groupArray); var items = new vis.DataSet(eventObjArray); var container = $("#task-assignment-timeline")[0]; var options = { groupOrder: function(a, b) { return a.value - b.value }, editable: false, align: 'left', selectable: false, showCurrentTime: false, start: minLaunchTime, end: maxFinishTime, zoomable: false, locale: "en", moment: function (date) { return vis.moment(date).utcOffset(offset); } }; var taskTimeline = new vis.Timeline(container); taskTimeline.setOptions(options); taskTimeline.setGroups(groups); taskTimeline.setItems(items); // If a user zooms while a tooltip is displayed, the user may zoom such that the cursor is no // longer over the task that the tooltip corresponds to. So, when a user zooms, we should hide // any currently displayed tooltips. var currentDisplayedTooltip = null; $("#task-assignment-timeline").on({ "mouseenter": function() { currentDisplayedTooltip = this; }, "mouseleave": function() { currentDisplayedTooltip = null; } }, ".task-assignment-timeline-content"); taskTimeline.on("rangechange", function(prop) { if (currentDisplayedTooltip !== null) { $(currentDisplayedTooltip).tooltip("hide"); } }); setupZoomable("#task-assignment-timeline-zoom-lock", taskTimeline); $("span.expand-task-assignment-timeline").click(function() { var status = window.localStorage.getItem("expand-task-assignment-timeline") == "true"; status = !status; $("#task-assignment-timeline").toggleClass("collapsed"); var visibilityState = status ? "" : "none"; $("#task-assignment-timeline").css("display", visibilityState); // Switch the class of the arrow from open to closed. $(this).find(".expand-task-assignment-timeline-arrow").toggleClass("arrow-open"); $(this).find(".expand-task-assignment-timeline-arrow").toggleClass("arrow-closed"); window.localStorage.setItem("expand-task-assignment-timeline", "" + status); }); } $(function () { if ($("span.expand-task-assignment-timeline").length && window.localStorage.getItem("expand-task-assignment-timeline") == "true") { // Set it to false so that the click function can revert it window.localStorage.setItem("expand-task-assignment-timeline", "false"); $("span.expand-task-assignment-timeline").trigger('click'); } else { $("#task-assignment-timeline").css("display", "none"); } }); function setupExecutorEventAction() { $(".vis-item.vis-box.executor").each(function () { $(this).hover( function() { $($(this).find(".executor-event-content")[0]).tooltip("show"); }, function() { $($(this).find(".executor-event-content")[0]).tooltip("hide"); } ); }); } function setupZoomable(id, timeline) { $(id + ' > input[type="checkbox"]').click(function() { if (this.checked) { timeline.setOptions({zoomable: true}); } else { timeline.setOptions({zoomable: false}); } }); $(id + " > span").click(function() { $(this).parent().find('input:checkbox').trigger('click'); }); }
witgo/spark
core/src/main/resources/org/apache/spark/ui/static/timeline-view.js
JavaScript
apache-2.0
10,145
/* * Copyright (c) 2015. Rick Hightower, Geoff Chandler * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * QBit - The Microservice lib for Java : JSON, WebSocket, REST. Be The Web! */ package io.advantageous.qbit.servlet.servletproto; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.AbstractHandler; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; /** * @author rhightower on 2/12/15. */ public class PrototypeJetty { public static void main(final String... args) throws Exception { Server server = new Server(8080); server.setHandler(new HelloWorld()); server.start(); server.join(); } public static class HelloWorld extends AbstractHandler { public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { response.setContentType("text/html;charset=utf-8"); response.setStatus(HttpServletResponse.SC_OK); baseRequest.setHandled(true); response.getWriter().println("<h1>Hello World</h1>"); } } }
bsa01/qbit
qbit/servlet/src/test/java/io/advantageous/qbit/servlet/servletproto/PrototypeJetty.java
Java
apache-2.0
1,905
# # Cookbook Name:: omnibus # Recipe:: _packaging # # Copyright 2014, Chef Software, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # # This recipe is used to install the "packaging" compontents. # case node['platform_family'] when 'debian' package 'dpkg-dev' package 'ncurses-dev' package 'zlib1g-dev' when 'freebsd' package 'ncurses' when 'mac_os_x' when 'rhel' package 'rpm-build' package 'ncurses-devel' package 'zlib-devel' # This script makes unattended rpm signing possible! cookbook_file ::File.join(build_user_home, 'sign-rpm') do source 'sign-rpm' mode '0755' owner node['omnibus']['build_user'] group node['omnibus']['build_user_group'] mode '0755' end when 'windows' include_recipe '7-zip::default' include_recipe 'wix::default' omnibus_env['PATH'] << node['wix']['home'] omnibus_env['PATH'] << node['7-zip']['home'] end
sun16/omnibus
recipes/_packaging.rb
Ruby
apache-2.0
1,390
/* * Copyright 2007 Yusuke Yamamoto * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package twitter4j; /** * A data class representing geo location. * * @author Yusuke Yamamoto - yusuke at mac.com */ public class GeoLocation implements java.io.Serializable { protected double latitude; protected double longitude; private static final long serialVersionUID = -4847567157651889935L; /** * Creates a GeoLocation instance * * @param latitude the latitude * @param longitude the longitude */ public GeoLocation(double latitude, double longitude) { this.latitude = latitude; this.longitude = longitude; } /* For serialization purposes only. */ /* package */ GeoLocation() { } /** * returns the latitude of the geo location * * @return the latitude */ public double getLatitude() { return latitude; } /** * returns the longitude of the geo location * * @return the longitude */ public double getLongitude() { return longitude; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof GeoLocation)) return false; GeoLocation that = (GeoLocation) o; if (Double.compare(that.getLatitude(), latitude) != 0) return false; if (Double.compare(that.getLongitude(), longitude) != 0) return false; return true; } @Override public int hashCode() { int result; long temp; temp = Double.doubleToLongBits(latitude); result = (int) (temp ^ (temp >>> 32)); temp = Double.doubleToLongBits(longitude); result = 31 * result + (int) (temp ^ (temp >>> 32)); return result; } @Override public String toString() { return "GeoLocation{" + "latitude=" + latitude + ", longitude=" + longitude + '}'; } }
vaglucas/cafeUnoesc
twitter4j-core/src/main/java/twitter4j/GeoLocation.java
Java
apache-2.0
2,496
var _Array$isArray = require("../core-js/array/is-array"); var arrayLikeToArray = require("./arrayLikeToArray"); function _maybeArrayLike(next, arr, i) { if (arr && !_Array$isArray(arr) && typeof arr.length === "number") { var len = arr.length; return arrayLikeToArray(arr, i !== void 0 && i < len ? i : len); } return next(arr, i); } module.exports = _maybeArrayLike;
BigBoss424/portfolio
v8/development/node_modules/@babel/runtime-corejs3/helpers/maybeArrayLike.js
JavaScript
apache-2.0
386
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.autoscaling.deployer; import joptsimple.OptionSet; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.samza.autoscaling.utils.YarnUtil; import org.apache.samza.config.Config; import org.apache.samza.config.JobConfig; import org.apache.samza.container.SamzaContainer; import org.apache.samza.coordinator.stream.messages.CoordinatorStreamMessage; import org.apache.samza.coordinator.stream.CoordinatorStreamSystemConsumer; import org.apache.samza.coordinator.stream.messages.SetConfig; import org.apache.samza.job.JobRunner; import org.apache.samza.job.model.ContainerModel; import org.apache.samza.metrics.MetricsRegistryMap; import org.apache.samza.system.SystemStreamPartitionIterator; import org.apache.samza.util.CommandLine; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.LinkedList; import java.util.List; import java.util.Map; /** * This class is a separate module that runs along side with a job, and handles all config changes submitted to a job after the bootstrap of the job. * All config changes are written to the coordinator stream using the @Link{CoordinatorStreamWriter}. * The way this class works is that it reads all messages with type "set-config" written to the coordinator stream after * the bootstrap of the job, and it handles the messages accordingly. * The current configuration changes it handles are * 1. changing the number of containers of a job * 2. setting the server url for the first time (in order to get the JobModel). * In order to use this class the run() method should be called to react to changes, * or call the start(), processConfigMessages(), and stop() function instead. * Additionally, you have to add the following configurations to the config file: * yarn.rm.address=localhost //the ip of the resource manager in yarn * yarn.rm.port=8088 //the port of the resource manager http server * Additionally, the config manger will periodically poll the coordinator stream to see if there are any new messages. * This period is set to 100 ms by default. However, it can be configured by adding the following property to the input config file. * configManager.polling.interval=&lt; polling interval &gt; */ public class ConfigManager { private final CoordinatorStreamSystemConsumer coordinatorStreamConsumer; private SystemStreamPartitionIterator coordinatorStreamIterator; private static final Logger log = LoggerFactory.getLogger(ConfigManager.class); private final long defaultPollingInterval = 100; private final int defaultReadJobModelDelayMs = 100; private final long interval; private String coordinatorServerURL = null; private final String jobName; private final int jobID; private Config config; private YarnUtil yarnUtil; private final String rmAddressOpt = "yarn.rm.address"; private final String rmPortOpt = "yarn.rm.port"; private final String pollingIntervalOpt = "configManager.polling.interval"; private static final String SERVER_URL_OPT = "samza.autoscaling.server.url"; private static final String YARN_CONTAINER_COUNT_OPT = "yarn.container.count"; public ConfigManager(Config config) { //get rm address and port if (!config.containsKey(rmAddressOpt) || !config.containsKey(rmPortOpt)) { throw new IllegalArgumentException("Missing config: the config file does not contain the rm host or port."); } String rmAddress = config.get(rmAddressOpt); int rmPort = config.getInt(rmPortOpt); //get job name and id; if (!config.containsKey(JobConfig.JOB_NAME())) { throw new IllegalArgumentException("Missing config: the config does not contain the job name"); } jobName = config.get(JobConfig.JOB_NAME()); jobID = config.getInt(JobConfig.JOB_ID(), 1); //set polling interval if (config.containsKey(pollingIntervalOpt)) { long pollingInterval = config.getLong(pollingIntervalOpt); if (pollingInterval <= 0) { throw new IllegalArgumentException("polling interval cannot be a negative value"); } this.interval = pollingInterval; } else { this.interval = defaultPollingInterval; } this.config = config; this.coordinatorStreamConsumer = new CoordinatorStreamSystemConsumer(config, new MetricsRegistryMap()); this.yarnUtil = new YarnUtil(rmAddress, rmPort); } /** * This method is an infinite loop that periodically checks if there are any new messages in the job coordinator stream, and reads them if they exist. * Then it reacts accordingly based on the configuration that is being set. * The method the calls the start() method to initialized the system, runs in a infinite loop, and calls the stop() method at the end to stop the consumer and the system */ public void run() { start(); try { while (true) { Thread.sleep(interval); processConfigMessages(); } } catch (InterruptedException e) { e.printStackTrace(); log.warn("Got interrupt in config manager thread, so shutting down"); Thread.currentThread().interrupt(); } finally { log.info("Stopping the config manager"); stop(); } } /** * Starts the system by starting the consumer */ public void start() { register(); coordinatorStreamConsumer.start(); coordinatorStreamIterator = coordinatorStreamConsumer.getStartIterator(); bootstrap(); } /** * stops the consumer making the system ready to stop */ public void stop() { coordinatorStreamConsumer.stop(); coordinatorServerURL = null; yarnUtil.stop(); } /** * registers the consumer */ private void register() { coordinatorStreamConsumer.register(); } /** * This function will bootstrap by reading all the unread messages until the moment of calling the function, and therefore find the server url. */ private void bootstrap() { List<String> keysToProcess = new LinkedList<>(); keysToProcess.add(SERVER_URL_OPT); processConfigMessages(keysToProcess); if (coordinatorServerURL == null) { throw new IllegalStateException("coordinator server url is null, while the bootstrap has finished "); } log.info("Config manager bootstrapped"); } /** * notAValidEvent all the unread messages up to the time this function is called. * This method just reads the messages, and it does not react to them or change any configuration of the system. */ private void skipUnreadMessages() { processConfigMessages(new LinkedList<String>()); log.info("Config manager skipped messages"); } /** * This function reads all the messages with "set-config" type added to the coordinator stream since the last time the method was invoked */ public void processConfigMessages() { List<String> keysToProcess = new LinkedList<>(); keysToProcess.add(YARN_CONTAINER_COUNT_OPT); keysToProcess.add(SERVER_URL_OPT); processConfigMessages(keysToProcess); } /** * This function reads all the messages with "set-config" type added to the coordinator stream since the last time the method was invoked * * @param keysToProcess a list of keys to process. Only messages with these keys will call their handler function, * and other messages will be skipped. If the list is empty all messages will be skipped. */ @SuppressWarnings("unchecked") private void processConfigMessages(List<String> keysToProcess) { if (!coordinatorStreamConsumer.hasNewMessages(coordinatorStreamIterator)) { return; } if (keysToProcess == null) { throw new IllegalArgumentException("The keys to process list is null"); } for (CoordinatorStreamMessage message : coordinatorStreamConsumer.getUnreadMessages(coordinatorStreamIterator, SetConfig.TYPE)) { String key = null; try { SetConfig setConfigMessage = new SetConfig(message); key = setConfigMessage.getKey(); Map<String, String> valuesMap = (Map<String, String>) setConfigMessage.getMessageMap().get("values"); String value = null; if (valuesMap != null) { value = valuesMap.get("value"); } log.debug("Received set-config message with key: " + key + " and value: " + value); if (keysToProcess.contains(key)) { if (key.equals(YARN_CONTAINER_COUNT_OPT)) { handleYarnContainerChange(value); } else if (key.equals(SERVER_URL_OPT)) { handleServerURLChange(value); } else { log.info("Setting the " + key + " configuration is currently not supported, skipping the message"); } } //TODO: change the handlers to implement a common interface, to make them pluggable } catch (Exception e) { log.debug("Error in reading a message, skipping message with key " + key); } } } /** * This method handle setConfig messages that want to change the url of the server the JobCoordinator has brought up. * * @param newServerURL the new value of the server URL */ private void handleServerURLChange(String newServerURL) { this.coordinatorServerURL = newServerURL; log.info("Server URL being set to " + newServerURL); } /** * This method handles setConfig messages that want to change the number of containers of a job * * @param containerCountAsString the new number of containers in a String format */ private void handleYarnContainerChange(String containerCountAsString) throws IOException, YarnException { String applicationId = yarnUtil.getRunningAppId(jobName, jobID); int containerCount = Integer.valueOf(containerCountAsString); //checking the input is valid int currentNumTask = getCurrentNumTasks(); int currentNumContainers = getCurrentNumContainers(); if (containerCount == currentNumContainers) { log.error("The new number of containers is equal to the current number of containers, skipping this message"); return; } if (containerCount <= 0) { log.error("The number of containers cannot be zero or less, skipping this message"); return; } if (containerCount > currentNumTask) { log.error("The number of containers cannot be more than the number of task, skipping this message"); return; } //killing the current job log.info("Killing the current job"); yarnUtil.killApplication(applicationId); //reset the global variables coordinatorServerURL = null; try { //waiting for the job to be killed String state = yarnUtil.getApplicationState(applicationId); Thread.sleep(1000); int countSleep = 1; while (!state.equals("KILLED")) { state = yarnUtil.getApplicationState(applicationId); log.info("Job kill signal sent, but job not killed yet for " + applicationId + ". Sleeping for another 1000ms"); Thread.sleep(1000); countSleep++; if (countSleep > 10) { throw new IllegalStateException("Job has not been killed after 10 attempts."); } } } catch (InterruptedException e) { e.printStackTrace(); Thread.currentThread().interrupt(); } log.info("Killed the current job successfully"); //start the job again log.info("Staring the job again"); skipUnreadMessages(); JobRunner jobRunner = new JobRunner(config); jobRunner.run(false); } /** * This method returns the number of tasks in the job. It works by querying the server, and getting the job model. * Then it extracts the number of tasks from the job model * * @return current number of tasks in the job */ public int getCurrentNumTasks() { int currentNumTasks = 0; for (ContainerModel containerModel : SamzaContainer.readJobModel(coordinatorServerURL, defaultReadJobModelDelayMs).getContainers().values()) { currentNumTasks += containerModel.getTasks().size(); } return currentNumTasks; } /** * This method returns the number of containers in the job. It works by querying the server, and getting the job model. * Then it extracts the number of containers from the job model * * @return current number of containers in the job */ public int getCurrentNumContainers() { return SamzaContainer.readJobModel(coordinatorServerURL, defaultReadJobModelDelayMs).getContainers().values().size(); } /** * Gets the current value of the server URL that the job coordinator is serving the job model on. * * @return the current server URL. If null, it means the job has not set the server yet. */ public String getCoordinatorServerURL() { return coordinatorServerURL; } /** * Main function for using the Config Manager. The main function starts a Config Manager, and reacts to all messages thereafter * In order for this module to run, you have to add the following configurations to the config file: * yarn.rm.address=localhost //the ip of the resource manager in yarn * yarn.rm.port=8088 //the port of the resource manager http server * Additionally, the config manger will periodically poll the coordinator stream to see if there are any new messages. * This period is set to 100 ms by default. However, it can be configured by adding the following property to the input config file. * configManager.polling.interval= &lt; polling interval &gt; * To run the code use the following command: * {path to samza deployment}/samza/bin/run-config-manager.sh --config-factory={config-factory} --config-path={path to config file of a job} * * @param args input arguments for running ConfigManager. */ public static void main(String[] args) { CommandLine cmdline = new CommandLine(); OptionSet options = cmdline.parser().parse(args); Config config = cmdline.loadConfig(options); ConfigManager configManager = new ConfigManager(config); configManager.run(); } }
bharathkk/samza
samza-autoscaling/src/main/java/org/apache/samza/autoscaling/deployer/ConfigManager.java
Java
apache-2.0
14,812
// Copyright (C) 2015 the V8 project authors. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /*--- description: Return value after successful match es6id: 21.2.5.6 info: > [...] 5. Let global be ToBoolean(Get(rx, "global")). 6. ReturnIfAbrupt(global). 7. If global is false, then a. Return RegExpExec(rx, S). 21.2.5.2.1 Runtime Semantics: RegExpExec ( R, S ) [...] 7. Return RegExpBuiltinExec(R, S). 21.2.5.2.2 Runtime Semantics: RegExpBuiltinExec ( R, S ) [...] 20. Let A be ArrayCreate(n + 1). [...] 24. Perform CreateDataProperty(A, "index", matchIndex). 25. Perform CreateDataProperty(A, "input", S). 26. Let matchedSubstr be the matched substring (i.e. the portion of S between offset lastIndex inclusive and offset e exclusive). 27. Perform CreateDataProperty(A, "0", matchedSubstr). [...] 29. Return A. features: [Symbol.match] ---*/ var result = /b./[Symbol.match]('abcd'); assert(Array.isArray(result)); assert.sameValue(result.index, 1); assert.sameValue(result.input, 'abcd'); assert.sameValue(result.length, 1); assert.sameValue(result[0], 'bc');
baslr/ArangoDB
3rdParty/V8/V8-5.0.71.39/test/test262/data/test/built-ins/RegExp/prototype/Symbol.match/builtin-success-return-val.js
JavaScript
apache-2.0
1,194
package hrv.band.app.ui.view.adapter; import android.content.Context; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.TextView; import java.text.DecimalFormat; import java.util.List; import hrv.band.app.R; import hrv.band.app.ui.view.fragment.MeasuredParameterFragment; import hrv.calc.parameter.HRVParameter; /** * Copyright (c) 2017 * Created by Thomas Czogalik on 19.01.2017 * <p> * This adapter holds the hrv parameters to show in the {@link MeasuredParameterFragment}. */ public class ValueAdapter extends BaseAdapter { /** * The context of activity holding the adapter. **/ private final Context context; /** * The hrv parameter to display. **/ private final List<HRVParameter> parameters; public ValueAdapter(Context context, List<HRVParameter> parameters) { this.context = context; this.parameters = parameters; } @Override public View getView(int position, View convertView, ViewGroup parent) { ViewHolder holder; if (convertView == null) { LayoutInflater inflater = (LayoutInflater) context .getSystemService(Context.LAYOUT_INFLATER_SERVICE); convertView = inflater.inflate(R.layout.measure_list_item, parent, false); holder = new ViewHolder(); holder.descText = (TextView) convertView.findViewById(R.id.measure_value_desc); holder.valueText = (TextView) convertView.findViewById(R.id.hrv_value); holder.unitText = (TextView) convertView.findViewById(R.id.measure_value_unit); convertView.setTag(holder); } else { holder = (ViewHolder) convertView.getTag(); } if (parameters != null) { HRVParameter param = (HRVParameter) getItem(position); holder.descText.setText(param.getName()); holder.valueText.setText(new DecimalFormat("#.##").format(param.getValue())); holder.unitText.setText(param.getUnit()); } return convertView; } @Override public Object getItem(int i) { return parameters.get(i); } @Override public int getCount() { return parameters.size(); } @Override public long getItemId(int i) { return i; } /** * The ViewHolder of this adapter. */ private static class ViewHolder { private TextView descText; private TextView valueText; private TextView unitText; } }
thomcz/hrv-band
app/src/main/java/hrv/band/app/ui/view/adapter/ValueAdapter.java
Java
apache-2.0
2,601
package gov.hhs.onc.sdcct.utils; import java.util.TimeZone; public final class SdcctDateUtils { public final static long HOURS_IN_DAY = 24L; public final static long MIN_IN_HOUR = 60L; public final static long MIN_IN_DAY = MIN_IN_HOUR * HOURS_IN_DAY; public final static long SEC_IN_MIN = 60L; public final static long SEC_IN_HOUR = SEC_IN_MIN * MIN_IN_HOUR; public final static long SEC_IN_DAY = SEC_IN_MIN * MIN_IN_DAY; public final static long SEC_IN_YEAR = 31556952L; public final static long MS_IN_SEC = 1000L; public final static long MS_IN_MIN = MS_IN_SEC * SEC_IN_MIN; public final static long MS_IN_HOUR = MS_IN_SEC * SEC_IN_HOUR; public final static long MS_IN_DAY = MS_IN_SEC * SEC_IN_DAY; public final static long MS_IN_YEAR = MS_IN_SEC * SEC_IN_YEAR; public final static long US_IN_MS = 1000L; public final static long US_IN_SEC = US_IN_MS * MS_IN_SEC; public final static long US_IN_MIN = US_IN_MS * MS_IN_MIN; public final static long US_IN_HOUR = US_IN_MS * MS_IN_HOUR; public final static long US_IN_DAY = US_IN_MS * MS_IN_DAY; public final static long US_IN_YEAR = US_IN_MS * MS_IN_YEAR; public final static long NS_IN_US = 1000L; public final static long NS_IN_MS = NS_IN_US * US_IN_MS; public final static long NS_IN_SEC = NS_IN_US * US_IN_SEC; public final static long NS_IN_MIN = NS_IN_US * US_IN_MIN; public final static long NS_IN_HOUR = NS_IN_US * US_IN_HOUR; public final static long NS_IN_DAY = NS_IN_US * US_IN_DAY; public final static long NS_IN_YEAR = NS_IN_US * US_IN_YEAR; public final static TimeZone DEFAULT_TZ = TimeZone.getDefault(); public final static String UTC_TZ_ID = "UTC"; public final static String UTC_ZULU_TZ_ID = "Z"; public final static TimeZone UTC_TZ = TimeZone.getTimeZone(UTC_TZ_ID); private SdcctDateUtils() { } }
mkotelba/sdcct
sdcct-core/src/main/java/gov/hhs/onc/sdcct/utils/SdcctDateUtils.java
Java
apache-2.0
1,901
/* * Copyright 2012-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.security; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.core.Ordered; import org.springframework.security.config.http.SessionCreationPolicy; import org.springframework.util.StringUtils; /** * Properties for the security aspects of an application. * * @author Dave Syer */ @ConfigurationProperties(prefix = "security", ignoreUnknownFields = false) public class SecurityProperties implements SecurityPrerequisite { /** * Order before the basic authentication access control provided by Boot. This is a * useful place to put user-defined access rules if you want to override the default * access rules. */ public static final int ACCESS_OVERRIDE_ORDER = SecurityProperties.BASIC_AUTH_ORDER - 2; /** * Order applied to the WebSecurityConfigurerAdapter that is used to configure basic * authentication for application endpoints. If you want to add your own * authentication for all or some of those endpoints the best thing to do is add your * own WebSecurityConfigurerAdapter with lower order. */ public static final int BASIC_AUTH_ORDER = Ordered.LOWEST_PRECEDENCE - 5; /** * Order applied to the WebSecurityConfigurer that ignores standard static resource * paths. */ public static final int IGNORED_ORDER = Ordered.HIGHEST_PRECEDENCE; /** * The default order of Spring Security's Filter */ public static final int DEFAULT_FILTER_ORDER = 0; /** * Enable secure channel for all requests. */ private boolean requireSsl; /** * Enable Cross Site Request Forgery support. */ // Flip this when session creation is disabled by default private boolean enableCsrf = false; private Basic basic = new Basic(); private final Headers headers = new Headers(); /** * Session creation policy (always, never, if_required, stateless). */ private SessionCreationPolicy sessions = SessionCreationPolicy.STATELESS; /** * Comma-separated list of paths to exclude from the default secured paths. */ private List<String> ignored = new ArrayList<String>(); private final User user = new User(); /** * Security filter chain order. */ private int filterOrder = DEFAULT_FILTER_ORDER; public Headers getHeaders() { return this.headers; } public User getUser() { return this.user; } public SessionCreationPolicy getSessions() { return this.sessions; } public void setSessions(SessionCreationPolicy sessions) { this.sessions = sessions; } public Basic getBasic() { return this.basic; } public void setBasic(Basic basic) { this.basic = basic; } public boolean isRequireSsl() { return this.requireSsl; } public void setRequireSsl(boolean requireSsl) { this.requireSsl = requireSsl; } public boolean isEnableCsrf() { return this.enableCsrf; } public void setEnableCsrf(boolean enableCsrf) { this.enableCsrf = enableCsrf; } public void setIgnored(List<String> ignored) { this.ignored = new ArrayList<String>(ignored); } public List<String> getIgnored() { return this.ignored; } public int getFilterOrder() { return this.filterOrder; } public void setFilterOrder(int filterOrder) { this.filterOrder = filterOrder; } public static class Headers { public static enum HSTS { NONE, DOMAIN, ALL } /** * Enable cross site scripting (XSS) protection. */ private boolean xss; /** * Enable cache control HTTP headers. */ private boolean cache; /** * Enable "X-Frame-Options" header. */ private boolean frame; /** * Enable "X-Content-Type-Options" header. */ private boolean contentType; /** * HTTP Strict Transport Security (HSTS) mode (none, domain, all). */ private HSTS hsts = HSTS.ALL; public boolean isXss() { return this.xss; } public void setXss(boolean xss) { this.xss = xss; } public boolean isCache() { return this.cache; } public void setCache(boolean cache) { this.cache = cache; } public boolean isFrame() { return this.frame; } public void setFrame(boolean frame) { this.frame = frame; } public boolean isContentType() { return this.contentType; } public void setContentType(boolean contentType) { this.contentType = contentType; } public HSTS getHsts() { return this.hsts; } public void setHsts(HSTS hsts) { this.hsts = hsts; } } public static class Basic { /** * Enable basic authentication. */ private boolean enabled = true; /** * HTTP basic realm name. */ private String realm = "Spring"; /** * Comma-separated list of paths to secure. */ private String[] path = new String[] { "/**" }; public boolean isEnabled() { return this.enabled; } public void setEnabled(boolean enabled) { this.enabled = enabled; } public String getRealm() { return this.realm; } public void setRealm(String realm) { this.realm = realm; } public String[] getPath() { return this.path; } public void setPath(String... paths) { this.path = paths; } } public static class User { /** * Default user name. */ private String name = "user"; /** * Password for the default user name. */ private String password = UUID.randomUUID().toString(); /** * Granted roles for the default user name. */ private List<String> role = new ArrayList<String>(Arrays.asList("USER")); private boolean defaultPassword = true; public String getName() { return this.name; } public void setName(String name) { this.name = name; } public String getPassword() { return this.password; } public void setPassword(String password) { if (password.startsWith("${") && password.endsWith("}") || !StringUtils.hasLength(password)) { return; } this.defaultPassword = false; this.password = password; } public List<String> getRole() { return this.role; } public void setRole(List<String> role) { this.role = new ArrayList<String>(role); } public boolean isDefaultPassword() { return this.defaultPassword; } } }
10045125/spring-boot
spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/security/SecurityProperties.java
Java
apache-2.0
6,846
/* * Copyright (C) 2012 - present by Yann Le Tallec. * Please see distribution for license. */ package com.assylias.jbloomberg; import com.bloomberglp.blpapi.Event; import com.bloomberglp.blpapi.EventHandler; import com.bloomberglp.blpapi.Message; import com.bloomberglp.blpapi.Session; import com.bloomberglp.blpapi.SessionOptions; import java.io.IOException; import java.util.List; import mockit.Mock; import mockit.MockUp; public class MockSession extends MockUp<Session> { private EventHandler handler; private Event startAsyncEvent; private boolean openServiceOk; public MockSession simulateStartAsyncOk() { List<Message> messages = new MockMessageList("SessionStarted").getList(); this.startAsyncEvent = new MockEvent(Event.EventType.SESSION_STATUS, messages); return this; } public MockSession simulateSessionStartupFailure() { List<Message> messages = new MockMessageList("SessionStartupFailure").getList(); this.startAsyncEvent = new MockEvent(Event.EventType.SESSION_STATUS, messages); return this; } public MockSession setOpenServiceOk() { openServiceOk = true; return this; } @Mock public void $init(SessionOptions ignore, EventHandler handler) { this.handler = handler; } @Mock public void startAsync() throws IOException { if (startAsyncEvent == null) { throw new IOException(); } handler.processEvent(startAsyncEvent, this.getMockInstance()); } @Mock public boolean openService(String serviceUri) throws IOException { if (!openServiceOk) { throw new IOException(); } return true; } }
nkabir/jBloomberg
src/test/java/com/assylias/jbloomberg/MockSession.java
Java
apache-2.0
1,787
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/codedeploy/model/DeploymentReadyAction.h> #include <aws/core/utils/HashingUtils.h> #include <aws/core/Globals.h> #include <aws/core/utils/EnumParseOverflowContainer.h> using namespace Aws::Utils; namespace Aws { namespace CodeDeploy { namespace Model { namespace DeploymentReadyActionMapper { static const int CONTINUE_DEPLOYMENT_HASH = HashingUtils::HashString("CONTINUE_DEPLOYMENT"); static const int STOP_DEPLOYMENT_HASH = HashingUtils::HashString("STOP_DEPLOYMENT"); DeploymentReadyAction GetDeploymentReadyActionForName(const Aws::String& name) { int hashCode = HashingUtils::HashString(name.c_str()); if (hashCode == CONTINUE_DEPLOYMENT_HASH) { return DeploymentReadyAction::CONTINUE_DEPLOYMENT; } else if (hashCode == STOP_DEPLOYMENT_HASH) { return DeploymentReadyAction::STOP_DEPLOYMENT; } EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer(); if(overflowContainer) { overflowContainer->StoreOverflow(hashCode, name); return static_cast<DeploymentReadyAction>(hashCode); } return DeploymentReadyAction::NOT_SET; } Aws::String GetNameForDeploymentReadyAction(DeploymentReadyAction enumValue) { switch(enumValue) { case DeploymentReadyAction::CONTINUE_DEPLOYMENT: return "CONTINUE_DEPLOYMENT"; case DeploymentReadyAction::STOP_DEPLOYMENT: return "STOP_DEPLOYMENT"; default: EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer(); if(overflowContainer) { return overflowContainer->RetrieveOverflow(static_cast<int>(enumValue)); } return {}; } } } // namespace DeploymentReadyActionMapper } // namespace Model } // namespace CodeDeploy } // namespace Aws
awslabs/aws-sdk-cpp
aws-cpp-sdk-codedeploy/source/model/DeploymentReadyAction.cpp
C++
apache-2.0
2,183
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Sergey I. Salishev */ package javax.imageio.event; import java.awt.image.BufferedImage; import java.util.EventListener; import javax.imageio.ImageReader; /** * @author Sergey I. Salishev */ public interface IIOReadUpdateListener extends EventListener { void imageUpdate(ImageReader source, BufferedImage theImage, int minX, int minY, int width, int height, int periodX, int periodY, int[] bands); void passComplete(ImageReader source, BufferedImage theImage); void passStarted(ImageReader source, BufferedImage theImage, int pass, int minPass, int maxPass, int minX, int minY, int periodX, int periodY, int[] bands); void thumbnailPassComplete(ImageReader source, BufferedImage theImage); void thumbnailPassStarted(ImageReader source, BufferedImage theThumbnail, int pass, int minPass, int maxPass, int minX, int minY, int periodX, int periodY, int[] bands); void thumbnailUpdate(ImageReader source, BufferedImage theThumbnail, int minX, int minY, int width, int height, int periodX, int periodY, int[] bands); }
freeVM/freeVM
enhanced/java/classlib/modules/imageio/src/main/java/javax/imageio/event/IIOReadUpdateListener.java
Java
apache-2.0
1,989
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.binary; import java.math.BigDecimal; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.Map; import org.apache.ignite.IgniteException; import org.apache.ignite.binary.BinaryObject; import org.apache.ignite.binary.BinaryObjectBuilder; import org.apache.ignite.binary.BinaryObjectException; import org.apache.ignite.binary.BinaryType; import org.apache.ignite.internal.binary.builder.BinaryObjectBuilderImpl; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.SB; import org.apache.ignite.lang.IgniteUuid; import org.jetbrains.annotations.Nullable; /** * Internal binary object interface. */ public abstract class BinaryObjectExImpl implements BinaryObjectEx { /** * @return Length. */ public abstract int length(); /** * @return Object start. */ public abstract int start(); /** * @return {@code True} if object is array based. */ public abstract boolean hasArray(); /** * @return Object array if object is array based, otherwise {@code null}. */ public abstract byte[] array(); /** * @return Object offheap address is object is offheap based, otherwise 0. */ public abstract long offheapAddress(); /** * Gets field value. * * @param fieldId Field ID. * @return Field value. * @throws org.apache.ignite.binary.BinaryObjectException In case of any other error. */ @Nullable public abstract <F> F field(int fieldId) throws BinaryObjectException; /** {@inheritDoc} */ @Override public int enumOrdinal() throws BinaryObjectException { throw new BinaryObjectException("Object is not enum."); } /** * Get offset of data begin. * * @return Field value. */ public abstract int dataStartOffset(); /** * Get offset of the footer begin. * * @return Field value. */ public abstract int footerStartOffset(); /** * Get field by offset. * * @param order Field offset. * @return Field value. */ @Nullable public abstract <F> F fieldByOrder(int order); /** * Create field comparer. * * @return Comparer. */ public abstract BinarySerializedFieldComparator createFieldComparator(); /** * Writes field value defined by the given field offset to the given byte buffer. * * @param fieldOffset Field offset. * @return Boolean flag indicating whether the field was successfully written to the buffer, {@code false} * if there is no enough space for the field in the buffer. */ protected abstract boolean writeFieldByOrder(int fieldOffset, ByteBuffer buf); /** * @param ctx Reader context. * @param fieldName Field name. * @return Field value. */ @Nullable protected abstract <F> F field(BinaryReaderHandles ctx, String fieldName); /** * @return {@code True} if object has schema. */ public abstract boolean hasSchema(); /** * Get schema ID. * * @return Schema ID. */ public abstract int schemaId(); /** * Create schema for object. * * @return Schema. */ public abstract BinarySchema createSchema(); /** * Get binary context. * * @return Binary context. */ public abstract BinaryContext context(); /** {@inheritDoc} */ @Override public BinaryObjectBuilder toBuilder() throws BinaryObjectException { return BinaryObjectBuilderImpl.wrap(this); } /** {@inheritDoc} */ @Override public BinaryObject clone() throws CloneNotSupportedException { return (BinaryObject)super.clone(); } /** {@inheritDoc} */ public boolean equals(Object other) { if (other == this) return true; if (!(other instanceof BinaryObject)) return false; BinaryIdentityResolver identity = context().identity(typeId()); return identity.equals(this, (BinaryObject)other); } /** {@inheritDoc} */ @Override public String toString() { try { BinaryReaderHandles ctx = new BinaryReaderHandles(); ctx.put(start(), this); return toString(ctx, new IdentityHashMap<BinaryObject, Integer>()); } catch (BinaryObjectException e) { throw new IgniteException("Failed to create string representation of binary object.", e); } } /** * @param ctx Reader context. * @param handles Handles for already traversed objects. * @return String representation. */ private String toString(BinaryReaderHandles ctx, IdentityHashMap<BinaryObject, Integer> handles) { int idHash = System.identityHashCode(this); int hash = hashCode(); BinaryType meta; try { meta = rawType(); } catch (BinaryObjectException ignore) { meta = null; } if (meta == null || !S.INCLUDE_SENSITIVE) return S.toString(S.INCLUDE_SENSITIVE ? BinaryObject.class.getSimpleName() : "BinaryObject", "idHash", idHash, false, "hash", hash, false, "typeId", typeId(), true); handles.put(this, idHash); SB buf = new SB(meta.typeName()); if (meta.fieldNames() != null) { buf.a(" [idHash=").a(idHash).a(", hash=").a(hash); for (String name : meta.fieldNames()) { Object val = field(ctx, name); buf.a(", ").a(name).a('='); appendValue(val, buf, ctx, handles); } buf.a(']'); } return buf.toString(); } /** * @param val Value to append. * @param buf Buffer to append to. * @param ctx Reader context. * @param handles Handles for already traversed objects. */ @SuppressWarnings("unchecked") private void appendValue(Object val, SB buf, BinaryReaderHandles ctx, IdentityHashMap<BinaryObject, Integer> handles) { if (val instanceof byte[]) buf.a(Arrays.toString((byte[]) val)); else if (val instanceof short[]) buf.a(Arrays.toString((short[])val)); else if (val instanceof int[]) buf.a(Arrays.toString((int[])val)); else if (val instanceof long[]) buf.a(Arrays.toString((long[])val)); else if (val instanceof float[]) buf.a(Arrays.toString((float[])val)); else if (val instanceof double[]) buf.a(Arrays.toString((double[])val)); else if (val instanceof char[]) buf.a(Arrays.toString((char[])val)); else if (val instanceof boolean[]) buf.a(Arrays.toString((boolean[]) val)); else if (val instanceof BigDecimal[]) buf.a(Arrays.toString((BigDecimal[])val)); else if (val instanceof IgniteUuid) buf.a(val); else if (val instanceof BinaryObjectExImpl) { BinaryObjectExImpl po = (BinaryObjectExImpl)val; Integer idHash0 = handles.get(val); if (idHash0 != null) { // Circular reference. BinaryType meta0 = po.rawType(); assert meta0 != null; buf.a(meta0.typeName()).a(" [hash=").a(idHash0).a(", ...]"); } else buf.a(po.toString(ctx, handles)); } else if (val instanceof Object[]) { Object[] arr = (Object[])val; buf.a('['); for (int i = 0; i < arr.length; i++) { Object o = arr[i]; appendValue(o, buf, ctx, handles); if (i < arr.length - 1) buf.a(", "); } } else if (val instanceof Iterable) { Iterable<Object> col = (Iterable<Object>)val; buf.a(col.getClass().getSimpleName()).a(" {"); Iterator it = col.iterator(); while (it.hasNext()) { Object o = it.next(); appendValue(o, buf, ctx, handles); if (it.hasNext()) buf.a(", "); } buf.a('}'); } else if (val instanceof Map) { Map<Object, Object> map = (Map<Object, Object>)val; buf.a(map.getClass().getSimpleName()).a(" {"); Iterator<Map.Entry<Object, Object>> it = map.entrySet().iterator(); while (it.hasNext()) { Map.Entry<Object, Object> e = it.next(); appendValue(e.getKey(), buf, ctx, handles); buf.a('='); appendValue(e.getValue(), buf, ctx, handles); if (it.hasNext()) buf.a(", "); } buf.a('}'); } else buf.a(val); } }
pperalta/ignite
modules/core/src/main/java/org/apache/ignite/internal/binary/BinaryObjectExImpl.java
Java
apache-2.0
9,818
package mpnginx import ( "bufio" "flag" "fmt" "io" "errors" "net/http" "regexp" "strconv" "strings" mp "github.com/mackerelio/go-mackerel-plugin-helper" ) var graphdef = map[string]mp.Graphs{ "nginx.connections": { Label: "Nginx Connections", Unit: "integer", Metrics: []mp.Metrics{ {Name: "connections", Label: "Active connections", Diff: false}, }, }, "nginx.requests": { Label: "Nginx requests", Unit: "float", Metrics: []mp.Metrics{ {Name: "accepts", Label: "Accepted connections", Diff: true, Type: "uint64"}, {Name: "handled", Label: "Handled connections", Diff: true, Type: "uint64"}, {Name: "requests", Label: "Handled requests", Diff: true, Type: "uint64"}, }, }, "nginx.queue": { Label: "Nginx connection status", Unit: "integer", Metrics: []mp.Metrics{ {Name: "reading", Label: "Reading", Diff: false}, {Name: "writing", Label: "Writing", Diff: false}, {Name: "waiting", Label: "Waiting", Diff: false}, }, }, } type stringSlice []string func (s *stringSlice) Set(v string) error { *s = append(*s, v) return nil } func (s *stringSlice) String() string { return fmt.Sprintf("%v", *s) } // NginxPlugin mackerel plugin for Nginx type NginxPlugin struct { URI string Header stringSlice } // % wget -qO- http://localhost:8080/nginx_status // Active connections: 123 // server accepts handled requests // 1693613501 1693613501 7996986318 // Reading: 66 Writing: 16 Waiting: 41 // FetchMetrics interface for mackerelplugin func (n NginxPlugin) FetchMetrics() (map[string]interface{}, error) { req, err := http.NewRequest("GET", n.URI, nil) if err != nil { return nil, err } for _, h := range n.Header { kv := strings.SplitN(h, ":", 2) var k, v string k = strings.TrimSpace(kv[0]) if len(kv) == 2 { v = strings.TrimSpace(kv[1]) } if http.CanonicalHeaderKey(k) == "Host" { req.Host = v } else { req.Header.Set(k, v) } } // set default User-Agent unless specified by n.Header if _, ok := req.Header["User-Agent"]; !ok { req.Header.Set("User-Agent", "mackerel-plugin-nginx") } resp, err := http.DefaultClient.Do(req) if err != nil { return nil, err } defer resp.Body.Close() return n.parseStats(resp.Body) } func (n NginxPlugin) parseStats(body io.Reader) (map[string]interface{}, error) { stat := make(map[string]interface{}) r := bufio.NewReader(body) line, _, err := r.ReadLine() if err != nil { return nil, errors.New("cannot get values") } re := regexp.MustCompile("Active connections: ([0-9]+)") res := re.FindStringSubmatch(string(line)) if res == nil || len(res) != 2 { return nil, errors.New("cannot get values") } stat["connections"], err = strconv.ParseFloat(res[1], 64) if err != nil { return nil, errors.New("cannot get values") } line, _, err = r.ReadLine() if err != nil { return nil, errors.New("cannot get values") } line, _, err = r.ReadLine() if err != nil { return nil, errors.New("cannot get values") } re = regexp.MustCompile("([0-9]+) ([0-9]+) ([0-9]+)") res = re.FindStringSubmatch(string(line)) if res == nil || len(res) != 4 { return nil, errors.New("cannot get values") } stat["accepts"], err = strconv.ParseFloat(res[1], 64) if err != nil { return nil, errors.New("cannot get values") } stat["handled"], err = strconv.ParseFloat(res[2], 64) if err != nil { return nil, errors.New("cannot get values") } stat["requests"], err = strconv.ParseFloat(res[3], 64) if err != nil { return nil, errors.New("cannot get values") } line, _, err = r.ReadLine() if err != nil { return nil, errors.New("cannot get values") } re = regexp.MustCompile("Reading: ([0-9]+) Writing: ([0-9]+) Waiting: ([0-9]+)") res = re.FindStringSubmatch(string(line)) if res == nil || len(res) != 4 { return nil, errors.New("cannot get values") } stat["reading"], err = strconv.ParseFloat(res[1], 64) if err != nil { return nil, errors.New("cannot get values") } stat["writing"], err = strconv.ParseFloat(res[2], 64) if err != nil { return nil, errors.New("cannot get values") } stat["waiting"], err = strconv.ParseFloat(res[3], 64) if err != nil { return nil, errors.New("cannot get values") } return stat, nil } // GraphDefinition interface for mackerelplugin func (n NginxPlugin) GraphDefinition() map[string]mp.Graphs { return graphdef } // Do the plugin func Do() { optURI := flag.String("uri", "", "URI") optScheme := flag.String("scheme", "http", "Scheme") optHost := flag.String("host", "localhost", "Hostname") optPort := flag.String("port", "8080", "Port") optPath := flag.String("path", "/nginx_status", "Path") optTempfile := flag.String("tempfile", "", "Temp file name") optHeader := &stringSlice{} flag.Var(optHeader, "header", "Set http header (e.g. \"Host: servername\")") flag.Parse() var nginx NginxPlugin if *optURI != "" { nginx.URI = *optURI } else { nginx.URI = fmt.Sprintf("%s://%s:%s%s", *optScheme, *optHost, *optPort, *optPath) } nginx.Header = *optHeader helper := mp.NewMackerelPlugin(nginx) helper.Tempfile = *optTempfile helper.Run() }
mackerelio/mackerel-agent-plugins
mackerel-plugin-nginx/lib/nginx.go
GO
apache-2.0
5,095
/* * Copyright (c) 2005-2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.ppaas.rest.endpoint.bean.autoscaler.policy.autoscale; import javax.xml.bind.annotation.XmlRootElement; @XmlRootElement public class AutoscalePolicy { private String id; private String displayName; private String description; private LoadThresholds loadThresholds; public String getId() { return id; } public void setId(String id) { this.id = id; } public String getDisplayName() { return displayName; } public void setDisplayName(String displayName) { this.displayName = displayName; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public LoadThresholds getLoadThresholds() { return loadThresholds; } public void setLoadThresholds(LoadThresholds loadThresholds) { this.loadThresholds = loadThresholds; } }
liurl3/product-private-paas
tools/ppaas-migration/4.0.0/src/main/java/org/wso2/ppaas/rest/endpoint/bean/autoscaler/policy/autoscale/AutoscalePolicy.java
Java
apache-2.0
1,652
package nl.tno.stormcv.model; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import backtype.storm.tuple.Tuple; /** * This {@link CVParticle} implementation represents a single feature calculated for {@link Frame} and has the following fields: * <ul> * <li>name: the name of the feature like 'SIFT', 'SURF', 'HOG' etc</li> * <li>duration: the duration of the feature in case it describes a temporal aspect of multiple frames</li> * <li>sparseDescriptors: a list with {@link Descriptor} objects used to described sparse features like SIFT</li> * <li>denseDescriptors: a three dimensional float array much like the OpenCV Mat object which can be used to represent * dense features like dense Optical Flow</li> * </ul> * It is not always clear how a specific descriptor should be stored and it is typically up to the characteristics of the * topology and context what is the best way to go. * * @author Corne Versloot * */ public class Feature extends CVParticle{ private String name; private long duration; private List<Descriptor> sparseDescriptors = new ArrayList<Descriptor>(); private float[][][] denseDescriptors = new float[0][0][0]; public Feature(String streamId, long sequenceNr, String name, long duration, List<Descriptor> sparseDescriptors, float[][][] denseDescriptors) { super(streamId, sequenceNr); this.name = name; this.duration = duration; if(sparseDescriptors != null){ this.sparseDescriptors = sparseDescriptors; } if(denseDescriptors != null){ this.denseDescriptors = denseDescriptors; } } public Feature(Tuple tuple, String name, long duration, List<Descriptor> sparseDescriptors, float[][][] denseDescriptors) { super(tuple); this.name = name; this.duration = duration; if(sparseDescriptors != null){ this.sparseDescriptors = sparseDescriptors; } if(denseDescriptors != null){ this.denseDescriptors = denseDescriptors; } } public String getName() { return name; } public List<Descriptor> getSparseDescriptors() { return sparseDescriptors; } public float[][][] getDenseDescriptors(){ return denseDescriptors; } public long getDuration(){ return this.duration; } public Feature deepCopy(){ float[][][] denseCopy = new float[denseDescriptors.length][][]; for(int x=0; x<denseDescriptors.length; x++){ denseCopy[x] = new float[denseDescriptors[x].length][]; for(int y=0; y<denseDescriptors[x].length; y++){ denseCopy[x][y] = Arrays.copyOf(denseDescriptors[x][y], denseDescriptors[x][y].length); } } List<Descriptor> sparseCopy = new ArrayList<Descriptor>(this.sparseDescriptors.size()); for(Descriptor d : sparseDescriptors){ sparseCopy.add(d.deepCopy()); } Feature copyFeature = new Feature(new String(this.getStreamId()), this.getSequenceNr(), new String(this.getName()), this.getDuration(), sparseCopy, denseCopy); copyFeature.setRequestId(getRequestId()); copyFeature.setMetadata(this.getMetadata()); return copyFeature; } public String toString(){ return "Feature {stream:"+getStreamId()+", nr:"+getSequenceNr()+", name: "+name+", descriptors: "+sparseDescriptors+"}"; } }
todotobe1/StormCV
stormcv/src/main/java/nl/tno/stormcv/model/Feature.java
Java
apache-2.0
3,179
/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // TODO(madhusdancs): // 1. Make printSuccess prepend protocol/scheme to the IPs/hostnames. // 1. Add a dry-run support. // 2. Make all the API object names customizable. // Ex: federation-apiserver, federation-controller-manager, etc. // 3. Make image name and tag customizable. // 4. Separate etcd container from API server pod as a first step towards enabling HA. // 5. Generate credentials of the following types for the API server: // i. "known_tokens.csv" // ii. "basic_auth.csv" // 6. Add the ability to customize DNS domain suffix. It should probably be derived // from cluster config. // 7. Make etcd PVC size configurable. // 8. Make API server and controller manager replicas customizable via the HA work. package init import ( "fmt" "io" "strings" "time" "k8s.io/apimachinery/pkg/api/resource" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/util/intstr" "k8s.io/apimachinery/pkg/util/wait" "k8s.io/client-go/tools/clientcmd" clientcmdapi "k8s.io/client-go/tools/clientcmd/api" certutil "k8s.io/client-go/util/cert" triple "k8s.io/client-go/util/cert/triple" kubeadmkubeconfigphase "k8s.io/kubernetes/cmd/kubeadm/app/phases/kubeconfig" "k8s.io/kubernetes/federation/pkg/kubefed/util" "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/apis/extensions" "k8s.io/kubernetes/pkg/apis/rbac" client "k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset" "k8s.io/kubernetes/pkg/kubectl/cmd/templates" cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util" "k8s.io/kubernetes/pkg/version" "github.com/spf13/cobra" ) const ( APIServerCN = "federation-apiserver" ControllerManagerCN = "federation-controller-manager" AdminCN = "admin" HostClusterLocalDNSZoneName = "cluster.local." // User name used by federation controller manager to make // calls to federation API server. ControllerManagerUser = "federation-controller-manager" // Name of the ServiceAccount used by the federation controller manager // to access the secrets in the host cluster. ControllerManagerSA = "federation-controller-manager" // Group name of the legacy/core API group legacyAPIGroup = "" lbAddrRetryInterval = 5 * time.Second podWaitInterval = 2 * time.Second ) var ( init_long = templates.LongDesc(` Initialize a federation control plane. Federation control plane is hosted inside a Kubernetes cluster. The host cluster must be specified using the --host-cluster-context flag.`) init_example = templates.Examples(` # Initialize federation control plane for a federation # named foo in the host cluster whose local kubeconfig # context is bar. kubectl init foo --host-cluster-context=bar`) componentLabel = map[string]string{ "app": "federated-cluster", } apiserverSvcSelector = map[string]string{ "app": "federated-cluster", "module": "federation-apiserver", } apiserverPodLabels = map[string]string{ "app": "federated-cluster", "module": "federation-apiserver", } controllerManagerPodLabels = map[string]string{ "app": "federated-cluster", "module": "federation-controller-manager", } hyperkubeImageName = "gcr.io/google_containers/hyperkube-amd64" ) // NewCmdInit defines the `init` command that bootstraps a federation // control plane inside a set of host clusters. func NewCmdInit(cmdOut io.Writer, config util.AdminConfig) *cobra.Command { cmd := &cobra.Command{ Use: "init FEDERATION_NAME --host-cluster-context=HOST_CONTEXT", Short: "init initializes a federation control plane", Long: init_long, Example: init_example, Run: func(cmd *cobra.Command, args []string) { err := initFederation(cmdOut, config, cmd, args) cmdutil.CheckErr(err) }, } defaultImage := fmt.Sprintf("%s:%s", hyperkubeImageName, version.Get()) util.AddSubcommandFlags(cmd) cmd.Flags().String("dns-zone-name", "", "DNS suffix for this federation. Federated Service DNS names are published with this suffix.") cmd.Flags().String("image", defaultImage, "Image to use for federation API server and controller manager binaries.") cmd.Flags().String("dns-provider", "google-clouddns", "Dns provider to be used for this deployment.") cmd.Flags().String("etcd-pv-capacity", "10Gi", "Size of persistent volume claim to be used for etcd.") cmd.Flags().Bool("dry-run", false, "dry run without sending commands to server.") cmd.Flags().String("storage-backend", "etcd2", "The storage backend for persistence. Options: 'etcd2' (default), 'etcd3'.") return cmd } type entityKeyPairs struct { ca *triple.KeyPair server *triple.KeyPair controllerManager *triple.KeyPair admin *triple.KeyPair } // initFederation initializes a federation control plane. // See the design doc in https://github.com/kubernetes/kubernetes/pull/34484 // for details. func initFederation(cmdOut io.Writer, config util.AdminConfig, cmd *cobra.Command, args []string) error { initFlags, err := util.GetSubcommandFlags(cmd, args) if err != nil { return err } dnsZoneName := cmdutil.GetFlagString(cmd, "dns-zone-name") image := cmdutil.GetFlagString(cmd, "image") dnsProvider := cmdutil.GetFlagString(cmd, "dns-provider") etcdPVCapacity := cmdutil.GetFlagString(cmd, "etcd-pv-capacity") dryRun := cmdutil.GetDryRunFlag(cmd) storageBackend := cmdutil.GetFlagString(cmd, "storage-backend") hostFactory := config.HostFactory(initFlags.Host, initFlags.Kubeconfig) hostClientset, err := hostFactory.ClientSet() if err != nil { return err } serverName := fmt.Sprintf("%s-apiserver", initFlags.Name) serverCredName := fmt.Sprintf("%s-credentials", serverName) cmName := fmt.Sprintf("%s-controller-manager", initFlags.Name) cmKubeconfigName := fmt.Sprintf("%s-kubeconfig", cmName) // 1. Create a namespace for federation system components _, err = createNamespace(hostClientset, initFlags.FederationSystemNamespace, dryRun) if err != nil { return err } // 2. Expose a network endpoint for the federation API server svc, err := createService(hostClientset, initFlags.FederationSystemNamespace, serverName, dryRun) if err != nil { return err } ips, hostnames, err := waitForLoadBalancerAddress(hostClientset, svc, dryRun) if err != nil { return err } // 3. Generate TLS certificates and credentials entKeyPairs, err := genCerts(initFlags.FederationSystemNamespace, initFlags.Name, svc.Name, HostClusterLocalDNSZoneName, ips, hostnames) if err != nil { return err } _, err = createAPIServerCredentialsSecret(hostClientset, initFlags.FederationSystemNamespace, serverCredName, entKeyPairs, dryRun) if err != nil { return err } // 4. Create a kubeconfig secret _, err = createControllerManagerKubeconfigSecret(hostClientset, initFlags.FederationSystemNamespace, initFlags.Name, svc.Name, cmKubeconfigName, entKeyPairs, dryRun) if err != nil { return err } // 5. Create a persistent volume and a claim to store the federation // API server's state. This is where federation API server's etcd // stores its data. pvc, err := createPVC(hostClientset, initFlags.FederationSystemNamespace, svc.Name, etcdPVCapacity, dryRun) if err != nil { return err } // Since only one IP address can be specified as advertise address, // we arbitrarily pick the first available IP address advertiseAddress := "" if len(ips) > 0 { advertiseAddress = ips[0] } endpoint := advertiseAddress if advertiseAddress == "" && len(hostnames) > 0 { endpoint = hostnames[0] } // 6. Create federation API server _, err = createAPIServer(hostClientset, initFlags.FederationSystemNamespace, serverName, image, serverCredName, pvc.Name, advertiseAddress, storageBackend, dryRun) if err != nil { return err } // 7. Create federation controller manager // 7a. Create a service account in the host cluster for federation // controller manager. sa, err := createControllerManagerSA(hostClientset, initFlags.FederationSystemNamespace, dryRun) if err != nil { return err } // 7b. Create RBAC role and role binding for federation controller // manager service account. _, _, err = createRoleBindings(hostClientset, initFlags.FederationSystemNamespace, sa.Name, dryRun) if err != nil { return err } // 7c. Create federation controller manager deployment. _, err = createControllerManager(hostClientset, initFlags.FederationSystemNamespace, initFlags.Name, svc.Name, cmName, image, cmKubeconfigName, dnsZoneName, dnsProvider, sa.Name, dryRun) if err != nil { return err } // 8. Write the federation API server endpoint info, credentials // and context to kubeconfig err = updateKubeconfig(config, initFlags.Name, endpoint, entKeyPairs, dryRun) if err != nil { return err } if !dryRun { fedPods := []string{serverName, cmName} err = waitForPods(hostClientset, fedPods, initFlags.FederationSystemNamespace) if err != nil { return err } err = waitSrvHealthy(config, initFlags.Name, initFlags.Kubeconfig) if err != nil { return err } return printSuccess(cmdOut, ips, hostnames) } _, err = fmt.Fprintf(cmdOut, "Federation control plane runs (dry run)\n") return err } func createNamespace(clientset *client.Clientset, namespace string, dryRun bool) (*api.Namespace, error) { ns := &api.Namespace{ ObjectMeta: metav1.ObjectMeta{ Name: namespace, }, } if dryRun { return ns, nil } return clientset.Core().Namespaces().Create(ns) } func createService(clientset *client.Clientset, namespace, svcName string, dryRun bool) (*api.Service, error) { svc := &api.Service{ ObjectMeta: metav1.ObjectMeta{ Name: svcName, Namespace: namespace, Labels: componentLabel, }, Spec: api.ServiceSpec{ Type: api.ServiceTypeLoadBalancer, Selector: apiserverSvcSelector, Ports: []api.ServicePort{ { Name: "https", Protocol: "TCP", Port: 443, TargetPort: intstr.FromInt(443), }, }, }, } if dryRun { return svc, nil } return clientset.Core().Services(namespace).Create(svc) } func waitForLoadBalancerAddress(clientset *client.Clientset, svc *api.Service, dryRun bool) ([]string, []string, error) { ips := []string{} hostnames := []string{} if dryRun { return ips, hostnames, nil } err := wait.PollImmediateInfinite(lbAddrRetryInterval, func() (bool, error) { pollSvc, err := clientset.Core().Services(svc.Namespace).Get(svc.Name, metav1.GetOptions{}) if err != nil { return false, nil } if ings := pollSvc.Status.LoadBalancer.Ingress; len(ings) > 0 { for _, ing := range ings { if len(ing.IP) > 0 { ips = append(ips, ing.IP) } if len(ing.Hostname) > 0 { hostnames = append(hostnames, ing.Hostname) } } if len(ips) > 0 || len(hostnames) > 0 { return true, nil } } return false, nil }) if err != nil { return nil, nil, err } return ips, hostnames, nil } func genCerts(svcNamespace, name, svcName, localDNSZoneName string, ips, hostnames []string) (*entityKeyPairs, error) { ca, err := triple.NewCA(name) if err != nil { return nil, fmt.Errorf("failed to create CA key and certificate: %v", err) } server, err := triple.NewServerKeyPair(ca, APIServerCN, svcName, svcNamespace, localDNSZoneName, ips, hostnames) if err != nil { return nil, fmt.Errorf("failed to create federation API server key and certificate: %v", err) } cm, err := triple.NewClientKeyPair(ca, ControllerManagerCN, nil) if err != nil { return nil, fmt.Errorf("failed to create federation controller manager client key and certificate: %v", err) } admin, err := triple.NewClientKeyPair(ca, AdminCN, nil) if err != nil { return nil, fmt.Errorf("failed to create client key and certificate for an admin: %v", err) } return &entityKeyPairs{ ca: ca, server: server, controllerManager: cm, admin: admin, }, nil } func createAPIServerCredentialsSecret(clientset *client.Clientset, namespace, credentialsName string, entKeyPairs *entityKeyPairs, dryRun bool) (*api.Secret, error) { // Build the secret object with API server credentials. secret := &api.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: credentialsName, Namespace: namespace, }, Data: map[string][]byte{ "ca.crt": certutil.EncodeCertPEM(entKeyPairs.ca.Cert), "server.crt": certutil.EncodeCertPEM(entKeyPairs.server.Cert), "server.key": certutil.EncodePrivateKeyPEM(entKeyPairs.server.Key), }, } if dryRun { return secret, nil } // Boilerplate to create the secret in the host cluster. return clientset.Core().Secrets(namespace).Create(secret) } func createControllerManagerKubeconfigSecret(clientset *client.Clientset, namespace, name, svcName, kubeconfigName string, entKeyPairs *entityKeyPairs, dryRun bool) (*api.Secret, error) { config := kubeadmkubeconfigphase.MakeClientConfigWithCerts( fmt.Sprintf("https://%s", svcName), name, ControllerManagerUser, certutil.EncodeCertPEM(entKeyPairs.ca.Cert), certutil.EncodePrivateKeyPEM(entKeyPairs.controllerManager.Key), certutil.EncodeCertPEM(entKeyPairs.controllerManager.Cert), ) return util.CreateKubeconfigSecret(clientset, config, namespace, kubeconfigName, dryRun) } func createPVC(clientset *client.Clientset, namespace, svcName, etcdPVCapacity string, dryRun bool) (*api.PersistentVolumeClaim, error) { capacity, err := resource.ParseQuantity(etcdPVCapacity) if err != nil { return nil, err } pvc := &api.PersistentVolumeClaim{ ObjectMeta: metav1.ObjectMeta{ Name: fmt.Sprintf("%s-etcd-claim", svcName), Namespace: namespace, Labels: componentLabel, Annotations: map[string]string{ "volume.alpha.kubernetes.io/storage-class": "yes", }, }, Spec: api.PersistentVolumeClaimSpec{ AccessModes: []api.PersistentVolumeAccessMode{ api.ReadWriteOnce, }, Resources: api.ResourceRequirements{ Requests: api.ResourceList{ api.ResourceStorage: capacity, }, }, }, } if dryRun { return pvc, nil } return clientset.Core().PersistentVolumeClaims(namespace).Create(pvc) } func createAPIServer(clientset *client.Clientset, namespace, name, image, credentialsName, pvcName, advertiseAddress, storageBackend string, dryRun bool) (*extensions.Deployment, error) { command := []string{ "/hyperkube", "federation-apiserver", "--bind-address=0.0.0.0", "--etcd-servers=http://localhost:2379", "--secure-port=443", "--client-ca-file=/etc/federation/apiserver/ca.crt", "--tls-cert-file=/etc/federation/apiserver/server.crt", "--tls-private-key-file=/etc/federation/apiserver/server.key", fmt.Sprintf("--storage-backend=%s", storageBackend), } if advertiseAddress != "" { command = append(command, fmt.Sprintf("--advertise-address=%s", advertiseAddress)) } dataVolumeName := "etcddata" dep := &extensions.Deployment{ ObjectMeta: metav1.ObjectMeta{ Name: name, Namespace: namespace, Labels: componentLabel, }, Spec: extensions.DeploymentSpec{ Replicas: 1, Template: api.PodTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Name: name, Labels: apiserverPodLabels, }, Spec: api.PodSpec{ Containers: []api.Container{ { Name: "apiserver", Image: image, Command: command, Ports: []api.ContainerPort{ { Name: "https", ContainerPort: 443, }, { Name: "local", ContainerPort: 8080, }, }, VolumeMounts: []api.VolumeMount{ { Name: credentialsName, MountPath: "/etc/federation/apiserver", ReadOnly: true, }, }, }, { Name: "etcd", Image: "gcr.io/google_containers/etcd:3.0.14-alpha.1", Command: []string{ "/usr/local/bin/etcd", "--data-dir", "/var/etcd/data", }, VolumeMounts: []api.VolumeMount{ { Name: dataVolumeName, MountPath: "/var/etcd", }, }, }, }, Volumes: []api.Volume{ { Name: credentialsName, VolumeSource: api.VolumeSource{ Secret: &api.SecretVolumeSource{ SecretName: credentialsName, }, }, }, { Name: dataVolumeName, VolumeSource: api.VolumeSource{ PersistentVolumeClaim: &api.PersistentVolumeClaimVolumeSource{ ClaimName: pvcName, }, }, }, }, }, }, }, } if dryRun { return dep, nil } return clientset.Extensions().Deployments(namespace).Create(dep) } func createControllerManagerSA(clientset *client.Clientset, namespace string, dryRun bool) (*api.ServiceAccount, error) { sa := &api.ServiceAccount{ ObjectMeta: metav1.ObjectMeta{ Name: ControllerManagerSA, Namespace: namespace, Labels: componentLabel, }, } if dryRun { return sa, nil } return clientset.Core().ServiceAccounts(namespace).Create(sa) } func createRoleBindings(clientset *client.Clientset, namespace, saName string, dryRun bool) (*rbac.Role, *rbac.RoleBinding, error) { roleName := "federation-system:federation-controller-manager" role := &rbac.Role{ // a role to use for bootstrapping the federation-controller-manager so it can access // secrets in the host cluster to access other clusters. ObjectMeta: metav1.ObjectMeta{ Name: roleName, Namespace: namespace, Labels: componentLabel, }, Rules: []rbac.PolicyRule{ rbac.NewRule("get", "list", "watch").Groups(legacyAPIGroup).Resources("secrets").RuleOrDie(), }, } rolebinding, err := rbac.NewRoleBinding(roleName, namespace).SAs(namespace, saName).Binding() if err != nil { return nil, nil, err } rolebinding.Labels = componentLabel if dryRun { return role, &rolebinding, nil } newRole, err := clientset.Rbac().Roles(namespace).Create(role) if err != nil { return nil, nil, err } newRolebinding, err := clientset.Rbac().RoleBindings(namespace).Create(&rolebinding) return newRole, newRolebinding, err } func createControllerManager(clientset *client.Clientset, namespace, name, svcName, cmName, image, kubeconfigName, dnsZoneName, dnsProvider, saName string, dryRun bool) (*extensions.Deployment, error) { dep := &extensions.Deployment{ ObjectMeta: metav1.ObjectMeta{ Name: cmName, Namespace: namespace, Labels: componentLabel, }, Spec: extensions.DeploymentSpec{ Replicas: 1, Template: api.PodTemplateSpec{ ObjectMeta: metav1.ObjectMeta{ Name: cmName, Labels: controllerManagerPodLabels, }, Spec: api.PodSpec{ Containers: []api.Container{ { Name: "controller-manager", Image: image, Command: []string{ "/hyperkube", "federation-controller-manager", fmt.Sprintf("--master=https://%s", svcName), "--kubeconfig=/etc/federation/controller-manager/kubeconfig", fmt.Sprintf("--dns-provider=%s", dnsProvider), "--dns-provider-config=", fmt.Sprintf("--federation-name=%s", name), fmt.Sprintf("--zone-name=%s", dnsZoneName), }, VolumeMounts: []api.VolumeMount{ { Name: kubeconfigName, MountPath: "/etc/federation/controller-manager", ReadOnly: true, }, }, Env: []api.EnvVar{ { Name: "POD_NAMESPACE", ValueFrom: &api.EnvVarSource{ FieldRef: &api.ObjectFieldSelector{ FieldPath: "metadata.namespace", }, }, }, }, }, }, Volumes: []api.Volume{ { Name: kubeconfigName, VolumeSource: api.VolumeSource{ Secret: &api.SecretVolumeSource{ SecretName: kubeconfigName, }, }, }, }, ServiceAccountName: saName, }, }, }, } if dryRun { return dep, nil } return clientset.Extensions().Deployments(namespace).Create(dep) } func waitForPods(clientset *client.Clientset, fedPods []string, namespace string) error { err := wait.PollInfinite(podWaitInterval, func() (bool, error) { podCheck := len(fedPods) podList, err := clientset.Core().Pods(namespace).List(metav1.ListOptions{}) if err != nil { return false, nil } for _, pod := range podList.Items { for _, fedPod := range fedPods { if strings.HasPrefix(pod.Name, fedPod) && pod.Status.Phase == "Running" { podCheck -= 1 } } //ensure that all pods are in running state or keep waiting if podCheck == 0 { return true, nil } } return false, nil }) return err } func waitSrvHealthy(config util.AdminConfig, context, kubeconfig string) error { fedClientSet, err := config.FederationClientset(context, kubeconfig) if err != nil { return err } fedDiscoveryClient := fedClientSet.Discovery() err = wait.PollInfinite(podWaitInterval, func() (bool, error) { body, err := fedDiscoveryClient.RESTClient().Get().AbsPath("/healthz").Do().Raw() if err != nil { return false, nil } if strings.EqualFold(string(body), "ok") { return true, nil } return false, nil }) return err } func printSuccess(cmdOut io.Writer, ips, hostnames []string) error { svcEndpoints := append(ips, hostnames...) _, err := fmt.Fprintf(cmdOut, "Federation API server is running at: %s\n", strings.Join(svcEndpoints, ", ")) return err } func updateKubeconfig(config util.AdminConfig, name, endpoint string, entKeyPairs *entityKeyPairs, dryRun bool) error { po := config.PathOptions() kubeconfig, err := po.GetStartingConfig() if err != nil { return err } // Populate API server endpoint info. cluster := clientcmdapi.NewCluster() // Prefix "https" as the URL scheme to endpoint. if !strings.HasPrefix(endpoint, "https://") { endpoint = fmt.Sprintf("https://%s", endpoint) } cluster.Server = endpoint cluster.CertificateAuthorityData = certutil.EncodeCertPEM(entKeyPairs.ca.Cert) // Populate credentials. authInfo := clientcmdapi.NewAuthInfo() authInfo.ClientCertificateData = certutil.EncodeCertPEM(entKeyPairs.admin.Cert) authInfo.ClientKeyData = certutil.EncodePrivateKeyPEM(entKeyPairs.admin.Key) authInfo.Username = AdminCN // Populate context. context := clientcmdapi.NewContext() context.Cluster = name context.AuthInfo = name // Update the config struct with API server endpoint info, // credentials and context. kubeconfig.Clusters[name] = cluster kubeconfig.AuthInfos[name] = authInfo kubeconfig.Contexts[name] = context if !dryRun { // Write the update kubeconfig. if err := clientcmd.ModifyConfig(po, *kubeconfig, true); err != nil { return err } } return nil }
rkouj/kubernetes
federation/pkg/kubefed/init/init.go
GO
apache-2.0
23,239
//// [internalAliasClassInsideLocalModuleWithExport.js] (function (x) { var c = (function () { function c() { } c.prototype.foo = function (a) { return a; }; return c; })(); x.c = c; })(exports.x || (exports.x = {})); var x = exports.x; (function (m2) { (function (m3) { var c = x.c; m3.c = c; m3.cProp = new c(); var cReturnVal = m3.cProp.foo(10); })(m2.m3 || (m2.m3 = {})); var m3 = m2.m3; })(exports.m2 || (exports.m2 = {})); var m2 = exports.m2; exports.d = new m2.m3.c(); ////[internalAliasClassInsideLocalModuleWithExport.d.ts] export declare module x { class c { public foo(a: number): number; } } export declare module m2 { module m3 { export import c = x.c; var cProp: c; } } export declare var d: x.c;
hippich/typescript
tests/baselines/reference/internalAliasClassInsideLocalModuleWithExport.js
JavaScript
apache-2.0
905
<?php /** * THE CODE IN THIS FILE WAS GENERATED FROM THE EBAY WSDL USING THE PROJECT: * * https://github.com/davidtsadler/ebay-api-sdk-php * * Copyright 2014 David T. Sadler * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace DTS\eBaySDK\Trading\Types; /** * * @property boolean $DaylightSavingsInEffect * @property string $DaylightSavingsLabel * @property string $DaylightSavingsOffset * @property string $DetailVersion * @property string $StandardLabel * @property string $StandardOffset * @property string $TimeZoneID * @property \DateTime $UpdateTime */ class TimeZoneDetailsType extends \DTS\eBaySDK\Types\BaseType { /** * @var array Properties belonging to objects of this class. */ private static $propertyTypes = array( 'DaylightSavingsInEffect' => array( 'type' => 'boolean', 'unbound' => false, 'attribute' => false, 'elementName' => 'DaylightSavingsInEffect' ), 'DaylightSavingsLabel' => array( 'type' => 'string', 'unbound' => false, 'attribute' => false, 'elementName' => 'DaylightSavingsLabel' ), 'DaylightSavingsOffset' => array( 'type' => 'string', 'unbound' => false, 'attribute' => false, 'elementName' => 'DaylightSavingsOffset' ), 'DetailVersion' => array( 'type' => 'string', 'unbound' => false, 'attribute' => false, 'elementName' => 'DetailVersion' ), 'StandardLabel' => array( 'type' => 'string', 'unbound' => false, 'attribute' => false, 'elementName' => 'StandardLabel' ), 'StandardOffset' => array( 'type' => 'string', 'unbound' => false, 'attribute' => false, 'elementName' => 'StandardOffset' ), 'TimeZoneID' => array( 'type' => 'string', 'unbound' => false, 'attribute' => false, 'elementName' => 'TimeZoneID' ), 'UpdateTime' => array( 'type' => 'DateTime', 'unbound' => false, 'attribute' => false, 'elementName' => 'UpdateTime' ) ); /** * @param array $values Optional properties and values to assign to the object. */ public function __construct(array $values = array()) { list($parentValues, $childValues) = self::getParentValues(self::$propertyTypes, $values); parent::__construct($parentValues); if (!array_key_exists(__CLASS__, self::$properties)) { self::$properties[__CLASS__] = array_merge(self::$properties[get_parent_class()], self::$propertyTypes); } if (!array_key_exists(__CLASS__, self::$xmlNamespaces)) { self::$xmlNamespaces[__CLASS__] = 'urn:ebay:apis:eBLBaseComponents'; } $this->setValues(__CLASS__, $childValues); } }
davidtsadler/ebay-sdk-trading
src/DTS/eBaySDK/Trading/Types/TimeZoneDetailsType.php
PHP
apache-2.0
3,548
/** * <copyright> * </copyright> * */ package org.xtuml.bp.xtext.oal.xoal; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>unrelate statement</b></em>'. * <!-- end-user-doc --> * * <p> * The following features are supported: * <ul> * <li>{@link org.xtuml.bp.xtext.oal.xoal.unrelate_statement#getA1 <em>A1</em>}</li> * <li>{@link org.xtuml.bp.xtext.oal.xoal.unrelate_statement#getA2 <em>A2</em>}</li> * <li>{@link org.xtuml.bp.xtext.oal.xoal.unrelate_statement#getA3 <em>A3</em>}</li> * </ul> * </p> * * @see org.xtuml.bp.xtext.oal.xoal.XoalPackage#getunrelate_statement() * @model * @generated */ public interface unrelate_statement extends statement { /** * Returns the value of the '<em><b>A1</b></em>' containment reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>A1</em>' containment reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>A1</em>' containment reference. * @see #setA1(inst_ref_var) * @see org.xtuml.bp.xtext.oal.xoal.XoalPackage#getunrelate_statement_A1() * @model containment="true" * @generated */ inst_ref_var getA1(); /** * Sets the value of the '{@link org.xtuml.bp.xtext.oal.xoal.unrelate_statement#getA1 <em>A1</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>A1</em>' containment reference. * @see #getA1() * @generated */ void setA1(inst_ref_var value); /** * Returns the value of the '<em><b>A2</b></em>' containment reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>A2</em>' containment reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>A2</em>' containment reference. * @see #setA2(inst_ref_var) * @see org.xtuml.bp.xtext.oal.xoal.XoalPackage#getunrelate_statement_A2() * @model containment="true" * @generated */ inst_ref_var getA2(); /** * Sets the value of the '{@link org.xtuml.bp.xtext.oal.xoal.unrelate_statement#getA2 <em>A2</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>A2</em>' containment reference. * @see #getA2() * @generated */ void setA2(inst_ref_var value); /** * Returns the value of the '<em><b>A3</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>A3</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>A3</em>' attribute. * @see #setA3(String) * @see org.xtuml.bp.xtext.oal.xoal.XoalPackage#getunrelate_statement_A3() * @model * @generated */ String getA3(); /** * Sets the value of the '{@link org.xtuml.bp.xtext.oal.xoal.unrelate_statement#getA3 <em>A3</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>A3</em>' attribute. * @see #getA3() * @generated */ void setA3(String value); } // unrelate_statement
john-tornblom/bridgepoint
org.xtuml.bp.xtext.oal/src-gen/org/xtuml/bp/xtext/oal/xoal/unrelate_statement.java
Java
apache-2.0
3,389
CKEDITOR.plugins.setLang('oembed', 'nl', { title : "Integratie van media-inhoud (foto's, video, content)", button : "Media-inhoud van externe websites", pasteUrl : "Geef een URL van een pagina in dat ondersteund wordt (Bijv.: YouTube, Flickr, Qik, Vimeo, Hulu, Viddler, MyOpera, etc.) ...", invalidUrl : "Please provide an valid URL!", noEmbedCode : "No embed code found, or site is not supported!", url : "URL:", width: "Breedte:", height: "Hoogte:", widthTitle: "Width for the embeded Content", heightTitle: "Height for the embeded Content", maxWidth: "Maximale breedte:", maxHeight: "Maximale hoogte:", maxWidthTitle: "Maximum Width for the embeded Content", maxHeightTitle: "Maximum Height for the embeded Content", resizeType: "Resize Type (Only Video's):", noresize: "No Resize (use default)", responsive: "Responsive Resize", custom: "Specific Resize", autoClose: "Automatically Close Dialog after Code is Embeded", noVimeo: "The owner of this video has set domain restrictions and you will not be able to embed it on your website.", Error: "Media Content could not been retrieved, please try a different URL." });
ONLYOFFICE/CommunityServer
web/studio/ASC.Web.Studio/UserControls/Common/ckeditor/plugins/oembed/lang/nl.js
JavaScript
apache-2.0
1,170
############################################################################### ## ## Copyright (C) 2014 Tavendo GmbH ## ## Licensed under the Apache License, Version 2.0 (the "License"); ## you may not use this file except in compliance with the License. ## You may obtain a copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or agreed to in writing, software ## distributed under the License is distributed on an "AS IS" BASIS, ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ## See the License for the specific language governing permissions and ## limitations under the License. ## ############################################################################### from twisted.internet import reactor from twisted.internet.defer import inlineCallbacks from autobahn.wamp.types import CallResult from autobahn.twisted.wamp import ApplicationSession class Component(ApplicationSession): """ Application component that provides procedures which return complex results. """ def onConnect(self): self.join("realm1") def onJoin(self, details): def add_complex(a, ai, b, bi): return CallResult(c = a + b, ci = ai + bi) self.register(add_complex, 'com.myapp.add_complex') def split_name(fullname): forename, surname = fullname.split() return CallResult(forename, surname) self.register(split_name, 'com.myapp.split_name')
robtandy/AutobahnPython
examples/twisted/wamp/basic/rpc/complex/backend.py
Python
apache-2.0
1,506
package org.wso2.carbon.identity.mgt.endpoint.serviceclient.beans; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; @XmlAccessorType(XmlAccessType.FIELD) @XmlType(propOrder = { "user", "code" }) @XmlRootElement(name = "confirmSelfRegistrationRequest") public class ConfirmSelfRegistrationRequest { @XmlElement(required = true) private User user; @XmlElement(required = true) private String code; public User getUser() { return user; } public void setUser(User user) { this.user = user; } public String getCode() { return code; } public void setCode(String code) { this.code = code; } }
PasinduTennage/carbon-identity-framework
components/identity-mgt/org.wso2.carbon.identity.mgt.endpoint/src/main/java/org/wso2/carbon/identity/mgt/endpoint/serviceclient/beans/ConfirmSelfRegistrationRequest.java
Java
apache-2.0
872
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.application.options; import com.intellij.CommonBundle; import com.intellij.openapi.application.ApplicationBundle; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.util.text.UniqueNameGenerator; import com.intellij.util.ui.JBUI; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.awt.*; import java.util.Collection; public class SaveSchemeDialog extends DialogWrapper { private final JTextField mySchemeName = new JTextField(); private final Collection<String> myExistingNames; public SaveSchemeDialog(@NotNull Component parent, String title, @NotNull Collection<String> existingNames, @NotNull String selectedName) { super(parent, false); myExistingNames = existingNames; setTitle(title); mySchemeName.setText(UniqueNameGenerator.generateUniqueName(selectedName + " copy", existingNames)); init(); } public String getSchemeName() { return mySchemeName.getText(); } @Override protected JComponent createNorthPanel() { JPanel panel = new JPanel(new GridBagLayout()); GridBagConstraints gc = new GridBagConstraints(); gc.gridx = 0; gc.gridy = 0; gc.weightx = 0; gc.insets = new Insets(5, 0, 5, 5); panel.add(new JLabel(ApplicationBundle.message("label.name")), gc); gc = new GridBagConstraints(); gc.gridx = 1; gc.gridy = 0; gc.weightx = 1; gc.fill = GridBagConstraints.HORIZONTAL; gc.gridwidth = 2; gc.insets = new Insets(0, 0, 5, 0); panel.add(mySchemeName, gc); panel.setPreferredSize(JBUI.size(220, 40)); return panel; } @Override protected void doOKAction() { if (getSchemeName().trim().isEmpty()) { Messages.showMessageDialog(getContentPane(), ApplicationBundle.message("error.scheme.must.have.a.name"), CommonBundle.getErrorTitle(), Messages.getErrorIcon()); return; } else if ("default".equals(getSchemeName())) { Messages.showMessageDialog(getContentPane(), ApplicationBundle.message("error.illegal.scheme.name"), CommonBundle.getErrorTitle(), Messages.getErrorIcon()); return; } else if (myExistingNames.contains(getSchemeName())) { Messages.showMessageDialog( getContentPane(), ApplicationBundle.message("error.a.scheme.with.this.name.already.exists.or.was.deleted.without.applying.the.changes"), CommonBundle.getErrorTitle(), Messages.getErrorIcon() ); return; } super.doOKAction(); } @Override protected JComponent createCenterPanel() { return null; } @Override public JComponent getPreferredFocusedComponent() { return mySchemeName; } }
hurricup/intellij-community
platform/platform-impl/src/com/intellij/application/options/SaveSchemeDialog.java
Java
apache-2.0
3,369
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.script; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.test.ESTestCase; import org.junit.After; import org.junit.Before; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import static java.util.Collections.unmodifiableMap; import static java.util.Collections.unmodifiableSet; import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.Matchers.containsString; // TODO: this needs to be a base test class, and all scripting engines extend it public class ScriptModesTests extends ESTestCase { ScriptSettings scriptSettings; ScriptContextRegistry scriptContextRegistry; private ScriptContext[] scriptContexts; private Map<String, ScriptEngineService> scriptEngines; private ScriptModes scriptModes; private Set<String> checkedSettings; private boolean assertAllSettingsWereChecked; private boolean assertScriptModesNonNull; @Before public void setupScriptEngines() { //randomly register custom script contexts int randomInt = randomIntBetween(0, 3); //prevent duplicates using map Map<String, ScriptContext.Plugin> contexts = new HashMap<>(); for (int i = 0; i < randomInt; i++) { String plugin = randomAsciiOfLength(randomIntBetween(1, 10)); String operation = randomAsciiOfLength(randomIntBetween(1, 30)); String context = plugin + "-" + operation; contexts.put(context, new ScriptContext.Plugin(plugin, operation)); } scriptContextRegistry = new ScriptContextRegistry(contexts.values()); scriptContexts = scriptContextRegistry.scriptContexts().toArray(new ScriptContext[scriptContextRegistry.scriptContexts().size()]); scriptEngines = buildScriptEnginesByLangMap(newHashSet( //add the native engine just to make sure it gets filtered out new NativeScriptEngineService(Settings.EMPTY, Collections.<String, NativeScriptFactory>emptyMap()), new CustomScriptEngineService())); ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Arrays.asList( new ScriptEngineRegistry.ScriptEngineRegistration(NativeScriptEngineService.class, NativeScriptEngineService.NAME), new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME))); scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); checkedSettings = new HashSet<>(); assertAllSettingsWereChecked = true; assertScriptModesNonNull = true; } @After public void assertNativeScriptsAreAlwaysAllowed() { if (assertScriptModesNonNull) { assertThat(scriptModes.getScriptEnabled(NativeScriptEngineService.NAME, randomFrom(ScriptType.values()), randomFrom(scriptContexts)), equalTo(true)); } } @After public void assertAllSettingsWereChecked() { if (assertScriptModesNonNull) { assertThat(scriptModes, notNullValue()); int numberOfSettings = ScriptType.values().length * scriptContextRegistry.scriptContexts().size(); numberOfSettings += 3; // for top-level inline/store/file settings assertThat(scriptModes.scriptEnabled.size(), equalTo(numberOfSettings)); if (assertAllSettingsWereChecked) { assertThat(checkedSettings.size(), equalTo(numberOfSettings)); } } } public void testDefaultSettings() { this.scriptModes = new ScriptModes(scriptSettings, Settings.EMPTY); assertScriptModesAllOps(true, ScriptType.FILE); assertScriptModesAllOps(false, ScriptType.STORED, ScriptType.INLINE); } public void testMissingSetting() { assertAllSettingsWereChecked = false; this.scriptModes = new ScriptModes(scriptSettings, Settings.EMPTY); try { scriptModes.getScriptEnabled("non_existing", randomFrom(ScriptType.values()), randomFrom(scriptContexts)); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("not found for lang [non_existing]")); } } public void testScriptTypeGenericSettings() { int randomInt = randomIntBetween(1, ScriptType.values().length - 1); Set<ScriptType> randomScriptTypesSet = new HashSet<>(); boolean[] randomScriptModes = new boolean[randomInt]; for (int i = 0; i < randomInt; i++) { boolean added = false; while (added == false) { added = randomScriptTypesSet.add(randomFrom(ScriptType.values())); } randomScriptModes[i] = randomBoolean(); } ScriptType[] randomScriptTypes = randomScriptTypesSet.toArray(new ScriptType[randomScriptTypesSet.size()]); Settings.Builder builder = Settings.builder(); for (int i = 0; i < randomInt; i++) { builder.put("script" + "." + randomScriptTypes[i].getScriptType(), randomScriptModes[i]); } this.scriptModes = new ScriptModes(scriptSettings, builder.build()); for (int i = 0; i < randomInt; i++) { assertScriptModesAllOps(randomScriptModes[i], randomScriptTypes[i]); } if (randomScriptTypesSet.contains(ScriptType.FILE) == false) { assertScriptModesAllOps(true, ScriptType.FILE); } if (randomScriptTypesSet.contains(ScriptType.STORED) == false) { assertScriptModesAllOps(false, ScriptType.STORED); } if (randomScriptTypesSet.contains(ScriptType.INLINE) == false) { assertScriptModesAllOps(false, ScriptType.INLINE); } } public void testScriptContextGenericSettings() { int randomInt = randomIntBetween(1, scriptContexts.length - 1); Set<ScriptContext> randomScriptContextsSet = new HashSet<>(); boolean[] randomScriptModes = new boolean[randomInt]; for (int i = 0; i < randomInt; i++) { boolean added = false; while (added == false) { added = randomScriptContextsSet.add(randomFrom(scriptContexts)); } randomScriptModes[i] = randomBoolean(); } ScriptContext[] randomScriptContexts = randomScriptContextsSet.toArray(new ScriptContext[randomScriptContextsSet.size()]); Settings.Builder builder = Settings.builder(); for (int i = 0; i < randomInt; i++) { builder.put("script" + "." + randomScriptContexts[i].getKey(), randomScriptModes[i]); } this.scriptModes = new ScriptModes(scriptSettings, builder.build()); for (int i = 0; i < randomInt; i++) { assertScriptModesAllTypes(randomScriptModes[i], randomScriptContexts[i]); } ScriptContext[] complementOf = complementOf(randomScriptContexts); assertScriptModes(true, new ScriptType[]{ScriptType.FILE}, complementOf); assertScriptModes(false, new ScriptType[]{ScriptType.STORED, ScriptType.INLINE}, complementOf); } public void testConflictingScriptTypeAndOpGenericSettings() { ScriptContext scriptContext = randomFrom(scriptContexts); Settings.Builder builder = Settings.builder() .put("script." + scriptContext.getKey(), "false") .put("script.stored", "true") .put("script.inline", "true"); //operations generic settings have precedence over script type generic settings this.scriptModes = new ScriptModes(scriptSettings, builder.build()); assertScriptModesAllTypes(false, scriptContext); ScriptContext[] complementOf = complementOf(scriptContext); assertScriptModes(true, new ScriptType[]{ScriptType.FILE, ScriptType.STORED}, complementOf); assertScriptModes(true, new ScriptType[]{ScriptType.INLINE}, complementOf); } private void assertScriptModesAllOps(boolean expectedScriptEnabled, ScriptType... scriptTypes) { assertScriptModes(expectedScriptEnabled, scriptTypes, scriptContexts); } private void assertScriptModesAllTypes(boolean expectedScriptEnabled, ScriptContext... scriptContexts) { assertScriptModes(expectedScriptEnabled, ScriptType.values(), scriptContexts); } private void assertScriptModes(boolean expectedScriptEnabled, ScriptType[] scriptTypes, ScriptContext... scriptContexts) { assert scriptTypes.length > 0; assert scriptContexts.length > 0; for (ScriptType scriptType : scriptTypes) { checkedSettings.add("script.engine.custom." + scriptType); for (ScriptContext scriptContext : scriptContexts) { assertThat("custom." + scriptType + "." + scriptContext.getKey() + " doesn't have the expected value", scriptModes.getScriptEnabled("custom", scriptType, scriptContext), equalTo(expectedScriptEnabled)); checkedSettings.add("custom." + scriptType + "." + scriptContext); } } } private ScriptContext[] complementOf(ScriptContext... scriptContexts) { Map<String, ScriptContext> copy = new HashMap<>(); for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) { copy.put(scriptContext.getKey(), scriptContext); } for (ScriptContext scriptContext : scriptContexts) { copy.remove(scriptContext.getKey()); } return copy.values().toArray(new ScriptContext[copy.size()]); } static Map<String, ScriptEngineService> buildScriptEnginesByLangMap(Set<ScriptEngineService> scriptEngines) { Map<String, ScriptEngineService> builder = new HashMap<>(); for (ScriptEngineService scriptEngine : scriptEngines) { String type = scriptEngine.getType(); builder.put(type, scriptEngine); } return unmodifiableMap(builder); } private static class CustomScriptEngineService implements ScriptEngineService { public static final String NAME = "custom"; @Override public String getType() { return NAME; } @Override public String getExtension() { return NAME; } @Override public Object compile(String scriptName, String scriptSource, Map<String, String> params) { return null; } @Override public ExecutableScript executable(CompiledScript compiledScript, @Nullable Map<String, Object> vars) { return null; } @Override public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, @Nullable Map<String, Object> vars) { return null; } @Override public void close() { } @Override public void scriptRemoved(@Nullable CompiledScript script) { } } }
camilojd/elasticsearch
core/src/test/java/org/elasticsearch/script/ScriptModesTests.java
Java
apache-2.0
12,174
/** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/guardduty/model/AdminAccount.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::Utils::Json; using namespace Aws::Utils; namespace Aws { namespace GuardDuty { namespace Model { AdminAccount::AdminAccount() : m_adminAccountIdHasBeenSet(false), m_adminStatus(AdminStatus::NOT_SET), m_adminStatusHasBeenSet(false) { } AdminAccount::AdminAccount(JsonView jsonValue) : m_adminAccountIdHasBeenSet(false), m_adminStatus(AdminStatus::NOT_SET), m_adminStatusHasBeenSet(false) { *this = jsonValue; } AdminAccount& AdminAccount::operator =(JsonView jsonValue) { if(jsonValue.ValueExists("adminAccountId")) { m_adminAccountId = jsonValue.GetString("adminAccountId"); m_adminAccountIdHasBeenSet = true; } if(jsonValue.ValueExists("adminStatus")) { m_adminStatus = AdminStatusMapper::GetAdminStatusForName(jsonValue.GetString("adminStatus")); m_adminStatusHasBeenSet = true; } return *this; } JsonValue AdminAccount::Jsonize() const { JsonValue payload; if(m_adminAccountIdHasBeenSet) { payload.WithString("adminAccountId", m_adminAccountId); } if(m_adminStatusHasBeenSet) { payload.WithString("adminStatus", AdminStatusMapper::GetNameForAdminStatus(m_adminStatus)); } return payload; } } // namespace Model } // namespace GuardDuty } // namespace Aws
aws/aws-sdk-cpp
aws-cpp-sdk-guardduty/source/model/AdminAccount.cpp
C++
apache-2.0
1,510
$(function() { //搜索框交互 var placeholder = window.INPUT_PLACEHOLDER || '请输入要搜索的关键词', baiduUrl = 'http://www.baidu.com/s?wd=', googleUrl = 'http://www.google.com.hk/search?q=', searchEl = $('#search'); $('.button', searchEl).on('click', function(e) { var keyword = $('.keyword', searchEl).val(), url = e.target.name == 'baidu' ? baiduUrl : googleUrl; window.open(url + encodeURIComponent(keyword)); e.preventDefault(); }); $('.keyword', searchEl) .val(placeholder) .on('focus', function(e) { var keyword = $(e.target); if(keyword.val() == placeholder) { keyword.removeClass('default-word').val(''); } }) .on('blur', function(e) { var keyword = $(e.target); if(keyword.val() == '') { keyword.addClass('default-word').val(placeholder); } }); //收藏 $('#header .icon-favor').on('click', function(e) { var title = document.title || '设计师网址导航', url = window.location.href; try { if(window.sidebar && window.sidebar.addPanel) { window.sidebar.addPanel(title, url, ''); }else if(window.external) { window.external.AddFavorite(url, title); }else { throw 'NOT_SUPPORTED'; } }catch(err) { alert('您的浏览器不支持自动收藏,请使用Ctrl+D进行收藏'); } e.preventDefault(); }); //加入首页 $('#header .icon-homepage').on('click', function(e) { try { if(window.netscape) { netscape.security.PrivilegeManager.enablePrivilege("UniversalXPConnect"); Components.classes['@mozilla.org/preferences-service;1'] .getService(Components. interfaces.nsIPrefBranch) .setCharPref('browser.startup.homepage',window.location.href); alert('成功设为首页'); }else if(window.external) { document.body.style.behavior='url(#default#homepage)'; document.body.setHomePage(location.href); }else { throw 'NOT_SUPPORTED'; } }catch(err) { alert('您的浏览器不支持或不允许自动设置首页, 请通过浏览器菜单设置'); } e.preventDefault(); }); //导航区域 $('#catalog,#website-map').on('click', '.website-list>li, .more-item', function(e) { var tarEl = e.target; if(tarEl.tagName == 'A' && $(tarEl).parents('section.active')[0]) { e.stopPropagation(); }else { if(tarEl.tagName != 'LI') { tarEl = $(tarEl).parents('li')[0]; } if(tarEl) { var aEl = $('a', tarEl); if(aEl.length) { var src = aEl.attr('href'); if(aEl.attr('target') == '_blank') { window.open(src); }else { location.href = src; } } } e.preventDefault(); } }); //快捷导航 catalogAnimationRunning = false; function highlightCatalog(target) { // *效果1* // var listItem = $('li', target); // for(var i=0; i<6; i++) { // $([listItem[i], listItem[i+6]]).delay(50*i).animate({opacity:0.1},200, function(){ // $(this).animate({opacity:1}, 200); // }); // } /*效果2*/ target.addClass('highlight'); setTimeout(function() { target.removeClass('highlight'); }, 800); /*效果3*/ // target.addClass('shake'); //setTimeout(function() { // target.removeClass('shake'); // }, 2000); } $('#shortcut nav').on('click', function(e) { if(e.target.tagName != 'A') { return; } var keyword = $(e.target).attr('href').slice(1); var target = $('section[data-catalog="'+keyword+'"]'); if(target[0] && !catalogAnimationRunning) { catalogAnimationRunning = true; var top = target.offset().top; $('html, body').animate({ scrollTop: top-20 }, 200, function() { highlightCatalog(target); catalogAnimationRunning = false; }); } e.preventDefault(); }); //热门关键字 (function() { var hotWordCtn = $('#content .tips .hot-words'); var titleStr = '<b>'+KeywordConfig.title+'</b>'; var curIndex = 0; function showHotWord() { var html = titleStr; for(var i=curIndex; i<KeywordConfig.num+curIndex; i++) { if(KeywordConfig.data[i]) { html += '<a href="'+KeywordConfig.data[i].url+'" class="website" target="_blank"><strong>'+KeywordConfig.data[i].kw+'</strong></a>'; } } hotWordCtn.empty().append(html); curIndex += KeywordConfig.num; if(curIndex >= KeywordConfig.data.length) { curIndex = 0; } var children = hotWordCtn.children(); for(var i=0; i<children.length; i++) { $(children[i]).delay(100*i).animate({opacity:0.1},200, function(){ $(this).animate({opacity:1}, 200); }); } showHotWord.timeout = setTimeout(showHotWord, KeywordConfig.delay*1000); } hotWordCtn.on('mouseenter', function() { if(showHotWord.timeout) { clearTimeout(showHotWord.timeout); } }).on('mouseleave', function() { showHotWord.timeout = setTimeout(showHotWord, KeywordConfig.delay*1000); }); if(hotWordCtn[0] && KeywordConfig) { showHotWord(); } })(); //文章序号 $('#aside .classics-article-list li').each(function(i, item) { $(item).css('backgroundPosition', '0 '+(6+i*-50)+'px'); }); //回顶部 var goToTopEl = $('#go-to-top'); $(window).scroll(function() { if($(window).scrollTop() >0) { goToTopEl.removeClass('hide'); }else { goToTopEl.addClass('hide'); } }); });
27979831/27979831.github.io
navigation/js/main.js
JavaScript
apache-2.0
7,078
/** * Copyright 2014 @ z3q.net. * name : * author : jarryliu * date : 2013-12-16 19:03 * description : * history : */ package app import ( "fmt" "github.com/jsix/gof" "go2o/src/core" "go2o/src/core/infrastructure" "go2o/src/core/service" "os" "strconv" ) func RunSocket(ctx gof.App, port int, debug, trace bool) { if gcx, ok := ctx.(*core.MainApp); ok { if !gcx.Loaded { gcx.Init(debug, trace) } } else { fmt.Println("app context err") os.Exit(1) return } if debug { fmt.Println("[Started]:Socket server (with debug) running on port [" + strconv.Itoa(port) + "]:") infrastructure.DebugMode = true } else { fmt.Println("[Started]:Socket server running on port [" + strconv.Itoa(port) + "]:") } service.ServerListen("tcp", ":"+strconv.Itoa(port), ctx) }
lance2088/go2o
src/app/socket_server.go
GO
apache-2.0
803
#include <math.h> #include "monster.h" #include "../../engine/graphics/graphics.h" #include "../../engine/input/input.h" #include "../../engine/engine.h" #include "../../engine/common/log.h" CMonster::CMonster() { mTexture = NULL; //LoadTexture(); } CMonster::~CMonster() { if ( mTexture ) { // If it isn't a managed texture, unload it the normal way if ( !CManagedTexture::Unload( &gResourceManager, mTexture ) ) mTexture->Unload(); } } int CMonster::AttackStrength() { return 1; } void CMonster::Update( float dt ) { SVector3 force; force.x = cos( mRot ) * 4.0f; force.y = 0.0f; force.z = sin( mRot ) * 4.0f; mObject->SetVelocity(force); mRot -= 1.0f * dt; } void CMonster::Render() { float r = mObject->GetSphere().radius; //float r = ((CPCircle*)mObject)->GetRadius() * 0.9f; SVector3 pos = mObject->GetPosition(); glTranslatef( pos.x, pos.y, pos.z ); glRotatef( (GLfloat)(mRot * DEGREES), 0.0f, -1.0f, 0.0f ); mTexture->Bind(); /* Temporary Code, Draw A Cube as a Placeholder */ glBegin(GL_QUADS); // Draw The Cube Using quads glColor3f(1.0f,1.0f,1.0f); glNormal3f( 0.0f, 1.0f, 0.0f ); glTexCoord2f( 0.0f, 0.0f ); glVertex3f( r, r,-r); // Top Right Of The Quad (Top) glTexCoord2f( 0.0f, 0.0f ); glVertex3f(-r, r,-r); // Top Left Of The Quad (Top) glTexCoord2f( 0.0f, 0.0f ); glVertex3f(-r, r, r); // Bottom Left Of The Quad (Top) glTexCoord2f( 0.0f, 0.0f ); glVertex3f( r, r, r); // Bottom Right Of The Quad (Top) glNormal3f( 0.0f, -1.0f, 0.0f ); glTexCoord2f( 0.0f, 0.0f ); glVertex3f( r,-r, r); // Top Right Of The Quad (Bottom) glTexCoord2f( 0.0f, 0.0f ); glVertex3f(-r,-r, r); // Top Left Of The Quad (Bottom) glTexCoord2f( 0.0f, 0.0f ); glVertex3f(-r,-r,-r); // Bottom Left Of The Quad (Bottom) glTexCoord2f( 0.0f, 0.0f ); glVertex3f( r,-r,-r); // Bottom Right Of The Quad (Bottom) glNormal3f( 0.0f, 0.0f, 1.0f ); glTexCoord2f( 0.0f, 0.0f ); glVertex3f( r, r, r); // Top Right Of The Quad (Front) glTexCoord2f( 1.0f, 0.0f ); glVertex3f(-r, r, r); // Top Left Of The Quad (Front) glTexCoord2f( 1.0f, 1.0f ); glVertex3f(-r,-r, r); // Bottom Left Of The Quad (Front) glTexCoord2f( 0.0f, 1.0f ); glVertex3f( r,-r, r); // Bottom Right Of The Quad (Front) glNormal3f( 0.0f, 0.0f, -1.0f ); glTexCoord2f( 0.0f, 1.0f ); glVertex3f( r,-r,-r); // Top Right Of The Quad (Back) glTexCoord2f( 1.0f, 1.0f ); glVertex3f(-r,-r,-r); // Top Left Of The Quad (Back) glTexCoord2f( 1.0f, 0.0f ); glVertex3f(-r, r,-r); // Bottom Left Of The Quad (Back) glTexCoord2f( 0.0f, 0.0f ); glVertex3f( r, r,-r); // Bottom Right Of The Quad (Back) glNormal3f( -1.0f, 0.0f, 0.0f ); glTexCoord2f( 0.0f, 0.0f ); glVertex3f(-r, r, r); // Top Right Of The Quad (Left) glTexCoord2f( 1.0f, 0.0f ); glVertex3f(-r, r,-r); // Top Left Of The Quad (Left) glTexCoord2f( 1.0f, 1.0f ); glVertex3f(-r,-r,-r); // Bottom Left Of The Quad (Left) glTexCoord2f( 0.0f, 1.0f ); glVertex3f(-r,-r, r); // Bottom Right Of The Quad (Left) glNormal3f( 1.0f, 0.0f, 0.0f ); glTexCoord2f( 0.0f, 0.0f ); glVertex3f( r, r,-r); // Top Right Of The Quad (Right) glTexCoord2f( 1.0f, 0.0f ); glVertex3f( r, r, r); // Top Left Of The Quad (Right) glTexCoord2f( 1.0f, 1.0f ); glVertex3f( r,-r, r); // Bottom Left Of The Quad (Right) glTexCoord2f( 0.0f, 1.0f ); glVertex3f( r,-r,-r); // Bottom Right Of The Quad (Right) glEnd(); // End Drawing The Cube mTexture->Unbind(); } void CMonster::LoadTexture() { }
gregtour/tedge-cpp
src/kart/entity/monster.cpp
C++
apache-2.0
3,675
package org.apache.maven.lifecycle.internal; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.ArrayList; import java.util.Collection; import java.util.StringTokenizer; import javax.inject.Inject; import javax.inject.Named; import javax.inject.Singleton; import org.apache.maven.execution.MavenSession; import org.apache.maven.model.Plugin; import org.apache.maven.plugin.BuildPluginManager; import org.apache.maven.plugin.InvalidPluginDescriptorException; import org.apache.maven.plugin.MojoNotFoundException; import org.apache.maven.plugin.PluginDescriptorParsingException; import org.apache.maven.plugin.PluginNotFoundException; import org.apache.maven.plugin.PluginResolutionException; import org.apache.maven.plugin.descriptor.MojoDescriptor; import org.apache.maven.plugin.prefix.DefaultPluginPrefixRequest; import org.apache.maven.plugin.prefix.NoPluginFoundForPrefixException; import org.apache.maven.plugin.prefix.PluginPrefixRequest; import org.apache.maven.plugin.prefix.PluginPrefixResolver; import org.apache.maven.plugin.prefix.PluginPrefixResult; import org.apache.maven.plugin.version.DefaultPluginVersionRequest; import org.apache.maven.plugin.version.PluginVersionRequest; import org.apache.maven.plugin.version.PluginVersionResolutionException; import org.apache.maven.plugin.version.PluginVersionResolver; import org.apache.maven.project.MavenProject; import org.codehaus.plexus.configuration.PlexusConfiguration; import org.codehaus.plexus.util.xml.Xpp3Dom; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * <p> * Resolves dependencies for the artifacts in context of the lifecycle build * </p> * <strong>NOTE:</strong> This class is not part of any public api and can be changed or deleted without prior notice. * * @since 3.0 * @author Benjamin Bentmann * @author Jason van Zyl * @author jdcasey * @author Kristian Rosenvold (extracted class only) */ @Named @Singleton public class MojoDescriptorCreator { private final Logger logger = LoggerFactory.getLogger( getClass() ); private final PluginVersionResolver pluginVersionResolver; private final BuildPluginManager pluginManager; private final PluginPrefixResolver pluginPrefixResolver; private final LifecyclePluginResolver lifecyclePluginResolver; @Inject public MojoDescriptorCreator( PluginVersionResolver pluginVersionResolver, BuildPluginManager pluginManager, PluginPrefixResolver pluginPrefixResolver, LifecyclePluginResolver lifecyclePluginResolver ) { this.pluginVersionResolver = pluginVersionResolver; this.pluginManager = pluginManager; this.pluginPrefixResolver = pluginPrefixResolver; this.lifecyclePluginResolver = lifecyclePluginResolver; } private Plugin findPlugin( String groupId, String artifactId, Collection<Plugin> plugins ) { for ( Plugin plugin : plugins ) { if ( artifactId.equals( plugin.getArtifactId() ) && groupId.equals( plugin.getGroupId() ) ) { return plugin; } } return null; } public static Xpp3Dom convert( MojoDescriptor mojoDescriptor ) { Xpp3Dom dom = new Xpp3Dom( "configuration" ); PlexusConfiguration c = mojoDescriptor.getMojoConfiguration(); PlexusConfiguration[] ces = c.getChildren(); if ( ces != null ) { for ( PlexusConfiguration ce : ces ) { String value = ce.getValue( null ); String defaultValue = ce.getAttribute( "default-value", null ); if ( value != null || defaultValue != null ) { Xpp3Dom e = new Xpp3Dom( ce.getName() ); e.setValue( value ); if ( defaultValue != null ) { e.setAttribute( "default-value", defaultValue ); } dom.addChild( e ); } } } return dom; } // org.apache.maven.plugins:maven-remote-resources-plugin:1.0:process@executionId public MojoDescriptor getMojoDescriptor( String task, MavenSession session, MavenProject project ) throws PluginNotFoundException, PluginResolutionException, PluginDescriptorParsingException, MojoNotFoundException, NoPluginFoundForPrefixException, InvalidPluginDescriptorException, PluginVersionResolutionException { String goal = null; Plugin plugin = null; StringTokenizer tok = new StringTokenizer( task, ":" ); int numTokens = tok.countTokens(); if ( numTokens >= 4 ) { // We have everything that we need // // org.apache.maven.plugins:maven-remote-resources-plugin:1.0:process // // groupId // artifactId // version // goal // plugin = new Plugin(); plugin.setGroupId( tok.nextToken() ); plugin.setArtifactId( tok.nextToken() ); plugin.setVersion( tok.nextToken() ); goal = tok.nextToken(); // This won't be valid, but it constructs something easy to read in the error message while ( tok.hasMoreTokens() ) { goal += ":" + tok.nextToken(); } } else if ( numTokens == 3 ) { // We have everything that we need except the version // // org.apache.maven.plugins:maven-remote-resources-plugin:???:process // // groupId // artifactId // ??? // goal // plugin = new Plugin(); plugin.setGroupId( tok.nextToken() ); plugin.setArtifactId( tok.nextToken() ); goal = tok.nextToken(); } else { // We have a prefix and goal // // idea:idea // String prefix = tok.nextToken(); if ( numTokens == 2 ) { goal = tok.nextToken(); } else { // goal was missing - pass through to MojoNotFoundException goal = ""; } // This is the case where someone has executed a single goal from the command line // of the form: // // mvn remote-resources:process // // From the metadata stored on the server which has been created as part of a standard // Maven plugin deployment we will find the right PluginDescriptor from the remote // repository. plugin = findPluginForPrefix( prefix, session ); } int executionIdx = goal.indexOf( '@' ); if ( executionIdx > 0 ) { goal = goal.substring( 0, executionIdx ); } injectPluginDeclarationFromProject( plugin, project ); // If there is no version to be found then we need to look in the repository metadata for // this plugin and see what's specified as the latest release. // if ( plugin.getVersion() == null ) { resolvePluginVersion( plugin, session, project ); } return pluginManager.getMojoDescriptor( plugin, goal.toString(), project.getRemotePluginRepositories(), session.getRepositorySession() ); } // TODO take repo mans into account as one may be aggregating prefixes of many // TODO collect at the root of the repository, read the one at the root, and fetch remote if something is missing // or the user forces the issue public Plugin findPluginForPrefix( String prefix, MavenSession session ) throws NoPluginFoundForPrefixException { // [prefix]:[goal] if ( session.getCurrentProject() != null ) { try { lifecyclePluginResolver.resolveMissingPluginVersions( session.getCurrentProject(), session ); } catch ( PluginVersionResolutionException e ) { // not critical here logger.debug( e.getMessage(), e ); } } PluginPrefixRequest prefixRequest = new DefaultPluginPrefixRequest( prefix, session ); PluginPrefixResult prefixResult = pluginPrefixResolver.resolve( prefixRequest ); Plugin plugin = new Plugin(); plugin.setGroupId( prefixResult.getGroupId() ); plugin.setArtifactId( prefixResult.getArtifactId() ); return plugin; } private void resolvePluginVersion( Plugin plugin, MavenSession session, MavenProject project ) throws PluginVersionResolutionException { PluginVersionRequest versionRequest = new DefaultPluginVersionRequest( plugin, session.getRepositorySession(), project.getRemotePluginRepositories() ); plugin.setVersion( pluginVersionResolver.resolve( versionRequest ).getVersion() ); } private void injectPluginDeclarationFromProject( Plugin plugin, MavenProject project ) { Plugin pluginInPom = findPlugin( plugin, project.getBuildPlugins() ); if ( pluginInPom == null && project.getPluginManagement() != null ) { pluginInPom = findPlugin( plugin, project.getPluginManagement().getPlugins() ); } if ( pluginInPom != null ) { if ( plugin.getVersion() == null ) { plugin.setVersion( pluginInPom.getVersion() ); } plugin.setDependencies( new ArrayList<>( pluginInPom.getDependencies() ) ); } } private Plugin findPlugin( Plugin plugin, Collection<Plugin> plugins ) { return findPlugin( plugin.getGroupId(), plugin.getArtifactId(), plugins ); } }
mcculls/maven
maven-core/src/main/java/org/apache/maven/lifecycle/internal/MojoDescriptorCreator.java
Java
apache-2.0
10,814
package org.stagemonitor.requestmonitor.ejb; import static net.bytebuddy.matcher.ElementMatchers.named; import static net.bytebuddy.matcher.ElementMatchers.returns; import static net.bytebuddy.matcher.ElementMatchers.takesArguments; import net.bytebuddy.description.method.MethodDescription; import net.bytebuddy.description.type.TypeDescription; import net.bytebuddy.matcher.ElementMatcher; import net.bytebuddy.matcher.ElementMatchers; class IsDeclaredInInterfaceHierarchyElementMatcher implements ElementMatcher<TypeDescription> { private final MethodDescription.InDefinedShape targetMethod; static ElementMatcher<TypeDescription> isDeclaredInInterfaceHierarchy(MethodDescription.InDefinedShape method) { return new IsDeclaredInInterfaceHierarchyElementMatcher(method); } public IsDeclaredInInterfaceHierarchyElementMatcher(MethodDescription.InDefinedShape targetMethod) { this.targetMethod = targetMethod; } @Override public boolean matches(TypeDescription targetInterface) { if (ElementMatchers.declaresMethod(named(targetMethod.getName()) .and(returns(targetMethod.getReturnType().asErasure())) .and(takesArguments(targetMethod.getParameters().asTypeList().asErasures()))) .matches(targetInterface)) { return true; } else { for (TypeDescription typeDescription : targetInterface.getInterfaces().asErasures()) { if (matches(typeDescription)) { return true; } } } return false; } }
trampi/stagemonitor
stagemonitor-requestmonitor/src/main/java/org/stagemonitor/requestmonitor/ejb/IsDeclaredInInterfaceHierarchyElementMatcher.java
Java
apache-2.0
1,446
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder.endpoint.dsl; import java.util.*; import java.util.Map; import java.util.concurrent.*; import java.util.function.*; import java.util.stream.*; import javax.annotation.Generated; import org.apache.camel.builder.EndpointConsumerBuilder; import org.apache.camel.builder.EndpointProducerBuilder; import org.apache.camel.builder.endpoint.AbstractEndpointBuilder; /** * Invoke methods of Java beans specified by class name. * * Generated by camel build tools - do NOT edit this file! */ @Generated("org.apache.camel.maven.packaging.EndpointDslMojo") public interface ClassEndpointBuilderFactory { /** * Builder for endpoint for the Class component. */ public interface ClassEndpointBuilder extends EndpointProducerBuilder { default AdvancedClassEndpointBuilder advanced() { return (AdvancedClassEndpointBuilder) this; } /** * Use scope option instead. * * The option is a: &lt;code&gt;java.lang.Boolean&lt;/code&gt; type. * * Group: common * * @param cache the value to set * @return the dsl builder */ @Deprecated default ClassEndpointBuilder cache(Boolean cache) { doSetProperty("cache", cache); return this; } /** * Use scope option instead. * * The option will be converted to a * &lt;code&gt;java.lang.Boolean&lt;/code&gt; type. * * Group: common * * @param cache the value to set * @return the dsl builder */ @Deprecated default ClassEndpointBuilder cache(String cache) { doSetProperty("cache", cache); return this; } /** * Sets the name of the method to invoke on the bean. * * The option is a: &lt;code&gt;java.lang.String&lt;/code&gt; type. * * Group: common * * @param method the value to set * @return the dsl builder */ default ClassEndpointBuilder method(String method) { doSetProperty("method", method); return this; } /** * Scope of bean. When using singleton scope (default) the bean is * created or looked up only once and reused for the lifetime of the * endpoint. The bean should be thread-safe in case concurrent threads * is calling the bean at the same time. When using request scope the * bean is created or looked up once per request (exchange). This can be * used if you want to store state on a bean while processing a request * and you want to call the same bean instance multiple times while * processing the request. The bean does not have to be thread-safe as * the instance is only called from the same request. When using * prototype scope, then the bean will be looked up or created per call. * However in case of lookup then this is delegated to the bean registry * such as Spring or CDI (if in use), which depends on their * configuration can act as either singleton or prototype scope. so when * using prototype then this depends on the delegated registry. * * The option is a: &lt;code&gt;org.apache.camel.BeanScope&lt;/code&gt; * type. * * Default: Singleton * Group: common * * @param scope the value to set * @return the dsl builder */ default ClassEndpointBuilder scope(org.apache.camel.BeanScope scope) { doSetProperty("scope", scope); return this; } /** * Scope of bean. When using singleton scope (default) the bean is * created or looked up only once and reused for the lifetime of the * endpoint. The bean should be thread-safe in case concurrent threads * is calling the bean at the same time. When using request scope the * bean is created or looked up once per request (exchange). This can be * used if you want to store state on a bean while processing a request * and you want to call the same bean instance multiple times while * processing the request. The bean does not have to be thread-safe as * the instance is only called from the same request. When using * prototype scope, then the bean will be looked up or created per call. * However in case of lookup then this is delegated to the bean registry * such as Spring or CDI (if in use), which depends on their * configuration can act as either singleton or prototype scope. so when * using prototype then this depends on the delegated registry. * * The option will be converted to a * &lt;code&gt;org.apache.camel.BeanScope&lt;/code&gt; type. * * Default: Singleton * Group: common * * @param scope the value to set * @return the dsl builder */ default ClassEndpointBuilder scope(String scope) { doSetProperty("scope", scope); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option is a: &lt;code&gt;boolean&lt;/code&gt; type. * * Default: false * Group: producer * * @param lazyStartProducer the value to set * @return the dsl builder */ default ClassEndpointBuilder lazyStartProducer(boolean lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } /** * Whether the producer should be started lazy (on the first message). * By starting lazy you can use this to allow CamelContext and routes to * startup in situations where a producer may otherwise fail during * starting and cause the route to fail being started. By deferring this * startup to be lazy then the startup failure can be handled during * routing messages via Camel's routing error handlers. Beware that when * the first message is processed then creating and starting the * producer may take a little time and prolong the total processing time * of the processing. * * The option will be converted to a &lt;code&gt;boolean&lt;/code&gt; * type. * * Default: false * Group: producer * * @param lazyStartProducer the value to set * @return the dsl builder */ default ClassEndpointBuilder lazyStartProducer(String lazyStartProducer) { doSetProperty("lazyStartProducer", lazyStartProducer); return this; } } /** * Advanced builder for endpoint for the Class component. */ public interface AdvancedClassEndpointBuilder extends EndpointProducerBuilder { default ClassEndpointBuilder basic() { return (ClassEndpointBuilder) this; } /** * Used for configuring additional properties on the bean. * * The option is a: &lt;code&gt;java.util.Map&amp;lt;java.lang.String, * java.lang.Object&amp;gt;&lt;/code&gt; type. * The option is multivalued, and you can use the parameters(String, * Object) method to add a value (call the method multiple times to set * more values). * * Group: advanced * * @param key the option key * @param value the option value * @return the dsl builder */ default AdvancedClassEndpointBuilder parameters(String key, Object value) { doSetMultiValueProperty("parameters", "bean." + key, value); return this; } /** * Used for configuring additional properties on the bean. * * The option is a: &lt;code&gt;java.util.Map&amp;lt;java.lang.String, * java.lang.Object&amp;gt;&lt;/code&gt; type. * The option is multivalued, and you can use the parameters(String, * Object) method to add a value (call the method multiple times to set * more values). * * Group: advanced * * @param values the values * @return the dsl builder */ default AdvancedClassEndpointBuilder parameters(Map values) { doSetMultiValueProperties("parameters", "bean.", values); return this; } } public interface ClassBuilders { /** * Class (camel-bean) * Invoke methods of Java beans specified by class name. * * Category: core,java * Since: 2.4 * Maven coordinates: org.apache.camel:camel-bean * * Syntax: <code>class:beanName</code> * * Path parameter: beanName (required) * Sets the name of the bean to invoke * * @param path beanName * @return the dsl builder */ default ClassEndpointBuilder clas(String path) { return ClassEndpointBuilderFactory.endpointBuilder("class", path); } /** * Class (camel-bean) * Invoke methods of Java beans specified by class name. * * Category: core,java * Since: 2.4 * Maven coordinates: org.apache.camel:camel-bean * * Syntax: <code>class:beanName</code> * * Path parameter: beanName (required) * Sets the name of the bean to invoke * * @param componentName to use a custom component name for the endpoint * instead of the default name * @param path beanName * @return the dsl builder */ default ClassEndpointBuilder clas(String componentName, String path) { return ClassEndpointBuilderFactory.endpointBuilder(componentName, path); } } static ClassEndpointBuilder endpointBuilder( String componentName, String path) { class ClassEndpointBuilderImpl extends AbstractEndpointBuilder implements ClassEndpointBuilder, AdvancedClassEndpointBuilder { public ClassEndpointBuilderImpl(String path) { super(componentName, path); } } return new ClassEndpointBuilderImpl(path); } }
christophd/camel
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ClassEndpointBuilderFactory.java
Java
apache-2.0
12,027
#! /usr/bin/python ''' Saves relevant data fed back from TwitterStream etc next to its PID and timestamp ready for analysis Needs to do limited analysis to work out which keywords in the tweet stream correspond to which programme ''' from datetime import datetime import os import string import time as time2 from time import time from Axon.Ipc import producerFinished from Axon.Ipc import shutdownMicroprocess from Axon.ThreadedComponent import threadedcomponent import MySQLdb import _mysql_exceptions import cjson from dateutil.parser import parse class DataCollector(threadedcomponent): Inboxes = { "inbox" : "Receives data in the format [tweetjson,[pid,pid]]", "control" : "" } Outboxes = { "outbox" : "", "signal" : "" } def __init__(self,dbuser,dbpass): super(DataCollector, self).__init__() self.dbuser = dbuser self.dbpass = dbpass def finished(self): while self.dataReady("control"): msg = self.recv("control") if isinstance(msg, producerFinished) or isinstance(msg, shutdownMicroprocess): self.send(msg, "signal") return True return False def dbConnect(self): db = MySQLdb.connect(user=self.dbuser,passwd=self.dbpass,db="twitter_bookmarks",use_unicode=True,charset="utf8") cursor = db.cursor() return cursor def main(self): cursor = self.dbConnect() while not self.finished(): twitdata = list() # Collect all current received tweet JSON and their related PIDs into a twitdata list while self.dataReady("inbox"): pids = list() data = self.recv("inbox") for pid in data[1]: pids.append(pid) twitdata.append([data[0],pids]) if len(twitdata) > 0: # Process the received twitdata for tweet in twitdata: tweet[0] = tweet[0].replace("\\/","/") # Fix slashes in links: This may need moving further down the line - ideally it would be handled by cjson if tweet[0] != "\r\n": # If \r\n is received, this is just a keep alive signal from Twitter every 30 secs # At this point, each 'tweet' contains tweetdata, and a list of possible pids newdata = cjson.decode(tweet[0]) if newdata.has_key('delete') or newdata.has_key('scrub_geo') or newdata.has_key('limit'): # Keep a record of all requests from Twitter for deletions, location removal etc # As yet none of these have been received, but this code will store them if they are received to enable debugging filepath = "contentDebug.txt" if os.path.exists(filepath): file = open(filepath, 'r') filecontents = file.read() else: filecontents = "" file = open(filepath, 'w') file.write(filecontents + "\n" + str(datetime.utcnow()) + " " + cjson.encode(newdata)) file.close() else: # This is a real tweet tweetid = newdata['id'] print "New tweet! @" + newdata['user']['screen_name'] + ": " + newdata['text'] for pid in tweet[1]: # Cycle through possible pids, grabbing that pid's keywords from the DB # Then, check this tweet against the keywords and save to DB where appropriate (there may be more than one location) cursor.execute("""SELECT keyword,type FROM keywords WHERE pid = %s""",(pid)) data = cursor.fetchall() for row in data: # Some keywords are stored with a ^. These must be split, and the tweet checked to see if it has both keywords, but not necessarily next to each other keywords = row[0].split("^") if len(keywords) == 2: if string.lower(keywords[0]) in string.lower(newdata['text']) and string.lower(keywords[1]) in string.lower(newdata['text']): cursor.execute("""SELECT timestamp,timediff FROM programmes WHERE pid = %s ORDER BY timestamp DESC""",(pid)) progdata = cursor.fetchone() if progdata != None: # Ensure the user hasn't already tweeted the same text # Also ensure they haven't tweeted in the past 10 seconds timestamp = time2.mktime(parse(newdata['created_at']).timetuple()) cursor.execute("""SELECT * FROM rawdata WHERE (pid = %s AND text = %s AND user = %s) OR (pid = %s AND user = %s AND timestamp >= %s AND timestamp < %s)""",(pid,newdata['text'],newdata['user']['screen_name'],pid,newdata['user']['screen_name'],timestamp-10,timestamp+10)) if cursor.fetchone() == None: print ("Storing tweet for pid " + pid) # Work out where this tweet really occurred in the programme using timestamps and DVB bridge data progposition = timestamp - (progdata[0] - progdata[1]) cursor.execute("""INSERT INTO rawdata (tweet_id,pid,timestamp,text,user,programme_position) VALUES (%s,%s,%s,%s,%s,%s)""", (tweetid,pid,timestamp,newdata['text'],newdata['user']['screen_name'],progposition)) break # Break out of this loop and back to check the same tweet against the next programme else: print ("Duplicate tweet from user - ignoring") if string.lower(row[0]) in string.lower(newdata['text']): cursor.execute("""SELECT timestamp,timediff FROM programmes WHERE pid = %s ORDER BY timestamp DESC""",(pid)) progdata = cursor.fetchone() if progdata != None: # Ensure the user hasn't already tweeted the same text for this programme # Also ensure they haven't tweeted in the past 10 seconds timestamp = time2.mktime(parse(newdata['created_at']).timetuple()) cursor.execute("""SELECT * FROM rawdata WHERE (pid = %s AND text = %s AND user = %s) OR (pid = %s AND user = %s AND timestamp >= %s AND timestamp < %s)""",(pid,newdata['text'],newdata['user']['screen_name'],pid,newdata['user']['screen_name'],timestamp-10,timestamp+10)) if cursor.fetchone() == None: print ("Storing tweet for pid " + pid) # Work out where this tweet really occurred in the programme using timestamps and DVB bridge data progposition = timestamp - (progdata[0] - progdata[1]) cursor.execute("""INSERT INTO rawdata (tweet_id,pid,timestamp,text,user,programme_position) VALUES (%s,%s,%s,%s,%s,%s)""", (tweetid,pid,timestamp,newdata['text'],newdata['user']['screen_name'],progposition)) break # Break out of this loop and back to check the same tweet against the next programme else: print ("Duplicate tweet from user - ignoring") else: print "Blank line received from Twitter - no new data" print ("Done!") # new line to break up display else: time2.sleep(0.1) ''' The raw data collector differs from the plain data collector in that it stores the raw JSON containers for tweets next to their unique IDs, but with no relation to PIDs This is run concurrent to the other data collector, so the two won't necessarily run at the same rate and could be out of sync This possible lack of sync must be handled later ''' class RawDataCollector(threadedcomponent): Inboxes = { "inbox" : "Receives data in the format [tweetjson,[pid,pid]]", "control" : "" } Outboxes = { "outbox" : "", "signal" : "" } def __init__(self,dbuser,dbpass): super(RawDataCollector, self).__init__() self.dbuser = dbuser self.dbpass = dbpass def finished(self): while self.dataReady("control"): msg = self.recv("control") if isinstance(msg, producerFinished) or isinstance(msg, shutdownMicroprocess): self.send(msg, "signal") return True return False def dbConnect(self): db = MySQLdb.connect(user=self.dbuser,passwd=self.dbpass,db="twitter_bookmarks",use_unicode=True,charset="utf8") cursor = db.cursor() return cursor def main(self): cursor = self.dbConnect() while not self.finished(): twitdata = list() # As in the data collector, create a list of all tweets currently received while self.dataReady("inbox"): data = self.recv("inbox") twitdata.append(data[0]) if len(twitdata) > 0: # Cycle through the tweets, fixing their URLs as before, and storing them if they aren't a status message for tweet in twitdata: tweet = tweet.replace("\\/","/") # This may need moving further down the line - ideally it would be handled by cjson if tweet != "\r\n": newdata = cjson.decode(tweet) if newdata.has_key('delete') or newdata.has_key('scrub_geo') or newdata.has_key('limit'): # It is assumed here that the original data collector has handled the Twitter status message print "Discarding tweet instruction - captured by other component" else: tweetid = newdata['id'] # Capture exactly when this tweet was stored tweetstamp = time() tweetsecs = int(tweetstamp) # Include the fractions of seconds portion of the timestamp in a separate field tweetfrac = tweetstamp - tweetsecs # We only have a 16000 VARCHAR field to use in MySQL (through choice) - this should be enough, but if not, the tweet will be written out to file if len(tweet) < 16000: try: cursor.execute("""INSERT INTO rawtweets (tweet_id,tweet_json,tweet_stored_seconds,tweet_stored_fraction) VALUES (%s,%s,%s,%s)""", (tweetid,tweet,tweetsecs,tweetfrac)) except _mysql_exceptions.IntegrityError, e: # Handle the possibility for Twitter having sent us a duplicate print "Duplicate tweet ID:", str(e) else: print "Discarding tweet - length limit exceeded" tweetcontents = "" homedir = os.path.expanduser("~") if os.path.exists(homedir + "/oversizedtweets.conf"): try: file = open(homedir + "/oversizedtweets.conf",'r') tweetcontents = file.read() file.close() except IOError, e: print ("Failed to load oversized tweet cache - it will be overwritten") try: file = open(homedir + "/oversizedtweets.conf",'w') tweetcontents = tweetcontents + tweet file.write(tweetcontents) file.close() except IOError, e: print ("Failed to save oversized tweet cache") else: time2.sleep(0.1)
sparkslabs/kamaelia_
Sketches/AB/backup/Bookmarks/DataCollector.py
Python
apache-2.0
13,360
/** * Copyright (c) 2014 Oracle and/or its affiliates. All rights reserved. * * You may not modify, use, reproduce, or distribute this software except in * compliance with the terms of the License at: * http://java.net/projects/javaeetutorial/pages/BerkeleyLicense */ package javaeetutorial.hello1rlc; import javax.enterprise.inject.Model; @Model public class Hello { private String name; public Hello() { } public String getName() { return name; } public void setName(String user_name) { this.name = user_name; } }
paulnguyen/cmpe279
modules/module10/web/jsf/hello1-rlc/src/main/java/javaeetutorial/hello1rlc/Hello.java
Java
apache-2.0
574
/* * * Copyright 2015 gRPC authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #include <grpc/support/port_platform.h> #include "src/core/lib/iomgr/port.h" #ifdef GRPC_WINSOCK_SOCKET #include "src/core/lib/iomgr/sockaddr_windows.h" #include <grpc/support/log.h> #include "src/core/lib/iomgr/iocp_windows.h" #include "src/core/lib/iomgr/iomgr.h" #include "src/core/lib/iomgr/pollset_windows.h" #include "src/core/lib/iomgr/resolve_address.h" #include "src/core/lib/iomgr/socket_windows.h" #include "src/core/lib/iomgr/tcp_client.h" #include "src/core/lib/iomgr/tcp_server.h" #include "src/core/lib/iomgr/timer.h" extern grpc_tcp_server_vtable grpc_windows_tcp_server_vtable; extern grpc_tcp_client_vtable grpc_windows_tcp_client_vtable; extern grpc_timer_vtable grpc_generic_timer_vtable; extern grpc_pollset_vtable grpc_windows_pollset_vtable; extern grpc_pollset_set_vtable grpc_windows_pollset_set_vtable; extern grpc_address_resolver_vtable grpc_windows_resolver_vtable; /* Windows' io manager is going to be fully designed using IO completion ports. All of what we're doing here is basically make sure that Windows sockets are initialized in and out. */ static void winsock_init(void) { WSADATA wsaData; int status = WSAStartup(MAKEWORD(2, 0), &wsaData); GPR_ASSERT(status == 0); } static void winsock_shutdown(void) { int status = WSACleanup(); GPR_ASSERT(status == 0); } static void iomgr_platform_init(void) { winsock_init(); grpc_iocp_init(); grpc_pollset_global_init(); } static void iomgr_platform_flush(void) { grpc_iocp_flush(); } static void iomgr_platform_shutdown(void) { grpc_pollset_global_shutdown(); grpc_iocp_shutdown(); winsock_shutdown(); } static void iomgr_platform_shutdown_background_closure(void) {} static bool iomgr_platform_is_any_background_poller_thread(void) { return false; } static grpc_iomgr_platform_vtable vtable = { iomgr_platform_init, iomgr_platform_flush, iomgr_platform_shutdown, iomgr_platform_shutdown_background_closure, iomgr_platform_is_any_background_poller_thread}; void grpc_set_default_iomgr_platform() { grpc_set_tcp_client_impl(&grpc_windows_tcp_client_vtable); grpc_set_tcp_server_impl(&grpc_windows_tcp_server_vtable); grpc_set_timer_impl(&grpc_generic_timer_vtable); grpc_set_pollset_vtable(&grpc_windows_pollset_vtable); grpc_set_pollset_set_vtable(&grpc_windows_pollset_set_vtable); grpc_set_resolver_impl(&grpc_windows_resolver_vtable); grpc_set_iomgr_platform_vtable(&vtable); } bool grpc_iomgr_run_in_background() { return false; } #endif /* GRPC_WINSOCK_SOCKET */
carl-mastrangelo/grpc
src/core/lib/iomgr/iomgr_windows.cc
C++
apache-2.0
3,126
package eu.hansolo.enzo.onoffswitch; import javafx.event.Event; import javafx.event.EventTarget; import javafx.event.EventType; /** * User: hansolo * Date: 10.10.13 * Time: 09:48 */ public class SelectionEvent extends Event { public static final EventType<SelectionEvent> SELECT = new EventType(ANY, "select"); public static final EventType<SelectionEvent> DESELECT = new EventType(ANY, "deselect"); // ******************** Constructors ********************************** public SelectionEvent(final Object SOURCE, final EventTarget TARGET, final EventType<SelectionEvent> EVENT_TYPE) { super(SOURCE, TARGET, EVENT_TYPE); } }
HanSolo/Enzo
src/main/java/eu.hansolo.enzo/onoffswitch/SelectionEvent.java
Java
apache-2.0
665
/** * * Copyright © 2016-2019 Florian Schmaus * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.smackx.iot.data.provider; import java.io.IOException; import org.jivesoftware.smack.packet.XmlEnvironment; import org.jivesoftware.smack.provider.IQProvider; import org.jivesoftware.smack.util.ParserUtils; import org.jivesoftware.smack.xml.XmlPullParser; import org.jivesoftware.smackx.iot.data.element.IoTDataRequest; public class IoTDataRequestProvider extends IQProvider<IoTDataRequest> { @Override public IoTDataRequest parse(XmlPullParser parser, int initialDepth, XmlEnvironment xmlEnvironment) throws IOException { int seqNr = ParserUtils.getIntegerAttributeOrThrow(parser, "seqnr", "IoT data request without sequence number"); boolean momentary = ParserUtils.getBooleanAttribute(parser, "momentary", false); return new IoTDataRequest(seqNr, momentary); } }
igniterealtime/Smack
smack-experimental/src/main/java/org/jivesoftware/smackx/iot/data/provider/IoTDataRequestProvider.java
Java
apache-2.0
1,443
// Copyright 2007 The Closure Library Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS-IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview DOM pattern base class. * * @author robbyw@google.com (Robby Walker) */ goog.provide('goog.dom.pattern.AbstractPattern'); goog.require('goog.dom.TagWalkType'); goog.require('goog.dom.pattern.MatchType'); /** * Base pattern class for DOM matching. * * @constructor */ goog.dom.pattern.AbstractPattern = function() { /** * The first node matched by this pattern. * @type {Node} */ this.matchedNode = null; }; /** * Reset any internal state this pattern keeps. */ goog.dom.pattern.AbstractPattern.prototype.reset = function() { // The base implementation does nothing. }; /** * Test whether this pattern matches the given token. * * @param {Node} token Token to match against. * @param {goog.dom.TagWalkType} type The type of token. * @return {goog.dom.pattern.MatchType} `MATCH` if the pattern matches. */ goog.dom.pattern.AbstractPattern.prototype.matchToken = function(token, type) { return goog.dom.pattern.MatchType.NO_MATCH; };
teppeis/closure-library
closure/goog/dom/pattern/abstractpattern.js
JavaScript
apache-2.0
1,621
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.locks; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Set; import org.apache.geode.distributed.internal.locks.DLockBatch; import org.apache.geode.distributed.internal.locks.DLockBatchId; import org.apache.geode.distributed.internal.locks.LockGrantorId; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.internal.InternalDataSerializer; import org.apache.geode.internal.cache.IdentityArrayList; import org.apache.geode.internal.cache.TXRegionLockRequestImpl; import org.apache.geode.internal.serialization.DataSerializableFixedID; import org.apache.geode.internal.serialization.DeserializationContext; import org.apache.geode.internal.serialization.KnownVersion; import org.apache.geode.internal.serialization.SerializationContext; /** * Adapts multiple TXRegionLockRequests to one DLockBatch for DLock to use. * */ public class TXLockBatch implements DLockBatch, DataSerializableFixedID { /** Identifies the batch as a single entity */ private TXLockIdImpl txLockId; /** List of <code>TXRegionLockRequests</code> */ private List reqs; /** Identifies the members participating in the transaction */ private Set participants; /** * Constructs a <code>TXLockBatch</code> for the list of <code>TXRegionLockRequests</code> */ public TXLockBatch(TXLockId txLockId, List reqs, Set participants) { this.txLockId = (TXLockIdImpl) txLockId; this.reqs = reqs; this.participants = participants; } @Override public InternalDistributedMember getOwner() { return this.txLockId.getMemberId(); } public TXLockId getTXLockId() { return this.txLockId; } @Override public DLockBatchId getBatchId() { return this.txLockId; } public void setParticipants(Set participants) { this.participants = participants; } @Override public void grantedBy(LockGrantorId lockGrantorId) { this.txLockId.setLockGrantorId(lockGrantorId); } @Override public List getReqs() { if (this.reqs != null && !(this.reqs instanceof IdentityArrayList)) { this.reqs = new IdentityArrayList(this.reqs); } return this.reqs; } @Override public String toString() { return "[TXLockBatch: txLockId=" + txLockId + "; reqs=" + reqs + "; participants=" + participants + "]"; } /** * Each lock batch contains a set of distributed system member ids that are participating in the * transaction. Public access for testing purposes. * * @return participants in the transaction */ public Set getParticipants() { return this.participants; } // ------------------------------------------------------------------------- // DataSerializable support // ------------------------------------------------------------------------- public TXLockBatch() {} @Override public int getDSFID() { return TX_LOCK_BATCH; } @Override public void fromData(DataInput in, DeserializationContext context) throws IOException, ClassNotFoundException { this.txLockId = TXLockIdImpl.createFromData(in); this.participants = InternalDataSerializer.readSet(in); { int reqsSize = in.readInt(); if (reqsSize >= 0) { this.reqs = new IdentityArrayList(reqsSize); for (int i = 0; i < reqsSize; i++) { this.reqs.add(TXRegionLockRequestImpl.createFromData(in)); } } } } @Override public void toData(DataOutput out, SerializationContext context) throws IOException { InternalDataSerializer.invokeToData(this.txLockId, out); InternalDataSerializer.writeSet(this.participants, out); if (this.reqs == null) { out.writeInt(-1); } else { out.writeInt(this.reqs.size()); for (Iterator iter = this.reqs.iterator(); iter.hasNext();) { TXRegionLockRequestImpl elem = (TXRegionLockRequestImpl) iter.next(); InternalDataSerializer.invokeToData(elem, out); } } } @Override public KnownVersion[] getSerializationVersions() { // TODO Auto-generated method stub return null; } }
smgoller/geode
geode-core/src/main/java/org/apache/geode/internal/cache/locks/TXLockBatch.java
Java
apache-2.0
5,005
package com.intellij.openapi.externalSystem.service.project; import com.intellij.openapi.externalSystem.model.DataNode; import com.intellij.openapi.externalSystem.model.ProjectKeys; import com.intellij.openapi.externalSystem.model.project.ContentRootData; import com.intellij.openapi.externalSystem.model.project.LibraryData; import com.intellij.openapi.externalSystem.model.project.ModuleData; import com.intellij.openapi.externalSystem.model.project.ModuleDependencyData; import com.intellij.openapi.externalSystem.util.ArtifactInfo; import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.*; import com.intellij.openapi.roots.impl.ModuleLibraryOrderEntryImpl; import com.intellij.openapi.roots.libraries.Library; import com.intellij.openapi.roots.libraries.LibraryTable; import com.intellij.openapi.vfs.VirtualFile; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * Thread-safe. * * @author Denis Zhdanov * @since 2/6/12 3:28 PM */ public class ProjectStructureHelper { @NotNull private final PlatformFacade myFacade; @NotNull private final ExternalLibraryPathTypeMapper myLibraryPathTypeMapper; public ProjectStructureHelper(@NotNull PlatformFacade facade, @NotNull ExternalLibraryPathTypeMapper mapper) { myFacade = facade; myLibraryPathTypeMapper = mapper; } @Nullable public Module findIdeModule(@NotNull ModuleData module, @NotNull Project ideProject) { return findIdeModule(module.getName(), ideProject); } @Nullable public Module findIdeModule(@NotNull String ideModuleName, @NotNull Project ideProject) { for (Module module : myFacade.getModules(ideProject)) { if (ideModuleName.equals(module.getName())) { return module; } } return null; } @Nullable public ModuleAwareContentRoot findIdeContentRoot(@NotNull DataNode<ContentRootData> node, @NotNull Project ideProject) { ModuleData moduleData = node.getData(ProjectKeys.MODULE); if (moduleData == null) { return null; } final Module module = findIdeModule(moduleData.getName(), ideProject); if (module == null) { return null; } for (ModuleAwareContentRoot contentRoot : myFacade.getContentRoots(module)) { final VirtualFile file = contentRoot.getFile(); if (node.getData().getRootPath().equals(file.getPath())) { return contentRoot; } } return null; } @Nullable public Library findIdeLibrary(@NotNull final LibraryData library, @NotNull Project ideProject) { return findIdeLibrary(library.getName(), ideProject); } /** * Gradle library names follow the following pattern: {@code '[base library name]-[library-version]'}. * <p/> * This methods serves as an utility which tries to find a library by it's given base name. * * @param baseName base name of the target library * @param ideProject target ide project * @return target library for the given base name if there is one and only one library for it; * <code>null</code> otherwise (if there are no libraries or more than one library for the given base name) */ @Nullable public Library findIdeLibraryByBaseName(@NotNull String baseName, @NotNull Project ideProject) { final LibraryTable libraryTable = myFacade.getProjectLibraryTable(ideProject); Library result = null; for (Library library : libraryTable.getLibraries()) { ArtifactInfo info = ExternalSystemApiUtil.parseArtifactInfo(ExternalSystemApiUtil.getLibraryName(library)); if (info == null || !baseName.equals(info.getName())) { continue; } if (result != null) { return null; } result = library; } return result; } @Nullable public Library findIdeLibrary(@NotNull String libraryName, @NotNull Project ideProject) { final LibraryTable libraryTable = myFacade.getProjectLibraryTable(ideProject); for (Library ideLibrary : libraryTable.getLibraries()) { if (libraryName.equals(ExternalSystemApiUtil.getLibraryName(ideLibrary))) { return ideLibrary; } } return null; } @Nullable public Library findIdeLibrary(@NotNull String libraryName, @NotNull OrderRootType jarType, @NotNull String jarPath, @NotNull Project ideProject) { Library library = findIdeLibrary(libraryName, ideProject); if (library == null) { return null; } for (VirtualFile file : library.getFiles(jarType)) { if (jarPath.equals(ExternalSystemApiUtil.getLocalFileSystemPath(file))) { return library; } } return null; } @Nullable public LibraryOrderEntry findIdeLibraryDependency(@NotNull final String moduleName, @NotNull final String libraryName, @NotNull Project ideProject) { final Module ideModule = findIdeModule(moduleName, ideProject); if (ideModule == null) { return null; } RootPolicy<LibraryOrderEntry> visitor = new RootPolicy<LibraryOrderEntry>() { @Override public LibraryOrderEntry visitLibraryOrderEntry(LibraryOrderEntry ideDependency, LibraryOrderEntry value) { if (libraryName.equals(ideDependency.getLibraryName())) { return ideDependency; } return value; } }; for (OrderEntry entry : myFacade.getOrderEntries(ideModule)) { final LibraryOrderEntry result = entry.accept(visitor, null); if (result != null) { return result; } } return null; } @Nullable public ModuleLibraryOrderEntryImpl findIdeModuleLocalLibraryDependency(@NotNull final String moduleName, @NotNull final String libraryName, @NotNull Project ideProject) { final Module ideModule = findIdeModule(moduleName, ideProject); if (ideModule == null) { return null; } RootPolicy<ModuleLibraryOrderEntryImpl> visitor = new RootPolicy<ModuleLibraryOrderEntryImpl>() { @Override public ModuleLibraryOrderEntryImpl visitLibraryOrderEntry(LibraryOrderEntry ideDependency, ModuleLibraryOrderEntryImpl value) { Library library = ideDependency.getLibrary(); if (library == null) { return value; } if (ideDependency instanceof ModuleLibraryOrderEntryImpl && libraryName.equals(ExternalSystemApiUtil.getLibraryName(library))) { return (ModuleLibraryOrderEntryImpl)ideDependency; } return value; } }; for (OrderEntry entry : myFacade.getOrderEntries(ideModule)) { final ModuleLibraryOrderEntryImpl result = entry.accept(visitor, null); if (result != null) { return result; } } return null; } @SuppressWarnings("MethodMayBeStatic") @Nullable public LibraryOrderEntry findIdeLibraryDependency(@NotNull final String libraryName, @NotNull ModifiableRootModel model) { for (OrderEntry entry : model.getOrderEntries()) { if (entry instanceof LibraryOrderEntry) { LibraryOrderEntry candidate = (LibraryOrderEntry)entry; if (libraryName.equals(candidate.getLibraryName())) { return candidate; } } } return null; } @Nullable public ModuleOrderEntry findIdeModuleDependency(@NotNull final ModuleDependencyData gradleDependency, @NotNull Project ideProject) { return findIdeModuleDependency(gradleDependency.getOwnerModule().getName(), gradleDependency.getTarget().getName(), ideProject); } @Nullable public ModuleOrderEntry findIdeModuleDependency(@NotNull final String ownerModuleName, @NotNull final String dependencyModuleName, @NotNull Project ideProject) { final Module ideOwnerModule = findIdeModule(ownerModuleName, ideProject); if (ideOwnerModule == null) { return null; } RootPolicy<ModuleOrderEntry> visitor = new RootPolicy<ModuleOrderEntry>() { @Override public ModuleOrderEntry visitModuleOrderEntry(ModuleOrderEntry ideDependency, ModuleOrderEntry value) { if (dependencyModuleName.equals(ideDependency.getModuleName())) { return ideDependency; } return value; } }; for (OrderEntry orderEntry : myFacade.getOrderEntries(ideOwnerModule)) { final ModuleOrderEntry result = orderEntry.accept(visitor, null); if (result != null) { return result; } } return null; } @SuppressWarnings("MethodMayBeStatic") @Nullable public ModuleOrderEntry findIdeModuleDependency(@NotNull ModuleDependencyData dependency, @NotNull ModifiableRootModel model) { for (OrderEntry entry : model.getOrderEntries()) { if (entry instanceof ModuleOrderEntry) { ModuleOrderEntry candidate = (ModuleOrderEntry)entry; if (dependency.getName().equals(candidate.getModuleName())) { return candidate; } } } return null; } }
android-ia/platform_tools_idea
platform/external-system-impl/src/com/intellij/openapi/externalSystem/service/project/ProjectStructureHelper.java
Java
apache-2.0
9,454
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * @author max */ package com.intellij.psi.impl.search; import com.intellij.ide.highlighter.JavaClassFileType; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.JdkOrderEntry; import com.intellij.openapi.roots.OrderEntry; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.SdkResolveScopeProvider; import com.intellij.psi.search.DelegatingGlobalSearchScope; import com.intellij.psi.search.GlobalSearchScope; import org.jetbrains.annotations.NotNull; public class JavaSourceFilterScope extends DelegatingGlobalSearchScope { private final ProjectFileIndex myIndex; public JavaSourceFilterScope(@NotNull final GlobalSearchScope delegate) { super(delegate); myIndex = ProjectRootManager.getInstance(getProject()).getFileIndex(); } @Override public boolean contains(final VirtualFile file) { if (!super.contains(file)) { return false; } if (JavaClassFileType.INSTANCE == file.getFileType()) { return myIndex.isInLibraryClasses(file); } if (myIndex.isInSourceContent(file)) { return true; } final Project project = getProject(); if (project != null) { for (OrderEntry entry : myIndex.getOrderEntriesForFile(file)) { if (entry instanceof JdkOrderEntry) { final JdkOrderEntry jdkOrderEntry = (JdkOrderEntry)entry; for (SdkResolveScopeProvider provider : SdkResolveScopeProvider.EP_NAME.getExtensions()) { final GlobalSearchScope scope = provider.getScope(project, jdkOrderEntry); if (scope != null && scope.contains(file)) { return true; } } } } } return false; } }
android-ia/platform_tools_idea
java/java-indexing-impl/src/com/intellij/psi/impl/search/JavaSourceFilterScope.java
Java
apache-2.0
2,412
package com.adsdk.sdk.nativeformats; import java.io.BufferedReader; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.lang.Thread.UncaughtExceptionHandler; import java.nio.charset.Charset; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.StatusLine; import org.apache.http.client.methods.HttpGet; import android.content.Context; import android.net.http.AndroidHttpClient; import android.os.Build; import android.os.Handler; import android.view.ViewGroup; import com.adsdk.sdk.Log; import com.adsdk.sdk.RequestException; import com.adsdk.sdk.Util; import com.adsdk.sdk.nativeformats.creative.Creative; import com.adsdk.sdk.nativeformats.creative.CreativesManager; /** * Created by itamar on 16/03/15. */ public class NativeFormat { private static final String BASE_URL = "http://my.mobfox.com/request.php"; private Handler handler; String publicationId; CreativesManager creative_manager; int width; int height; Context ctx; final static String TYPE_BLOCK = "block"; final static String TYPE_STRIPE = "stripe"; // public void WriteTemp(String data) { // // FileOutputStream fop = null; // // try { // // File temp = File.createTempFile("creative", ".html"); // fop = new FileOutputStream(temp); // // fop.write(data.getBytes(Charset.forName("UTF-8"))); // // android.util.Log.d("FilePath", temp.getAbsolutePath()); // android.util.Log.d("FileData", data); // // } catch(IOException e) { // // e.printStackTrace(); // // } // } public interface Listener { public void onSuccess(String template, String data); public void onError(Exception e); } NativeFormat(Context ctx, int width, int height, String publicationId) { this.ctx = ctx; this.width = width; this.height = height; this.publicationId = publicationId; this.creative_manager = CreativesManager.getInstance(this.ctx,publicationId); } // --------------------------------------------------------- public void loadAd(String webviewUserAgent, final Listener listener) { float ratio = height / width; String type = NativeFormat.TYPE_BLOCK; if ( ratio < 0.5 ) { type = NativeFormat.TYPE_STRIPE; } if(Build.FINGERPRINT.startsWith("generic")){ webviewUserAgent = ""; } final Creative creative = creative_manager.getCreative(type,webviewUserAgent); final NativeFormatRequest request = new NativeFormatRequest(); request.setRequestUrl(BASE_URL); request.setPublisherId(this.publicationId); // TODO: check if correctly set String ipAddress = Utils.getIPAddress(); //TODO: can we remove it? Other requests don't send IP if (ipAddress.indexOf("10.") == 0 || ipAddress.length() == 0) { ipAddress = "2.122.29.194"; } request.ip = ipAddress; // request.add("o_androidid", Settings.Secure.getString(getContext().getContentResolver(), Settings.Secure.ANDROID_ID)); //TODO: we cannot use this ID anymore (only Google Advertising ID) // params.add("o_andadvid", "c86f7529-33e2-4346-be0d-777ac53be320");//AdvertisingIdClient.getAdvertisingIdInfo(this.getContext()).getId()); request.setAndroidAdId(Util.getAndroidAdId()); request.setAdDoNotTrack(Util.hasAdDoNotTrack()); request.setUserAgent(Util.getDefaultUserAgentString(ctx)); request.setUserAgent2(Util.buildUserAgent()); request.setTemplateName(creative.getName()); Log.d("starting build"); Log.d("native req: "+request.toUri()); handler = new Handler(); Thread requestThread = new Thread(new Runnable() { @Override public void run() { AndroidHttpClient client = null; try { client = AndroidHttpClient.newInstance(System.getProperty("http.agent")); final String url = request.toString(); HttpGet request = new HttpGet(url); request.setHeader("User-Agent", System.getProperty("http.agent")); HttpResponse response = client.execute(request); Log.v("sent request"); StatusLine statusLine = response.getStatusLine(); int statusCode = statusLine.getStatusCode(); if (statusCode == 200) { Log.v("start build response"); StringBuilder builder = new StringBuilder(); HttpEntity entity = response.getEntity(); InputStream content = entity.getContent(); BufferedReader reader = new BufferedReader(new InputStreamReader(content)); String line; while ((line = reader.readLine()) != null) { builder.append(line + "\n"); } final String data = builder.toString(); android.util.Log.d("builder.toString()", builder.toString()); Log.v("build got data"); if (data.length() == 0) { handler.post(new Runnable() { @Override public void run() { listener.onError(new RequestException("empty response from: " + url)); } }); return; } Log.v("builder: "+data); handler.post(new Runnable() { @Override public void run() { listener.onSuccess(creative.getTemplate(), data); } }); } else { handler.post(new Runnable() { @Override public void run() { listener.onError(new RequestException("request failed: " + url)); } }); return; } } catch (final Exception e) { handler.post(new Runnable() { @Override public void run() { listener.onError(e); } }); } finally { if (client != null) { client.close(); } } } }); requestThread.setUncaughtExceptionHandler(new UncaughtExceptionHandler() { @Override public void uncaughtException(Thread thread, Throwable ex) { listener.onError(new Exception(ex)); } }); requestThread.start(); }; // --------------------------------------------------------- }
palaniyappanBala/MobFox-Android-SDK
src/main/java/com/adsdk/sdk/nativeformats/NativeFormat.java
Java
apache-2.0
6,782
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.management.internal.cli.functions; import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import java.io.Serializable; import java.util.List; import java.util.Objects; import com.fasterxml.jackson.databind.ObjectMapper; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.apache.geode.cache.Cache; import org.apache.geode.cache.CacheFactory; import org.apache.geode.cache.Region; import org.apache.geode.cache.RegionFactory; import org.apache.geode.cache.RegionShortcut; import org.apache.geode.internal.cache.InternalCache; import org.apache.geode.management.internal.cli.domain.DataCommandResult; public class Geode3544JUnitTest { private static Cache cache; private static final String PARTITIONED_REGION = "emp_region"; private static String emp_key; static class EmpProfile implements Serializable { private static final long serialVersionUID = 1L; private long data; public EmpProfile() { } public EmpProfile(long in_data) { this.data = in_data; } public long getData() { return data; } public void setData(long data) { this.data = data; } } public static class EmpData extends EmpProfile { private short empId; private Integer empNumber; private long empAccount; public EmpData() { super(); } public EmpData(long in_data, short in_empId, Integer in_empNumber, long in_empAccount) { super(in_data); this.empId = in_empId; this.empNumber = in_empNumber; this.empAccount = in_empAccount; } public boolean equals(Object other) { if (this == other) { return true; } if (other instanceof EmpData) { return this.getEmpId() == (((EmpData) other).getEmpId()); } return true; } @Override public String toString() { return "data:" + getData() + "," + "empId" + getEmpId(); } public short getEmpId() { return empId; } public void setEmpId(short empId) { this.empId = empId; } public Integer getEmpNumber() { return empNumber; } public void setEmpNumber(Integer empNumber) { this.empNumber = empNumber; } public long getEmpAccount() { return empAccount; } public void setEmpAccount(long empAccount) { this.empAccount = empAccount; } @Override public int hashCode() { return Objects.hash(empAccount, empNumber, empId); } } @BeforeClass public static void setUp() throws Exception { cache = new CacheFactory().set(MCAST_PORT, "0").create(); RegionFactory<EmpData, String> factory = cache.createRegionFactory(RegionShortcut.PARTITION); Region<EmpData, String> region1 = factory.create(PARTITIONED_REGION); EmpData emp_data_key = new EmpData(1, (short) 1, 1, 1); region1.put(emp_data_key, "value_1"); ObjectMapper mapper = new ObjectMapper(); emp_key = mapper.writeValueAsString(emp_data_key); } @AfterClass public static void tearDown() { cache.close(); cache = null; } /* * This test addresses GEODE-3544 */ @Test public void testLocateKeyIsObject() { DataCommandFunction dataCmdFn = new DataCommandFunction(); DataCommandResult result = dataCmdFn.locateEntry(emp_key, EmpData.class.getName(), String.class.getName(), PARTITIONED_REGION, false, (InternalCache) cache); assertNotNull(result); result.aggregate(null); List<DataCommandResult.KeyInfo> keyInfos = result.getLocateEntryLocations(); assertEquals(1, keyInfos.size()); } }
smgoller/geode
geode-gfsh/src/integrationTest/java/org/apache/geode/management/internal/cli/functions/Geode3544JUnitTest.java
Java
apache-2.0
4,524
namespace eidss.winclient.FlexForms.Controls { partial class DesignerHost { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { components = new System.ComponentModel.Container(); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; } #endregion } }
EIDSS/EIDSS-Legacy
EIDSS v6/eidss.winclient/FlexForms/Controls/DesignerHost.Designer.cs
C#
bsd-2-clause
1,165
#include "stdhdr.h" #include "camplib.h" #include "mfd.h" #include "Graphics/Include/render2d.h" #include "dispcfg.h" #include "simdrive.h" #include "camp2sim.h" #include "hud.h" #include "aircrft.h" #include "fack.h" #include "otwdrive.h" //MI #include "cpmanager.h" //MI #include "icp.h" //MI #include "aircrft.h" //MI #include "fcc.h" //MI #include "radardoppler.h" //MI //MI void DrawBullseyeCircle(VirtualDisplay* display, float cursorX, float cursorY); struct MfdTestButtons { char *label1, *label2; enum { ModeNoop = 0, // do nothing ModeParent, // hand off to parent ModeTest1, ModeTest2, // two test sub modes ModeRaltTest, ModeRunTest, ModeClear, }; int nextMode; }; #define NOENTRY { NULL, NULL, MfdTestButtons::ModeNoop} #define PARENT { NULL, NULL, MfdTestButtons::ModeParent} static const MfdTestButtons testpage1[20] = { // test page menu {"BIT1", NULL, MfdTestButtons::ModeTest2}, // 1 NOENTRY, {"CLR", NULL, MfdTestButtons::ModeClear}, NOENTRY, NOENTRY, // 5 {"MFDS", NULL, MfdTestButtons::ModeRunTest}, {"RALT", "500", MfdTestButtons::ModeRaltTest}, {"TGP", NULL, MfdTestButtons::ModeRunTest}, {"FINS", NULL, MfdTestButtons::ModeRunTest}, {"TFR", NULL, MfdTestButtons::ModeRunTest}, // 10 PARENT, PARENT, PARENT, PARENT, // current mode PARENT, // 15 {"RSU", NULL, MfdTestButtons::ModeRunTest}, {"INS", NULL, MfdTestButtons::ModeRunTest}, {"SMS", NULL, MfdTestButtons::ModeNoop}, {"FCR", NULL, MfdTestButtons::ModeRunTest}, {"DTE", NULL, MfdTestButtons::ModeRunTest}, // 20 }; static const MfdTestButtons testpage2[20] = { // test page menu {"BIT2", NULL, MfdTestButtons::ModeTest1}, // 1 NOENTRY, {"CLR", NULL, MfdTestButtons::ModeClear}, NOENTRY, NOENTRY, // 5 {"IFF1", NULL, MfdTestButtons::ModeRunTest}, {"IFF2", NULL, MfdTestButtons::ModeRunTest}, {"IFF3", NULL, MfdTestButtons::ModeRunTest}, {"IFFC", NULL, MfdTestButtons::ModeRunTest}, {"TCN", NULL, MfdTestButtons::ModeRunTest}, // 10 PARENT, PARENT, PARENT, PARENT, PARENT, // 15 {NULL, NULL, MfdTestButtons::ModeNoop}, {NULL, NULL, MfdTestButtons::ModeNoop}, {NULL, NULL, MfdTestButtons::ModeNoop}, {"TISL", NULL, MfdTestButtons::ModeRunTest}, {"UFC", NULL, MfdTestButtons::ModeRunTest}, // 20 }; struct MfdTestPage { const MfdTestButtons *buttons; }; static const MfdTestPage mfdpages[] = { {testpage1}, {testpage2}, }; static const int NMFDPAGES = sizeof(mfdpages) / sizeof(mfdpages[0]); TestMfdDrawable::TestMfdDrawable() { bitpage = 0; bittest = -1; timer = 0; } void TestMfdDrawable::Display(VirtualDisplay* newDisplay) { AircraftClass *playerAC = SimDriver.GetPlayerAircraft(); //MI float cX, cY = 0; if (g_bRealisticAvionics) { RadarDopplerClass* theRadar = (RadarDopplerClass*)FindSensor(playerAC, SensorClass::Radar); if ( not theRadar) { ShiWarning("Oh Oh shouldn't be here without a radar"); return; } else { theRadar->GetCursorPosition(&cX, &cY); } } display = newDisplay; ShiAssert(bitpage >= 0 and bitpage < sizeof(mfdpages) / sizeof(mfdpages[0])); ShiAssert(display not_eq NULL); const MfdTestButtons *mb = mfdpages[bitpage].buttons; AircraftClass *self = MfdDisplay[OnMFD()]->GetOwnShip(); ShiAssert(self not_eq NULL); //MI changed if (g_bRealisticAvionics) { if (OTWDriver.pCockpitManager and OTWDriver.pCockpitManager->mpIcp and OTWDriver.pCockpitManager->mpIcp->ShowBullseyeInfo) { DrawBullseyeCircle(display, cX, cY); } else DrawReference(self); } else DrawReference(self); display->SetColor(GetMfdColor(MFD_LABELS)); char buf[100]; for (int i = 0; i < 20; i++) { int hilite = 0; if (i == bittest and timer > SimLibElapsedTime) hilite = 1; switch (mb[i].nextMode) { case MfdTestButtons::ModeRaltTest: sprintf(buf, "%.0f", hilite ? 300.0f : TheHud->lowAltWarning); LabelButton(i, mb[i].label1, buf, hilite); break; default: if (mb[i].label1) LabelButton(i, mb[i].label1, mb[i].label2, hilite); else if (mb[i].nextMode == MfdTestButtons::ModeParent) MfdDrawable::DefaultLabel(i); } } if (playerAC and playerAC->mFaults) { FackClass *fack = playerAC->mFaults; float yinc = display->TextHeight(); const static float namex = -0.6f; const static float starty = 0.6f; float y = starty; float x = namex; float xinc = 0.3F; for (int i = 0; i < fack->GetMflListCount(); i++) { const char *fname; int subsys; int count; char timestr[100]; if (fack->GetMflEntry(i, &fname, &subsys, &count, timestr) == false) continue; char outstr[100]; for (int i = 0; i < 5; i++) { switch (i) { case 1: sprintf(outstr, "%-4s", fname); display->TextLeft(x, y, outstr); x += xinc; break; case 2: sprintf(outstr, "%03d", subsys); display->TextLeft(x, y, outstr); x += xinc; break; case 3: x -= 0.1F; sprintf(outstr, "%2d", count); display->TextLeft(x, y, outstr); x += xinc; break; case 4: x -= 0.1F; sprintf(outstr, "%s", timestr); display->TextLeft(x, y, outstr); x += xinc; break; default: break; } } //sprintf (outstr, "%-4s %03d %2d %s", fname, subsys, count, timestr); //ShiAssert(strlen(outstr) < sizeof outstr); //display->TextLeft(namex, y, outstr); y -= yinc; x = namex; } } } void TestMfdDrawable::PushButton(int whichButton, int whichMFD) { ShiAssert(bitpage >= 0 and bitpage < sizeof(mfdpages) / sizeof(mfdpages[0])); ShiAssert(whichButton >= 0 and whichButton < 20); AircraftClass *playerAC = SimDriver.GetPlayerAircraft(); switch (mfdpages[bitpage].buttons[whichButton].nextMode) { case MfdTestButtons::ModeNoop: break; case MfdTestButtons::ModeRaltTest: case MfdTestButtons::ModeRunTest: bittest = whichButton; timer = SimLibElapsedTime + 5 * CampaignSeconds; break; case MfdTestButtons::ModeTest2: bitpage = 1; break; case MfdTestButtons::ModeTest1: bitpage = 0; break; case MfdTestButtons::ModeParent: MfdDrawable::PushButton(whichButton, whichMFD); break; case MfdTestButtons::ModeClear: // clear MFL if (playerAC and playerAC->mFaults) playerAC->mFaults->ClearMfl(); break; } }
GPUWorks/freefalcon-central
src/sim/displays/testmfd.cpp
C++
bsd-2-clause
7,687
/* * L.Handler.TouchZoom is used internally by L.Map to add touch-zooming on Webkit-powered mobile browsers. */ L.Handler.TouchZoom = L.Handler.extend({ enable: function() { if (!L.Browser.touch || this._enabled) { return; } L.DomEvent.addListener(this._map._container, 'touchstart', this._onTouchStart, this); this._enabled = true; }, disable: function() { if (!this._enabled) { return; } L.DomEvent.removeListener(this._map._container, 'touchstart', this._onTouchStart, this); this._enabled = false; }, _onTouchStart: function(e) { if (!e.touches || e.touches.length != 2 || this._map._animatingZoom) { return; } var p1 = this._map.mouseEventToLayerPoint(e.touches[0]), p2 = this._map.mouseEventToLayerPoint(e.touches[1]), viewCenter = this._map.containerPointToLayerPoint(this._map.getSize().divideBy(2)); this._startCenter = p1.add(p2).divideBy(2, true); this._startDist = p1.distanceTo(p2); //this._startTransform = this._map._mapPane.style.webkitTransform; this._moved = false; this._zooming = true; this._centerOffset = viewCenter.subtract(this._startCenter); L.DomEvent.addListener(document, 'touchmove', this._onTouchMove, this); L.DomEvent.addListener(document, 'touchend', this._onTouchEnd, this); L.DomEvent.preventDefault(e); }, _onTouchMove: function(e) { if (!e.touches || e.touches.length != 2) { return; } if (!this._moved) { this._map._mapPane.className += ' leaflet-zoom-anim'; this._map._prepareTileBg(); this._moved = true; } var p1 = this._map.mouseEventToLayerPoint(e.touches[0]), p2 = this._map.mouseEventToLayerPoint(e.touches[1]); this._scale = p1.distanceTo(p2) / this._startDist; this._delta = p1.add(p2).divideBy(2, true).subtract(this._startCenter); /* * Used 2 translates instead of transform-origin because of a very strange bug - * it didn't count the origin on the first touch-zoom but worked correctly afterwards */ this._map._tileBg.style.webkitTransform = [ L.DomUtil.getTranslateString(this._delta), L.DomUtil.getScaleString(this._scale, this._startCenter) ].join(" "); L.DomEvent.preventDefault(e); }, _onTouchEnd: function(e) { if (!this._moved || !this._zooming) { return; } this._zooming = false; var oldZoom = this._map.getZoom(), floatZoomDelta = Math.log(this._scale)/Math.LN2, roundZoomDelta = (floatZoomDelta > 0 ? Math.ceil(floatZoomDelta) : Math.floor(floatZoomDelta)), zoom = this._map._limitZoom(oldZoom + roundZoomDelta), zoomDelta = zoom - oldZoom, centerOffset = this._centerOffset.subtract(this._delta).divideBy(this._scale), centerPoint = this._map.getPixelOrigin().add(this._startCenter).add(centerOffset), center = this._map.unproject(centerPoint); L.DomEvent.removeListener(document, 'touchmove', this._onTouchMove); L.DomEvent.removeListener(document, 'touchend', this._onTouchEnd); var finalScale = Math.pow(2, zoomDelta); this._map._runAnimation(center, zoom, finalScale / this._scale, this._startCenter.add(centerOffset)); } });
coomsie/Leaflet
src/handler/TouchZoom.js
JavaScript
bsd-2-clause
3,183
require 'formula' class Qt < Formula homepage 'http://qt-project.org/' url "http://download.qt-project.org/official_releases/qt/4.8/4.8.6/qt-everywhere-opensource-src-4.8.6.tar.gz" sha1 "ddf9c20ca8309a116e0466c42984238009525da6" head 'git://gitorious.org/qt/qt.git', :branch => '4.8' bottle do revision 5 sha1 "34d66e17aaed4d2067297d4a64482d56f2382339" => :mavericks sha1 "9ab96caa65e8b707deeb27caaff9ad8b1e906b2c" => :mountain_lion sha1 "18b1d1a4aa89f92c4b9a9f202a95cc0896e03a9d" => :lion end option :universal option 'with-qt3support', 'Build with deprecated Qt3Support module support' option 'with-docs', 'Build documentation' option 'developer', 'Build and link with developer options' depends_on "d-bus" => :optional depends_on "mysql" => :optional odie 'qt: --with-qtdbus has been renamed to --with-d-bus' if build.with? "qtdbus" odie 'qt: --with-demos-examples is no longer supported' if build.with? "demos-examples" odie 'qt: --with-debug-and-release is no longer supported' if build.with? "debug-and-release" def install ENV.universal_binary if build.universal? args = ["-prefix", prefix, "-system-zlib", "-qt-libtiff", "-qt-libpng", "-qt-libjpeg", "-confirm-license", "-opensource", "-nomake", "demos", "-nomake", "examples", "-cocoa", "-fast", "-release"] if ENV.compiler == :clang args << "-platform" if MacOS.version >= :mavericks args << "unsupported/macx-clang-libc++" else args << "unsupported/macx-clang" end end args << "-plugin-sql-mysql" if build.with? 'mysql' if build.with? 'd-bus' dbus_opt = Formula["d-bus"].opt_prefix args << "-I#{dbus_opt}/lib/dbus-1.0/include" args << "-I#{dbus_opt}/include/dbus-1.0" args << "-L#{dbus_opt}/lib" args << "-ldbus-1" args << "-dbus-linked" end if build.with? 'qt3support' args << "-qt3support" else args << "-no-qt3support" end args << "-nomake" << "docs" if build.without? 'docs' if MacOS.prefer_64_bit? or build.universal? args << '-arch' << 'x86_64' end if !MacOS.prefer_64_bit? or build.universal? args << '-arch' << 'x86' end args << '-developer-build' if build.include? 'developer' system "./configure", *args system "make" ENV.j1 system "make install" # what are these anyway? (bin+'pixeltool.app').rmtree (bin+'qhelpconverter.app').rmtree # remove porting file for non-humans (prefix+'q3porting.xml').unlink if build.without? 'qt3support' # Some config scripts will only find Qt in a "Frameworks" folder frameworks.install_symlink Dir["#{lib}/*.framework"] # The pkg-config files installed suggest that headers can be found in the # `include` directory. Make this so by creating symlinks from `include` to # the Frameworks' Headers folders. Pathname.glob("#{lib}/*.framework/Headers") do |path| include.install_symlink path => path.parent.basename(".framework") end Pathname.glob("#{bin}/*.app") { |app| mv app, prefix } end test do system "#{bin}/qmake", '-project' end def caveats; <<-EOS.undent We agreed to the Qt opensource license for you. If this is unacceptable you should uninstall. EOS end end
ktheory/homebrew
Library/Formula/qt.rb
Ruby
bsd-2-clause
3,379
/* * Copyright 2011 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.internal.artifacts.ivyservice.dynamicversions; import org.gradle.api.internal.artifacts.ivyservice.ivyresolve.ModuleVersionListing; class ModuleVersionsCacheEntry { public ModuleVersionListing moduleVersionListing; public long createTimestamp; ModuleVersionsCacheEntry(ModuleVersionListing moduleVersionListing, long createTimestamp) { this.moduleVersionListing = moduleVersionListing; this.createTimestamp = createTimestamp; } }
Pushjet/Pushjet-Android
gradle/wrapper/dists/gradle-1.12-all/4ff8jj5a73a7zgj5nnzv1ubq0/gradle-1.12/src/core-impl/org/gradle/api/internal/artifacts/ivyservice/dynamicversions/ModuleVersionsCacheEntry.java
Java
bsd-2-clause
1,104
cask "craftmanager" do version "1.0.105,226" sha256 :no_check url "https://craft-assets.invisionapp.com/CraftManager/production/CraftManager.zip" name "CraftManager" homepage "https://www.invisionapp.com/craft" livecheck do url "https://craft-assets.invisionapp.com/CraftManager/production/appcast.xml" strategy :sparkle end app "CraftManager.app" end
m3nu/homebrew-cask
Casks/craftmanager.rb
Ruby
bsd-2-clause
379
/* * World Calendars * https://github.com/alexcjohnson/world-calendars * * Batch-converted from kbwood/calendars * Many thanks to Keith Wood and all of the contributors to the original project! * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ /* http://keith-wood.name/calendars.html Hindi INDIA localisation for Gregorian/Julian calendars for jQuery. Written by Pawan Kumar Singh. */ var main = require('../main'); var _gregorian = main.calendars.gregorian; var _julian = main.calendars.julian; _gregorian.prototype.regionalOptions['hi-IN'] = { name: 'Gregorian', epochs: ['BCE', 'CE'], monthNames: ['जनवरी',' फरवरी', 'मार्च', 'अप्रैल', 'मई', 'जून','जुलाई', 'अगस्त', 'सितम्बर', 'अक्टूबर', 'नवम्बर', 'दिसम्बर'], monthNamesShort: ['जन', 'फर', 'मार्च','अप्रै', 'मई', 'जून','जुलाई', 'अग', 'सित', 'अक्टू', 'नव', 'दिस'], dayNames: ['रविवार', 'सोमवार', 'मंगलवार', 'बुधवार', 'गुरुवार', 'शुक्रवार', 'शनिवार'], dayNamesShort: ['रवि', 'सोम', 'मंगल', 'बुध', 'गुरु', 'शुक्र', 'शनि'], dayNamesMin: ['र','सो','मं','बु','गु','शु','श'], digits: null, dateFormat: 'dd/mm/yyyy', firstDay: 1, isRTL: false }; if (_julian) { _julian.prototype.regionalOptions['hi-IN'] = _gregorian.prototype.regionalOptions['hi-IN']; }
useabode/redash
node_modules/world-calendars/dist/regional/hi-IN.js
JavaScript
bsd-2-clause
1,725
cask "simplenote" do version "2.15.0" sha256 "74c5ac784e03c9c804ee8ca27de15227ad0c65b3d9bea585a85bc426405e2f7d" url "https://github.com/Automattic/simplenote-electron/releases/download/v#{version}/Simplenote-macOS-#{version}.dmg" name "Simplenote" desc "React client for Simplenote" homepage "https://github.com/Automattic/simplenote-electron" livecheck do url :url strategy :github_latest end app "Simplenote.app" zap trash: [ "~/Library/Application Support/Simplenote", "~/Library/Caches/com.automattic.simplenote", "~/Library/Caches/com.automattic.simplenote.ShipIt", "~/Library/Saved Application State/com.automattic.simplenote.savedState", ] end
kronicd/homebrew-cask
Casks/simplenote.rb
Ruby
bsd-2-clause
701
#ifndef OSRM_TEST_SERVER_PARAMETERS_IO #define OSRM_TEST_SERVER_PARAMETERS_IO #include "engine/api/route_parameters.hpp" #include "engine/approach.hpp" #include "engine/bearing.hpp" #include <ostream> namespace osrm { namespace engine { namespace api { inline std::ostream &operator<<(std::ostream &out, api::RouteParameters::GeometriesType geometries) { switch (geometries) { case api::RouteParameters::GeometriesType::GeoJSON: out << "GeoJSON"; break; case api::RouteParameters::GeometriesType::Polyline: out << "Polyline"; break; default: BOOST_ASSERT_MSG(false, "GeometriesType not fully captured"); } return out; } inline std::ostream &operator<<(std::ostream &out, api::RouteParameters::OverviewType overview) { switch (overview) { case api::RouteParameters::OverviewType::False: out << "False"; break; case api::RouteParameters::OverviewType::Full: out << "Full"; break; case api::RouteParameters::OverviewType::Simplified: out << "Simplified"; break; default: BOOST_ASSERT_MSG(false, "OverviewType not fully captured"); } return out; } } inline std::ostream &operator<<(std::ostream &out, Bearing bearing) { out << bearing.bearing << "," << bearing.range; return out; } inline std::ostream &operator<<(std::ostream &out, Approach approach) { out << static_cast<int>(approach); return out; } } } #endif
oxidase/osrm-backend
unit_tests/server/parameters_io.hpp
C++
bsd-2-clause
1,489
cask 'resolume-arena' do version '7.1.1,68467' sha256 '80bac5f59aa7e9ea7b220b160ba81ee5bf0e2b105c63e35622ffcad387609328' url "https://resolume.com/download/Resolume_Arena_#{version.major_minor_patch.dots_to_underscores}_rev_#{version.after_comma}_Installer.dmg" appcast 'https://resolume.com/download/' name 'Resolume Arena' homepage 'https://resolume.com/' pkg 'Resolume Arena Installer.pkg' uninstall pkgutil: [ 'com.resolume.pkg.ResolumeArena.*', 'com.resolume.pkg.ResolumeDXV', 'com.resolume.pkg.ResolumeQuickLook', ], delete: "/Applications/Resolume Arena #{version.major}", signal: ['TERM', 'com.resolume.arena'], launchctl: 'com.resolume.arena' zap pkgutil: [ 'com.resolume.pkg.ResolumeDXV', 'com.resolume.pkg.ResolumeQuickLook', ] end
shoichiaizawa/homebrew-cask
Casks/resolume-arena.rb
Ruby
bsd-2-clause
956
/* * Copyright 2014 Attila Szegedi, Daniel Dekany, Jonathan Revusky * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package freemarker.ext.jsp.taglibmembers; public class EnclosingClass { public static class NestedClass { public static double hypotenuse(double a, double b) { return Math.sqrt(a * a + b * b); } } }
ekollof/DarkUniverse
lib/Freemarker/source/src/test/java/freemarker/ext/jsp/taglibmembers/EnclosingClass.java
Java
bsd-2-clause
892
class Znc < Formula desc "Advanced IRC bouncer" homepage "https://wiki.znc.in/ZNC" url "https://znc.in/releases/archive/znc-1.7.5.tar.gz" sha256 "a8941e1385c8654287a4428018d93459482e9d5eeedf86bef7b020ddc5f24721" bottle do sha256 "4bc43bf605d281484dbc34a779da628960df63ece897aa4d216ab6a7fc728b10" => :catalina sha256 "a0f33bcd73035f1c117ce51bbc9f1fd528b615a48a6f4783b64a26f3a02738e5" => :mojave sha256 "c708bb54d28e9780bfea6babc05f861b66fdbf1ac18e03ce9dfc19d9cc45052d" => :high_sierra end head do url "https://github.com/znc/znc.git" depends_on "autoconf" => :build depends_on "automake" => :build depends_on "libtool" => :build end depends_on "pkg-config" => :build depends_on "icu4c" depends_on "openssl@1.1" depends_on "python" def install ENV.cxx11 # These need to be set in CXXFLAGS, because ZNC will embed them in its # znc-buildmod script; ZNC's configure script won't add the appropriate # flags itself if they're set in superenv and not in the environment. ENV.append "CXXFLAGS", "-std=c++11" ENV.append "CXXFLAGS", "-stdlib=libc++" if ENV.compiler == :clang system "./autogen.sh" if build.head? system "./configure", "--prefix=#{prefix}", "--enable-python" system "make", "install" end plist_options :manual => "znc --foreground" def plist; <<~EOS <?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> <dict> <key>Label</key> <string>#{plist_name}</string> <key>ProgramArguments</key> <array> <string>#{opt_bin}/znc</string> <string>--foreground</string> </array> <key>StandardErrorPath</key> <string>#{var}/log/znc.log</string> <key>StandardOutPath</key> <string>#{var}/log/znc.log</string> <key>RunAtLoad</key> <true/> <key>StartInterval</key> <integer>300</integer> </dict> </plist> EOS end test do mkdir ".znc" system bin/"znc", "--makepem" assert_predicate testpath/".znc/znc.pem", :exist? end end
zmwangx/homebrew-core
Formula/znc.rb
Ruby
bsd-2-clause
2,199
<?php date_default_timezone_set('Europe/Oslo'); $intervals = array( "2008-05-11T15:30:00Z/2007-03-01T13:00:00Z", "2007-05-11T15:30:00Z/2008-03-01T13:00:00Z", "2007-05-11T15:30:00Z 2008-03-01T13:00:00Z", "2007-05-11T15:30:00Z/", "2007-05-11T15:30:00Z", "2007-05-11T15:30:00Z/:00Z", ); foreach($intervals as $iv) { try { $di = new DateInterval($iv); var_dump($di); } catch ( Exception $e ) { echo $e->getMessage(), "\n"; } } echo "==DONE==\n"; ?>
JSchwehn/php
testdata/fuzzdir/corpus/ext_date_tests_bug52808.php
PHP
bsd-3-clause
490
""" ========================================================================== Illustration of prior and posterior Gaussian process for different kernels ========================================================================== This example illustrates the prior and posterior of a :class:`~sklearn.gaussian_process.GaussianProcessRegressor` with different kernels. Mean, standard deviation, and 5 samples are shown for both prior and posterior distributions. Here, we only give some illustration. To know more about kernels' formulation, refer to the :ref:`User Guide <gp_kernels>`. """ print(__doc__) # Authors: Jan Hendrik Metzen <jhm@informatik.uni-bremen.de> # Guillaume Lemaitre <g.lemaitre58@gmail.com> # License: BSD 3 clause # %% # Helper function # --------------- # # Before presenting each individual kernel available for Gaussian processes, # we will define an helper function allowing us plotting samples drawn from # the Gaussian process. # # This function will take a # :class:`~sklearn.gaussian_process.GaussianProcessRegressor` model and will # drawn sample from the Gaussian process. If the model was not fit, the samples # are drawn from the prior distribution while after model fitting, the samples are # drawn from the posterior distribution. import matplotlib.pyplot as plt import numpy as np def plot_gpr_samples(gpr_model, n_samples, ax): """Plot samples drawn from the Gaussian process model. If the Gaussian process model is not trained then the drawn samples are drawn from the prior distribution. Otherwise, the samples are drawn from the posterior distribution. Be aware that a sample here corresponds to a function. Parameters ---------- gpr_model : `GaussianProcessRegressor` A :class:`~sklearn.gaussian_process.GaussianProcessRegressor` model. n_samples : int The number of samples to draw from the Gaussian process distribution. ax : matplotlib axis The matplotlib axis where to plot the samples. """ x = np.linspace(0, 5, 100) X = x.reshape(-1, 1) y_mean, y_std = gpr_model.predict(X, return_std=True) y_samples = gpr_model.sample_y(X, n_samples) y_mean, y_std = gpr_model.predict(X, return_std=True) y_samples = gpr_model.sample_y(X, n_samples) for idx, single_prior in enumerate(y_samples.T): ax.plot( x, single_prior, linestyle="--", alpha=0.7, label=f"Sampled function #{idx + 1}", ) ax.plot(x, y_mean, color="black", label="Mean") ax.fill_between( x, y_mean - y_std, y_mean + y_std, alpha=0.1, color="black", label=r"$\pm$ 1 std. dev.", ) ax.set_xlabel("x") ax.set_ylabel("y") ax.set_ylim([-3, 3]) # %% # Dataset and Gaussian process generation # --------------------------------------- # We will create a training dataset that we will use in the different sections. rng = np.random.RandomState(4) X_train = rng.uniform(0, 5, 10).reshape(-1, 1) y_train = np.sin((X_train[:, 0] - 2.5) ** 2) n_samples = 5 # %% # Kernel cookbook # --------------- # # In this section, we illustrate some samples drawn from the prior and posterior # distributions of the Gaussian process with different kernels. # # Radial Basis Function kernel # ............................ from sklearn.gaussian_process import GaussianProcessRegressor from sklearn.gaussian_process.kernels import RBF kernel = 1.0 * RBF(length_scale=1.0, length_scale_bounds=(1e-1, 10.0)) gpr = GaussianProcessRegressor(kernel=kernel, random_state=0) fig, axs = plt.subplots(nrows=2, sharex=True, sharey=True, figsize=(10, 8)) # plot prior plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[0]) axs[0].set_title("Samples from prior distribution") # plot posterior gpr.fit(X_train, y_train) plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[1]) axs[1].scatter(X_train[:, 0], y_train, color="red", zorder=10, label="Observations") axs[1].legend(bbox_to_anchor=(1.05, 1.5), loc="upper left") axs[1].set_title("Samples from posterior distribution") fig.suptitle("Radial Basis Function kernel", fontsize=18) plt.tight_layout() # %% print(f"Kernel parameters before fit:\n{kernel})") print( f"Kernel parameters after fit: \n{gpr.kernel_} \n" f"Log-likelihood: {gpr.log_marginal_likelihood(gpr.kernel_.theta):.3f}" ) # %% # Rational Quadradtic kernel # .......................... from sklearn.gaussian_process.kernels import RationalQuadratic kernel = 1.0 * RationalQuadratic(length_scale=1.0, alpha=0.1, alpha_bounds=(1e-5, 1e15)) gpr = GaussianProcessRegressor(kernel=kernel, random_state=0) fig, axs = plt.subplots(nrows=2, sharex=True, sharey=True, figsize=(10, 8)) # plot prior plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[0]) axs[0].set_title("Samples from prior distribution") # plot posterior gpr.fit(X_train, y_train) plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[1]) axs[1].scatter(X_train[:, 0], y_train, color="red", zorder=10, label="Observations") axs[1].legend(bbox_to_anchor=(1.05, 1.5), loc="upper left") axs[1].set_title("Samples from posterior distribution") fig.suptitle("Rational Quadratic kernel", fontsize=18) plt.tight_layout() # %% print(f"Kernel parameters before fit:\n{kernel})") print( f"Kernel parameters after fit: \n{gpr.kernel_} \n" f"Log-likelihood: {gpr.log_marginal_likelihood(gpr.kernel_.theta):.3f}" ) # %% # Periodic kernel # ............... from sklearn.gaussian_process.kernels import ExpSineSquared kernel = 1.0 * ExpSineSquared( length_scale=1.0, periodicity=3.0, length_scale_bounds=(0.1, 10.0), periodicity_bounds=(1.0, 10.0), ) gpr = GaussianProcessRegressor(kernel=kernel, random_state=0) fig, axs = plt.subplots(nrows=2, sharex=True, sharey=True, figsize=(10, 8)) # plot prior plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[0]) axs[0].set_title("Samples from prior distribution") # plot posterior gpr.fit(X_train, y_train) plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[1]) axs[1].scatter(X_train[:, 0], y_train, color="red", zorder=10, label="Observations") axs[1].legend(bbox_to_anchor=(1.05, 1.5), loc="upper left") axs[1].set_title("Samples from posterior distribution") fig.suptitle("Periodic kernel", fontsize=18) plt.tight_layout() # %% print(f"Kernel parameters before fit:\n{kernel})") print( f"Kernel parameters after fit: \n{gpr.kernel_} \n" f"Log-likelihood: {gpr.log_marginal_likelihood(gpr.kernel_.theta):.3f}" ) # %% # Dot product kernel # .................. from sklearn.gaussian_process.kernels import ConstantKernel, DotProduct kernel = ConstantKernel(0.1, (0.01, 10.0)) * ( DotProduct(sigma_0=1.0, sigma_0_bounds=(0.1, 10.0)) ** 2 ) gpr = GaussianProcessRegressor(kernel=kernel, random_state=0) fig, axs = plt.subplots(nrows=2, sharex=True, sharey=True, figsize=(10, 8)) # plot prior plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[0]) axs[0].set_title("Samples from prior distribution") # plot posterior gpr.fit(X_train, y_train) plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[1]) axs[1].scatter(X_train[:, 0], y_train, color="red", zorder=10, label="Observations") axs[1].legend(bbox_to_anchor=(1.05, 1.5), loc="upper left") axs[1].set_title("Samples from posterior distribution") fig.suptitle("Dot product kernel", fontsize=18) plt.tight_layout() # %% print(f"Kernel parameters before fit:\n{kernel})") print( f"Kernel parameters after fit: \n{gpr.kernel_} \n" f"Log-likelihood: {gpr.log_marginal_likelihood(gpr.kernel_.theta):.3f}" ) # %% # Mattern kernel # .............. from sklearn.gaussian_process.kernels import Matern kernel = 1.0 * Matern(length_scale=1.0, length_scale_bounds=(1e-1, 10.0), nu=1.5) gpr = GaussianProcessRegressor(kernel=kernel, random_state=0) fig, axs = plt.subplots(nrows=2, sharex=True, sharey=True, figsize=(10, 8)) # plot prior plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[0]) axs[0].set_title("Samples from prior distribution") # plot posterior gpr.fit(X_train, y_train) plot_gpr_samples(gpr, n_samples=n_samples, ax=axs[1]) axs[1].scatter(X_train[:, 0], y_train, color="red", zorder=10, label="Observations") axs[1].legend(bbox_to_anchor=(1.05, 1.5), loc="upper left") axs[1].set_title("Samples from posterior distribution") fig.suptitle("Mattern kernel", fontsize=18) plt.tight_layout() # %% print(f"Kernel parameters before fit:\n{kernel})") print( f"Kernel parameters after fit: \n{gpr.kernel_} \n" f"Log-likelihood: {gpr.log_marginal_likelihood(gpr.kernel_.theta):.3f}" )
huzq/scikit-learn
examples/gaussian_process/plot_gpr_prior_posterior.py
Python
bsd-3-clause
8,547
// Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "components/update_client/crx_downloader_factory.h" #include "build/build_config.h" #if defined(OS_WIN) #include "components/update_client/background_downloader_win.h" #endif #include "components/update_client/crx_downloader.h" #include "components/update_client/network.h" #include "components/update_client/url_fetcher_downloader.h" namespace update_client { namespace { class CrxDownloaderFactoryChromium : public CrxDownloaderFactory { public: explicit CrxDownloaderFactoryChromium( scoped_refptr<NetworkFetcherFactory> network_fetcher_factory) : network_fetcher_factory_(network_fetcher_factory) {} // Overrides for CrxDownloaderFactory. scoped_refptr<CrxDownloader> MakeCrxDownloader( bool background_download_enabled) const override; private: ~CrxDownloaderFactoryChromium() override = default; scoped_refptr<NetworkFetcherFactory> network_fetcher_factory_; }; scoped_refptr<CrxDownloader> CrxDownloaderFactoryChromium::MakeCrxDownloader( bool background_download_enabled) const { scoped_refptr<CrxDownloader> url_fetcher_downloader = base::MakeRefCounted<UrlFetcherDownloader>(nullptr, network_fetcher_factory_); #if defined(OS_WIN) // If background downloads are allowed, then apply the BITS service // background downloader first. if (background_download_enabled) { return base::MakeRefCounted<BackgroundDownloader>(url_fetcher_downloader); } #endif return url_fetcher_downloader; } } // namespace scoped_refptr<CrxDownloaderFactory> MakeCrxDownloaderFactory( scoped_refptr<NetworkFetcherFactory> network_fetcher_factory) { return base::MakeRefCounted<CrxDownloaderFactoryChromium>( network_fetcher_factory); } } // namespace update_client
nwjs/chromium.src
components/update_client/crx_downloader_factory.cc
C++
bsd-3-clause
1,955
/* * File: NormalInverseGammaDistribution.java * Authors: Kevin R. Dixon * Company: Sandia National Laboratories * Project: Cognitive Foundry * * Copyright Mar 16, 2010, Sandia Corporation. * Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive * license for use of this work by or on behalf of the U.S. Government. * Export of this program may require a license from the United States * Government. See CopyrightHistory.txt for complete details. * */ package gov.sandia.cognition.statistics.distribution; import gov.sandia.cognition.annotation.PublicationReference; import gov.sandia.cognition.annotation.PublicationReferences; import gov.sandia.cognition.annotation.PublicationType; import gov.sandia.cognition.math.matrix.Vector; import gov.sandia.cognition.math.matrix.VectorFactory; import gov.sandia.cognition.math.matrix.VectorInputEvaluator; import gov.sandia.cognition.statistics.AbstractDistribution; import gov.sandia.cognition.statistics.ClosedFormComputableDistribution; import gov.sandia.cognition.statistics.ProbabilityDensityFunction; import java.util.ArrayList; import java.util.Collection; import java.util.Random; /** * The normal inverse-gamma distribution is the product of a univariate * Gaussian distribution with an inverse-gamma distribution. It is the * conjugate prior of a univariate Gaussian with unknown mean and unknown * variance. (As far as I know, it has no other purpose.) * @author Kevin R. Dixon * @since 3.0 */ @PublicationReferences( references={ @PublicationReference( author="Christopher M. Bishop", title="Pattern Recognition and Machine Learning", type=PublicationType.Book, year=2006, pages={101} ) , @PublicationReference( author="Wikipedia", title="Normal-scaled inverse gamma distribution", type=PublicationType.WebPage, year=2010, url="http://en.wikipedia.org/wiki/Normal-scaled_inverse_gamma_distribution" ) } ) public class NormalInverseGammaDistribution extends AbstractDistribution<Vector> implements ClosedFormComputableDistribution<Vector> { /** * Default location, {@value}. */ public static final double DEFAULT_LOCATION = 0.0; /** * Default precision, {@value}. */ public static final double DEFAULT_PRECISION = 1.0; /** * Default shape, {@value}. */ public static final double DEFAULT_SHAPE = InverseGammaDistribution.DEFAULT_SHAPE; /** * Default scale, {@value}. */ public static final double DEFAULT_SCALE = InverseGammaDistribution.DEFAULT_SCALE; /** * Location of the Gaussian kernel. */ private double location; /** * Precision of the Gaussian kernel, must be greater than zero. */ private double precision; /** * Shape parameter of the Inverse Gamma kernel, must be greater than zero. */ private double shape; /** * Scale parameter of the Inverse Gamma kernel, must be greater than zero. */ private double scale; /** * Creates a new instance of NormalInverseGammaDistribution */ public NormalInverseGammaDistribution() { this( DEFAULT_LOCATION, DEFAULT_PRECISION, DEFAULT_SHAPE, DEFAULT_SCALE ); } /** * Creates a new instance of NormalInverseGammaDistribution * @param location * Location of the Gaussian kernel. * @param precision * Precision of the Gaussian kernel, must be greater than zero. * @param shape * Shape parameter of the Inverse Gamma kernel, must be greater than zero. * @param scale * Scale parameter of the Inverse Gamma kernel, must be greater than zero. */ public NormalInverseGammaDistribution( double location, double precision, double shape, double scale) { this.setLocation(location); this.setPrecision(precision); this.setShape(shape); this.setScale(scale); } /** * Copy constructor * @param other * NormalInverseGammaDistribution to copy */ public NormalInverseGammaDistribution( NormalInverseGammaDistribution other ) { this( other.getLocation(), other.getPrecision(), other.getShape(), other.getScale() ); } @Override public NormalInverseGammaDistribution clone() { return (NormalInverseGammaDistribution) super.clone(); } public Vector getMean() { if( this.shape > 1.0 ) { return VectorFactory.getDefault().copyValues( this.location, this.scale/(this.shape-1.0) ); } else { throw new IllegalArgumentException( "Shape must be > 1.0 for a mean" ); } } @Override public void sampleInto( final Random random, final int sampleCount, final Collection<? super Vector> output) { InverseGammaDistribution.CDF inverseGamma = new InverseGammaDistribution.CDF(this.shape, this.scale); UnivariateGaussian.CDF gaussian = new UnivariateGaussian.CDF(this.location, 1.0 / this.precision); final double[] variances = inverseGamma.sampleAsDoubles(random, sampleCount); for (double variance : variances) { gaussian.setVariance(variance / this.precision); double mean = gaussian.sample(random); output.add(VectorFactory.getDefault().copyValues(mean, variance)); } } public Vector convertToVector() { return VectorFactory.getDefault().copyValues( this.getLocation(), this.getPrecision(), this.getShape(), this.getScale() ); } public void convertFromVector( Vector parameters) { parameters.assertDimensionalityEquals(4); this.setLocation( parameters.getElement(0) ); this.setPrecision( parameters.getElement(1) ); this.setShape( parameters.getElement(2) ); this.setScale( parameters.getElement(3) ); } public NormalInverseGammaDistribution.PDF getProbabilityFunction() { return new NormalInverseGammaDistribution.PDF( this ); } /** * Getter for location. * @return * Location of the Gaussian kernel. */ public double getLocation() { return this.location; } /** * Setter for location. * @param location * Location of the Gaussian kernel. */ public void setLocation( double location) { this.location = location; } /** * Getter for precision * @return * Precision of the Gaussian kernel, must be greater than zero. */ public double getPrecision() { return this.precision; } /** * Setter for precision. * @param precision * Precision of the Gaussian kernel, must be greater than zero. */ public void setPrecision( double precision) { if( precision <= 0.0 ) { throw new IllegalArgumentException( "Precision must be > 0.0" ); } this.precision = precision; } /** * Getter for shape * @return * Shape parameter of the Inverse Gamma kernel, must be greater than zero. */ public double getShape() { return this.shape; } /** * Setter for shape * @param shape * Shape parameter of the Inverse Gamma kernel, must be greater than zero. */ public void setShape( double shape) { if( shape <= 0.0 ) { throw new IllegalArgumentException( "Shape must be > 0.0" ); } this.shape = shape; } /** * Getter for scale * @return * Scale parameter of the Inverse Gamma kernel, must be greater than zero. */ public double getScale() { return this.scale; } /** * Setter for scale * @param scale * Scale parameter of the Inverse Gamma kernel, must be greater than zero. */ public void setScale( double scale) { if( scale <= 0.0 ) { throw new IllegalArgumentException( "Scale must be > 0.0" ); } this.scale = scale; } @Override public String toString() { return "Location: " + this.getLocation() + ", Precision: " + this.getPrecision() + ", Shape: " + this.getShape() + ", Scale: " + this.getScale(); } /** * PDF of the NormalInverseGammaDistribution */ public static class PDF extends NormalInverseGammaDistribution implements ProbabilityDensityFunction<Vector>, VectorInputEvaluator<Vector,Double> { /** * Creates a new instance of NormalInverseGammaDistribution */ public PDF() { super(); } /** * Creates a new instance of NormalInverseGammaDistribution * @param location * Location of the Gaussian kernel. * @param precision * Precision of the Gaussian kernel, must be greater than zero. * @param shape * Shape parameter of the Inverse Gamma kernel, must be greater than zero. * @param scale * Scale parameter of the Inverse Gamma kernel, must be greater than zero. */ public PDF( double location, double precision, double shape, double scale) { super( location, precision, shape, scale); } /** * Copy constructor * @param other * NormalInverseGammaDistribution to copy */ public PDF( NormalInverseGammaDistribution other ) { super( other ); } @Override public NormalInverseGammaDistribution.PDF getProbabilityFunction() { return this; } public double logEvaluate( Vector input) { input.assertDimensionalityEquals(2); double mean = input.getElement(0); double variance = input.getElement(1); InverseGammaDistribution.PDF inverseGamma = new InverseGammaDistribution.PDF( this.getShape(), this.getScale() ); UnivariateGaussian.PDF gaussian = new UnivariateGaussian.PDF( this.getLocation(), variance / this.getPrecision() ); double logInverseGamma = inverseGamma.logEvaluate(variance); double logGaussian = gaussian.logEvaluate(mean); return logGaussian + logInverseGamma; } public Double evaluate( Vector input) { return Math.exp( this.logEvaluate(input) ); } public int getInputDimensionality() { return 2; } } }
codeaudit/Foundry
Components/LearningCore/Source/gov/sandia/cognition/statistics/distribution/NormalInverseGammaDistribution.java
Java
bsd-3-clause
11,067
// ============================================================================= // PROJECT CHRONO - http://projectchrono.org // // Copyright (c) 2014 projectchrono.org // All rights reserved. // // Use of this source code is governed by a BSD-style license that can be found // in the LICENSE file at the top level of the distribution and at // http://projectchrono.org/license-chrono.txt. // // ============================================================================= // Authors: Alessandro Tasora, Radu Serban // ============================================================================= #include "chrono/motion_functions/ChFunction_Poly345.h" namespace chrono { // Register into the object factory, to enable run-time dynamic creation and persistence CH_FACTORY_REGISTER(ChFunction_Poly345) ChFunction_Poly345::ChFunction_Poly345(double m_h, double m_end) : h(m_h) { Set_end(m_end); } ChFunction_Poly345::ChFunction_Poly345(const ChFunction_Poly345& other) { h = other.h; end = other.end; } double ChFunction_Poly345::Get_y(double x) const { double ret = 0; if (x <= 0) return 0; if (x >= end) return h; double a = x / end; ret = h * (10 * pow(a, 3) - 15 * pow(a, 4) + 6 * pow(a, 5)); return ret; } double ChFunction_Poly345::Get_y_dx(double x) const { double ret = 0; if (x <= 0) return 0; if (x >= end) return 0; double a = x / end; ret = h * (1 / end) * (30 * pow(a, 2) - 60 * pow(a, 3) + 30 * pow(a, 4)); return ret; } double ChFunction_Poly345::Get_y_dxdx(double x) const { double ret = 0; if (x <= 0) return 0; if (x >= end) return 0; double a = x / end; ret = h * (1 / (end * end)) * (60 * a - 180 * pow(a, 2) + 120 * pow(a, 3)); return ret; } void ChFunction_Poly345::ArchiveOUT(ChArchiveOut& marchive) { // version number marchive.VersionWrite<ChFunction_Poly345>(); // serialize parent class ChFunction::ArchiveOUT(marchive); // serialize all member data: marchive << CHNVP(h); marchive << CHNVP(end); } void ChFunction_Poly345::ArchiveIN(ChArchiveIn& marchive) { // version number /*int version =*/ marchive.VersionRead<ChFunction_Poly345>(); // deserialize parent class ChFunction::ArchiveIN(marchive); // stream in all member data: marchive >> CHNVP(h); marchive >> CHNVP(end); } } // end namespace chrono
projectchrono/chrono
src/chrono/motion_functions/ChFunction_Poly345.cpp
C++
bsd-3-clause
2,431
using System; using System.Text; using System.Data; using System.Data.SqlClient; using System.Data.Common; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Configuration; using System.Xml; using System.Xml.Serialization; using SubSonic; using SubSonic.Utilities; // <auto-generated /> namespace Southwind{ /// <summary> /// Strongly-typed collection for the OrderSubtotal class. /// </summary> [Serializable] public partial class OrderSubtotalCollection : ReadOnlyList<OrderSubtotal, OrderSubtotalCollection> { public OrderSubtotalCollection() {} } /// <summary> /// This is Read-only wrapper class for the order subtotals view. /// </summary> [Serializable] public partial class OrderSubtotal : ReadOnlyRecord<OrderSubtotal>, IReadOnlyRecord { #region Default Settings protected static void SetSQLProps() { GetTableSchema(); } #endregion #region Schema Accessor public static TableSchema.Table Schema { get { if (BaseSchema == null) { SetSQLProps(); } return BaseSchema; } } private static void GetTableSchema() { if(!IsSchemaInitialized) { //Schema declaration TableSchema.Table schema = new TableSchema.Table("order subtotals", TableType.View, DataService.GetInstance("Southwind")); schema.Columns = new TableSchema.TableColumnCollection(); schema.SchemaName = @""; //columns TableSchema.TableColumn colvarOrderID = new TableSchema.TableColumn(schema); colvarOrderID.ColumnName = "OrderID"; colvarOrderID.DataType = DbType.Int32; colvarOrderID.MaxLength = 10; colvarOrderID.AutoIncrement = false; colvarOrderID.IsNullable = false; colvarOrderID.IsPrimaryKey = false; colvarOrderID.IsForeignKey = false; colvarOrderID.IsReadOnly = false; schema.Columns.Add(colvarOrderID); TableSchema.TableColumn colvarSubtotal = new TableSchema.TableColumn(schema); colvarSubtotal.ColumnName = "Subtotal"; colvarSubtotal.DataType = DbType.Decimal; colvarSubtotal.MaxLength = 0; colvarSubtotal.AutoIncrement = false; colvarSubtotal.IsNullable = true; colvarSubtotal.IsPrimaryKey = false; colvarSubtotal.IsForeignKey = false; colvarSubtotal.IsReadOnly = false; schema.Columns.Add(colvarSubtotal); BaseSchema = schema; //add this schema to the provider //so we can query it later DataService.Providers["Southwind"].AddSchema("order subtotals",schema); } } #endregion #region Query Accessor public static Query CreateQuery() { return new Query(Schema); } #endregion #region .ctors public OrderSubtotal() { SetSQLProps(); SetDefaults(); MarkNew(); } public OrderSubtotal(bool useDatabaseDefaults) { SetSQLProps(); if(useDatabaseDefaults) { ForceDefaults(); } MarkNew(); } public OrderSubtotal(object keyID) { SetSQLProps(); LoadByKey(keyID); } public OrderSubtotal(string columnName, object columnValue) { SetSQLProps(); LoadByParam(columnName,columnValue); } #endregion #region Props [XmlAttribute("OrderID")] [Bindable(true)] public int OrderID { get { return GetColumnValue<int>("OrderID"); } set { SetColumnValue("OrderID", value); } } [XmlAttribute("Subtotal")] [Bindable(true)] public decimal? Subtotal { get { return GetColumnValue<decimal?>("Subtotal"); } set { SetColumnValue("Subtotal", value); } } #endregion #region Columns Struct public struct Columns { public static string OrderID = @"OrderID"; public static string Subtotal = @"Subtotal"; } #endregion #region IAbstractRecord Members public new CT GetColumnValue<CT>(string columnName) { return base.GetColumnValue<CT>(columnName); } public object GetColumnValue(string columnName) { return base.GetColumnValue<object>(columnName); } #endregion } }
w8in/SubSonic-2.0
SubSonic.Tests/Generated/Southwind/OrderSubtotal.cs
C#
bsd-3-clause
5,138
# coding=utf-8 from __future__ import unicode_literals from random import randint from .. import Provider as AddressProvider class Provider(AddressProvider): address_formats = ['{{street_address}}, {{city}}, {{postcode}}'] building_number_formats = ['#', '##', '###'] city_formats = ['{{city_prefix}} {{first_name}}'] street_address_formats = ['{{street_name}}, {{building_number}}'] street_name_formats = ['{{street_prefix}} {{last_name}}', '{{last_name}} {{street_suffix}}'] city_prefixes = ['місто', 'село', 'селище', 'хутір'] countries = [ 'Австралія', 'Австрія', 'Азербайджан', 'Албанія', 'Алжир', 'Ангола', 'Андорра', 'Антигуа і Барбуда', 'Аргентина', 'Афганістан', 'Багамські Острови', 'Бангладеш', 'Барбадос', 'Бахрейн', 'Беліз', 'Бельгія', 'Бенін', 'Білорусь', 'Болгарія', 'Болівія', 'Боснія і Герцеговина', 'Ботсвана', 'Бразилія', 'Бруней', 'Буркіна-Фасо', 'Бурунді', 'Бутан', 'Вануату', 'Ватикан', 'Велика Британія', 'Венесуела', 'В\'єтнам', 'Вірменія', 'Габон', 'Гаїті', 'Гаяна', 'Гамбія', 'Гана', 'Гватемала', 'Гвінея', 'Гвінея-Бісау', 'Гондурас', 'Гренада', 'Греція', 'Грузія', 'Данія', 'Джибуті', 'Домініка', 'Домініканська Республіка', 'Еквадор', 'Екваторіальна Гвінея', 'Еритрея', 'Естонія', 'Ефіопія', 'Єгипет', 'Ємен', 'Замбія', 'Західна Сахара', 'Зімбабве', 'Ізраїль', 'Індія', 'Індонезія', 'Ірак', 'Іран', 'Ірландія', 'Ісландія', 'Іспанія', 'Італія', 'Йорданія', 'Кабо-Верде', 'Казахстан', 'Камбоджа', 'Камерун', 'Канада', 'Катар', 'Кенія', 'Киргизстан', 'КНР', 'Кіпр', 'Кірибаті', 'Колумбія', 'Коморські Острови', 'Конго', 'ДР Конго', 'Південна Корея', 'Північна Корея', 'Косово', 'Коста-Рика', 'Кот-д\'Івуар', 'Куба', 'Кувейт', 'Лаос', 'Латвія', 'Лесото', 'Литва', 'Ліберія', 'Ліван', 'Лівія', 'Ліхтенштейн', 'Люксембург', 'Маврикій', 'Мавританія', 'Мадагаскар', 'Республіка Македонія', 'Малаві', 'Малайзія', 'Малі', 'Мальдіви', 'Мальта', 'Марокко', 'Маршаллові Острови', 'Мексика', 'Федеративні Штати Мікронезії', 'Мозамбік', 'Молдова', 'Монако', 'Монголія', 'М\'янма', 'Намібія', 'Науру', 'Непал', 'Нігер', 'Нігерія', 'Нідерланди', 'Нікарагуа', 'Німеччина', 'Нова Зеландія', 'Норвегія', 'ОАЕ', 'Оман', 'Пакистан', 'Палау', 'Палестинська держава', 'Панама', 'Папуа Нова Гвінея', 'ПАР', 'Парагвай', 'Перу', 'Південний Судан', 'Польща', 'Португалія', 'Росія', 'Руанда', 'Румунія', 'Сальвадор', 'Самоа', 'Сан-Марино', 'Сан-Томе і Принсіпі', 'Саудівська Аравія', 'Свазіленд', 'Сейшельські Острови', 'Сенегал', 'Сент-Вінсент і Гренадини', 'Сент-Кіттс і Невіс', 'Сент-Люсія', 'Сербія', 'Сінгапур', 'Сирія', 'Словаччина', 'Словенія', 'Соломонові Острови', 'Сомалі', 'Судан', 'Суринам', 'Східний Тимор', 'США', 'Сьєрра-Леоне', 'Таджикистан', 'Таїланд', 'Тайвань', 'Танзанія', 'Того', 'Тонга', 'Тринідад і Тобаго', 'Тувалу', 'Туніс', 'Туреччина', 'Туркменістан', 'Уганда', 'Угорщина', 'Узбекистан', 'Україна', 'Уругвай', 'Фіджі', 'Філіппіни', 'Фінляндія', 'Франція', 'Хорватія', 'Центральноафриканська Республіка', 'Чад', 'Чехія', 'Чилі', 'Чорногорія', 'Швейцарія', 'Швеція', 'Шрі-Ланка', 'Ямайка', 'Японія' ] street_prefixes = [ 'вулиця', 'проспект', 'майдан', 'набережна', 'бульвар', 'провулок' ] street_suffixes = ['узвіз'] @classmethod def city_prefix(cls): return cls.random_element(cls.city_prefixes) @classmethod def postcode(cls): """The code consists of five digits (01000-99999)""" return '{}{}'.format(randint(0, 10), randint(1000, 10000)) @classmethod def street_prefix(cls): return cls.random_element(cls.street_prefixes)
vicky2135/lucious
oscar/lib/python2.7/site-packages/faker/providers/address/uk_UA/__init__.py
Python
bsd-3-clause
5,601
// (C) Copyright Nick Thompson and Matt Borland 2020. // Use, modification and distribution are subject to the // Boost Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #include <random> #include <boost/math/statistics/univariate_statistics.hpp> #include <benchmark/benchmark.h> template <class Z> void test_mode(benchmark::State& state) { using boost::math::statistics::sorted_mode; std::random_device rd; std::mt19937_64 mt(rd()); std::uniform_int_distribution<> dist {1, 10}; auto gen = [&dist, &mt](){return dist(mt);}; std::vector<Z> v(state.range(0)); std::generate(v.begin(), v.end(), gen); for (auto _ : state) { std::vector<Z> modes; benchmark::DoNotOptimize(sorted_mode(v.begin(), v.end(), std::back_inserter(modes))); } state.SetComplexityN(state.range(0)); } template <class Z> void sequential_test_mode(benchmark::State& state) { using boost::math::statistics::sorted_mode; std::vector<Z> v(state.range(0)); size_t current_num {1}; // produces {1, 2, 3, 4, 5...} for(size_t i {}; i < v.size(); ++i) { v[i] = current_num; ++current_num; } for (auto _ : state) { std::vector<Z> modes; benchmark::DoNotOptimize(sorted_mode(v, std::back_inserter(modes))); } state.SetComplexityN(state.range(0)); } template <class Z> void sequential_pairs_test_mode(benchmark::State& state) { using boost::math::statistics::sorted_mode; std::vector<Z> v(state.range(0)); size_t current_num {1}; size_t current_num_counter {}; // produces {1, 1, 2, 2, 3, 3, ...} for(size_t i {}; i < v.size(); ++i) { v[i] = current_num; ++current_num_counter; if(current_num_counter > 2) { ++current_num; current_num_counter = 0; } } for (auto _ : state) { std::vector<Z> modes; benchmark::DoNotOptimize(sorted_mode(v, std::back_inserter(modes))); } state.SetComplexityN(state.range(0)); } template <class Z> void sequential_multiple_test_mode(benchmark::State& state) { using boost::math::statistics::sorted_mode; std::vector<Z> v(state.range(0)); size_t current_num {1}; size_t current_num_counter {}; // produces {1, 2, 2, 3, 3, 3, 4, 4, 4, 4, ...} for(size_t i {}; i < v.size(); ++i) { v[i] = current_num; ++current_num_counter; if(current_num_counter > current_num) { ++current_num; current_num_counter = 0; } } for (auto _ : state) { std::vector<Z> modes; benchmark::DoNotOptimize(sorted_mode(v, std::back_inserter(modes))); } state.SetComplexityN(state.range(0)); } BENCHMARK_TEMPLATE(test_mode, int32_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity(); BENCHMARK_TEMPLATE(test_mode, int64_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity(); BENCHMARK_TEMPLATE(test_mode, uint32_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity(); BENCHMARK_TEMPLATE(sequential_test_mode, int32_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity(); BENCHMARK_TEMPLATE(sequential_test_mode, int64_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity(); BENCHMARK_TEMPLATE(sequential_test_mode, uint32_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity(); BENCHMARK_TEMPLATE(sequential_pairs_test_mode, int32_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity(); BENCHMARK_TEMPLATE(sequential_pairs_test_mode, int64_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity(); BENCHMARK_TEMPLATE(sequential_pairs_test_mode, uint32_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity(); BENCHMARK_TEMPLATE(sequential_multiple_test_mode, int32_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity(); BENCHMARK_TEMPLATE(sequential_multiple_test_mode, int64_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity(); BENCHMARK_TEMPLATE(sequential_multiple_test_mode, uint32_t)->RangeMultiplier(2)->Range(1<<1, 1<<22)->Complexity(); BENCHMARK_MAIN();
stan-dev/math
lib/boost_1.75.0/libs/math/reporting/performance/test_mode.cpp
C++
bsd-3-clause
4,151
from oscar_vat_moss import fields from oscar.apps.address.abstract_models import AbstractShippingAddress from oscar.apps.address.abstract_models import AbstractBillingAddress class ShippingAddress(AbstractShippingAddress): vatin = fields.vatin() class BillingAddress(AbstractBillingAddress): vatin = fields.vatin() from oscar.apps.order.models import * # noqa
hastexo/django-oscar-vat_moss
oscar_vat_moss/order/models.py
Python
bsd-3-clause
375
// Copyright (c) 2010, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Unit tests for NetworkSourceLineServer. #include <ios> #include <set> #include <string> #include "breakpad_googletest_includes.h" #include "google_breakpad/processor/code_module.h" #include "google_breakpad/processor/source_line_resolver_interface.h" #include "google_breakpad/processor/stack_frame.h" #include "google_breakpad/processor/symbol_supplier.h" #include "processor/binarystream.h" #include "processor/cfi_frame_info.h" #include "processor/network_source_line_server.h" #include "processor/network_source_line_protocol.h" #include "processor/windows_frame_info.h" namespace { using std::ios_base; using std::set; using std::string; using google_breakpad::CFIFrameInfo; using google_breakpad::CodeModule; using google_breakpad::binarystream; using google_breakpad::NetworkInterface; using google_breakpad::NetworkSourceLineServer; using google_breakpad::SourceLineResolverInterface; using google_breakpad::StackFrame; using google_breakpad::SymbolSupplier; using google_breakpad::SystemInfo; using google_breakpad::WindowsFrameInfo; using ::testing::_; using ::testing::DoAll; using ::testing::Invoke; using ::testing::Property; using ::testing::Return; using ::testing::SetArgumentPointee; // Style guide forbids "using namespace", so at least shorten it. namespace P = google_breakpad::source_line_protocol; class MockNetwork : public NetworkInterface { public: MockNetwork() {} MOCK_METHOD1(Init, bool(bool listen)); MOCK_METHOD2(Send, bool(const char *data, size_t length)); MOCK_METHOD1(WaitToReceive, bool(int timeout)); MOCK_METHOD3(Receive, bool(char *buffer, size_t buffer_size, ssize_t &received)); }; class MockSymbolSupplier : public SymbolSupplier { public: MockSymbolSupplier() {} MOCK_METHOD3(GetSymbolFile, SymbolResult(const CodeModule *module, const SystemInfo *system_info, string *symbol_file)); MOCK_METHOD4(GetSymbolFile, SymbolResult(const CodeModule *module, const SystemInfo *system_info, string *symbol_file, string *symbol_data)); }; class MockSourceLineResolver : public SourceLineResolverInterface { public: MockSourceLineResolver() {} virtual ~MockSourceLineResolver() {} MOCK_METHOD2(LoadModule, bool(const CodeModule *module, const string &map_file)); MOCK_METHOD2(LoadModuleUsingMapBuffer, bool(const CodeModule *module, const string &map_buffer)); MOCK_METHOD1(UnloadModule, void(const CodeModule *module)); MOCK_METHOD1(HasModule, bool(const CodeModule *module)); MOCK_METHOD1(FillSourceLineInfo, void(StackFrame *frame)); MOCK_METHOD1(FindWindowsFrameInfo, WindowsFrameInfo*(const StackFrame *frame)); MOCK_METHOD1(FindCFIFrameInfo, CFIFrameInfo*(const StackFrame *frame)); }; class TestNetworkSourceLineServer : public NetworkSourceLineServer { public: // Override visibility for testing. It's a lot easier to just // call into this method and verify the result than it would be // to mock out the calls to the NetworkInterface, even though // that would ostensibly be more correct and test the code more // thoroughly. Perhaps if someone has time and figures out a // clean way to do it this could be changed. using NetworkSourceLineServer::HandleRequest; TestNetworkSourceLineServer(SymbolSupplier *supplier, SourceLineResolverInterface *resolver, NetworkInterface *net, u_int64_t max_symbol_lines = 0) : NetworkSourceLineServer(supplier, resolver, net, max_symbol_lines) {} }; class NetworkSourceLineServerTest : public ::testing::Test { public: MockSymbolSupplier supplier; MockSourceLineResolver resolver; MockNetwork net; TestNetworkSourceLineServer *server; NetworkSourceLineServerTest() : server(NULL) {} void SetUp() { server = new TestNetworkSourceLineServer(&supplier, &resolver, &net); } }; TEST_F(NetworkSourceLineServerTest, TestInit) { EXPECT_CALL(net, Init(true)).WillOnce(Return(true)); EXPECT_CALL(net, WaitToReceive(0)).WillOnce(Return(false)); ASSERT_TRUE(server->Initialize()); EXPECT_FALSE(server->RunOnce(0)); } TEST_F(NetworkSourceLineServerTest, TestMalformedRequest) { binarystream request; // send a request without a full sequence number request << u_int8_t(1); binarystream response; EXPECT_FALSE(server->HandleRequest(request, response)); request.rewind(); // send a request without a command request << u_int16_t(1); EXPECT_FALSE(server->HandleRequest(request, response)); } TEST_F(NetworkSourceLineServerTest, TestUnknownCommand) { binarystream request; // send a request with an unknown command request << u_int16_t(1) << u_int8_t(100); binarystream response; ASSERT_TRUE(server->HandleRequest(request, response)); u_int16_t response_sequence; u_int8_t response_status; response >> response_sequence >> response_status; ASSERT_FALSE(response.eof()); EXPECT_EQ(u_int16_t(1), response_sequence); EXPECT_EQ(P::ERROR, int(response_status)); } TEST_F(NetworkSourceLineServerTest, TestHasBasic) { EXPECT_CALL(resolver, HasModule(_)) .WillOnce(Return(false)) .WillOnce(Return(true)); binarystream request; const u_int16_t sequence = 0xA0A0; // first request should come back as not loaded request << sequence << P::HAS << string("test.dll") << string("test.pdb") << string("ABCD1234"); binarystream response; ASSERT_TRUE(server->HandleRequest(request, response)); u_int16_t response_sequence; u_int8_t response_status, response_data; response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::OK, int(response_status)); EXPECT_EQ(P::MODULE_NOT_LOADED, int(response_data)); // second request should come back as loaded binarystream request2; request2 << sequence << P::HAS << string("loaded.dll") << string("loaded.pdb") << string("ABCD1234"); ASSERT_TRUE(server->HandleRequest(request2, response)); response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::OK, int(response_status)); EXPECT_EQ(P::MODULE_LOADED, int(response_data)); } TEST_F(NetworkSourceLineServerTest, TestMalformedHasRequest) { binarystream request; // send request with just command, missing all data const u_int16_t sequence = 0xA0A0; request << sequence << P::HAS; binarystream response; ASSERT_TRUE(server->HandleRequest(request, response)); u_int16_t response_sequence; u_int8_t response_status; response >> response_sequence >> response_status; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::ERROR, int(response_status)); // send request with just module name binarystream request2; request2 << sequence << P::HAS << string("test.dll"); ASSERT_TRUE(server->HandleRequest(request2, response)); response >> response_sequence >> response_status; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::ERROR, int(response_status)); // send request with module name, debug file, missing debug id binarystream request3; request3 << sequence << P::HAS << string("test.dll") << string("test.pdb"); ASSERT_TRUE(server->HandleRequest(request3, response)); response >> response_sequence >> response_status; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::ERROR, int(response_status)); } TEST_F(NetworkSourceLineServerTest, TestHasLoad) { EXPECT_CALL(resolver, HasModule(_)) .WillOnce(Return(false)) .WillOnce(Return(false)) .WillOnce(Return(true)); EXPECT_CALL(resolver, LoadModuleUsingMapBuffer(_,_)) .WillOnce(Return(true)); EXPECT_CALL(supplier, GetSymbolFile(_,_,_,_)) .WillOnce(Return(SymbolSupplier::FOUND)); // verify that the module is not loaded, with a HAS request binarystream request; const u_int16_t sequence = 0xA0A0; request << sequence << P::HAS << string("found.dll") << string("found.pdb") << string("ABCD1234"); binarystream response; ASSERT_TRUE(server->HandleRequest(request, response)); u_int16_t response_sequence; u_int8_t response_status, response_data; response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::OK, int(response_status)); ASSERT_EQ(P::MODULE_NOT_LOADED, int(response_data)); // now send a load request for this module binarystream request2; const u_int16_t sequence2 = 0xB0B0; request2 << sequence2 << P::LOAD << string("found.dll") << string("found.pdb") << string("ABCD1234"); ASSERT_TRUE(server->HandleRequest(request2, response)); response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence2, response_sequence); EXPECT_EQ(P::OK, int(response_status)); ASSERT_EQ(int(P::LOAD_OK), int(response_data)); // sending another HAS message should now show it as loaded binarystream request3; const u_int16_t sequence3 = 0xC0C0; request3 << sequence3 << P::HAS << string("found.dll") << string("found.pdb") << string("ABCD1234"); ASSERT_TRUE(server->HandleRequest(request3, response)); response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence3, response_sequence); EXPECT_EQ(P::OK, int(response_status)); EXPECT_EQ(P::MODULE_LOADED, int(response_data)); } TEST_F(NetworkSourceLineServerTest, TestLoad) { EXPECT_CALL(resolver, HasModule(_)) .Times(3) .WillRepeatedly(Return(false)); EXPECT_CALL(resolver, LoadModuleUsingMapBuffer(_,_)) .WillOnce(Return(false)); EXPECT_CALL(supplier, GetSymbolFile(_,_,_,_)) .WillOnce(Return(SymbolSupplier::NOT_FOUND)) .WillOnce(Return(SymbolSupplier::INTERRUPT)) .WillOnce(Return(SymbolSupplier::FOUND)); // notfound.dll should return LOAD_NOT_FOUND binarystream request; const u_int16_t sequence = 0xA0A0; request << sequence << P::LOAD << string("notfound.dll") << string("notfound.pdb") << string("ABCD1234"); binarystream response; ASSERT_TRUE(server->HandleRequest(request, response)); u_int16_t response_sequence; u_int8_t response_status, response_data; response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::OK, int(response_status)); EXPECT_EQ(int(P::LOAD_NOT_FOUND), int(response_data)); // interrupt.dll should return LOAD_INTERRUPT binarystream request2; const u_int16_t sequence2 = 0xB0B0; request2 << sequence2 << P::LOAD << string("interrupt.dll") << string("interrupt.pdb") << string("0000"); ASSERT_TRUE(server->HandleRequest(request2, response)); response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence2, response_sequence); EXPECT_EQ(P::OK, int(response_status)); EXPECT_EQ(int(P::LOAD_INTERRUPT), int(response_data)); // fail.dll should return LOAD_FAIL binarystream request3; const u_int16_t sequence3 = 0xC0C0; request3 << sequence3 << P::LOAD << string("fail.dll") << string("fail.pdb") << string("FFFFFFFF"); ASSERT_TRUE(server->HandleRequest(request3, response)); response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence3, response_sequence); EXPECT_EQ(P::OK, int(response_status)); EXPECT_EQ(int(P::LOAD_FAIL), int(response_data)); } TEST_F(NetworkSourceLineServerTest, TestMalformedLoadRequest) { binarystream request; // send request with just command, missing all data const u_int16_t sequence = 0xA0A0; request << sequence << P::LOAD; binarystream response; ASSERT_TRUE(server->HandleRequest(request, response)); u_int16_t response_sequence; u_int8_t response_status; response >> response_sequence >> response_status; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::ERROR, int(response_status)); // send request with just module name binarystream request2; request2 << sequence << P::LOAD << string("test.dll"); ASSERT_TRUE(server->HandleRequest(request2, response)); response >> response_sequence >> response_status; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::ERROR, int(response_status)); // send request with module name, debug file, missing debug id binarystream request3; request3 << sequence << P::LOAD << string("test.dll") << string("test.pdb"); ASSERT_TRUE(server->HandleRequest(request3, response)); response >> response_sequence >> response_status; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::ERROR, int(response_status)); } void FillFullSourceLineInfo(StackFrame *frame) { frame->function_name = "function1"; frame->function_base = 0x1200; frame->source_file_name = "function1.cc"; frame->source_line = 1; frame->source_line_base = 0x1230; } void FillPartialSourceLineInfo(StackFrame *frame) { frame->function_name = "function2"; frame->function_base = 0xFFF0; } TEST_F(NetworkSourceLineServerTest, TestGet) { EXPECT_CALL(resolver, FillSourceLineInfo(_)) .WillOnce(Invoke(FillFullSourceLineInfo)) .WillOnce(Invoke(FillPartialSourceLineInfo)); binarystream request; const u_int16_t sequence = 0xA0A0; request << sequence << P::GET << string("loaded.dll") << string("loaded.pdb") << string("ABCD1234") << u_int64_t(0x1000) << u_int64_t(0x1234); binarystream response; ASSERT_TRUE(server->HandleRequest(request, response)); u_int16_t response_sequence; u_int8_t response_status; string function, source_file; u_int32_t source_line; u_int64_t function_base, source_line_base; response >> response_sequence >> response_status >> function >> function_base >> source_file >> source_line >> source_line_base; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::OK, int(response_status)); EXPECT_EQ("function1", function); EXPECT_EQ(0x1200, function_base); EXPECT_EQ("function1.cc", source_file); EXPECT_EQ(1, source_line); EXPECT_EQ(0x1230, source_line_base); binarystream request2; const u_int16_t sequence2 = 0xC0C0; request2 << sequence2 << P::GET << string("loaded.dll") << string("loaded.pdb") << string("ABCD1234") << u_int64_t(0x1000) << u_int64_t(0xFFFF); ASSERT_TRUE(server->HandleRequest(request2, response)); response >> response_sequence >> response_status >> function >> function_base >> source_file >> source_line >> source_line_base; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence2, response_sequence); EXPECT_EQ(P::OK, int(response_status)); EXPECT_EQ("function2", function); EXPECT_EQ(0xFFF0, function_base); EXPECT_EQ("", source_file); EXPECT_EQ(0, source_line); EXPECT_EQ(0, source_line_base); } WindowsFrameInfo* GetFullWindowsFrameInfo(const StackFrame *frame) { // return frame info with program string return new WindowsFrameInfo(1, 2, 3, 0xA, 0xFF, 0xF00, true, "x y ="); } WindowsFrameInfo* GetPartialWindowsFrameInfo(const StackFrame *frame) { // return frame info, no program string return new WindowsFrameInfo(1, 2, 3, 4, 5, 6, true, ""); } TEST_F(NetworkSourceLineServerTest, TestGetStackWin) { EXPECT_CALL(resolver, FindWindowsFrameInfo(_)) .WillOnce(Invoke(GetFullWindowsFrameInfo)) .WillOnce(Invoke(GetPartialWindowsFrameInfo)) .WillOnce(Return((WindowsFrameInfo*)NULL)); binarystream request; const u_int16_t sequence = 0xA0A0; request << sequence << P::GETSTACKWIN << string("loaded.dll") << string("loaded.pdb") << string("ABCD1234") << u_int64_t(0x1000) << u_int64_t(0x1234); binarystream response; ASSERT_TRUE(server->HandleRequest(request, response)); u_int16_t response_sequence; u_int8_t response_status; string stack_info; response >> response_sequence >> response_status >> stack_info; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::OK, int(response_status)); EXPECT_EQ("0 0 0 1 2 3 a ff f00 1 x y =", stack_info); binarystream request2; const u_int16_t sequence2 = 0xB0B0; request2 << sequence2 << P::GETSTACKWIN << string("loaded.dll") << string("loaded.pdb") << string("ABCD1234") << u_int64_t(0x1000) << u_int64_t(0xABCD); ASSERT_TRUE(server->HandleRequest(request2, response)); response >> response_sequence >> response_status >> stack_info; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence2, response_sequence); EXPECT_EQ(P::OK, int(response_status)); EXPECT_EQ("0 0 0 1 2 3 4 5 6 0 1", stack_info); binarystream request3; const u_int16_t sequence3 = 0xC0C0; request3 << sequence3 << P::GETSTACKWIN << string("loaded.dll") << string("loaded.pdb") << string("ABCD1234") << u_int64_t(0x1000) << u_int64_t(0xFFFF); ASSERT_TRUE(server->HandleRequest(request3, response)); response >> response_sequence >> response_status >> stack_info; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence3, response_sequence); EXPECT_EQ(P::OK, int(response_status)); EXPECT_EQ("", stack_info); } CFIFrameInfo* GetCFIFrameInfoJustCFA(const StackFrame *frame) { CFIFrameInfo* cfi = new CFIFrameInfo(); cfi->SetCFARule("12345678"); return cfi; } CFIFrameInfo* GetCFIFrameInfoCFARA(const StackFrame *frame) { CFIFrameInfo* cfi = new CFIFrameInfo(); cfi->SetCFARule("12345678"); cfi->SetRARule("abcdefgh"); return cfi; } CFIFrameInfo* GetCFIFrameInfoLots(const StackFrame *frame) { CFIFrameInfo* cfi = new CFIFrameInfo(); cfi->SetCFARule("12345678"); cfi->SetRARule("abcdefgh"); cfi->SetRegisterRule("r0", "foo bar"); cfi->SetRegisterRule("b0", "123 abc +"); return cfi; } TEST_F(NetworkSourceLineServerTest, TestGetStackCFI) { EXPECT_CALL(resolver, FindCFIFrameInfo(_)) .WillOnce(Return((CFIFrameInfo*)NULL)) .WillOnce(Invoke(GetCFIFrameInfoJustCFA)) .WillOnce(Invoke(GetCFIFrameInfoCFARA)) .WillOnce(Invoke(GetCFIFrameInfoLots)); binarystream request; const u_int16_t sequence = 0xA0A0; request << sequence << P::GETSTACKCFI << string("loaded.dll") << string("loaded.pdb") << string("ABCD1234") << u_int64_t(0x1000) << u_int64_t(0x1234); binarystream response; ASSERT_TRUE(server->HandleRequest(request, response)); u_int16_t response_sequence; u_int8_t response_status; string stack_info; response >> response_sequence >> response_status >> stack_info; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::OK, int(response_status)); EXPECT_EQ("", stack_info); binarystream request2; const u_int16_t sequence2 = 0xB0B0; request2 << sequence2 << P::GETSTACKCFI << string("loaded.dll") << string("loaded.pdb") << string("ABCD1234") << u_int64_t(0x1000) << u_int64_t(0xABCD); ASSERT_TRUE(server->HandleRequest(request2, response)); response >> response_sequence >> response_status >> stack_info; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence2, response_sequence); EXPECT_EQ(P::OK, int(response_status)); EXPECT_EQ(".cfa: 12345678", stack_info); binarystream request3; const u_int16_t sequence3 = 0xC0C0; request3 << sequence3 << P::GETSTACKCFI << string("loaded.dll") << string("loaded.pdb") << string("ABCD1234") << u_int64_t(0x1000) << u_int64_t(0xFFFF); ASSERT_TRUE(server->HandleRequest(request3, response)); response >> response_sequence >> response_status >> stack_info; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence3, response_sequence); EXPECT_EQ(P::OK, int(response_status)); EXPECT_EQ(".cfa: 12345678 .ra: abcdefgh", stack_info); binarystream request4; const u_int16_t sequence4 = 0xD0D0; request4 << sequence4 << P::GETSTACKCFI << string("loaded.dll") << string("loaded.pdb") << string("ABCD1234") << u_int64_t(0x1000) << u_int64_t(0xFFFF); ASSERT_TRUE(server->HandleRequest(request4, response)); response >> response_sequence >> response_status >> stack_info; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence4, response_sequence); EXPECT_EQ(P::OK, int(response_status)); EXPECT_EQ(".cfa: 12345678 .ra: abcdefgh b0: 123 abc + r0: foo bar", stack_info); } TEST_F(NetworkSourceLineServerTest, TestMalformedGetRequest) { //TODO } TEST(TestMissingMembers, TestServerWithoutSymbolSupplier) { // Should provide reasonable responses without a SymbolSupplier MockSourceLineResolver resolver; MockNetwork net; TestNetworkSourceLineServer server(NULL, &resolver, &net); // All LOAD requests should return LOAD_NOT_FOUND binarystream request; binarystream response; const u_int16_t sequence = 0xB0B0; u_int16_t response_sequence; u_int8_t response_status, response_data; request << sequence << P::LOAD << string("found.dll") << string("found.pdb") << string("ABCD1234"); ASSERT_TRUE(server.HandleRequest(request, response)); response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::OK, int(response_status)); ASSERT_EQ(int(P::LOAD_NOT_FOUND), int(response_data)); } TEST(TestMissingMembers, TestServerWithoutResolver) { // Should provide reasonable responses without a SourceLineResolver MockSymbolSupplier supplier; MockNetwork net; TestNetworkSourceLineServer server(&supplier, NULL, &net); // GET requests should return empty info binarystream request; binarystream response; const u_int16_t sequence = 0xA0A0; u_int16_t response_sequence; u_int8_t response_status; request << sequence << P::GET << string("loaded.dll") << string("loaded.pdb") << string("ABCD1234") << u_int64_t(0x1000) << u_int64_t(0x1234); ASSERT_TRUE(server.HandleRequest(request, response)); string function, source_file; u_int32_t source_line; u_int64_t function_base, source_line_base; response >> response_sequence >> response_status >> function >> function_base >> source_file >> source_line >> source_line_base; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::OK, int(response_status)); EXPECT_EQ("", function); EXPECT_EQ(0x0, function_base); EXPECT_EQ("", source_file); EXPECT_EQ(0, source_line); EXPECT_EQ(0x0, source_line_base); // GETSTACKWIN requests should return an empty string binarystream request2; const u_int16_t sequence2 = 0xB0B0; request << sequence2 << P::GETSTACKWIN << string("loaded.dll") << string("loaded.pdb") << string("ABCD1234") << u_int64_t(0x1000) << u_int64_t(0x1234); ASSERT_TRUE(server.HandleRequest(request, response)); string response_string; response >> response_sequence >> response_status >> response_string; EXPECT_EQ(sequence2, response_sequence); EXPECT_EQ(P::OK, int(response_status)); EXPECT_EQ("", response_string); } class TestModuleManagement : public ::testing::Test { public: MockSymbolSupplier supplier; MockSourceLineResolver resolver; MockNetwork net; TestNetworkSourceLineServer server; // Init server with symbol line limit of 25 TestModuleManagement() : server(&supplier, &resolver, &net, 25) {} }; TEST_F(TestModuleManagement, TestModuleUnloading) { EXPECT_CALL(supplier, GetSymbolFile(_,_,_,_)) .Times(3) .WillRepeatedly(DoAll(SetArgumentPointee<3>(string("1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n")), Return(SymbolSupplier::FOUND))); EXPECT_CALL(resolver, HasModule(_)) .Times(3) .WillRepeatedly(Return(false)); EXPECT_CALL(resolver, LoadModuleUsingMapBuffer(_,_)) .Times(3) .WillRepeatedly(Return(true)); EXPECT_CALL(resolver, UnloadModule(Property(&CodeModule::code_file, string("one.dll|one.pdb|1111")))) .Times(1); // load three modules, each with 10 lines of symbols. // the third module will overflow the server's symbol line limit, // and should cause the first module to be unloaded. binarystream request; const u_int16_t sequence = 0x1010; request << sequence << P::LOAD << string("one.dll") << string("one.pdb") << string("1111"); binarystream response; ASSERT_TRUE(server.HandleRequest(request, response)); u_int16_t response_sequence; u_int8_t response_status, response_data; response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::OK, int(response_status)); ASSERT_EQ(int(P::LOAD_OK), int(response_data)); binarystream request2; const u_int16_t sequence2 = 0x2020; request2 << sequence2 << P::LOAD << string("two.dll") << string("two.pdb") << string("2222"); ASSERT_TRUE(server.HandleRequest(request2, response)); response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence2, response_sequence); EXPECT_EQ(P::OK, int(response_status)); ASSERT_EQ(int(P::LOAD_OK), int(response_data)); binarystream request3; const u_int16_t sequence3 = 0x3030; request3 << sequence3 << P::LOAD << string("three.dll") << string("three.pdb") << string("3333"); ASSERT_TRUE(server.HandleRequest(request3, response)); response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence3, response_sequence); EXPECT_EQ(P::OK, int(response_status)); ASSERT_EQ(int(P::LOAD_OK), int(response_data)); } TEST_F(TestModuleManagement, TestSymbolLimitTooLow) { // load module with symbol count > limit, // ensure that it doesn't get unloaded even though it's the only module EXPECT_CALL(supplier, GetSymbolFile(_,_,_,_)) .WillOnce(DoAll(SetArgumentPointee<3>(string("1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n21\n22\n23\n24\n25\n26\n")), Return(SymbolSupplier::FOUND))); EXPECT_CALL(resolver, HasModule(_)) .WillOnce(Return(false)); EXPECT_CALL(resolver, LoadModuleUsingMapBuffer(_,_)) .WillOnce(Return(true)); EXPECT_CALL(resolver, UnloadModule(_)) .Times(0); binarystream request; const u_int16_t sequence = 0x1010; request << sequence << P::LOAD << string("one.dll") << string("one.pdb") << string("1111"); binarystream response; ASSERT_TRUE(server.HandleRequest(request, response)); u_int16_t response_sequence; u_int8_t response_status, response_data; response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::OK, int(response_status)); ASSERT_EQ(int(P::LOAD_OK), int(response_data)); } TEST_F(TestModuleManagement, TestModuleLoadLRU) { // load 2 modules, then re-load the first one, // then load a third one, causing the second one to be unloaded EXPECT_CALL(supplier, GetSymbolFile(_,_,_,_)) .Times(3) .WillRepeatedly(DoAll(SetArgumentPointee<3>(string("1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n")), Return(SymbolSupplier::FOUND))); EXPECT_CALL(resolver, HasModule(_)) .WillOnce(Return(false)) // load module 1 .WillOnce(Return(false)) // load module 2 .WillOnce(Return(true)) // module 1 already loaded .WillOnce(Return(false)); // load module 3 EXPECT_CALL(resolver, LoadModuleUsingMapBuffer(_,_)) .Times(3) .WillRepeatedly(Return(true)); EXPECT_CALL(resolver, UnloadModule(Property(&CodeModule::code_file, string("two.dll|two.pdb|2222")))) .Times(1); binarystream request; const u_int16_t sequence = 0x1010; request << sequence << P::LOAD << string("one.dll") << string("one.pdb") << string("1111"); binarystream response; ASSERT_TRUE(server.HandleRequest(request, response)); u_int16_t response_sequence; u_int8_t response_status, response_data; response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::OK, int(response_status)); ASSERT_EQ(int(P::LOAD_OK), int(response_data)); binarystream request2; const u_int16_t sequence2 = 0x2020; request2 << sequence2 << P::LOAD << string("two.dll") << string("two.pdb") << string("2222"); ASSERT_TRUE(server.HandleRequest(request2, response)); response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence2, response_sequence); EXPECT_EQ(P::OK, int(response_status)); ASSERT_EQ(int(P::LOAD_OK), int(response_data)); binarystream request3; const u_int16_t sequence3 = 0x3030; request3 << sequence3 << P::LOAD << string("one.dll") << string("one.pdb") << string("1111"); ASSERT_TRUE(server.HandleRequest(request3, response)); response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence3, response_sequence); EXPECT_EQ(P::OK, int(response_status)); ASSERT_EQ(int(P::LOAD_OK), int(response_data)); binarystream request4; const u_int16_t sequence4 = 0x4040; request4 << sequence4 << P::LOAD << string("three.dll") << string("three.pdb") << string("3333"); ASSERT_TRUE(server.HandleRequest(request4, response)); response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence4, response_sequence); EXPECT_EQ(P::OK, int(response_status)); ASSERT_EQ(int(P::LOAD_OK), int(response_data)); } TEST_F(TestModuleManagement, TestModuleGetLRU) { // load 2 modules, then issue a GET for the first one, // then load a third one, causing the second one to be unloaded EXPECT_CALL(supplier, GetSymbolFile(_,_,_,_)) .Times(3) .WillRepeatedly(DoAll(SetArgumentPointee<3>(string("1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n")), Return(SymbolSupplier::FOUND))); EXPECT_CALL(resolver, HasModule(_)) .Times(3) .WillRepeatedly(Return(false)); EXPECT_CALL(resolver, LoadModuleUsingMapBuffer(_,_)) .Times(3) .WillRepeatedly(Return(true)); EXPECT_CALL(resolver, FillSourceLineInfo(_)) .Times(1); EXPECT_CALL(resolver, UnloadModule(Property(&CodeModule::code_file, string("two.dll|two.pdb|2222")))) .Times(1); binarystream request; const u_int16_t sequence = 0x1010; request << sequence << P::LOAD << string("one.dll") << string("one.pdb") << string("1111"); binarystream response; ASSERT_TRUE(server.HandleRequest(request, response)); u_int16_t response_sequence; u_int8_t response_status, response_data; response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::OK, int(response_status)); ASSERT_EQ(int(P::LOAD_OK), int(response_data)); binarystream request2; const u_int16_t sequence2 = 0x2020; request2 << sequence2 << P::LOAD << string("two.dll") << string("two.pdb") << string("2222"); ASSERT_TRUE(server.HandleRequest(request2, response)); response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence2, response_sequence); EXPECT_EQ(P::OK, int(response_status)); ASSERT_EQ(int(P::LOAD_OK), int(response_data)); binarystream request3; const u_int16_t sequence3 = 0x3030; request3 << sequence3 << P::GET << string("one.dll") << string("one.pdb") << string("1111") << u_int64_t(0x1000) << u_int64_t(0x1234); ASSERT_TRUE(server.HandleRequest(request3, response)); string function, source_file; u_int32_t source_line; u_int64_t function_base, source_line_base; response >> response_sequence >> response_status >> function >> function_base >> source_file >> source_line >> source_line_base; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence3, response_sequence); EXPECT_EQ(P::OK, int(response_status)); // Don't care about the rest of the response, really. binarystream request4; const u_int16_t sequence4 = 0x4040; request4 << sequence4 << P::LOAD << string("three.dll") << string("three.pdb") << string("3333"); ASSERT_TRUE(server.HandleRequest(request4, response)); response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence4, response_sequence); EXPECT_EQ(P::OK, int(response_status)); ASSERT_EQ(int(P::LOAD_OK), int(response_data)); } TEST_F(TestModuleManagement, TestModuleGetStackWinLRU) { // load 2 modules, then issue a GETSTACKWIN for the first one, // then load a third one, causing the second one to be unloaded EXPECT_CALL(supplier, GetSymbolFile(_,_,_,_)) .Times(3) .WillRepeatedly(DoAll(SetArgumentPointee<3>(string("1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n")), Return(SymbolSupplier::FOUND))); EXPECT_CALL(resolver, HasModule(_)) .Times(3) .WillRepeatedly(Return(false)); EXPECT_CALL(resolver, LoadModuleUsingMapBuffer(_,_)) .Times(3) .WillRepeatedly(Return(true)); EXPECT_CALL(resolver, FindWindowsFrameInfo(_)) .WillOnce(Return((WindowsFrameInfo*)NULL)); EXPECT_CALL(resolver, UnloadModule(Property(&CodeModule::code_file, string("two.dll|two.pdb|2222")))) .Times(1); binarystream request; const u_int16_t sequence = 0x1010; request << sequence << P::LOAD << string("one.dll") << string("one.pdb") << string("1111"); binarystream response; ASSERT_TRUE(server.HandleRequest(request, response)); u_int16_t response_sequence; u_int8_t response_status, response_data; response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence, response_sequence); EXPECT_EQ(P::OK, int(response_status)); ASSERT_EQ(int(P::LOAD_OK), int(response_data)); binarystream request2; const u_int16_t sequence2 = 0x2020; request2 << sequence2 << P::LOAD << string("two.dll") << string("two.pdb") << string("2222"); ASSERT_TRUE(server.HandleRequest(request2, response)); response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence2, response_sequence); EXPECT_EQ(P::OK, int(response_status)); ASSERT_EQ(int(P::LOAD_OK), int(response_data)); binarystream request3; const u_int16_t sequence3 = 0x3030; request3 << sequence3 << P::GETSTACKWIN << string("one.dll") << string("one.pdb") << string("1111") << u_int64_t(0x1000) << u_int64_t(0x1234); ASSERT_TRUE(server.HandleRequest(request3, response)); string stack_info; response >> response_sequence >> response_status >> stack_info; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence3, response_sequence); EXPECT_EQ(P::OK, int(response_status)); // Don't care about the rest of the response, really. binarystream request4; const u_int16_t sequence4 = 0x4040; request4 << sequence4 << P::LOAD << string("three.dll") << string("three.pdb") << string("3333"); ASSERT_TRUE(server.HandleRequest(request4, response)); response >> response_sequence >> response_status >> response_data; ASSERT_FALSE(response.eof()); EXPECT_EQ(sequence4, response_sequence); EXPECT_EQ(P::OK, int(response_status)); ASSERT_EQ(int(P::LOAD_OK), int(response_data)); } } // namespace int main(int argc, char *argv[]) { ::testing::InitGoogleTest(&argc, argv); return RUN_ALL_TESTS(); }
AlericInglewood/3p-google-breakpad
src/processor/network_source_line_server_unittest.cc
C++
bsd-3-clause
37,850
# Copyright (c) 2016, NVIDIA CORPORATION. All rights reserved. from __future__ import absolute_import from digits.utils import subclass, override, constants from digits.extensions.data.interface import DataIngestionInterface from .forms import DatasetForm, InferenceForm import numpy as np import os TEMPLATE = "templates/template.html" INFERENCE_TEMPLATE = "templates/inference_template.html" @subclass class DataIngestion(DataIngestionInterface): """ A data ingestion extension for an image gradient dataset """ def __init__(self, is_inference_db=False, **kwargs): super(DataIngestion, self).__init__(**kwargs) self.userdata['is_inference_db'] = is_inference_db # Used to calculate the gradients later self.yy, self.xx = np.mgrid[:self.image_height, :self.image_width].astype('float') @override def encode_entry(self, entry): xslope, yslope = entry label = np.array([xslope, yslope]) a = xslope * 255 / self.image_width b = yslope * 255 / self.image_height image = a * (self.xx - self.image_width/2) + b * (self.yy - self.image_height/2) + 127.5 image = image.astype('uint8') # convert to 3D tensors image = image[np.newaxis, ...] label = label[np.newaxis, np.newaxis, ...] return image, label @staticmethod @override def get_category(): return "Images" @staticmethod @override def get_id(): return "image-gradients" @staticmethod @override def get_dataset_form(): return DatasetForm() @staticmethod @override def get_dataset_template(form): """ parameters: - form: form returned by get_dataset_form(). This may be populated with values if the job was cloned return: - (template, context) tuple - template is a Jinja template to use for rendering dataset creation options - context is a dictionary of context variables to use for rendering the form """ extension_dir = os.path.dirname(os.path.abspath(__file__)) template = open(os.path.join(extension_dir, TEMPLATE), "r").read() context = {'form': form} return (template, context) @override def get_inference_form(self): return InferenceForm() @staticmethod @override def get_inference_template(form): extension_dir = os.path.dirname(os.path.abspath(__file__)) template = open(os.path.join(extension_dir, INFERENCE_TEMPLATE), "r").read() context = {'form': form} return (template, context) @staticmethod @override def get_title(): return "Gradients" @override def itemize_entries(self, stage): count = 0 if self.userdata['is_inference_db']: if stage == constants.TEST_DB: if self.test_image_count: count = self.test_image_count else: return [(self.gradient_x, self.gradient_y)] else: if stage == constants.TRAIN_DB: count = self.train_image_count elif stage == constants.VAL_DB: count = self.val_image_count elif stage == constants.TEST_DB: count = self.test_image_count return [np.random.random_sample(2) - 0.5 for i in xrange(count)] if count > 0 else []
bygreencn/DIGITS
plugins/data/imageGradients/digitsDataPluginImageGradients/data.py
Python
bsd-3-clause
3,492
/** * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @flow * @format */ import React, {Component} from 'react'; import Link from 'gatsby-link'; import {Icon} from 'antd'; import './Footer.css'; import FacebookOSSLogo from './FacebookOSSLogo'; export default class Footer extends Component<{}> { render() { return ( <div className="Footer"> <a href="https://code.facebook.com/projects/" className="logoOSS"> <FacebookOSSLogo /> Facebook Open Source </a> <div className="SocialNetwork"> <a href="https://github.com/facebook/yoga">GitHub</a> <a href="https://twitter.com/yogalayout">Twitter</a> </div> </div> ); } }
facebook/css-layout
website/src/components/Footer.js
JavaScript
bsd-3-clause
851
#!/usr/bin/env python3 # # Copyright (c) 2019, The OpenThread Authors. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. Neither the name of the copyright holder nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # import datetime import sys import time from typing import Any, Union from pyshark.packet.fields import LayerFieldsContainer, LayerField from pyshark.packet.packet import Packet as RawPacket from pktverify.addrs import EthAddr, ExtAddr, Ipv6Addr from pktverify.bytes import Bytes from pktverify.consts import VALID_LAYER_NAMES from pktverify.null_field import nullField def _auto(v: Union[LayerFieldsContainer, LayerField]): """parse the layer field automatically according to its format""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 or v.get_default_value() is not None, v.fields dv = v.get_default_value() rv = v.raw_value if dv.startswith('0x'): return int(dv, 16) try: if dv == rv: return int(dv) elif int(dv) == int(rv, 16): return int(dv) except (ValueError, TypeError): pass if rv is None: try: return int(dv) except (ValueError, TypeError): pass if ':' in dv and '::' not in dv and dv.replace(':', '') == rv: # '88:00', '8800' return int(rv, 16) # timestamp: 'Jan 1, 1970 08:00:00.000000000 CST', '0000000000000000' # convert to seconds from 1970, ignore the nanosecond for now since # there are integer seconds applied in the test cases try: time_str = datetime.datetime.strptime(dv, "%b %d, %Y %H:%M:%S.%f000 %Z") time_in_sec = time.mktime(time_str.utctimetuple()) return int(time_in_sec) except (ValueError, TypeError): pass try: int(rv, 16) return int(dv) except Exception: pass raise ValueError((v, v.get_default_value(), v.raw_value)) def _payload(v: Union[LayerFieldsContainer, LayerField]) -> bytearray: """parse the layer field as a bytearray""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 hex_value = v.raw_value assert len(hex_value) % 2 == 0 s = bytearray() for i in range(0, len(hex_value), 2): s.append(int(hex_value[i:i + 2], 16)) return s def _hex(v: Union[LayerFieldsContainer, LayerField]) -> int: """parse the layer field as a hex string""" # split v into octets and reverse the order assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return int(v.get_default_value(), 16) def _raw_hex(v: Union[LayerFieldsContainer, LayerField]) -> int: """parse the layer field as a raw hex string""" # split v into octets and reverse the order assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 iv = v.hex_value try: int(v.get_default_value()) assert int(v.get_default_value()) == iv, (v.get_default_value(), v.raw_value) except ValueError: pass try: int(v.get_default_value(), 16) assert int(v.get_default_value(), 16) == iv, (v.get_default_value(), v.raw_value) except ValueError: pass return iv def _raw_hex_rev(v: Union[LayerFieldsContainer, LayerField]) -> int: """parse the layer field as a reversed raw hex string""" # split v into octets and reverse the order assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 rv = v.raw_value octets = [rv[i:i + 2] for i in range(0, len(rv), 2)] iv = int(''.join(reversed(octets)), 16) try: int(v.get_default_value()) assert int(v.get_default_value()) == iv, (v.get_default_value(), v.raw_value) except ValueError: pass try: int(v.get_default_value(), 16) assert int(v.get_default_value(), 16) == iv, (v.get_default_value(), v.raw_value) except ValueError: pass return iv def _dec(v: Union[LayerFieldsContainer, LayerField]) -> int: """parse the layer field as a decimal""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return int(v.get_default_value()) def _float(v: Union[LayerFieldsContainer, LayerField]) -> float: """parse the layer field as a float""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return float(v.get_default_value()) def _str(v: Union[LayerFieldsContainer, LayerField]) -> str: """parse the layer field as a string""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return str(v.get_default_value()) def _bytes(v: Union[LayerFieldsContainer, LayerField]) -> Bytes: """parse the layer field as raw bytes""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return Bytes(v.raw_value) def _ext_addr(v: Union[LayerFieldsContainer, LayerField]) -> ExtAddr: """parse the layer field as an extended address""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return ExtAddr(v.get_default_value()) def _ipv6_addr(v: Union[LayerFieldsContainer, LayerField]) -> Ipv6Addr: """parse the layer field as an IPv6 address""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 return Ipv6Addr(v.get_default_value()) def _eth_addr(v: Union[LayerFieldsContainer, LayerField]) -> EthAddr: """parse the layer field as an Ethernet MAC address""" assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1, v.fields return EthAddr(v.get_default_value()) def _routerid_set(v: Union[LayerFieldsContainer, LayerField]) -> set: """parse the layer field as a set of router ids Notes: the router ID mask in wireshark is a hexadecimal string separated by ':' """ assert not isinstance(v, LayerFieldsContainer) or len(v.fields) == 1 try: ridmask = str(v.get_default_value()) assert isinstance(ridmask, str), ridmask ridmask_int = int(ridmask.replace(':', ''), base=16) rid_set = set() count = 0 while ridmask_int: count += 1 if ridmask_int & 1: rid_set.add(64 - count) ridmask_int = ridmask_int >> 1 except ValueError: pass return rid_set class _first(object): """parse the first layer field""" def __init__(self, sub_parse): self._sub_parse = sub_parse def __call__(self, v: Union[LayerFieldsContainer, LayerField]): return self._sub_parse(v.fields[0]) class _list(object): """parse all layer fields into a list""" def __init__(self, sub_parse): self._sub_parse = sub_parse def __call__(self, v: Union[LayerFieldsContainer, LayerField]): return [self._sub_parse(f) for f in v.fields] _LAYER_FIELDS = { # WPAN 'wpan.fcf': _raw_hex_rev, 'wpan.cmd': _auto, 'wpan.security': _auto, 'wpan.frame_type': _auto, 'wpan.pending': _auto, 'wpan.ack_request': _auto, 'wpan.pan_id_compression': _auto, 'wpan.seqno_suppression': _auto, 'wpan.ie_present': _auto, 'wpan.dst_addr_mode': _auto, 'wpan.version': _auto, 'wpan.src_addr_mode': _auto, 'wpan.dst_pan': _auto, 'wpan.seq_no': _auto, 'wpan.src16': _auto, 'wpan.dst16': _auto, 'wpan.src64': _ext_addr, 'wpan.dst64': _ext_addr, 'wpan.fcs': _raw_hex_rev, 'wpan.fcs_ok': _auto, 'wpan.frame_length': _dec, 'wpan.key_number': _auto, 'wpan.aux_sec.sec_suite': _auto, 'wpan.aux_sec.security_control_field': _auto, 'wpan.aux_sec.sec_level': _auto, 'wpan.aux_sec.key_id_mode': _auto, 'wpan.aux_sec.frame_counter_suppression': _auto, 'wpan.aux_sec.asn_in_nonce': _auto, 'wpan.aux_sec.reserved': _auto, 'wpan.aux_sec.frame_counter': _auto, 'wpan.aux_sec.key_source': _auto, 'wpan.aux_sec.key_index': _auto, 'wpan.aux_sec.hdr': _str, 'wpan.mic': _auto, 'wpan.channel': _auto, 'wpan.header_ie.id': _list(_auto), 'wpan.header_ie.csl.period': _auto, 'wpan.payload_ie.vendor.oui': _auto, # MLE 'mle.cmd': _auto, 'mle.sec_suite': _hex, 'mle.tlv.type': _list(_dec), 'mle.tlv.len': _list(_dec), 'mle.tlv.mode.receiver_on_idle': _auto, 'mle.tlv.mode.reserved1': _auto, 'mle.tlv.mode.reserved2': _auto, 'mle.tlv.mode.device_type_bit': _auto, 'mle.tlv.mode.network_data': _auto, 'mle.tlv.challenge': _bytes, 'mle.tlv.scan_mask.r': _auto, 'mle.tlv.scan_mask.e': _auto, 'mle.tlv.version': _auto, 'mle.tlv.source_addr': _auto, 'mle.tlv.active_tstamp': _auto, 'mle.tlv.pending_tstamp': _auto, 'mle.tlv.leader_data.partition_id': _auto, 'mle.tlv.leader_data.weighting': _auto, 'mle.tlv.leader_data.data_version': _auto, 'mle.tlv.leader_data.stable_data_version': _auto, 'mle.tlv.leader_data.router_id': _auto, 'mle.tlv.route64.nbr_out': _list(_auto), 'mle.tlv.route64.nbr_in': _list(_auto), 'mle.tlv.route64.id_seq': _auto, 'mle.tlv.route64.id_mask': _routerid_set, 'mle.tlv.route64.cost': _list(_auto), 'mle.tlv.response': _bytes, 'mle.tlv.mle_frm_cntr': _auto, 'mle.tlv.ll_frm_cntr': _auto, 'mle.tlv.link_margin': _auto, 'mle.tlv.conn.sed_dgram_cnt': _auto, 'mle.tlv.conn.sed_buf_size': _auto, 'mle.tlv.conn.lq3': _auto, 'mle.tlv.conn.lq2': _auto, 'mle.tlv.conn.lq1': _auto, 'mle.tlv.conn.leader_cost': _auto, 'mle.tlv.conn.id_seq': _auto, 'mle.tlv.conn.flags.pp': _auto, 'mle.tlv.conn.active_rtrs': _auto, 'mle.tlv.timeout': _auto, 'mle.tlv.addr16': _auto, 'mle.tlv.channel': _auto, 'mle.tlv.addr_reg_iid': _list(_auto), 'mle.tlv.link_enh_ack_flags': _auto, 'mle.tlv.link_forward_series': _list(_auto), 'mle.tlv.link_requested_type_id_flags': _list(_hex), 'mle.tlv.link_sub_tlv': _auto, 'mle.tlv.link_status_sub_tlv': _auto, 'mle.tlv.query_id': _auto, 'mle.tlv.metric_type_id_flags.type': _list(_hex), 'mle.tlv.metric_type_id_flags.metric': _list(_hex), 'mle.tlv.metric_type_id_flags.l': _list(_hex), 'mle.tlv.link_requested_type_id_flags': _bytes, # IP 'ip.version': _auto, 'ip.src': _str, 'ip.src_host': _str, 'ip.dst': _str, 'ip.dst_host': _str, 'ip.ttl': _auto, 'ip.proto': _auto, 'ip.len': _auto, 'ip.id': _auto, 'ip.host': _list(_str), 'ip.hdr_len': _dec, 'ip.frag_offset': _auto, 'ip.flags.rb': _auto, 'ip.flags.mf': _auto, 'ip.flags.df': _auto, 'ip.dsfield.ecn': _auto, 'ip.dsfield.dscp': _auto, 'ip.checksum.status': _auto, 'ip.addr': _list(_str), 'ip.options.routeralert': _bytes, 'ip.opt.type.number': _auto, 'ip.opt.type.copy': _auto, 'ip.opt.type.class': _auto, 'ip.opt.ra': _auto, 'ip.opt.len': _auto, # UDP 'udp.stream': _auto, 'udp.srcport': _auto, 'udp.dstport': _auto, 'udp.length': _auto, 'udp.port': _list(_dec), 'udp.checksum.status': _auto, # IPv6 'ipv6.version': _auto, 'ipv6.src': _ipv6_addr, 'ipv6.src_host': _ipv6_addr, 'ipv6.dst': _ipv6_addr, 'ipv6.dst_host': _ipv6_addr, 'ipv6.addr': _list(_ipv6_addr), 'ipv6.tclass.dscp': _auto, 'ipv6.tclass.ecn': _auto, 'ipv6.flow': _auto, 'ipv6.hlim': _auto, 'ipv6.nxt': _auto, 'ipv6.hopopts.len': _auto, 'ipv6.hopopts.nxt': _auto, 'ipv6.hopopts.len_oct': _dec, 'ipv6.host': _list(_ipv6_addr), 'ipv6.plen': _auto, 'ipv6.opt.type.rest': _list(_auto), 'ipv6.opt.type.change': _list(_auto), 'ipv6.opt.type.action': _list(_auto), 'ipv6.opt.router_alert': _auto, 'ipv6.opt.padn': _str, 'ipv6.opt.length': _list(_auto), 'ipv6.opt.mpl.seed_id': _bytes, 'ipv6.opt.mpl.sequence': _auto, 'ipv6.opt.mpl.flag.v': _auto, 'ipv6.opt.mpl.flag.s': _auto, 'ipv6.opt.mpl.flag.rsv': _auto, 'ipv6.opt.mpl.flag.m': _auto, # Eth 'eth.src': _eth_addr, 'eth.src_resolved': _eth_addr, 'eth.dst': _eth_addr, 'eth.dst_resolved': _eth_addr, 'eth.type': _auto, 'eth.addr': _list(_eth_addr), 'eth.addr_resolved': _list(_eth_addr), 'eth.ig': _list(_auto), 'eth.lg': _list(_auto), # 6LOWPAN '6lowpan.src': _ipv6_addr, '6lowpan.dst': _ipv6_addr, '6lowpan.udp.src': _auto, '6lowpan.udp.dst': _auto, '6lowpan.udp.checksum': _auto, '6lowpan.frag.offset': _auto, '6lowpan.frag.tag': _auto, '6lowpan.frag.size': _auto, '6lowpan.pattern': _list(_auto), '6lowpan.hops': _auto, '6lowpan.padding': _auto, '6lowpan.next': _auto, '6lowpan.flow': _auto, '6lowpan.ecn': _auto, '6lowpan.iphc.tf': _auto, '6lowpan.iphc.m': _auto, '6lowpan.iphc.nh': _auto, '6lowpan.iphc.hlim': _auto, '6lowpan.iphc.cid': _auto, '6lowpan.iphc.sac': _auto, '6lowpan.iphc.sam': _auto, '6lowpan.iphc.dac': _auto, '6lowpan.iphc.dam': _auto, '6lowpan.iphc.sci': _auto, '6lowpan.iphc.dci': _auto, '6lowpan.iphc.sctx.prefix': _ipv6_addr, '6lowpan.iphc.dctx.prefix': _ipv6_addr, '6lowpan.mesh.v': _auto, '6lowpan.nhc.pattern': _list(_auto), '6lowpan.nhc.udp.checksum': _auto, '6lowpan.nhc.udp.ports': _auto, '6lowpan.nhc.ext.nh': _auto, '6lowpan.nhc.ext.length': _auto, '6lowpan.nhc.ext.eid': _auto, '6lowpan.reassembled.length': _auto, '6lowpan.fragments': _str, '6lowpan.fragment.count': _auto, '6lowpan.mesh.orig16': _auto, '6lowpan.mesh.hops8': _auto, '6lowpan.mesh.hops': _auto, '6lowpan.mesh.f': _auto, '6lowpan.mesh.dest16': _auto, # ICMPv6 'icmpv6.type': _first(_auto), 'icmpv6.code': _first(_auto), 'icmpv6.checksum': _first(_auto), 'icmpv6.reserved': _raw_hex, 'icmpv6.resptime': _float, 'icmpv6.resp_to': _auto, 'icmpv6.mldr.nb_mcast_records': _auto, 'icmpv6.nd.ra.cur_hop_limit': _auto, 'icmpv6.nd.ns.target_address': _ipv6_addr, 'icmpv6.nd.na.target_address': _ipv6_addr, 'icmpv6.nd.na.flag.s': _auto, 'icmpv6.nd.na.flag.o': _auto, 'icmpv6.nd.na.flag.r': _auto, 'icmpv6.nd.na.flag.rsv': _auto, 'icmpv6.mldr.mar.record_type': _list(_auto), 'icmpv6.mldr.mar.aux_data_len': _list(_auto), 'icmpv6.mldr.mar.nb_sources': _list(_auto), 'icmpv6.mldr.mar.multicast_address': _list(_ipv6_addr), 'icmpv6.opt.type': _list(_auto), 'icmpv6.opt.nonce': _bytes, 'icmpv6.opt.linkaddr': _eth_addr, 'icmpv6.opt.src_linkaddr': _eth_addr, 'icmpv6.opt.target_linkaddr': _eth_addr, 'icmpv6.opt.route_lifetime': _auto, 'icmpv6.opt.route_info.flag.route_preference': _auto, 'icmpv6.opt.route_info.flag.reserved': _auto, 'icmpv6.opt.prefix.valid_lifetime': _auto, 'icmpv6.opt.prefix.preferred_lifetime': _auto, 'icmpv6.opt.prefix.length': _list(_auto), 'icmpv6.opt.prefix.flag.reserved': _auto, 'icmpv6.opt.prefix.flag.r': _auto, 'icmpv6.opt.prefix.flag.l': _auto, 'icmpv6.opt.prefix.flag.a': _auto, 'icmpv6.opt.length': _list(_auto), 'icmpv6.opt.reserved': _str, 'icmpv6.nd.ra.router_lifetime': _auto, 'icmpv6.nd.ra.retrans_timer': _auto, 'icmpv6.nd.ra.reachable_time': _auto, 'icmpv6.nd.ra.flag.rsv': _auto, 'icmpv6.nd.ra.flag.prf': _auto, 'icmpv6.nd.ra.flag.p': _auto, 'icmpv6.nd.ra.flag.o': _auto, 'icmpv6.nd.ra.flag.m': _auto, 'icmpv6.nd.ra.flag.h': _auto, 'icmpv6.echo.sequence_number': _auto, 'icmpv6.echo.identifier': _auto, 'icmpv6.data.len': _auto, # COAP 'coap.code': _auto, 'coap.version': _auto, 'coap.type': _auto, 'coap.mid': _auto, 'coap.token_len': _auto, 'coap.token': _auto, 'coap.opt.uri_path': _list(_str), 'coap.opt.name': _list(_str), 'coap.opt.length': _list(_auto), 'coap.opt.uri_path_recon': _str, 'coap.payload': _payload, 'coap.payload_length': _auto, 'coap.payload_desc': _str, 'coap.opt.end_marker': _auto, 'coap.opt.desc': _list(_str), 'coap.opt.delta': _list(_auto), 'coap.response_to': _auto, 'coap.response_time': _float, # COAP TLVS 'coap.tlv.type': _list(_auto), 'coap.tlv.status': _auto, 'coap.tlv.target_eid': _ipv6_addr, 'coap.tlv.ml_eid': _ext_addr, 'coap.tlv.last_transaction_time': _auto, 'coap.tlv.rloc16': _auto, 'coap.tlv.net_name': _str, 'coap.tlv.ext_mac_addr': _ext_addr, 'coap.tlv.router_mask_assigned': _auto, 'coap.tlv.router_mask_id_seq': _auto, # dtls 'dtls.handshake.type': _list(_auto), 'dtls.handshake.cookie': _auto, 'dtls.record.content_type': _list(_auto), 'dtls.alert_message.desc': _auto, # thread beacon 'thread_bcn.protocol': _auto, 'thread_bcn.version': _auto, 'thread_bcn.network_name': _str, 'thread_bcn.epid': _ext_addr, # thread_address 'thread_address.tlv.len': _list(_auto), 'thread_address.tlv.type': _list(_auto), 'thread_address.tlv.status': _auto, 'thread_address.tlv.target_eid': _ipv6_addr, 'thread_address.tlv.ext_mac_addr': _ext_addr, 'thread_address.tlv.router_mask_id_seq': _auto, 'thread_address.tlv.router_mask_assigned': _bytes, 'thread_address.tlv.rloc16': _hex, 'thread_address.tlv.target_eid': _ipv6_addr, 'thread_address.tlv.ml_eid': _ext_addr, # thread bl 'thread_bl.tlv.type': _list(_auto), 'thread_bl.tlv.len': _list(_auto), 'thread_bl.tlv.target_eid': _ipv6_addr, 'thread_bl.tlv.ml_eid': _ext_addr, 'thread_bl.tlv.last_transaction_time': _auto, 'thread_bl.tlv.timeout': _auto, # THEAD NM 'thread_nm.tlv.type': _list(_auto), 'thread_nm.tlv.ml_eid': _ext_addr, 'thread_nm.tlv.target_eid': _ipv6_addr, 'thread_nm.tlv.status': _auto, 'thread_nm.tlv.timeout': _auto, # thread_meshcop is not a real layer 'thread_meshcop.len_size_mismatch': _str, 'thread_meshcop.tlv.type': _list(_auto), 'thread_meshcop.tlv.len8': _list(_auto), 'thread_meshcop.tlv.net_name': _list(_str), # from thread_bl 'thread_meshcop.tlv.commissioner_id': _str, 'thread_meshcop.tlv.commissioner_sess_id': _auto, # from mle "thread_meshcop.tlv.channel_page": _auto, # from ble "thread_meshcop.tlv.channel": _list(_auto), # from ble "thread_meshcop.tlv.chan_mask": _str, # from ble 'thread_meshcop.tlv.chan_mask_page': _auto, 'thread_meshcop.tlv.chan_mask_len': _auto, 'thread_meshcop.tlv.chan_mask_mask': _bytes, 'thread_meshcop.tlv.discovery_req_ver': _auto, 'thread_meshcop.tlv.discovery_rsp_ver': _auto, 'thread_meshcop.tlv.discovery_rsp_n': _auto, 'thread_meshcop.tlv.energy_list': _list(_auto), 'thread_meshcop.tlv.pan_id': _list(_auto), 'thread_meshcop.tlv.xpan_id': _bytes, 'thread_meshcop.tlv.ml_prefix': _bytes, 'thread_meshcop.tlv.master_key': _bytes, 'thread_meshcop.tlv.pskc': _bytes, 'thread_meshcop.tlv.sec_policy_rot': _auto, 'thread_meshcop.tlv.sec_policy_o': _auto, 'thread_meshcop.tlv.sec_policy_n': _auto, 'thread_meshcop.tlv.sec_policy_r': _auto, 'thread_meshcop.tlv.sec_policy_c': _auto, 'thread_meshcop.tlv.sec_policy_b': _auto, 'thread_meshcop.tlv.state': _auto, 'thread_meshcop.tlv.steering_data': _bytes, 'thread_meshcop.tlv.unknown': _bytes, 'thread_meshcop.tlv.udp_port': _list(_auto), 'thread_meshcop.tlv.ba_locator': _auto, 'thread_meshcop.tlv.jr_locator': _auto, 'thread_meshcop.tlv.active_tstamp': _auto, 'thread_meshcop.tlv.pending_tstamp': _auto, 'thread_meshcop.tlv.delay_timer': _auto, 'thread_meshcop.tlv.ipv6_addr': _list(_ipv6_addr), # THREAD NWD 'thread_nwd.tlv.type': _list(_auto), 'thread_nwd.tlv.len': _list(_auto), 'thread_nwd.tlv.stable': _list(_auto), 'thread_nwd.tlv.service.t': _auto, 'thread_nwd.tlv.service.s_id': _auto, 'thread_nwd.tlv.service.s_data_len': _auto, 'thread_nwd.tlv.service.s_data.seqno': _auto, 'thread_nwd.tlv.service.s_data.rrdelay': _auto, 'thread_nwd.tlv.service.s_data.mlrtimeout': _auto, 'thread_nwd.tlv.server_16': _list(_auto), 'thread_nwd.tlv.border_router_16': _list(_auto), 'thread_nwd.tlv.sub_tlvs': _list(_str), # TODO: support thread_nwd.tlv.prefix.length and thread_nwd.tlv.prefix.domain_id 'thread_nwd.tlv.prefix': _list(_ipv6_addr), 'thread_nwd.tlv.border_router.pref': _auto, 'thread_nwd.tlv.border_router.flag.s': _list(_auto), 'thread_nwd.tlv.border_router.flag.r': _list(_auto), 'thread_nwd.tlv.border_router.flag.p': _list(_auto), 'thread_nwd.tlv.border_router.flag.o': _list(_auto), 'thread_nwd.tlv.border_router.flag.n': _list(_auto), 'thread_nwd.tlv.border_router.flag.dp': _list(_auto), 'thread_nwd.tlv.border_router.flag.d': _list(_auto), 'thread_nwd.tlv.border_router.flag.c': _list(_auto), 'thread_nwd.tlv.6co.flag.reserved': _auto, 'thread_nwd.tlv.6co.flag.cid': _auto, 'thread_nwd.tlv.6co.flag.c': _list(_auto), 'thread_nwd.tlv.6co.context_length': _auto, # Thread Diagnostic 'thread_diagnostic.tlv.type': _list(_auto), 'thread_diagnostic.tlv.len8': _list(_auto), 'thread_diagnostic.tlv.general': _list(_str), # DNS 'dns.resp.ttl': _auto, 'dns.flags.response': _auto, } _layer_containers = set() for key in _LAYER_FIELDS: assert key.strip() == key and ' ' not in key, key secs = key.split('.') assert len(secs) >= 2 assert secs[0] in VALID_LAYER_NAMES, secs[0] for i in range(len(secs) - 2): path = secs[0] + '.' + '.'.join(secs[1:i + 2]) assert path not in _LAYER_FIELDS, '%s can not be both field and path' % path _layer_containers.add(path) def is_layer_field(uri: str) -> bool: """ Returns if the URI is a valid layer field. :param uri: The layer field URI. """ return uri in _LAYER_FIELDS def is_layer_field_container(uri: str) -> bool: """ Returns if the URI is a valid layer field container. :param uri: The layer field container URI. """ return uri in _layer_containers def get_layer_field(packet: RawPacket, field_uri: str) -> Any: """ Get a given layer field from the packet. :param packet: The packet. :param field_uri: The layer field URI. :return: The specified layer field. """ assert isinstance(packet, RawPacket) secs = field_uri.split('.') layer_depth = 0 layer_name = secs[0] if layer_name.endswith('inner'): layer_name = layer_name[:-len('inner')] field_uri = '.'.join([layer_name] + secs[1:]) layer_depth = 1 if is_layer_field(field_uri): candidate_layers = _get_candidate_layers(packet, layer_name) for layers in candidate_layers: if layer_depth >= len(layers): continue layer = layers[layer_depth] v = layer.get_field(field_uri) if v is not None: try: v = _LAYER_FIELDS[field_uri](v) print("[%s = %r] " % (field_uri, v), file=sys.stderr) return v except Exception as ex: raise ValueError('can not parse field %s = %r' % (field_uri, (v.get_default_value(), v.raw_value))) from ex print("[%s = %s] " % (field_uri, "null"), file=sys.stderr) return nullField elif is_layer_field_container(field_uri): from pktverify.layer_fields_container import LayerFieldsContainer return LayerFieldsContainer(packet, field_uri) else: raise NotImplementedError('Field %s is not valid, please add it to `_LAYER_FIELDS`' % field_uri) def check_layer_field_exists(packet, field_uri): """ Check if a given layer field URI exists in the packet. :param packet: The packet to check. :param field_uri: The layer field URI. :return: Whether the layer field URI exists in the packet. """ assert isinstance(packet, RawPacket) secs = field_uri.split('.') layer_name = secs[0] if not is_layer_field(field_uri) and not is_layer_field_container(field_uri): raise NotImplementedError('%s is neither a field or field container' % field_uri) candidate_layers = _get_candidate_layers(packet, layer_name) for layers in candidate_layers: for layer in layers: for k, v in layer._all_fields.items(): if k == field_uri or k.startswith(field_uri + '.'): return True return False def _get_candidate_layers(packet, layer_name): if layer_name == 'thread_meshcop': candidate_layer_names = ['thread_meshcop', 'mle', 'coap', 'thread_bl', 'thread_nm'] elif layer_name == 'thread_nwd': candidate_layer_names = ['mle', 'thread_address', 'thread_diagnostic'] elif layer_name == 'wpan': candidate_layer_names = ['wpan', 'mle'] elif layer_name == 'ip': candidate_layer_names = ['ip', 'ipv6'] elif layer_name == 'thread_bcn': candidate_layer_names = ['thread_bcn'] else: candidate_layer_names = [layer_name] layers = [] for ln in candidate_layer_names: if hasattr(packet, ln): layers.append(packet.get_multiple_layers(ln)) return layers
jwhui/openthread
tests/scripts/thread-cert/pktverify/layer_fields.py
Python
bsd-3-clause
26,646
"aaaaaaaaaa,aaaaaaaaaaaaaaa".replace(/^(a+)\1*,\1+$/,"$1")
daejunpark/jsaf
tests/interpreter_tests/regexp-17.js
JavaScript
bsd-3-clause
59
/* * Copyright (c) 2012, United States Government, as represented by the Secretary of Health and Human Services. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above * copyright notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the United States Government nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package gov.hhs.fha.nhinc.patientdiscovery.nhin.deferred.response.proxy; import gov.hhs.fha.nhinc.common.nhinccommon.AssertionType; import gov.hhs.fha.nhinc.common.nhinccommon.NhinTargetSystemType; import gov.hhs.fha.nhinc.aspect.NwhinInvocationEvent; import gov.hhs.fha.nhinc.patientdiscovery.aspect.PRPAIN201306UV02EventDescriptionBuilder; import gov.hhs.fha.nhinc.patientdiscovery.aspect.MCCIIN000002UV01EventDescriptionBuilder; import org.hl7.v3.MCCIIN000002UV01; import org.hl7.v3.PRPAIN201306UV02; /** * * @author JHOPPESC */ public class NhinPatientDiscoveryDeferredRespProxyNoOpImpl implements NhinPatientDiscoveryDeferredRespProxy { @NwhinInvocationEvent(beforeBuilder = PRPAIN201306UV02EventDescriptionBuilder.class, afterReturningBuilder = MCCIIN000002UV01EventDescriptionBuilder.class, serviceType = "Patient Discovery Deferred Response", version = "1.0") public MCCIIN000002UV01 respondingGatewayPRPAIN201306UV02(PRPAIN201306UV02 body, AssertionType assertion, NhinTargetSystemType target) { return new MCCIIN000002UV01(); } }
sailajaa/CONNECT
Product/Production/Services/PatientDiscoveryCore/src/main/java/gov/hhs/fha/nhinc/patientdiscovery/nhin/deferred/response/proxy/NhinPatientDiscoveryDeferredRespProxyNoOpImpl.java
Java
bsd-3-clause
2,810
/* * Copyright (C) 2012 Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "config.h" #include "MemoryUsageSupport.h" #include <SkGraphics.h> #include <public/Platform.h> namespace WebCore { int MemoryUsageSupport::memoryUsageMB() { return WebKit::Platform::current()->memoryUsageMB(); } int MemoryUsageSupport::actualMemoryUsageMB() { return WebKit::Platform::current()->actualMemoryUsageMB(); } int MemoryUsageSupport::lowMemoryUsageMB() { return WebKit::Platform::current()->lowMemoryUsageMB(); } int MemoryUsageSupport::highMemoryUsageMB() { return WebKit::Platform::current()->highMemoryUsageMB(); } int MemoryUsageSupport::highUsageDeltaMB() { return WebKit::Platform::current()->highUsageDeltaMB(); } bool MemoryUsageSupport::processMemorySizesInBytes(size_t* privateBytes, size_t* sharedBytes) { return WebKit::Platform::current()->processMemorySizesInBytes(privateBytes, sharedBytes); } void MemoryUsageSupport::memoryUsageByComponents(Vector<ComponentInfo>& components) { size_t size = SkGraphics::GetFontCacheUsed(); components.append(ComponentInfo("GlyphCache", size)); } } // namespace WebCore
leighpauls/k2cro4
third_party/WebKit/Source/WebCore/platform/chromium/MemoryUsageSupportChromium.cpp
C++
bsd-3-clause
2,647
//============================================================================== // Copyright 2003 - 2011 LASMEA UMR 6602 CNRS/Univ. Clermont II // Copyright 2009 - 2014 LRI UMR 8623 CNRS/Univ Paris Sud XI // Copyright 2012 - 2014 MetaScale SAS // // Distributed under the Boost Software License, Version 1.0. // See accompanying file LICENSE.txt or copy at // http://www.boost.org/LICENSE_1_0.txt //============================================================================== #ifndef BOOST_SIMD_CONSTANT_CONSTANTS_SIMD_VMX_ALTIVEC_EIGHT_HPP_INCLUDED #define BOOST_SIMD_CONSTANT_CONSTANTS_SIMD_VMX_ALTIVEC_EIGHT_HPP_INCLUDED #ifdef BOOST_SIMD_HAS_VMX_SUPPORT #include <boost/simd/constant/constants/eight.hpp> #include <boost/dispatch/attributes.hpp> namespace boost { namespace simd { namespace ext { BOOST_SIMD_FUNCTOR_IMPLEMENTATION ( simd::tag::Eight , boost::simd::tag::vmx_ , (A0) , ((target_ < simd_ < int8_<A0> , boost::simd::tag::vmx_ > > )) ) { typedef typename A0::type result_type; BOOST_FORCEINLINE result_type operator()(A0 const&) const { return vec_splat_s8(8); } }; BOOST_SIMD_FUNCTOR_IMPLEMENTATION ( simd::tag::Eight , boost::simd::tag::vmx_ , (A0) , ((target_ < simd_ < uint8_<A0> , boost::simd::tag::vmx_ > > )) ) { typedef typename A0::type result_type; BOOST_FORCEINLINE result_type operator()(A0 const&) const { return vec_splat_u8(8); } }; BOOST_SIMD_FUNCTOR_IMPLEMENTATION ( simd::tag::Eight , boost::simd::tag::vmx_ , (A0) , ((target_ < simd_ < int16_<A0> , boost::simd::tag::vmx_ > > )) ) { typedef typename A0::type result_type; BOOST_FORCEINLINE result_type operator()(A0 const&) const { return vec_splat_s16(8); } }; BOOST_SIMD_FUNCTOR_IMPLEMENTATION ( simd::tag::Eight , boost::simd::tag::vmx_ , (A0) , ((target_ < simd_ < uint16_<A0> , boost::simd::tag::vmx_ > > )) ) { typedef typename A0::type result_type; BOOST_FORCEINLINE result_type operator()(A0 const&) const { return vec_splat_u16(8); } }; BOOST_SIMD_FUNCTOR_IMPLEMENTATION ( simd::tag::Eight , boost::simd::tag::vmx_ , (A0) , ((target_ < simd_ < int32_<A0> , boost::simd::tag::vmx_ > > )) ) { typedef typename A0::type result_type; BOOST_FORCEINLINE result_type operator()(A0 const&) const { return vec_splat_s32(8); } }; BOOST_SIMD_FUNCTOR_IMPLEMENTATION ( simd::tag::Eight , boost::simd::tag::vmx_ , (A0) , ((target_ < simd_ < uint32_<A0> , boost::simd::tag::vmx_ > > )) ) { typedef typename A0::type result_type; BOOST_FORCEINLINE result_type operator()(A0 const&) const { return vec_splat_u32(8); } }; } } } #endif #endif
hainm/pythran
third_party/boost/simd/constant/constants/simd/vmx/altivec/eight.hpp
C++
bsd-3-clause
4,832
<?php /** * Zend Framework (http://framework.zend.com/) * * @link http://github.com/zendframework/zf2 for the canonical source repository * @copyright Copyright (c) 2005-2012 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @package Zend_Mail */ namespace Zend\Mail; use Countable; use Iterator; /** * @category Zend * @package Zend_Mail */ class AddressList implements Countable, Iterator { /** * List of Address objects we're managing * * @var array */ protected $addresses = array(); /** * Add an address to the list * * @param string|Address\AddressInterface $emailOrAddress * @param null|string $name * @throws Exception\InvalidArgumentException * @return AddressList */ public function add($emailOrAddress, $name = null) { if (is_string($emailOrAddress)) { $emailOrAddress = $this->createAddress($emailOrAddress, $name); } elseif (!$emailOrAddress instanceof Address\AddressInterface) { throw new Exception\InvalidArgumentException(sprintf( '%s expects an email address or %s\Address object as its first argument; received "%s"', __METHOD__, __NAMESPACE__, (is_object($emailOrAddress) ? get_class($emailOrAddress) : gettype($emailOrAddress)) )); } $email = strtolower($emailOrAddress->getEmail()); if ($this->has($email)) { return $this; } $this->addresses[$email] = $emailOrAddress; return $this; } /** * Add many addresses at once * * If an email key is provided, it will be used as the email, and the value * as the name. Otherwise, the value is passed as the sole argument to add(), * and, as such, can be either email strings or Address\AddressInterface objects. * * @param array $addresses * @throws Exception\RuntimeException * @return AddressList */ public function addMany(array $addresses) { foreach ($addresses as $key => $value) { if (is_int($key) || is_numeric($key)) { $this->add($value); } elseif (is_string($key)) { $this->add($key, $value); } else { throw new Exception\RuntimeException(sprintf( 'Invalid key type in provided addresses array ("%s")', (is_object($key) ? get_class($key) : var_export($key, 1)) )); } } return $this; } /** * Merge another address list into this one * * @param AddressList $addressList * @return AddressList */ public function merge(AddressList $addressList) { foreach ($addressList as $address) { $this->add($address); } return $this; } /** * Does the email exist in this list? * * @param string $email * @return bool */ public function has($email) { $email = strtolower($email); return isset($this->addresses[$email]); } /** * Get an address by email * * @param string $email * @return boolean|Address\AddressInterface */ public function get($email) { $email = strtolower($email); if (!isset($this->addresses[$email])) { return false; } return $this->addresses[$email]; } /** * Delete an address from the list * * @param string $email * @return bool */ public function delete($email) { $email = strtolower($email); if (!isset($this->addresses[$email])) { return false; } unset($this->addresses[$email]); return true; } /** * Return count of addresses * * @return int */ public function count() { return count($this->addresses); } /** * Rewind iterator * * @return mixed the value of the first addresses element, or false if the addresses is * empty. * @see addresses */ public function rewind() { return reset($this->addresses); } /** * Return current item in iteration * * @return Address */ public function current() { return current($this->addresses); } /** * Return key of current item of iteration * * @return string */ public function key() { return key($this->addresses); } /** * Move to next item * * @return mixed the addresses value in the next place that's pointed to by the * internal array pointer, or false if there are no more elements. * @see addresses */ public function next() { return next($this->addresses); } /** * Is the current item of iteration valid? * * @return bool */ public function valid() { $key = key($this->addresses); return ($key !== null && $key !== false); } /** * Create an address object * * @param string $email * @param string|null $name * @return Address */ protected function createAddress($email, $name) { return new Address($email, $name); } }
rvdpol/gh18
vendor/zendframework/zendframework/library/Zend/Mail/AddressList.php
PHP
bsd-3-clause
5,645