code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
$(document).ready(function() {
// function loadStratumEntryTable() {
// var createStratumBtn = document.querySelector('button[data-bind="click: createStratum"]');
// console.log(createStratumBtn);
// if (createStratumBtn) {
// createStratumBtn.click();
// window.clearTimeout();
// } else {
// window.setTimeout(function() {
// loadStratumEntryTable();
// }, 1000);
// }
// }
// not sure why jquery says dom is ready but click() won't work without settimeout below
// best guess is the knockout observables are not ready yet
// TODO: Find a better way than settimeout
// window.setTimeout(function() {
// loadStratumEntryTable();
// }, 1000);
$('.field-species select').on('change', function(e) {
var species = e.currentTarget.value;
var row_id = e.currentTarget.id.split('-')[1];
var size_class_selector = $('#id_form-'+ row_id + '-size_class');
var size_select_values = {};
if (species.length > 0) {
var size_classes = choice_json.filter(function(o){return o.species == species;} )[0].size_classes;
for (var i=0; i < size_classes.length; i++) {
size_select_values["("+ size_classes[i].min + ", " + size_classes[i].max + ")"] = size_classes[i].min + '" to ' + size_classes[i].max + '"';
}
}
if(size_class_selector.prop) {
var options = size_class_selector.prop('options');
} else {
var options = size_class_selector.attr('options');
}
$('option', size_class_selector).remove();
$.each(size_select_values, function(val, text) {
options[options.length] = new Option(text, val);
});
});
});
total_forms = parseInt($('#id_form-TOTAL_FORMS').val());
initial_forms = parseInt($('#id_form-INITIAL_FORMS').val());
init_show = initial_forms > 2 ? initial_forms + 1 : 3;
form_show_index = init_show;
for (var i = 0; i < init_show; i++) {
$('#formset-row-' + i).show();
}
submitForm = function() {
$.ajax({
type: "POST",
url: $('#stand_strata_form').attr('action'),
data: $('#stand_strata_form').serialize(),
success: function(data){
window.location = "/discovery/forest_profile/" + discovery_stand_uid + "/"
}
});
}
addRow = function() {
if (form_show_index < total_forms) {
$('#formset-row-' + form_show_index).show();
form_show_index++;
} else {
alert('Cannot add more rows at this time. Please save and return to this form to add more.');
}
}
| Ecotrust/forestplanner | lot/discovery/static/discovery/js/strata.js | JavaScript | bsd-3-clause | 2,461 |
using System.Collections.Generic;
using System.Linq;
using NUnit.Framework;
using NServiceKit.Common.Tests.Models;
namespace NServiceKit.OrmLite.Tests
{
/// <summary>An ORM lite basic persistence provider tests.</summary>
[TestFixture]
public class OrmLiteBasicPersistenceProviderTests
: OrmLiteTestBase
{
/// <summary>Can get by identifier from basic persistence provider.</summary>
[Test]
public void Can_GetById_from_basic_persistence_provider()
{
using (var db = OpenDbConnection())
{
db.CreateTable<ModelWithFieldsOfDifferentTypes>(true);
var basicProvider = new OrmLitePersistenceProvider(db);
var row = ModelWithFieldsOfDifferentTypes.Create(1);
db.Insert(row);
var providerRow = basicProvider.GetById<ModelWithFieldsOfDifferentTypes>(1);
ModelWithFieldsOfDifferentTypes.AssertIsEqual(providerRow, row);
}
}
/// <summary>Can get by identifiers from basic persistence provider.</summary>
[Test]
public void Can_GetByIds_from_basic_persistence_provider()
{
using (var db = OpenDbConnection())
{
db.CreateTable<ModelWithFieldsOfDifferentTypes>(true);
var basicProvider = new OrmLitePersistenceProvider(db);
var rowIds = new List<int> { 1, 2, 3, 4, 5 };
var rows = rowIds.ConvertAll(x => ModelWithFieldsOfDifferentTypes.Create(x));
rows.ForEach(x => db.Insert(x));
var getRowIds = new[] { 2, 4 };
var providerRows = basicProvider.GetByIds<ModelWithFieldsOfDifferentTypes>(getRowIds).ToList();
var providerRowIds = providerRows.ConvertAll(x => x.Id);
Assert.That(providerRowIds, Is.EquivalentTo(getRowIds));
}
}
/// <summary>Can store from basic persistence provider.</summary>
[Test]
public void Can_Store_from_basic_persistence_provider()
{
using (var db = OpenDbConnection())
{
db.CreateTable<ModelWithFieldsOfDifferentTypes>(true);
var basicProvider = new OrmLitePersistenceProvider(db);
var rowIds = new List<int> { 1, 2, 3, 4, 5 };
var rows = rowIds.ConvertAll(x => ModelWithFieldsOfDifferentTypes.Create(x));
rows.ForEach(x => basicProvider.Store(x));
var getRowIds = new[] { 2, 4 };
var providerRows = db.GetByIds<ModelWithFieldsOfDifferentTypes>(getRowIds).ToList();
var providerRowIds = providerRows.ConvertAll(x => x.Id);
Assert.That(providerRowIds, Is.EquivalentTo(getRowIds));
}
}
/// <summary>Can delete from basic persistence provider.</summary>
[Test]
public void Can_Delete_from_basic_persistence_provider()
{
using (var db = OpenDbConnection())
{
db.CreateTable<ModelWithFieldsOfDifferentTypes>(true);
var basicProvider = new OrmLitePersistenceProvider(db);
var rowIds = new List<int> { 1, 2, 3, 4, 5 };
var rows = rowIds.ConvertAll(x => ModelWithFieldsOfDifferentTypes.Create(x));
rows.ForEach(x => db.Insert(x));
var deleteRowIds = new List<int> { 2, 4 };
foreach (var row in rows)
{
if (deleteRowIds.Contains(row.Id))
{
basicProvider.Delete(row);
}
}
var providerRows = basicProvider.GetByIds<ModelWithFieldsOfDifferentTypes>(rowIds).ToList();
var providerRowIds = providerRows.ConvertAll(x => x.Id);
var remainingIds = new List<int>(rowIds);
deleteRowIds.ForEach(x => remainingIds.Remove(x));
Assert.That(providerRowIds, Is.EquivalentTo(remainingIds));
}
}
}
} | NServiceKit/NServiceKit.OrmLite | tests/NServiceKit.OrmLite.Tests/OrmLiteBasicPersistenceProviderTests.cs | C# | bsd-3-clause | 3,399 |
package com.sunand;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* Unit test for simple App.
*/
public class AppTest
extends TestCase
{
/**
* Create the test case
*
* @param testName name of the test case
*/
public AppTest( String testName )
{
super( testName );
}
/**
* @return the suite of tests being tested
*/
public static Test suite()
{
return new TestSuite( AppTest.class );
}
/**
* Rigourous Test :-)
*/
public void testApp()
{
assertTrue( true );
}
}
| u-learn/LearnRx | src/test/java/com/sunand/AppTest.java | Java | bsd-3-clause | 638 |
define(["avalon","domReady!","mmRequest","text!./login.html"], function(avalon, domReady,mmRequest,login) {
avalon.templateCache.login = login
var loginVm = avalon.define({
$id: "login",
loginCheck: function(){
window.location="/admin.html"
}
})
avalon.vmodels.root.body = "login"
})
| michaelwang/study | avalon/practice/avalon-test-ui/modules/login/login.js | JavaScript | bsd-3-clause | 339 |
/*******************************************************************************
* Caleydo - Visualization for Molecular Biology - http://caleydo.org
* Copyright (c) The Caleydo Team. All rights reserved.
* Licensed under the new BSD license, available at http://caleydo.org/license
******************************************************************************/
package org.caleydo.view.bookmark;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import javax.media.opengl.GL2;
import javax.media.opengl.GLAutoDrawable;
import org.caleydo.core.data.datadomain.ATableBasedDataDomain;
import org.caleydo.core.data.perspective.table.TablePerspective;
import org.caleydo.core.data.selection.SelectionCommand;
import org.caleydo.core.data.selection.delta.SelectionDelta;
import org.caleydo.core.data.selection.events.SelectionCommandListener;
import org.caleydo.core.data.selection.events.SelectionUpdateListener;
import org.caleydo.core.event.data.BookmarkEvent;
import org.caleydo.core.event.data.RemoveBookmarkEvent;
import org.caleydo.core.event.data.SelectionCommandEvent;
import org.caleydo.core.event.data.SelectionUpdateEvent;
import org.caleydo.core.id.IDCategory;
import org.caleydo.core.serialize.ASerializedView;
import org.caleydo.core.util.collection.Pair;
import org.caleydo.core.view.opengl.camera.ViewFrustum;
import org.caleydo.core.view.opengl.canvas.AGLView;
import org.caleydo.core.view.opengl.canvas.ATableBasedView;
import org.caleydo.core.view.opengl.canvas.IGLCanvas;
import org.caleydo.core.view.opengl.layout.Column;
import org.caleydo.core.view.opengl.layout.LayoutManager;
import org.caleydo.core.view.opengl.mouse.GLMouseListener;
import org.caleydo.core.view.opengl.picking.Pick;
import org.caleydo.core.view.opengl.picking.PickingMode;
import org.caleydo.core.view.opengl.picking.PickingType;
import org.caleydo.core.view.opengl.util.text.CaleydoTextRenderer;
/**
* The list heat map that shows elements on the right of a view that have been selected. It is registered to special
* listeners that are triggered in such a event. Other than that it is equivalent to the {@link GLHeatMap}
*
* @author Alexander Lex
*/
public class GLBookmarkView extends ATableBasedView {
public static String VIEW_TYPE = "org.caleydo.view.bookmark";
public static String VIEW_NAME = "Bookmarks";
// private ColorMapping colorMapper;
/** A hash map that associated the Category with the container */
private HashMap<IDCategory, ABookmarkContainer> hashCategoryToBookmarkContainer;
/** A list of bookmark containers, to preserve the ordering */
private ArrayList<ABookmarkContainer> bookmarkContainers;
private BookmarkListener bookmarkListener;
private PickingIDManager pickingIDManager;
private RemoveBookmarkListener removeBookmarkListener;
/** The class responsible for rendering the template */
private LayoutManager layoutManager;
class PickingIDManager {
/**
* A hash map that hashes the picking ID of an element to the BookmarkContainer and the id internal to the
* bookmark container
*/
private HashMap<Integer, Pair<IDCategory, Integer>> pickingIDToBookmarkContainer;
private int idCount = 0;
private PickingIDManager() {
pickingIDToBookmarkContainer = new HashMap<Integer, Pair<IDCategory, Integer>>();
}
public int getPickingID(ABookmarkContainer container, int privateID) {
int pickingID = pickingManager.getPickingID(uniqueID, PickingType.BOOKMARK_ELEMENT, idCount);
pickingIDToBookmarkContainer.put(idCount++, new Pair<IDCategory, Integer>(container.getCategory(),
privateID));
return pickingID;
}
private Pair<IDCategory, Integer> getPrivateID(int externalID) {
return pickingIDToBookmarkContainer.get(externalID);
}
private void reset() {
idCount = 0;
pickingIDToBookmarkContainer = new HashMap<Integer, Pair<IDCategory, Integer>>();
}
}
/**
* Constructor.
*/
public GLBookmarkView(IGLCanvas glCanvas, ViewFrustum viewFrustum) {
super(glCanvas, viewFrustum, VIEW_TYPE, VIEW_NAME);
bookmarkContainers = new ArrayList<ABookmarkContainer>();
hashCategoryToBookmarkContainer = new HashMap<IDCategory, ABookmarkContainer>();
pickingIDManager = new PickingIDManager();
layoutManager = new LayoutManager(viewFrustum, pixelGLConverter);
}
@Override
public void reshape(GLAutoDrawable drawable, int x, int y, int width, int height) {
super.reshape(drawable, x, y, width, height);
layoutManager.updateLayout();
}
@Override
public void registerEventListeners() {
super.registerEventListeners();
bookmarkListener = new BookmarkListener();
bookmarkListener.setHandler(this);
eventPublisher.addListener(BookmarkEvent.class, bookmarkListener);
removeBookmarkListener = new RemoveBookmarkListener();
removeBookmarkListener.setHandler(this);
eventPublisher.addListener(RemoveBookmarkEvent.class, removeBookmarkListener);
selectionUpdateListener = new SelectionUpdateListener();
selectionUpdateListener.setHandler(this);
eventPublisher.addListener(SelectionUpdateEvent.class, selectionUpdateListener);
selectionCommandListener = new SelectionCommandListener();
selectionCommandListener.setHandler(this);
eventPublisher.addListener(SelectionCommandEvent.class, selectionCommandListener);
}
@Override
public void unregisterEventListeners() {
super.unregisterEventListeners();
if (bookmarkListener != null) {
eventPublisher.removeListener(bookmarkListener);
bookmarkListener = null;
}
if (removeBookmarkListener != null) {
eventPublisher.removeListener(removeBookmarkListener);
removeBookmarkListener = null;
}
if (selectionUpdateListener != null) {
eventPublisher.removeListener(selectionUpdateListener);
selectionUpdateListener = null;
}
if (selectionCommandListener != null) {
eventPublisher.removeListener(selectionCommandListener);
selectionCommandListener = null;
}
}
@Override
public void display(GL2 gl) {
if (isDisplayListDirty) {
isDisplayListDirty = false;
buildDisplayList(gl, displayListIndex);
}
gl.glCallList(displayListIndex);
checkForHits(gl);
}
@Override
protected void displayLocal(GL2 gl) {
pickingManager.handlePicking(this, gl);
display(gl);
}
@Override
public void displayRemote(GL2 gl) {
display(gl);
}
/**
* Builds a display list of graphical elements that do not have to be updated in every frame.
*
* @param gl
* GL2 context.
* @param iGLDisplayListIndex
* Index of display list.
*/
private void buildDisplayList(final GL2 gl, int iGLDisplayListIndex) {
gl.glNewList(iGLDisplayListIndex, GL2.GL_COMPILE);
layoutManager.render(gl);
gl.glEndList();
}
@Override
protected void handlePickingEvents(PickingType pickingType, PickingMode pickingMode, int externalID, Pick pick) {
switch (pickingType) {
case BOOKMARK_ELEMENT:
Pair<IDCategory, Integer> pair = pickingIDManager.getPrivateID(externalID);
hashCategoryToBookmarkContainer.get(pair.getFirst()).handleEvents(pickingType, pickingMode,
pair.getSecond(), pick);
break;
default:
break;
}
}
/**
* @param <IDDataType>
* @param event
*/
public <IDDataType> void handleNewBookmarkEvent(BookmarkEvent<IDDataType> event) {
if (dataDomain.getDataDomainID() != event.getEventSpace())
return;
ABookmarkContainer container = hashCategoryToBookmarkContainer.get(event.getIDType().getIDCategory());
if (container == null)
throw new IllegalStateException("Can not handle bookmarks of type " + event.getIDType().getIDCategory());
container.handleNewBookmarkEvent(event);
layoutManager.updateLayout();
setDisplayListDirty();
}
public <IDDataType> void handleRemoveBookmarkEvent(RemoveBookmarkEvent<IDDataType> event) {
ABookmarkContainer container = hashCategoryToBookmarkContainer.get(event.getIDType().getIDCategory());
if (container == null)
throw new IllegalStateException("Can not handle bookmarks of type " + event.getIDType().getIDCategory());
container.handleRemoveBookmarkEvent(event);
layoutManager.updateLayout();
setDisplayListDirty();
}
@Override
public void init(GL2 gl) {
displayListIndex = gl.glGenLists(1);
textRenderer = new CaleydoTextRenderer(24);
}
@Override
protected void initLocal(GL2 gl) {
init(gl);
}
@Override
public void initRemote(GL2 gl, AGLView glParentView, GLMouseListener glMouseListener) {
init(gl);
}
@Override
public ASerializedView getSerializableRepresentation() {
SerializedBookmarkView serializedForm = new SerializedBookmarkView(this);
return serializedForm;
}
@Override
public void handleSelectionUpdate(SelectionDelta selectionDelta) {
// EIDCategory category = ;
ABookmarkContainer container = hashCategoryToBookmarkContainer.get(selectionDelta.getIDType()
.getIDCategory());
if (container != null)
container.handleSelectionUpdate(selectionDelta);
setDisplayListDirty();
}
@Override
public void handleSelectionCommand(IDCategory category, SelectionCommand selectionCommand) {
ABookmarkContainer container = hashCategoryToBookmarkContainer.get(category);
if (container != null)
container.handleSelectionCommand(selectionCommand);
setDisplayListDirty();
}
CaleydoTextRenderer getMinSizeTextRenderer() {
return textRenderer;
}
PickingIDManager getBookmarkPickingIDManager() {
return pickingIDManager;
}
@Override
public void setDataDomain(ATableBasedDataDomain dataDomain) {
this.dataDomain = dataDomain;
Column mainColumn = new Column("baseBookmarkColumn");
mainColumn.setFrameColor(1, 0, 0, 1);
mainColumn.setYDynamic(true);
mainColumn.setBottomUp(false);
// mainColumn.setPixelGLConverter(pixelGLConverter);
layoutManager.setBaseElementLayout(mainColumn);
RecordBookmarkContainer geneContainer = new RecordBookmarkContainer(this, dataDomain.getRecordIDCategory(),
dataDomain.getRecordIDCategory().getPrimaryMappingType());
mainColumn.append(geneContainer.getLayout());
hashCategoryToBookmarkContainer.put(dataDomain.getRecordIDCategory(), geneContainer);
bookmarkContainers.add(geneContainer);
DimensionBookmarkContainer experimentContainer = new DimensionBookmarkContainer(this);
mainColumn.append(experimentContainer.getLayout());
hashCategoryToBookmarkContainer.put(dataDomain.getDimensionIDCategory(), experimentContainer);
bookmarkContainers.add(experimentContainer);
}
@Override
public List<TablePerspective> getTablePerspectives() {
// TODO Auto-generated method stub
return null;
}
@Override
protected void destroyViewSpecificContent(GL2 gl) {
// TODO Auto-generated method stub
}
}
| Caleydo/caleydo | org.caleydo.view.bookmark/src/org/caleydo/view/bookmark/GLBookmarkView.java | Java | bsd-3-clause | 10,629 |
<?php
use common\models\Device;
use yii\helpers\Html;
use kartik\grid\GridView;
use yii\widgets\Pjax;
/**
* @var yii\web\View $this
* @var yii\data\ActiveDataProvider $dataProvider
* @var common\models\search\Device $searchModel
*/
?>
<div class="device-index">
<?php Pjax::begin(); echo GridView::widget([
'dataProvider' => $dataProvider,
'columns' => [
'name',
'display_name',
'description',
],
'responsive'=>true,
'hover'=>true,
'condensed'=>true,
'floatHeader'=>true,
'panel' => [
'heading'=>'<h3 class="panel-title"><i class="glyphicon glyphicon-th-list"></i> '.Html::encode($title).' </h3>',
'type'=>'info',
'showFooter'=>false
],
]); Pjax::end(); ?>
</div>
| devleaks/gip | backend/modules/coreengine/views/group/_list.php | PHP | bsd-3-clause | 822 |
/**
* Copyright (c) 2008-2016, Massachusetts Institute of Technology (MIT)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package edu.mit.ll.nics.common.rabbitmq;
import java.io.IOException;
public class RabbitFactory {
private static final String CNAME = RabbitFactory.class.getName();
private static RabbitPubSubProducer producer = null;
public static RabbitPubSubProducer makeRabbitPubSubProducer(String rabbitHost,
String rabbitExchange, String rabbitUsername, String rabbitPassword) throws IOException {
if(producer == null){
String host = validateRabbitHostName(rabbitHost);
String exchange = validateRabbitExchange(rabbitExchange);
String username = validateRabbitUsername(rabbitUsername);
String userpwd = validateRabbitUserpwd(rabbitPassword);
try {
producer = new RabbitPubSubProducer(host, exchange, username, userpwd);
} catch (IOException e) {
throw new IOException("Failure trying to connect to " + host + "/" +
exchange + ". " + e.getMessage());
}
}
return producer;
}
private static String validateRabbitHostName(String host) {
if (host == null || host.isEmpty()) {
throw new IllegalArgumentException("Host is not defined");
}
return host;
}
private static String validateRabbitExchange(String exchange) {
if (exchange == null || exchange.isEmpty()) {
exchange = RabbitClient.AMQ_TOPIC;
}
return exchange;
}
private static String validateRabbitUsername(String username) {
if (username == null || username.isEmpty()) {
username = "guest";
}
return username;
}
private static String validateRabbitUserpwd(String userpwd) {
if (userpwd == null || userpwd.isEmpty()) {
userpwd = "guest";
}
return userpwd;
}
}
| hadrsystems/nics-common | rabbitmq-client/src/main/java/edu/mit/ll/nics/common/rabbitmq/RabbitFactory.java | Java | bsd-3-clause | 3,225 |
/*
* This file is a part of the open source stm32plus library.
* Copyright (c) 2011,2012,2013,2014 Andy Brown <www.andybrown.me.uk>
* Please see website for licensing terms.
*/
#include "config/stm32plus.h"
#if defined(STM32PLUS_F4)
#include "config/exti.h"
using namespace stm32plus;
// static initialiser for the hack that forces the IRQ handlers to be linked
template<> ExtiInterruptEnabler<4>::FPTR ExtiInterruptEnabler<4>::_forceLinkage=nullptr;
template<> ExtiPeripheral<EXTI_Line4> *ExtiPeripheral<EXTI_Line4>::_extiInstance=nullptr;
#if defined(USE_EXTI4_INTERRUPT)
extern "C" {
void __attribute__ ((interrupt("IRQ"))) EXTI4_IRQHandler(void) {
if(EXTI_GetITStatus(EXTI_Line4)!=RESET) {
Exti4::_extiInstance->ExtiInterruptEventSender.raiseEvent(4);
EXTI_ClearITPendingBit(EXTI_Line4);
}
__DSB(); // prevent erroneous recall of this handler due to delayed memory write
}
}
#endif
#endif
| 0x00f/stm32plus | lib/src/exti/interrupts/f4/Exti4InterruptHandler.cpp | C++ | bsd-3-clause | 948 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 5, transform = "BoxCox", sigma = 0.0, exog_count = 0, ar_order = 12); | antoinecarme/pyaf | tests/artificial/transf_BoxCox/trend_PolyTrend/cycle_5/ar_12/test_artificial_128_BoxCox_PolyTrend_5_12_0.py | Python | bsd-3-clause | 261 |
<?php
class AnsController extends Controller
{
public function actionIndex()
{
$this->render('index');
}
} | Gerasin/https-github.com-ninetor-lsga | protected/controllers/AnsController.php | PHP | bsd-3-clause | 129 |
//
// Copyright (c) 2004-2016 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
using System.Globalization;
using System.Linq;
using NLog.Layouts;
namespace NLog.Config
{
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using JetBrains.Annotations;
using NLog.Common;
using NLog.Internal;
using NLog.Targets;
/// <summary>
/// Keeps logging configuration and provides simple API
/// to modify it.
/// </summary>
///<remarks>This class is thread-safe.<c>.ToList()</c> is used for that purpose.</remarks>
public class LoggingConfiguration
{
private readonly IDictionary<string, Target> targets =
new Dictionary<string, Target>(StringComparer.OrdinalIgnoreCase);
private List<object> configItems = new List<object>();
/// <summary>
/// Variables defined in xml or in API. name is case case insensitive.
/// </summary>
private readonly Dictionary<string, SimpleLayout> variables = new Dictionary<string, SimpleLayout>(StringComparer.OrdinalIgnoreCase);
/// <summary>
/// Initializes a new instance of the <see cref="LoggingConfiguration" /> class.
/// </summary>
public LoggingConfiguration()
{
this.LoggingRules = new List<LoggingRule>();
}
/// <summary>
/// Use the old exception log handling of NLog 3.0?
/// </summary>
[Obsolete("This option will be removed in NLog 5")]
public bool ExceptionLoggingOldStyle { get; set; }
/// <summary>
/// Gets the variables defined in the configuration.
/// </summary>
public IDictionary<string, SimpleLayout> Variables
{
get
{
return variables;
}
}
/// <summary>
/// Gets a collection of named targets specified in the configuration.
/// </summary>
/// <returns>
/// A list of named targets.
/// </returns>
/// <remarks>
/// Unnamed targets (such as those wrapped by other targets) are not returned.
/// </remarks>
public ReadOnlyCollection<Target> ConfiguredNamedTargets
{
get { return new List<Target>(this.targets.Values).AsReadOnly(); }
}
/// <summary>
/// Gets the collection of file names which should be watched for changes by NLog.
/// </summary>
public virtual IEnumerable<string> FileNamesToWatch
{
get { return ArrayHelper.Empty<string>(); }
}
/// <summary>
/// Gets the collection of logging rules.
/// </summary>
public IList<LoggingRule> LoggingRules { get; private set; }
/// <summary>
/// Gets or sets the default culture info to use as <see cref="LogEventInfo.FormatProvider"/>.
/// </summary>
/// <value>
/// Specific culture info or null to use <see cref="CultureInfo.CurrentCulture"/>
/// </value>
[CanBeNull]
public CultureInfo DefaultCultureInfo { get; set; }
/// <summary>
/// Gets all targets.
/// </summary>
public ReadOnlyCollection<Target> AllTargets
{
get
{
var configTargets = this.configItems.OfType<Target>();
return configTargets.Concat(targets.Values).Distinct(TargetNameComparer).ToList().AsReadOnly();
}
}
/// <summary>
/// Compare on name
/// </summary>
private static IEqualityComparer<Target> TargetNameComparer = new TargetNameEq();
/// <summary>
/// Compare on name
/// </summary>
private class TargetNameEq : IEqualityComparer<Target>
{
public bool Equals(Target x, Target y)
{
return string.Equals(x.Name, y.Name);
}
public int GetHashCode(Target obj)
{
return (obj.Name != null ? obj.Name.GetHashCode() : 0);
}
}
/// <summary>
/// Registers the specified target object. The name of the target is read from <see cref="Target.Name"/>.
/// </summary>
/// <param name="target">
/// The target object with a non <see langword="null"/> <see cref="Target.Name"/>
/// </param>
/// <exception cref="ArgumentNullException">when <paramref name="target"/> is <see langword="null"/></exception>
public void AddTarget([NotNull] Target target)
{
if (target == null) throw new ArgumentNullException("target");
AddTarget(target.Name, target);
}
/// <summary>
/// Registers the specified target object under a given name.
/// </summary>
/// <param name="name">
/// Name of the target.
/// </param>
/// <param name="target">
/// The target object.
/// </param>
public void AddTarget(string name, Target target)
{
if (name == null)
{
throw new ArgumentException("Target name cannot be null", "name");
}
InternalLogger.Debug("Registering target {0}: {1}", name, target.GetType().FullName);
this.targets[name] = target;
}
/// <summary>
/// Finds the target with the specified name.
/// </summary>
/// <param name="name">
/// The name of the target to be found.
/// </param>
/// <returns>
/// Found target or <see langword="null"/> when the target is not found.
/// </returns>
public Target FindTargetByName(string name)
{
Target value;
if (!this.targets.TryGetValue(name, out value))
{
return null;
}
return value;
}
/// <summary>
/// Finds the target with the specified name and specified type.
/// </summary>
/// <param name="name">
/// The name of the target to be found.
/// </param>
/// <typeparam name="TTarget">Type of the target</typeparam>
/// <returns>
/// Found target or <see langword="null"/> when the target is not found of not of type <typeparamref name="TTarget"/>
/// </returns>
public TTarget FindTargetByName<TTarget>(string name)
where TTarget : Target
{
return FindTargetByName(name) as TTarget;
}
/// <summary>
/// Add a rule with min- and maxLevel.
/// </summary>
/// <param name="minLevel">Minimum log level needed to trigger this rule.</param>
/// <param name="maxLevel">Maximum log level needed to trigger this rule.</param>
/// <param name="targetName">Name of the target to be written when the rule matches.</param>
/// <param name="loggerNamePattern">Logger name pattern. It may include the '*' wildcard at the beginning, at the end or at both ends.</param>
public void AddRule(LogLevel minLevel, LogLevel maxLevel, string targetName, string loggerNamePattern = "*")
{
var target = FindTargetByName(targetName);
if (target == null)
{
throw new NLogRuntimeException("Target '{0}' not found", targetName);
}
AddRule(minLevel, maxLevel, target, loggerNamePattern);
}
/// <summary>
/// Add a rule with min- and maxLevel.
/// </summary>
/// <param name="minLevel">Minimum log level needed to trigger this rule.</param>
/// <param name="maxLevel">Maximum log level needed to trigger this rule.</param>
/// <param name="target">Target to be written to when the rule matches.</param>
/// <param name="loggerNamePattern">Logger name pattern. It may include the '*' wildcard at the beginning, at the end or at both ends.</param>
public void AddRule(LogLevel minLevel, LogLevel maxLevel, Target target, string loggerNamePattern = "*")
{
LoggingRules.Add(new LoggingRule(loggerNamePattern, minLevel, maxLevel, target));
}
/// <summary>
/// Add a rule for one loglevel.
/// </summary>
/// <param name="level">log level needed to trigger this rule. </param>
/// <param name="targetName">Name of the target to be written when the rule matches.</param>
/// <param name="loggerNamePattern">Logger name pattern. It may include the '*' wildcard at the beginning, at the end or at both ends.</param>
public void AddRuleForOneLevel(LogLevel level, string targetName, string loggerNamePattern = "*")
{
var target = FindTargetByName(targetName);
if (target == null)
{
throw new NLogConfigurationException("Target '{0}' not found", targetName);
}
AddRuleForOneLevel(level, target, loggerNamePattern);
}
/// <summary>
/// Add a rule for one loglevel.
/// </summary>
/// <param name="level">log level needed to trigger this rule. </param>
/// <param name="target">Target to be written to when the rule matches.</param>
/// <param name="loggerNamePattern">Logger name pattern. It may include the '*' wildcard at the beginning, at the end or at both ends.</param>
public void AddRuleForOneLevel(LogLevel level, Target target, string loggerNamePattern = "*")
{
var loggingRule = new LoggingRule(loggerNamePattern, target);
loggingRule.EnableLoggingForLevel(level);
LoggingRules.Add(loggingRule);
}
/// <summary>
/// Add a rule for alle loglevels.
/// </summary>
/// <param name="targetName">Name of the target to be written when the rule matches.</param>
/// <param name="loggerNamePattern">Logger name pattern. It may include the '*' wildcard at the beginning, at the end or at both ends.</param>
public void AddRuleForAllLevels(string targetName, string loggerNamePattern = "*")
{
var target = FindTargetByName(targetName);
if (target == null)
{
throw new NLogRuntimeException("Target '{0}' not found", targetName);
}
AddRuleForAllLevels(target, loggerNamePattern);
}
/// <summary>
/// Add a rule for alle loglevels.
/// </summary>
/// <param name="target">Target to be written to when the rule matches.</param>
/// <param name="loggerNamePattern">Logger name pattern. It may include the '*' wildcard at the beginning, at the end or at both ends.</param>
public void AddRuleForAllLevels(Target target, string loggerNamePattern = "*")
{
var loggingRule = new LoggingRule(loggerNamePattern, target);
loggingRule.EnableLoggingForLevels(LogLevel.MinLevel, LogLevel.MaxLevel);
LoggingRules.Add(loggingRule);
}
/// <summary>
/// Called by LogManager when one of the log configuration files changes.
/// </summary>
/// <returns>
/// A new instance of <see cref="LoggingConfiguration"/> that represents the updated configuration.
/// </returns>
public virtual LoggingConfiguration Reload()
{
return this;
}
/// <summary>
/// Removes the specified named target.
/// </summary>
/// <param name="name">
/// Name of the target.
/// </param>
public void RemoveTarget(string name)
{
this.targets.Remove(name);
}
/// <summary>
/// Installs target-specific objects on current system.
/// </summary>
/// <param name="installationContext">The installation context.</param>
/// <remarks>
/// Installation typically runs with administrative permissions.
/// </remarks>
public void Install(InstallationContext installationContext)
{
if (installationContext == null)
{
throw new ArgumentNullException("installationContext");
}
this.InitializeAll();
var configItemsList = GetInstallableItems();
foreach (IInstallable installable in configItemsList)
{
installationContext.Info("Installing '{0}'", installable);
try
{
installable.Install(installationContext);
installationContext.Info("Finished installing '{0}'.", installable);
}
catch (Exception exception)
{
InternalLogger.Error(exception, "Install of '{0}' failed.", installable);
if (exception.MustBeRethrownImmediately())
{
throw;
}
installationContext.Error("Install of '{0}' failed: {1}.", installable, exception);
}
}
}
/// <summary>
/// Uninstalls target-specific objects from current system.
/// </summary>
/// <param name="installationContext">The installation context.</param>
/// <remarks>
/// Uninstallation typically runs with administrative permissions.
/// </remarks>
public void Uninstall(InstallationContext installationContext)
{
if (installationContext == null)
{
throw new ArgumentNullException("installationContext");
}
this.InitializeAll();
var configItemsList = GetInstallableItems();
foreach (IInstallable installable in configItemsList)
{
installationContext.Info("Uninstalling '{0}'", installable);
try
{
installable.Uninstall(installationContext);
installationContext.Info("Finished uninstalling '{0}'.", installable);
}
catch (Exception exception)
{
InternalLogger.Error(exception, "Uninstall of '{0}' failed.", installable);
if (exception.MustBeRethrownImmediately())
{
throw;
}
installationContext.Error("Uninstall of '{0}' failed: {1}.", installable, exception);
}
}
}
/// <summary>
/// Closes all targets and releases any unmanaged resources.
/// </summary>
internal void Close()
{
InternalLogger.Debug("Closing logging configuration...");
var supportsInitializesList = GetSupportsInitializes();
foreach (ISupportsInitialize initialize in supportsInitializesList)
{
InternalLogger.Trace("Closing {0}", initialize);
try
{
initialize.Close();
}
catch (Exception exception)
{
InternalLogger.Warn(exception, "Exception while closing.");
if (exception.MustBeRethrown())
{
throw;
}
}
}
InternalLogger.Debug("Finished closing logging configuration.");
}
/// <summary>
/// Log to the internal (NLog) logger the information about the <see cref="Target"/> and <see
/// cref="LoggingRule"/> associated with this <see cref="LoggingConfiguration"/> instance.
/// </summary>
/// <remarks>
/// The information are only recorded in the internal logger if Debug level is enabled, otherwise nothing is
/// recorded.
/// </remarks>
internal void Dump()
{
if (!InternalLogger.IsDebugEnabled)
{
return;
}
InternalLogger.Debug("--- NLog configuration dump ---");
InternalLogger.Debug("Targets:");
var targetList = this.targets.Values.ToList();
foreach (Target target in targetList)
{
InternalLogger.Debug("{0}", target);
}
InternalLogger.Debug("Rules:");
var loggingRules = this.LoggingRules.ToList();
foreach (LoggingRule rule in loggingRules)
{
InternalLogger.Debug("{0}", rule);
}
InternalLogger.Debug("--- End of NLog configuration dump ---");
}
/// <summary>
/// Flushes any pending log messages on all appenders.
/// </summary>
/// <param name="asyncContinuation">The asynchronous continuation.</param>
internal void FlushAllTargets(AsyncContinuation asyncContinuation)
{
var uniqueTargets = new List<Target>();
var loggingRules = this.LoggingRules.ToList();
foreach (var rule in loggingRules)
{
var targetList = rule.Targets.ToList();
foreach (var target in targetList)
{
if (!uniqueTargets.Contains(target))
{
uniqueTargets.Add(target);
}
}
}
AsyncHelpers.ForEachItemInParallel(uniqueTargets, asyncContinuation, (target, cont) => target.Flush(cont));
}
/// <summary>
/// Validates the configuration.
/// </summary>
internal void ValidateConfig()
{
var roots = new List<object>();
var loggingRules = this.LoggingRules.ToList();
foreach (LoggingRule rule in loggingRules)
{
roots.Add(rule);
}
var targetList = this.targets.Values.ToList();
foreach (Target target in targetList)
{
roots.Add(target);
}
this.configItems = ObjectGraphScanner.FindReachableObjects<object>(roots.ToArray());
// initialize all config items starting from most nested first
// so that whenever the container is initialized its children have already been
InternalLogger.Info("Found {0} configuration items", this.configItems.Count);
foreach (object o in this.configItems)
{
PropertyHelper.CheckRequiredParameters(o);
}
}
internal void InitializeAll()
{
this.ValidateConfig();
var supportsInitializes = GetSupportsInitializes(true);
foreach (ISupportsInitialize initialize in supportsInitializes)
{
InternalLogger.Trace("Initializing {0}", initialize);
try
{
initialize.Initialize(this);
}
catch (Exception exception)
{
if (exception.MustBeRethrown())
{
throw;
}
if (LogManager.ThrowExceptions)
{
throw new NLogConfigurationException("Error during initialization of " + initialize, exception);
}
}
}
}
internal void EnsureInitialized()
{
this.InitializeAll();
}
private List<IInstallable> GetInstallableItems()
{
return this.configItems.OfType<IInstallable>().ToList();
}
private List<ISupportsInitialize> GetSupportsInitializes(bool reverse = false)
{
var items = this.configItems.OfType<ISupportsInitialize>();
if (reverse)
{
items = items.Reverse();
}
return items.ToList();
}
/// <summary>
/// Copies all variables from provided dictionary into current configuration variables.
/// </summary>
/// <param name="masterVariables">Master variables dictionary</param>
internal void CopyVariables(IDictionary<string, SimpleLayout> masterVariables)
{
foreach (var variable in masterVariables)
{
this.Variables[variable.Key] = variable.Value;
}
}
}
} | littlesmilelove/NLog | src/NLog/Config/LoggingConfiguration.cs | C# | bsd-3-clause | 22,236 |
<?php
namespace common\models;
use Yii;
/**
* This is the model class for table "{{%hanzi_hyyt_add}}".
*
* @property string $id
* @property integer $page
* @property integer $num
* @property integer $type2
* @property string $tong_word2
* @property string $zhushi2
*/
class HanziHyytAdd extends \yii\db\ActiveRecord
{
/**
* @inheritdoc
*/
public static function tableName()
{
return '{{%hanzi_hyyt_add}}';
}
/**
* @inheritdoc
*/
public function rules()
{
return [
[['page', 'num', 'type2'], 'integer'],
[['tong_word2'], 'string', 'max' => 32],
[['zhushi2'], 'string', 'max' => 64],
];
}
/**
* @inheritdoc
*/
public function attributeLabels()
{
return [
'id' => Yii::t('common', 'ID'),
'page' => Yii::t('common', 'Page'),
'num' => Yii::t('common', 'Num'),
'type2' => Yii::t('common', 'Type2'),
'tong_word2' => Yii::t('common', 'Tong Word2'),
'zhushi2' => Yii::t('common', 'Zhushi2'),
];
}
}
| gwisdomroof/hanzi | common/models/HanziHyytAdd.php | PHP | bsd-3-clause | 1,130 |
<?php
########################################
# #
# (C) 2007 by CTXtra #
# http://www.ctxtra.de #
# #
# Version: V1.5 #
# #
########################################
$txt_credits[1] = 'Team und Danksagungen';
$txt_credits[2] = 'Der Chef:';
$txt_credits[3] = 'Die Coder:';
$txt_credits[4] = 'Die Betatester:';
$txt_credits[5] = 'Danke an:';
$txt_credits[6] = '- <a href="http://www.wupmedia.de/smf/index.php?action=profile;u=8" target="_blank">blutarm</a> (feedback for wbb)<br />
- Christian Land alias <a href="http://www.simplemachines.org/community/index.php?action=profile;u=1389" target="_blank">SnowCrash</a> fü SMF (german) für Erweiterung des Wormprotector<br />
- Und natürlich auch ein riesiges Dankeschön an alle Benutzer, die uns Bugs und Fehler gemeldet haben!!';
$txt_credits[7] = 'Die Übersetzer:';
?> | exhibia/exhibia | include/addons/ctracker/languages/german/credits.php | PHP | bsd-3-clause | 933 |
/**************************************************************************************
* Copyright (c) 2013-2015, Finnish Social Science Data Archive/University of Tampere *
* *
* All rights reserved. *
* *
* Redistribution and use in source and binary forms, with or without modification, *
* are permitted provided that the following conditions are met: *
* 1. Redistributions of source code must retain the above copyright notice, this *
* list of conditions and the following disclaimer. *
* 2. Redistributions in binary form must reproduce the above copyright notice, *
* this list of conditions and the following disclaimer in the documentation *
* and/or other materials provided with the distribution. *
* 3. Neither the name of the copyright holder nor the names of its contributors *
* may be used to endorse or promote products derived from this software *
* without specific prior written permission. *
* *
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND *
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED *
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE *
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR *
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES *
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; *
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON *
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT *
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS *
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *
**************************************************************************************/
package fi.uta.fsd.metka.storage.entity;
import fi.uta.fsd.metka.storage.entity.key.RevisionKey;
import fi.uta.fsd.metka.enums.RevisionState;
import org.hibernate.annotations.*;
import org.hibernate.annotations.Cache;
import org.joda.time.LocalDateTime;
import javax.persistence.*;
import javax.persistence.Entity;
import javax.persistence.Index;
import javax.persistence.Table;
@Entity
@Table(name = "REVISION",
indexes = {
@Index(name = "revision_index_status", columnList = "INDEX_STATUS"),
@Index(name = "revision_index_handled", columnList = "INDEXING_HANDLED"),
@Index(name = "revision_index_requested", columnList = "INDEXING_REQUESTED")
})
/*@Cache(usage = CacheConcurrencyStrategy.READ_WRITE)*/
public class RevisionEntity {
@EmbeddedId
private RevisionKey key;
@Enumerated(EnumType.STRING)
@Column(name = "STATE")
private RevisionState state;
@Lob
@Column(name = "DATA")
@Type(type="org.hibernate.type.StringClobType")
private String data;
@Column(name = "INDEXING_REQUESTED")
@Type(type="org.jadira.usertype.dateandtime.joda.PersistentLocalDateTime")
private LocalDateTime indexingRequested;
@Column(name = "INDEXING_HANDLED")
@Type(type="org.jadira.usertype.dateandtime.joda.PersistentLocalDateTime")
private LocalDateTime indexingHandled;
@Column(name = "INDEX_STATUS")
private String indexStatus;
@Column(name = "LATEST")
private String latest;
public RevisionEntity() {
}
public RevisionEntity(RevisionKey key) {
this.key = key;
}
public RevisionKey getKey() {
return key;
}
public void setKey(RevisionKey key) {
this.key = key;
}
public RevisionState getState() {
return state;
}
public void setState(RevisionState state) {
this.state = state;
}
public String getData() {
return data;
}
public void setData(String data) {
this.data = data;
}
public LocalDateTime getIndexingRequested() {
return indexingRequested;
}
public void setIndexingRequested(LocalDateTime indexingRequested) {
this.indexingRequested = indexingRequested;
}
public LocalDateTime getIndexingHandled() {
return indexingHandled;
}
public void setIndexingHandled(LocalDateTime indexingHandled) {
this.indexingHandled = indexingHandled;
}
public String getIndexStatus() {
return indexStatus;
}
public void setIndexStatus(String indexStatus) {
this.indexStatus = indexStatus;
}
public String getLatest() {
return latest;
}
public void setLatest(String latest) {
this.latest = latest;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RevisionEntity that = (RevisionEntity) o;
if (!key.equals(that.key)) return false;
return true;
}
@Override
public int hashCode() {
return key.hashCode();
}
@Override
public String toString() {
return "Entity[name="+this.getClass().getSimpleName()+", key="+key+"]";
}
public enum IndexStatus {
INDEXED // Revision has been indexed
}
}
| Tietoarkisto/metka | metka/src/main/java/fi/uta/fsd/metka/storage/entity/RevisionEntity.java | Java | bsd-3-clause | 5,743 |
from __future__ import absolute_import
import mock
import os
from django.conf import settings
TEST_ROOT = os.path.normpath(
os.path.join(
os.path.dirname(__file__),
os.pardir,
os.pardir,
os.pardir,
os.pardir,
'tests'))
def pytest_configure(config):
# HACK: Only needed for testing!
os.environ.setdefault('_SENTRY_SKIP_CONFIGURATION', '1')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sentry.conf.server')
# override docs which are typically synchronized from an upstream server
# to ensure tests are consistent
os.environ.setdefault(
'INTEGRATION_DOC_FOLDER',
os.path.join(
TEST_ROOT,
'fixtures',
'integration-docs'))
from sentry.utils import integrationdocs
integrationdocs.DOC_FOLDER = os.environ['INTEGRATION_DOC_FOLDER']
if not settings.configured:
# only configure the db if its not already done
test_db = os.environ.get('DB', 'postgres')
if test_db == 'mysql':
settings.DATABASES['default'].update(
{
'ENGINE': 'django.db.backends.mysql',
'NAME': 'sentry',
'USER': 'root',
'HOST': '127.0.0.1',
}
)
# mysql requires running full migration all the time
elif test_db == 'postgres':
settings.DATABASES['default'].update(
{
'ENGINE': 'sentry.db.postgres',
'USER': 'postgres',
'NAME': 'sentry',
'HOST': '127.0.0.1',
}
)
# postgres requires running full migration all the time
# since it has to install stored functions which come from
# an actual migration.
elif test_db == 'sqlite':
settings.DATABASES['default'].update(
{
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
)
else:
raise RuntimeError('oops, wrong database: %r' % test_db)
settings.TEMPLATE_DEBUG = True
# Disable static compiling in tests
settings.STATIC_BUNDLES = {}
# override a few things with our test specifics
settings.INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + ('tests', )
# Need a predictable key for tests that involve checking signatures
settings.SENTRY_PUBLIC = False
if not settings.SENTRY_CACHE:
settings.SENTRY_CACHE = 'sentry.cache.django.DjangoCache'
settings.SENTRY_CACHE_OPTIONS = {}
# This speeds up the tests considerably, pbkdf2 is by design, slow.
settings.PASSWORD_HASHERS = [
'django.contrib.auth.hashers.MD5PasswordHasher',
]
settings.AUTH_PASSWORD_VALIDATORS = []
# Replace real sudo middleware with our mock sudo middleware
# to assert that the user is always in sudo mode
middleware = list(settings.MIDDLEWARE_CLASSES)
sudo = middleware.index('sentry.middleware.sudo.SudoMiddleware')
middleware[sudo] = 'sentry.testutils.middleware.SudoMiddleware'
settings.MIDDLEWARE_CLASSES = tuple(middleware)
settings.SENTRY_OPTIONS['cloudflare.secret-key'] = 'cloudflare-secret-key'
# enable draft features
settings.SENTRY_OPTIONS['mail.enable-replies'] = True
settings.SENTRY_ALLOW_ORIGIN = '*'
settings.SENTRY_TSDB = 'sentry.tsdb.inmemory.InMemoryTSDB'
settings.SENTRY_TSDB_OPTIONS = {}
if settings.SENTRY_NEWSLETTER == 'sentry.newsletter.base.Newsletter':
settings.SENTRY_NEWSLETTER = 'sentry.newsletter.dummy.DummyNewsletter'
settings.SENTRY_NEWSLETTER_OPTIONS = {}
settings.BROKER_BACKEND = 'memory'
settings.BROKER_URL = None
settings.CELERY_ALWAYS_EAGER = False
settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
settings.DEBUG_VIEWS = True
settings.SENTRY_ENCRYPTION_SCHEMES = ()
settings.DISABLE_RAVEN = True
settings.CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
if not hasattr(settings, 'SENTRY_OPTIONS'):
settings.SENTRY_OPTIONS = {}
settings.SENTRY_OPTIONS.update(
{
'redis.clusters': {
'default': {
'hosts': {
0: {
'db': 9,
},
},
},
},
'mail.backend': 'django.core.mail.backends.locmem.EmailBackend',
'system.url-prefix': 'http://testserver',
'slack.client-id': 'slack-client-id',
'slack.client-secret': 'slack-client-secret',
'slack.verification-token': 'slack-verification-token',
'github-app.name': 'sentry-test-app',
'github-app.client-id': 'github-client-id',
'github-app.client-secret': 'github-client-secret',
'vsts.client-id': 'vsts-client-id',
'vsts.client-secret': 'vsts-client-secret',
}
)
# django mail uses socket.getfqdn which doesn't play nice if our
# networking isn't stable
patcher = mock.patch('socket.getfqdn', return_value='localhost')
patcher.start()
if not settings.SOUTH_TESTS_MIGRATE:
settings.INSTALLED_APPS = tuple(i for i in settings.INSTALLED_APPS if i != 'south')
from sentry.runner.initializer import (
bootstrap_options, configure_structlog, initialize_receivers, fix_south,
bind_cache_to_option_store, setup_services
)
bootstrap_options(settings)
configure_structlog()
fix_south(settings)
bind_cache_to_option_store()
initialize_receivers()
setup_services()
register_extensions()
from sentry.utils.redis import clusters
with clusters.get('default').all() as client:
client.flushdb()
# force celery registration
from sentry.celery import app # NOQA
# disable DISALLOWED_IPS
from sentry import http
http.DISALLOWED_IPS = set()
def register_extensions():
from sentry.plugins import plugins
from sentry.plugins.utils import TestIssuePlugin2
plugins.register(TestIssuePlugin2)
from sentry import integrations
from sentry.integrations.bitbucket import BitbucketIntegrationProvider
from sentry.integrations.example import ExampleIntegrationProvider, AliasedIntegrationProvider
from sentry.integrations.github import GitHubIntegrationProvider
from sentry.integrations.github_enterprise import GitHubEnterpriseIntegrationProvider
from sentry.integrations.jira import JiraIntegrationProvider
from sentry.integrations.slack import SlackIntegrationProvider
from sentry.integrations.vsts import VstsIntegrationProvider
from sentry.integrations.vsts_extension import VstsExtensionIntegrationProvider
integrations.register(BitbucketIntegrationProvider)
integrations.register(ExampleIntegrationProvider)
integrations.register(AliasedIntegrationProvider)
integrations.register(GitHubIntegrationProvider)
integrations.register(GitHubEnterpriseIntegrationProvider)
integrations.register(JiraIntegrationProvider)
integrations.register(SlackIntegrationProvider)
integrations.register(VstsIntegrationProvider)
integrations.register(VstsExtensionIntegrationProvider)
from sentry.plugins import bindings
from sentry.plugins.providers.dummy import DummyRepositoryProvider
bindings.add('repository.provider', DummyRepositoryProvider, id='dummy')
def pytest_runtest_teardown(item):
from sentry import tsdb
# TODO(dcramer): this only works if this is the correct tsdb backend
tsdb.flush()
# XXX(dcramer): only works with DummyNewsletter
from sentry import newsletter
if hasattr(newsletter.backend, 'clear'):
newsletter.backend.clear()
from sentry.utils.redis import clusters
with clusters.get('default').all() as client:
client.flushdb()
from celery.task.control import discard_all
discard_all()
from sentry.models import OrganizationOption, ProjectOption, UserOption
for model in (OrganizationOption, ProjectOption, UserOption):
model.objects.clear_local_cache()
| ifduyue/sentry | src/sentry/utils/pytest/sentry.py | Python | bsd-3-clause | 8,317 |
from __future__ import unicode_literals
import os
import json
from functools import wraps
from datetime import datetime, date
from contextlib import contextmanager
from threading import RLock, Condition, current_thread
from collections import Sized, Iterable, Mapping, defaultdict
def is_listy(x):
"""
returns a boolean indicating whether the passed object is "listy",
which we define as a sized iterable which is not a map or string
"""
return isinstance(x, Sized) and isinstance(x, Iterable) and not isinstance(x, (Mapping, type(b''), type('')))
def listify(x):
"""
returns a list version of x if x is a non-string iterable, otherwise
returns a list with x as its only element
"""
return list(x) if is_listy(x) else [x]
class serializer(json.JSONEncoder):
"""
JSONEncoder subclass for plugins to register serializers for types.
Plugins should not need to instantiate this class directly, but
they are expected to call serializer.register() for new data types.
"""
_registry = {}
_datetime_format = '%Y-%m-%d %H:%M:%S.%f'
def default(self, o):
if type(o) in self._registry:
preprocessor = self._registry[type(o)]
else:
for klass, preprocessor in self._registry.items():
if isinstance(o, klass):
break
else:
raise json.JSONEncoder.default(self, o)
return preprocessor(o)
@classmethod
def register(cls, type, preprocessor):
"""
Associates a type with a preprocessor so that RPC handlers may
pass non-builtin JSON types. For example, Sideboard already
does the equivalent of
>>> serializer.register(datetime, lambda dt: dt.strftime('%Y-%m-%d %H:%M:%S.%f'))
This method raises an exception if you try to register a
preprocessor for a type which already has one.
:param type: the type you are registering
:param preprocessor: function which takes one argument which is
the value to serialize and returns a json-
serializable value
"""
assert type not in cls._registry, '{} already has a preprocessor defined'.format(type)
cls._registry[type] = preprocessor
serializer.register(date, lambda d: d.strftime('%Y-%m-%d'))
serializer.register(datetime, lambda dt: dt.strftime(serializer._datetime_format))
serializer.register(set, lambda s: sorted(list(s)))
def cached_property(func):
"""decorator for making readonly, memoized properties"""
pname = "_" + func.__name__
@property
@wraps(func)
def caching(self, *args, **kwargs):
if not hasattr(self, pname):
setattr(self, pname, func(self, *args, **kwargs))
return getattr(self, pname)
return caching
def request_cached_property(func):
"""
Sometimes we want a property to be cached for the duration of a request,
with concurrent requests each having their own cached version. This does
that via the threadlocal class, such that each HTTP request CherryPy serves
and each RPC request served via websocket or JSON-RPC will have its own
cached value, which is cleared and then re-generated on later requests.
"""
from sideboard.lib import threadlocal
name = func.__module__ + '.' + func.__name__
@property
@wraps(func)
def with_caching(self):
val = threadlocal.get(name)
if val is None:
val = func(self)
threadlocal.set(name, val)
return val
return with_caching
class _class_property(property):
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
def class_property(cls):
"""
For whatever reason, the @property decorator isn't smart enough to recognize
classmethods and behave differently on them than on instance methods. This
property may be used to create a class-level property, useful for singletons
and other one-per-class properties. Class properties are read-only.
"""
return _class_property(classmethod(cls))
def entry_point(func):
"""
Decorator used to define entry points for command-line scripts. Sideboard
ships with a "sep" (Sideboard Entry Point) command line script which can be
used to call into any plugin-defined entry point after deleting sys.argv[0]
so that the entry point name will be the first argument. For example, if a
plugin had this entry point:
@entry_point
def some_action():
print(sys.argv)
Then someone in a shell ran the command:
sep some_action foo bar
It would print:
['some_action', 'foo', 'bar']
:param func: a function which takes no arguments; its name will be the name
of the command, and an exception is raised if a function with
the same name has already been registered as an entry point
"""
assert func.__name__ not in _entry_points, 'An entry point named {} has already been implemented'.format(func.__name__)
_entry_points[func.__name__] = func
return func
_entry_points = {}
class RWGuard(object):
"""
This utility class provides the ability to perform read/write locking, such
that we can have any number of readers OR a single writer. We give priority
to writers, who will get the lock before any readers.
These locks are reentrant, meaning that the same thread can acquire a read
or write lock multiple times, and will then need to release the lock the
same number of times it was acquired. A thread with an acquired read lock
cannot acquire a write lock, or vice versa. Locks can only be released by
the threads which acquired them.
This class is named RWGuard rather than RWLock because it is not itself a
lock, e.g. it doesn't have an acquire method, it cannot be directly used as
a context manager, etc.
"""
def __init__(self):
self.lock = RLock()
self.waiting_writer_count = 0
self.acquired_writer = defaultdict(int)
self.acquired_readers = defaultdict(int)
self.ready_for_reads = Condition(self.lock)
self.ready_for_writes = Condition(self.lock)
@property
@contextmanager
def read_locked(self):
"""
Context manager which acquires a read lock on entrance and releases it
on exit. Any number of threads may acquire a read lock.
"""
self.acquire_for_read()
try:
yield
finally:
self.release()
@property
@contextmanager
def write_locked(self):
"""
Context manager which acquires a write lock on entrance and releases it
on exit. Only one thread may acquire a write lock at a time.
"""
self.acquire_for_write()
try:
yield
finally:
self.release()
def acquire_for_read(self):
"""
NOTE: consumers are encouraged to use the "read_locked" context manager
instead of this method where possible.
This method acquires the read lock for the current thread, blocking if
necessary until there are no other threads with the write lock acquired
or waiting for the write lock to be available.
"""
tid = current_thread().ident
assert tid not in self.acquired_writer, 'Threads which have already acquired a write lock may not lock for reading'
with self.lock:
while self.acquired_writer or (self.waiting_writer_count and tid not in self.acquired_readers):
self.ready_for_reads.wait()
self.acquired_readers[tid] += 1
def acquire_for_write(self):
"""
NOTE: consumers are encouraged to use the "write_locked" context manager
instead of this method where possible.
This method acquires the write lock for the current thread, blocking if
necessary until no other threads have the write lock acquired and no
thread has the read lock acquired.
"""
tid = current_thread().ident
assert tid not in self.acquired_readers, 'Threads which have already acquired a read lock may not lock for writing'
with self.lock:
while self.acquired_readers or (self.acquired_writer and tid not in self.acquired_writer):
self.waiting_writer_count += 1
self.ready_for_writes.wait()
self.waiting_writer_count -= 1
self.acquired_writer[tid] += 1
def release(self):
"""
Release the read or write lock held by the current thread. Since these
locks are reentrant, this method must be called once for each time the
lock was acquired. This method raises an exception if called by a
thread with no read or write lock acquired.
"""
tid = current_thread().ident
assert tid in self.acquired_readers or tid in self.acquired_writer, 'this thread does not hold a read or write lock'
with self.lock:
for counts in [self.acquired_readers, self.acquired_writer]:
counts[tid] -= 1
if counts[tid] <= 0:
del counts[tid]
wake_readers = not self.waiting_writer_count
wake_writers = self.waiting_writer_count and not self.acquired_readers
if wake_writers:
with self.ready_for_writes:
self.ready_for_writes.notify()
elif wake_readers:
with self.ready_for_reads:
self.ready_for_reads.notify_all()
| RobRuana/sideboard | sideboard/lib/_utils.py | Python | bsd-3-clause | 9,665 |
# -*- coding: utf-8 -*-
__author__ = 'Michael Ingrisch'
__email__ = 'michael.ingrisch@gmail.com'
__version__ = '0.1.0' | michimichi/compartmentmodels | compartmentmodels/__init__.py | Python | bsd-3-clause | 119 |
package io.burt.jmespath.function;
import java.util.List;
import io.burt.jmespath.Adapter;
import io.burt.jmespath.JmesPathType;
public class EndsWithFunction extends BaseFunction {
public EndsWithFunction() {
super(
ArgumentConstraints.typeOf(JmesPathType.STRING),
ArgumentConstraints.typeOf(JmesPathType.STRING)
);
}
@Override
protected <T> T callFunction(Adapter<T> runtime, List<FunctionArgument<T>> arguments) {
T subject = arguments.get(0).value();
T suffix = arguments.get(1).value();
return runtime.createBoolean(runtime.toString(subject).endsWith(runtime.toString(suffix)));
}
}
| burtcorp/jmespath-java | jmespath-core/src/main/java/io/burt/jmespath/function/EndsWithFunction.java | Java | bsd-3-clause | 634 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 5, transform = "Anscombe", sigma = 0.0, exog_count = 20, ar_order = 12); | antoinecarme/pyaf | tests/artificial/transf_Anscombe/trend_PolyTrend/cycle_5/ar_12/test_artificial_1024_Anscombe_PolyTrend_5_12_20.py | Python | bsd-3-clause | 265 |
/*
* Copyright (c) 2010, Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "platform/graphics/gpu/DrawingBuffer.h"
#include "gpu/command_buffer/client/gles2_interface.h"
#include "platform/RuntimeEnabledFeatures.h"
#include "platform/TraceEvent.h"
#include "platform/graphics/GraphicsLayer.h"
#include "platform/graphics/ImageBuffer.h"
#include "platform/graphics/gpu/Extensions3DUtil.h"
#include "public/platform/Platform.h"
#include "public/platform/WebCompositorSupport.h"
#include "public/platform/WebExternalBitmap.h"
#include "public/platform/WebExternalTextureLayer.h"
#include "public/platform/WebGraphicsContext3D.h"
#include "public/platform/WebGraphicsContext3DProvider.h"
#include "wtf/ArrayBufferContents.h"
#include "wtf/CheckedNumeric.h"
#include <algorithm>
namespace blink {
namespace {
const float s_resourceAdjustedRatio = 0.5;
class ScopedTextureUnit0BindingRestorer {
STACK_ALLOCATED();
WTF_MAKE_NONCOPYABLE(ScopedTextureUnit0BindingRestorer);
public:
ScopedTextureUnit0BindingRestorer(gpu::gles2::GLES2Interface* gl, GLenum activeTextureUnit, GLuint textureUnitZeroId)
: m_gl(gl)
, m_oldActiveTextureUnit(activeTextureUnit)
, m_oldTextureUnitZeroId(textureUnitZeroId)
{
m_gl->ActiveTexture(GL_TEXTURE0);
}
~ScopedTextureUnit0BindingRestorer()
{
m_gl->BindTexture(GL_TEXTURE_2D, m_oldTextureUnitZeroId);
m_gl->ActiveTexture(m_oldActiveTextureUnit);
}
private:
gpu::gles2::GLES2Interface* m_gl;
GLenum m_oldActiveTextureUnit;
GLuint m_oldTextureUnitZeroId;
};
static bool shouldFailDrawingBufferCreationForTesting = false;
} // namespace
PassRefPtr<DrawingBuffer> DrawingBuffer::create(PassOwnPtr<WebGraphicsContext3DProvider> contextProvider, const IntSize& size, bool premultipliedAlpha, bool wantAlphaChannel, bool wantDepthBuffer, bool wantStencilBuffer, bool wantAntialiasing, PreserveDrawingBuffer preserve)
{
ASSERT(contextProvider);
if (shouldFailDrawingBufferCreationForTesting) {
shouldFailDrawingBufferCreationForTesting = false;
return nullptr;
}
OwnPtr<Extensions3DUtil> extensionsUtil = Extensions3DUtil::create(contextProvider->contextGL());
if (!extensionsUtil->isValid()) {
// This might be the first time we notice that the GL context is lost.
return nullptr;
}
ASSERT(extensionsUtil->supportsExtension("GL_OES_packed_depth_stencil"));
extensionsUtil->ensureExtensionEnabled("GL_OES_packed_depth_stencil");
bool multisampleSupported = wantAntialiasing
&& (extensionsUtil->supportsExtension("GL_CHROMIUM_framebuffer_multisample")
|| extensionsUtil->supportsExtension("GL_EXT_multisampled_render_to_texture"))
&& extensionsUtil->supportsExtension("GL_OES_rgb8_rgba8");
if (multisampleSupported) {
extensionsUtil->ensureExtensionEnabled("GL_OES_rgb8_rgba8");
if (extensionsUtil->supportsExtension("GL_CHROMIUM_framebuffer_multisample"))
extensionsUtil->ensureExtensionEnabled("GL_CHROMIUM_framebuffer_multisample");
else
extensionsUtil->ensureExtensionEnabled("GL_EXT_multisampled_render_to_texture");
}
bool discardFramebufferSupported = extensionsUtil->supportsExtension("GL_EXT_discard_framebuffer");
if (discardFramebufferSupported)
extensionsUtil->ensureExtensionEnabled("GL_EXT_discard_framebuffer");
RefPtr<DrawingBuffer> drawingBuffer = adoptRef(new DrawingBuffer(std::move(contextProvider), extensionsUtil.release(), discardFramebufferSupported, wantAlphaChannel, premultipliedAlpha, preserve));
if (!drawingBuffer->initialize(size, wantDepthBuffer, wantStencilBuffer, multisampleSupported)) {
drawingBuffer->beginDestruction();
return PassRefPtr<DrawingBuffer>();
}
return drawingBuffer.release();
}
void DrawingBuffer::forceNextDrawingBufferCreationToFail()
{
shouldFailDrawingBufferCreationForTesting = true;
}
DrawingBuffer::DrawingBuffer(
PassOwnPtr<WebGraphicsContext3DProvider> contextProvider,
PassOwnPtr<Extensions3DUtil> extensionsUtil,
bool discardFramebufferSupported,
bool wantAlphaChannel,
bool premultipliedAlpha,
PreserveDrawingBuffer preserve)
: m_preserveDrawingBuffer(preserve)
, m_scissorEnabled(false)
, m_texture2DBinding(0)
, m_drawFramebufferBinding(0)
, m_readFramebufferBinding(0)
, m_activeTextureUnit(GL_TEXTURE0)
, m_contextProvider(std::move(contextProvider))
, m_context(m_contextProvider->context3d())
, m_gl(m_contextProvider->contextGL())
, m_extensionsUtil(std::move(extensionsUtil))
, m_size(-1, -1)
, m_discardFramebufferSupported(discardFramebufferSupported)
, m_wantAlphaChannel(wantAlphaChannel)
, m_premultipliedAlpha(premultipliedAlpha)
, m_hasImplicitStencilBuffer(false)
, m_fbo(0)
, m_depthStencilBuffer(0)
, m_multisampleFBO(0)
, m_multisampleColorBuffer(0)
, m_contentsChanged(true)
, m_contentsChangeCommitted(false)
, m_bufferClearNeeded(false)
, m_antiAliasingMode(None)
, m_maxTextureSize(0)
, m_sampleCount(0)
, m_packAlignment(4)
, m_destructionInProgress(false)
, m_isHidden(false)
, m_filterQuality(kLow_SkFilterQuality)
{
// Used by browser tests to detect the use of a DrawingBuffer.
TRACE_EVENT_INSTANT0("test_gpu", "DrawingBufferCreation", TRACE_EVENT_SCOPE_GLOBAL);
}
DrawingBuffer::~DrawingBuffer()
{
ASSERT(m_destructionInProgress);
ASSERT(m_textureMailboxes.isEmpty());
m_layer.clear();
m_contextProvider.clear();
}
void DrawingBuffer::markContentsChanged()
{
m_contentsChanged = true;
m_contentsChangeCommitted = false;
}
bool DrawingBuffer::bufferClearNeeded() const
{
return m_bufferClearNeeded;
}
void DrawingBuffer::setBufferClearNeeded(bool flag)
{
if (m_preserveDrawingBuffer == Discard) {
m_bufferClearNeeded = flag;
} else {
ASSERT(!m_bufferClearNeeded);
}
}
WebGraphicsContext3D* DrawingBuffer::context()
{
return m_context;
}
gpu::gles2::GLES2Interface* DrawingBuffer::contextGL()
{
return m_gl;
}
WebGraphicsContext3DProvider* DrawingBuffer::contextProvider()
{
return m_contextProvider.get();
}
void DrawingBuffer::setIsHidden(bool hidden)
{
if (m_isHidden == hidden)
return;
m_isHidden = hidden;
if (m_isHidden)
freeRecycledMailboxes();
}
void DrawingBuffer::setFilterQuality(SkFilterQuality filterQuality)
{
if (m_filterQuality != filterQuality) {
m_filterQuality = filterQuality;
if (m_layer)
m_layer->setNearestNeighbor(filterQuality == kNone_SkFilterQuality);
}
}
void DrawingBuffer::freeRecycledMailboxes()
{
if (m_recycledMailboxQueue.isEmpty())
return;
while (!m_recycledMailboxQueue.isEmpty())
deleteMailbox(m_recycledMailboxQueue.takeLast());
}
bool DrawingBuffer::prepareMailbox(WebExternalTextureMailbox* outMailbox, WebExternalBitmap* bitmap)
{
if (m_destructionInProgress) {
// It can be hit in the following sequence.
// 1. WebGL draws something.
// 2. The compositor begins the frame.
// 3. Javascript makes a context lost using WEBGL_lose_context extension.
// 4. Here.
return false;
}
ASSERT(!m_isHidden);
if (!m_contentsChanged)
return false;
if (m_newMailboxCallback)
(*m_newMailboxCallback)();
// Resolve the multisampled buffer into m_colorBuffer texture.
if (m_antiAliasingMode != None)
commit();
if (bitmap) {
bitmap->setSize(size());
unsigned char* pixels = bitmap->pixels();
bool needPremultiply = m_wantAlphaChannel && !m_premultipliedAlpha;
WebGLImageConversion::AlphaOp op = needPremultiply ? WebGLImageConversion::AlphaDoPremultiply : WebGLImageConversion::AlphaDoNothing;
if (pixels)
readBackFramebuffer(pixels, size().width(), size().height(), ReadbackSkia, op);
}
// We must restore the texture binding since creating new textures,
// consuming and producing mailboxes changes it.
ScopedTextureUnit0BindingRestorer restorer(m_gl, m_activeTextureUnit, m_texture2DBinding);
// First try to recycle an old buffer.
RefPtr<MailboxInfo> frontColorBufferMailbox = recycledMailbox();
// No buffer available to recycle, create a new one.
if (!frontColorBufferMailbox)
frontColorBufferMailbox = createNewMailbox(createTextureAndAllocateMemory(m_size));
if (m_preserveDrawingBuffer == Discard) {
std::swap(frontColorBufferMailbox->textureInfo, m_colorBuffer);
// It appears safe to overwrite the context's framebuffer binding in the Discard case since there will always be a
// WebGLRenderingContext::clearIfComposited() call made before the next draw call which restores the framebuffer binding.
// If this stops being true at some point, we should track the current framebuffer binding in the DrawingBuffer and restore
// it after attaching the new back buffer here.
m_gl->BindFramebuffer(GL_FRAMEBUFFER, m_fbo);
attachColorBufferToCurrentFBO();
if (m_discardFramebufferSupported) {
// Explicitly discard framebuffer to save GPU memory bandwidth for tile-based GPU arch.
const GLenum attachments[3] = { GL_COLOR_ATTACHMENT0, GL_DEPTH_ATTACHMENT, GL_STENCIL_ATTACHMENT};
m_gl->DiscardFramebufferEXT(GL_FRAMEBUFFER, 3, attachments);
}
} else {
m_gl->CopyTextureCHROMIUM(m_colorBuffer.textureId, frontColorBufferMailbox->textureInfo.textureId, frontColorBufferMailbox->textureInfo.parameters.internalColorFormat, GL_UNSIGNED_BYTE, GL_FALSE, GL_FALSE, GL_FALSE);
}
restoreFramebufferBindings();
m_contentsChanged = false;
m_gl->ProduceTextureDirectCHROMIUM(frontColorBufferMailbox->textureInfo.textureId, frontColorBufferMailbox->textureInfo.parameters.target, frontColorBufferMailbox->mailbox.name);
const GLuint64 fenceSync = m_gl->InsertFenceSyncCHROMIUM();
m_gl->Flush();
m_gl->GenSyncTokenCHROMIUM(fenceSync, frontColorBufferMailbox->mailbox.syncToken);
frontColorBufferMailbox->mailbox.validSyncToken = true;
frontColorBufferMailbox->mailbox.allowOverlay = frontColorBufferMailbox->textureInfo.imageId != 0;
frontColorBufferMailbox->mailbox.textureTarget = frontColorBufferMailbox->textureInfo.parameters.target;
frontColorBufferMailbox->mailbox.textureSize = WebSize(m_size.width(), m_size.height());
setBufferClearNeeded(true);
// set m_parentDrawingBuffer to make sure 'this' stays alive as long as it has live mailboxes
ASSERT(!frontColorBufferMailbox->m_parentDrawingBuffer);
frontColorBufferMailbox->m_parentDrawingBuffer = this;
*outMailbox = frontColorBufferMailbox->mailbox;
m_frontColorBuffer = { frontColorBufferMailbox->textureInfo, frontColorBufferMailbox->mailbox };
return true;
}
void DrawingBuffer::mailboxReleased(const WebExternalTextureMailbox& mailbox, bool lostResource)
{
if (m_destructionInProgress || m_gl->GetGraphicsResetStatusKHR() != GL_NO_ERROR || lostResource || m_isHidden) {
mailboxReleasedWithoutRecycling(mailbox);
return;
}
for (size_t i = 0; i < m_textureMailboxes.size(); i++) {
RefPtr<MailboxInfo> mailboxInfo = m_textureMailboxes[i];
if (nameEquals(mailboxInfo->mailbox, mailbox)) {
memcpy(mailboxInfo->mailbox.syncToken, mailbox.syncToken,
sizeof(mailboxInfo->mailbox.syncToken));
mailboxInfo->mailbox.validSyncToken = mailbox.validSyncToken;
ASSERT(mailboxInfo->m_parentDrawingBuffer.get() == this);
mailboxInfo->m_parentDrawingBuffer.clear();
m_recycledMailboxQueue.prepend(mailboxInfo->mailbox);
return;
}
}
ASSERT_NOT_REACHED();
}
DrawingBuffer::TextureParameters DrawingBuffer::chromiumImageTextureParameters()
{
#if OS(MACOSX)
// A CHROMIUM_image backed texture requires a specialized set of parameters
// on OSX.
TextureParameters parameters;
parameters.target = GC3D_TEXTURE_RECTANGLE_ARB;
parameters.internalColorFormat = GL_RGBA;
parameters.internalRenderbufferFormat = GL_RGBA8_OES;
parameters.colorFormat = GL_RGBA;
return parameters;
#else
return defaultTextureParameters();
#endif
}
DrawingBuffer::TextureParameters DrawingBuffer::defaultTextureParameters()
{
TextureParameters parameters;
parameters.target = GL_TEXTURE_2D;
if (m_wantAlphaChannel) {
parameters.internalColorFormat = GL_RGBA;
parameters.colorFormat = GL_RGBA;
parameters.internalRenderbufferFormat = GL_RGBA8_OES;
} else {
parameters.internalColorFormat = GL_RGB;
parameters.colorFormat = GL_RGB;
parameters.internalRenderbufferFormat = GL_RGB8_OES;
}
return parameters;
}
void DrawingBuffer::mailboxReleasedWithoutRecycling(const WebExternalTextureMailbox& mailbox)
{
ASSERT(m_textureMailboxes.size());
// Ensure not to call the destructor until deleteMailbox() is completed.
RefPtr<DrawingBuffer> self = this;
deleteMailbox(mailbox);
}
PassRefPtr<DrawingBuffer::MailboxInfo> DrawingBuffer::recycledMailbox()
{
if (m_recycledMailboxQueue.isEmpty())
return PassRefPtr<MailboxInfo>();
WebExternalTextureMailbox mailbox;
while (!m_recycledMailboxQueue.isEmpty()) {
mailbox = m_recycledMailboxQueue.takeLast();
// Never have more than one mailbox in the released state.
if (!m_recycledMailboxQueue.isEmpty())
deleteMailbox(mailbox);
}
RefPtr<MailboxInfo> mailboxInfo;
for (size_t i = 0; i < m_textureMailboxes.size(); i++) {
if (nameEquals(m_textureMailboxes[i]->mailbox, mailbox)) {
mailboxInfo = m_textureMailboxes[i];
break;
}
}
ASSERT(mailboxInfo);
if (mailboxInfo->mailbox.validSyncToken) {
m_gl->WaitSyncTokenCHROMIUM(mailboxInfo->mailbox.syncToken);
mailboxInfo->mailbox.validSyncToken = false;
}
if (mailboxInfo->size != m_size) {
resizeTextureMemory(&mailboxInfo->textureInfo, m_size);
mailboxInfo->size = m_size;
}
return mailboxInfo.release();
}
PassRefPtr<DrawingBuffer::MailboxInfo> DrawingBuffer::createNewMailbox(const TextureInfo& info)
{
RefPtr<MailboxInfo> returnMailbox = adoptRef(new MailboxInfo());
m_gl->GenMailboxCHROMIUM(returnMailbox->mailbox.name);
returnMailbox->textureInfo = info;
returnMailbox->size = m_size;
m_textureMailboxes.append(returnMailbox);
return returnMailbox.release();
}
void DrawingBuffer::deleteMailbox(const WebExternalTextureMailbox& mailbox)
{
for (size_t i = 0; i < m_textureMailboxes.size(); i++) {
if (nameEquals(m_textureMailboxes[i]->mailbox, mailbox)) {
if (mailbox.validSyncToken)
m_gl->WaitSyncTokenCHROMIUM(mailbox.syncToken);
deleteChromiumImageForTexture(&m_textureMailboxes[i]->textureInfo);
m_gl->DeleteTextures(1, &m_textureMailboxes[i]->textureInfo.textureId);
m_textureMailboxes.remove(i);
return;
}
}
ASSERT_NOT_REACHED();
}
bool DrawingBuffer::initialize(const IntSize& size, bool wantDepthBuffer, bool wantStencilBuffer, bool useMultisampling)
{
if (m_gl->GetGraphicsResetStatusKHR() != GL_NO_ERROR) {
// Need to try to restore the context again later.
return false;
}
m_gl->GetIntegerv(GL_MAX_TEXTURE_SIZE, &m_maxTextureSize);
int maxSampleCount = 0;
m_antiAliasingMode = None;
if (useMultisampling) {
m_gl->GetIntegerv(GL_MAX_SAMPLES_ANGLE, &maxSampleCount);
m_antiAliasingMode = MSAAExplicitResolve;
if (m_extensionsUtil->supportsExtension("GL_EXT_multisampled_render_to_texture")) {
m_antiAliasingMode = MSAAImplicitResolve;
} else if (m_extensionsUtil->supportsExtension("GL_CHROMIUM_screen_space_antialiasing")) {
m_antiAliasingMode = ScreenSpaceAntialiasing;
}
}
m_sampleCount = std::min(4, maxSampleCount);
m_gl->GenFramebuffers(1, &m_fbo);
m_gl->BindFramebuffer(GL_FRAMEBUFFER, m_fbo);
createSecondaryBuffers();
if (!reset(size, wantDepthBuffer || wantStencilBuffer))
return false;
if (m_depthStencilBuffer) {
DCHECK(wantDepthBuffer || wantStencilBuffer);
m_hasImplicitStencilBuffer = !wantStencilBuffer;
}
if (m_gl->GetGraphicsResetStatusKHR() != GL_NO_ERROR) {
// It's possible that the drawing buffer allocation provokes a context loss, so check again just in case. http://crbug.com/512302
return false;
}
return true;
}
bool DrawingBuffer::copyToPlatformTexture(WebGraphicsContext3D* context, gpu::gles2::GLES2Interface* gl, GLuint texture, GLenum internalFormat,
GLenum destType, GLint level, bool premultiplyAlpha, bool flipY, SourceDrawingBuffer sourceBuffer)
{
if (m_contentsChanged) {
if (m_antiAliasingMode != None) {
commit();
restoreFramebufferBindings();
}
m_gl->Flush();
}
// Assume that the destination target is GL_TEXTURE_2D.
if (!Extensions3DUtil::canUseCopyTextureCHROMIUM(GL_TEXTURE_2D, internalFormat, destType, level))
return false;
// Contexts may be in a different share group. We must transfer the texture through a mailbox first
WebExternalTextureMailbox mailbox;
GLint textureId = 0;
GLenum target = 0;
if (sourceBuffer == FrontBuffer && m_frontColorBuffer.texInfo.textureId) {
textureId = m_frontColorBuffer.texInfo.textureId;
mailbox = m_frontColorBuffer.mailbox;
target = m_frontColorBuffer.texInfo.parameters.target;
} else {
textureId = m_colorBuffer.textureId;
target = m_colorBuffer.parameters.target;
m_gl->GenMailboxCHROMIUM(mailbox.name);
m_gl->ProduceTextureDirectCHROMIUM(textureId, target, mailbox.name);
const GLuint64 fenceSync = m_gl->InsertFenceSyncCHROMIUM();
m_gl->Flush();
m_gl->GenSyncTokenCHROMIUM(fenceSync, mailbox.syncToken);
mailbox.validSyncToken = true;
}
if (mailbox.validSyncToken)
gl->WaitSyncTokenCHROMIUM(mailbox.syncToken);
GLuint sourceTexture = gl->CreateAndConsumeTextureCHROMIUM(target, mailbox.name);
GLboolean unpackPremultiplyAlphaNeeded = GL_FALSE;
GLboolean unpackUnpremultiplyAlphaNeeded = GL_FALSE;
if (m_wantAlphaChannel && m_premultipliedAlpha && !premultiplyAlpha)
unpackUnpremultiplyAlphaNeeded = GL_TRUE;
else if (m_wantAlphaChannel && !m_premultipliedAlpha && premultiplyAlpha)
unpackPremultiplyAlphaNeeded = GL_TRUE;
gl->CopyTextureCHROMIUM(sourceTexture, texture, internalFormat, destType, flipY, unpackPremultiplyAlphaNeeded, unpackUnpremultiplyAlphaNeeded);
gl->DeleteTextures(1, &sourceTexture);
const GLuint64 fenceSync = gl->InsertFenceSyncCHROMIUM();
gl->Flush();
GLbyte syncToken[24];
gl->GenSyncTokenCHROMIUM(fenceSync, syncToken);
m_gl->WaitSyncTokenCHROMIUM(syncToken);
return true;
}
GLuint DrawingBuffer::framebuffer() const
{
return m_fbo;
}
WebLayer* DrawingBuffer::platformLayer()
{
if (!m_layer) {
m_layer = adoptPtr(Platform::current()->compositorSupport()->createExternalTextureLayer(this));
m_layer->setOpaque(!m_wantAlphaChannel);
m_layer->setBlendBackgroundColor(m_wantAlphaChannel);
m_layer->setPremultipliedAlpha(m_premultipliedAlpha);
m_layer->setNearestNeighbor(m_filterQuality == kNone_SkFilterQuality);
GraphicsLayer::registerContentsLayer(m_layer->layer());
}
return m_layer->layer();
}
void DrawingBuffer::clearPlatformLayer()
{
if (m_layer)
m_layer->clearTexture();
m_gl->Flush();
}
void DrawingBuffer::beginDestruction()
{
ASSERT(!m_destructionInProgress);
m_destructionInProgress = true;
clearPlatformLayer();
while (!m_recycledMailboxQueue.isEmpty())
deleteMailbox(m_recycledMailboxQueue.takeLast());
if (m_multisampleFBO)
m_gl->DeleteFramebuffers(1, &m_multisampleFBO);
if (m_fbo)
m_gl->DeleteFramebuffers(1, &m_fbo);
if (m_multisampleColorBuffer)
m_gl->DeleteRenderbuffers(1, &m_multisampleColorBuffer);
if (m_depthStencilBuffer)
m_gl->DeleteRenderbuffers(1, &m_depthStencilBuffer);
if (m_colorBuffer.textureId) {
deleteChromiumImageForTexture(&m_colorBuffer);
m_gl->DeleteTextures(1, &m_colorBuffer.textureId);
}
setSize(IntSize());
m_colorBuffer = TextureInfo();
m_frontColorBuffer = FrontBufferInfo();
m_multisampleColorBuffer = 0;
m_depthStencilBuffer = 0;
m_multisampleFBO = 0;
m_fbo = 0;
if (m_layer)
GraphicsLayer::unregisterContentsLayer(m_layer->layer());
}
GLuint DrawingBuffer::createColorTexture(const TextureParameters& parameters)
{
GLuint offscreenColorTexture;
m_gl->GenTextures(1, &offscreenColorTexture);
m_gl->BindTexture(parameters.target, offscreenColorTexture);
m_gl->TexParameteri(parameters.target, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
m_gl->TexParameteri(parameters.target, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
m_gl->TexParameteri(parameters.target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
m_gl->TexParameteri(parameters.target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
return offscreenColorTexture;
}
void DrawingBuffer::createSecondaryBuffers()
{
// create a multisample FBO
if (m_antiAliasingMode == MSAAExplicitResolve) {
m_gl->GenFramebuffers(1, &m_multisampleFBO);
m_gl->BindFramebuffer(GL_FRAMEBUFFER, m_multisampleFBO);
m_gl->GenRenderbuffers(1, &m_multisampleColorBuffer);
}
}
bool DrawingBuffer::resizeFramebuffer(const IntSize& size, bool wantDepthOrStencilBuffer)
{
m_gl->BindFramebuffer(GL_FRAMEBUFFER, m_fbo);
if (m_antiAliasingMode != MSAAExplicitResolve && wantDepthOrStencilBuffer)
resizeDepthStencil(size);
if (m_gl->CheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
return false;
return true;
}
bool DrawingBuffer::resizeMultisampleFramebuffer(const IntSize& size, bool wantDepthOrStencilBuffer)
{
if (m_antiAliasingMode == MSAAExplicitResolve) {
m_gl->BindFramebuffer(GL_FRAMEBUFFER, m_multisampleFBO);
m_gl->BindRenderbuffer(GL_RENDERBUFFER, m_multisampleColorBuffer);
m_gl->RenderbufferStorageMultisampleCHROMIUM(GL_RENDERBUFFER, m_sampleCount, m_colorBuffer.parameters.internalRenderbufferFormat, size.width(), size.height());
if (m_gl->GetError() == GL_OUT_OF_MEMORY)
return false;
m_gl->FramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, m_multisampleColorBuffer);
if (wantDepthOrStencilBuffer)
resizeDepthStencil(size);
if (m_gl->CheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
return false;
}
return true;
}
void DrawingBuffer::resizeDepthStencil(const IntSize& size)
{
if (!m_depthStencilBuffer)
m_gl->GenRenderbuffers(1, &m_depthStencilBuffer);
m_gl->BindRenderbuffer(GL_RENDERBUFFER, m_depthStencilBuffer);
if (m_antiAliasingMode == MSAAImplicitResolve)
m_gl->RenderbufferStorageMultisampleEXT(GL_RENDERBUFFER, m_sampleCount, GL_DEPTH24_STENCIL8_OES, size.width(), size.height());
else if (m_antiAliasingMode == MSAAExplicitResolve)
m_gl->RenderbufferStorageMultisampleCHROMIUM(GL_RENDERBUFFER, m_sampleCount, GL_DEPTH24_STENCIL8_OES, size.width(), size.height());
else
m_gl->RenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH24_STENCIL8_OES, size.width(), size.height());
// For ES 2.0 contexts DEPTH_STENCIL is not available natively, so we emulate it
// at the command buffer level for WebGL contexts.
m_gl->FramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_RENDERBUFFER, m_depthStencilBuffer);
m_gl->BindRenderbuffer(GL_RENDERBUFFER, 0);
}
void DrawingBuffer::clearFramebuffers(GLbitfield clearMask)
{
// We will clear the multisample FBO, but we also need to clear the non-multisampled buffer.
if (m_multisampleFBO) {
m_gl->BindFramebuffer(GL_FRAMEBUFFER, m_fbo);
m_gl->Clear(GL_COLOR_BUFFER_BIT);
}
m_gl->BindFramebuffer(GL_FRAMEBUFFER, m_multisampleFBO ? m_multisampleFBO : m_fbo);
m_gl->Clear(clearMask);
}
void DrawingBuffer::setSize(const IntSize& size)
{
if (m_size == size)
return;
m_size = size;
}
IntSize DrawingBuffer::adjustSize(const IntSize& desiredSize, const IntSize& curSize, int maxTextureSize)
{
IntSize adjustedSize = desiredSize;
// Clamp if the desired size is greater than the maximum texture size for the device.
if (adjustedSize.height() > maxTextureSize)
adjustedSize.setHeight(maxTextureSize);
if (adjustedSize.width() > maxTextureSize)
adjustedSize.setWidth(maxTextureSize);
return adjustedSize;
}
bool DrawingBuffer::reset(const IntSize& newSize, bool wantDepthOrStencilBuffer)
{
ASSERT(!newSize.isEmpty());
IntSize adjustedSize = adjustSize(newSize, m_size, m_maxTextureSize);
if (adjustedSize.isEmpty())
return false;
if (adjustedSize != m_size) {
do {
if (m_colorBuffer.textureId) {
resizeTextureMemory(&m_colorBuffer, adjustedSize);
} else {
m_colorBuffer = createTextureAndAllocateMemory(adjustedSize);
}
m_gl->BindFramebuffer(GL_FRAMEBUFFER, m_fbo);
attachColorBufferToCurrentFBO();
// resize multisample FBO
if (!resizeMultisampleFramebuffer(adjustedSize, wantDepthOrStencilBuffer)
|| !resizeFramebuffer(adjustedSize, wantDepthOrStencilBuffer)) {
adjustedSize.scale(s_resourceAdjustedRatio);
continue;
}
break;
} while (!adjustedSize.isEmpty());
setSize(adjustedSize);
if (adjustedSize.isEmpty())
return false;
}
m_gl->Disable(GL_SCISSOR_TEST);
m_gl->ClearColor(0, 0, 0, 0);
m_gl->ColorMask(true, true, true, true);
GLbitfield clearMask = GL_COLOR_BUFFER_BIT;
if (!!m_depthStencilBuffer) {
m_gl->ClearDepthf(1.0f);
clearMask |= GL_DEPTH_BUFFER_BIT;
m_gl->DepthMask(true);
}
if (!!m_depthStencilBuffer) {
m_gl->ClearStencil(0);
clearMask |= GL_STENCIL_BUFFER_BIT;
m_gl->StencilMaskSeparate(GL_FRONT, 0xFFFFFFFF);
}
clearFramebuffers(clearMask);
return true;
}
void DrawingBuffer::commit()
{
if (m_multisampleFBO && !m_contentsChangeCommitted) {
m_gl->BindFramebuffer(GL_READ_FRAMEBUFFER_ANGLE, m_multisampleFBO);
m_gl->BindFramebuffer(GL_DRAW_FRAMEBUFFER_ANGLE, m_fbo);
if (m_scissorEnabled)
m_gl->Disable(GL_SCISSOR_TEST);
int width = m_size.width();
int height = m_size.height();
// Use NEAREST, because there is no scale performed during the blit.
m_gl->BlitFramebufferCHROMIUM(0, 0, width, height, 0, 0, width, height, GL_COLOR_BUFFER_BIT, GL_NEAREST);
if (m_scissorEnabled)
m_gl->Enable(GL_SCISSOR_TEST);
}
m_gl->BindFramebuffer(GL_FRAMEBUFFER, m_fbo);
if (m_antiAliasingMode == ScreenSpaceAntialiasing) {
m_gl->ApplyScreenSpaceAntialiasingCHROMIUM();
}
m_contentsChangeCommitted = true;
}
void DrawingBuffer::restoreFramebufferBindings()
{
if (m_drawFramebufferBinding && m_readFramebufferBinding) {
if (m_drawFramebufferBinding == m_readFramebufferBinding) {
m_gl->BindFramebuffer(GL_FRAMEBUFFER, m_readFramebufferBinding);
} else {
m_gl->BindFramebuffer(GL_READ_FRAMEBUFFER, m_readFramebufferBinding);
m_gl->BindFramebuffer(GL_DRAW_FRAMEBUFFER, m_drawFramebufferBinding);
}
return;
}
if (!m_drawFramebufferBinding && !m_readFramebufferBinding) {
bind(GL_FRAMEBUFFER);
return;
}
if (!m_drawFramebufferBinding) {
bind(GL_DRAW_FRAMEBUFFER);
m_gl->BindFramebuffer(GL_READ_FRAMEBUFFER, m_readFramebufferBinding);
} else {
bind(GL_READ_FRAMEBUFFER);
m_gl->BindFramebuffer(GL_DRAW_FRAMEBUFFER, m_drawFramebufferBinding);
}
}
bool DrawingBuffer::multisample() const
{
return m_antiAliasingMode != None;
}
void DrawingBuffer::bind(GLenum target)
{
if (target != GL_READ_FRAMEBUFFER)
m_gl->BindFramebuffer(target, m_multisampleFBO ? m_multisampleFBO : m_fbo);
else
m_gl->BindFramebuffer(target, m_fbo);
}
void DrawingBuffer::setPackAlignment(GLint param)
{
m_packAlignment = param;
}
bool DrawingBuffer::paintRenderingResultsToImageData(int& width, int& height, SourceDrawingBuffer sourceBuffer, WTF::ArrayBufferContents& contents)
{
ASSERT(!m_premultipliedAlpha);
width = size().width();
height = size().height();
CheckedNumeric<int> dataSize = 4;
dataSize *= width;
dataSize *= height;
if (!dataSize.IsValid())
return false;
WTF::ArrayBufferContents pixels(width * height, 4, WTF::ArrayBufferContents::NotShared, WTF::ArrayBufferContents::DontInitialize);
GLuint fbo = 0;
if (sourceBuffer == FrontBuffer && m_frontColorBuffer.texInfo.textureId) {
m_gl->GenFramebuffers(1, &fbo);
m_gl->BindFramebuffer(GL_FRAMEBUFFER, fbo);
m_gl->FramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, m_frontColorBuffer.texInfo.parameters.target, m_frontColorBuffer.texInfo.textureId, 0);
} else {
m_gl->BindFramebuffer(GL_FRAMEBUFFER, framebuffer());
}
readBackFramebuffer(static_cast<unsigned char*>(pixels.data()), width, height, ReadbackRGBA, WebGLImageConversion::AlphaDoNothing);
flipVertically(static_cast<uint8_t*>(pixels.data()), width, height);
if (fbo) {
m_gl->FramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, m_frontColorBuffer.texInfo.parameters.target, 0, 0);
m_gl->DeleteFramebuffers(1, &fbo);
}
restoreFramebufferBindings();
pixels.transfer(contents);
return true;
}
void DrawingBuffer::readBackFramebuffer(unsigned char* pixels, int width, int height, ReadbackOrder readbackOrder, WebGLImageConversion::AlphaOp op)
{
if (m_packAlignment > 4)
m_gl->PixelStorei(GL_PACK_ALIGNMENT, 1);
m_gl->ReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, pixels);
if (m_packAlignment > 4)
m_gl->PixelStorei(GL_PACK_ALIGNMENT, m_packAlignment);
size_t bufferSize = 4 * width * height;
if (readbackOrder == ReadbackSkia) {
#if (SK_R32_SHIFT == 16) && !SK_B32_SHIFT
// Swizzle red and blue channels to match SkBitmap's byte ordering.
// TODO(kbr): expose GL_BGRA as extension.
for (size_t i = 0; i < bufferSize; i += 4) {
std::swap(pixels[i], pixels[i + 2]);
}
#endif
}
if (op == WebGLImageConversion::AlphaDoPremultiply) {
for (size_t i = 0; i < bufferSize; i += 4) {
pixels[i + 0] = std::min(255, pixels[i + 0] * pixels[i + 3] / 255);
pixels[i + 1] = std::min(255, pixels[i + 1] * pixels[i + 3] / 255);
pixels[i + 2] = std::min(255, pixels[i + 2] * pixels[i + 3] / 255);
}
} else if (op != WebGLImageConversion::AlphaDoNothing) {
ASSERT_NOT_REACHED();
}
}
void DrawingBuffer::flipVertically(uint8_t* framebuffer, int width, int height)
{
m_scanline.resize(width * 4);
uint8_t* scanline = &m_scanline[0];
unsigned rowBytes = width * 4;
unsigned count = height / 2;
for (unsigned i = 0; i < count; i++) {
uint8_t* rowA = framebuffer + i * rowBytes;
uint8_t* rowB = framebuffer + (height - i - 1) * rowBytes;
memcpy(scanline, rowB, rowBytes);
memcpy(rowB, rowA, rowBytes);
memcpy(rowA, scanline, rowBytes);
}
}
void DrawingBuffer::texImage2DResourceSafe(GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, GLint unpackAlignment)
{
ASSERT(unpackAlignment == 1 || unpackAlignment == 2 || unpackAlignment == 4 || unpackAlignment == 8);
m_gl->TexImage2D(target, level, internalformat, width, height, border, format, type, 0);
}
void DrawingBuffer::deleteChromiumImageForTexture(TextureInfo* info)
{
if (info->imageId) {
m_gl->BindTexture(info->parameters.target, info->textureId);
m_gl->ReleaseTexImage2DCHROMIUM(info->parameters.target, info->imageId);
m_gl->DestroyImageCHROMIUM(info->imageId);
info->imageId = 0;
}
}
DrawingBuffer::TextureInfo DrawingBuffer::createTextureAndAllocateMemory(const IntSize& size)
{
// TODO(erikchen): Add support for a CHROMIUM_image back buffer whose
// behavior mimics a texture with internal format GL_RGB.
// https://crbug.com/581777.
if (!m_wantAlphaChannel)
return createDefaultTextureAndAllocateMemory(size);
if (!RuntimeEnabledFeatures::webGLImageChromiumEnabled())
return createDefaultTextureAndAllocateMemory(size);
// First, try to allocate a CHROMIUM_image. This always has the potential to
// fail.
TextureParameters parameters = chromiumImageTextureParameters();
GLuint imageId = m_gl->CreateGpuMemoryBufferImageCHROMIUM(size.width(), size.height(), parameters.internalColorFormat, GC3D_SCANOUT_CHROMIUM);
if (!imageId)
return createDefaultTextureAndAllocateMemory(size);
GLuint textureId = createColorTexture(parameters);
m_gl->BindTexImage2DCHROMIUM(parameters.target, imageId);
TextureInfo info;
info.textureId = textureId;
info.imageId = imageId;
info.parameters = parameters;
return info;
}
DrawingBuffer::TextureInfo DrawingBuffer::createDefaultTextureAndAllocateMemory(const IntSize& size)
{
TextureParameters parameters = defaultTextureParameters();
GLuint textureId = createColorTexture(parameters);
texImage2DResourceSafe(parameters.target, 0, parameters.internalColorFormat, size.width(), size.height(), 0, parameters.colorFormat, GL_UNSIGNED_BYTE);
DrawingBuffer::TextureInfo info;
info.textureId = textureId;
info.parameters = parameters;
return info;
}
void DrawingBuffer::resizeTextureMemory(TextureInfo* info, const IntSize& size)
{
ASSERT(info->textureId);
if (info->imageId) {
deleteChromiumImageForTexture(info);
info->imageId = m_gl->CreateGpuMemoryBufferImageCHROMIUM(size.width(), size.height(), info->parameters.internalColorFormat, GC3D_SCANOUT_CHROMIUM);
if (info->imageId) {
m_gl->BindTexture(info->parameters.target, info->textureId);
m_gl->BindTexImage2DCHROMIUM(info->parameters.target, info->imageId);
return;
}
// If the desired texture target is different, there's no way to fall back
// to a non CHROMIUM_image texture.
if (chromiumImageTextureParameters().target != defaultTextureParameters().target)
return;
}
m_gl->BindTexture(info->parameters.target, info->textureId);
texImage2DResourceSafe(info->parameters.target, 0, info->parameters.internalColorFormat, size.width(), size.height(), 0, info->parameters.colorFormat, GL_UNSIGNED_BYTE);
}
void DrawingBuffer::attachColorBufferToCurrentFBO()
{
GLenum target = m_colorBuffer.parameters.target;
m_gl->BindTexture(target, m_colorBuffer.textureId);
if (m_antiAliasingMode == MSAAImplicitResolve)
m_gl->FramebufferTexture2DMultisampleEXT(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, target, m_colorBuffer.textureId, 0, m_sampleCount);
else
m_gl->FramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, target, m_colorBuffer.textureId, 0);
m_gl->BindTexture(GL_TEXTURE_2D, m_texture2DBinding);
}
} // namespace blink
| was4444/chromium.src | third_party/WebKit/Source/platform/graphics/gpu/DrawingBuffer.cpp | C++ | bsd-3-clause | 37,512 |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/gcm_driver/instance_id/instance_id.h"
namespace instance_id {
InstanceID::InstanceID(const std::string& app_id,
gcm::InstanceIDHandler* handler)
: handler_(handler), app_id_(app_id) {
DCHECK(handler_);
}
InstanceID::~InstanceID() {
}
void InstanceID::SetTokenRefreshCallback(const TokenRefreshCallback& callback) {
token_refresh_callback_ = callback;
}
void InstanceID::NotifyTokenRefresh(bool update_id) {
if (!token_refresh_callback_.is_null())
token_refresh_callback_.Run(app_id_, update_id);
}
} // namespace instance_id
| was4444/chromium.src | components/gcm_driver/instance_id/instance_id.cc | C++ | bsd-3-clause | 756 |
/**
* Copyright (c) 2004-2005, Regents of the University of California
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of the University of California, Los Angeles nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package avrora.syntax.objdump;
import avrora.Avrora;
import avrora.util.StringUtil;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.StringTokenizer;
/**
* @author Ben L. Titzer
*/
public class ObjDumpReformatter {
HashSet sections;
List sectlist;
public ObjDumpReformatter(List slist) {
sections = new HashSet();
Iterator i = slist.iterator();
while ( i.hasNext() ) {
sections.add(i.next());
}
sectlist = slist;
}
public StringBuffer cleanCode(String inFile) throws IOException {
try {
//Status.begin("Preprocessing");
StringBuffer out = new StringBuffer(200000);
BufferedReader in = new BufferedReader(new FileReader(inFile));
cleanFile(in, out);
//Status.success();
return out;
} catch (IOException e) {
// rethrow IO exceptions (e.g. file not found)
//Status.error(e);
throw e;
} catch (Throwable e) {
//Status.error(e);
throw Avrora.unexpected(e);
}
}
private void cleanFile(BufferedReader in, StringBuffer out) throws IOException {
line_count = 0;
String line = nextLine(in);
//clean up first section
line = readHeader(in, out, line);
while (line != null) {
String section = getSectionName(line);
if (section != null) {
// read the whole section
line = readSection(in, out, section);
} else {
// ignore this line if it is between sections
line = nextLine(in);
}
}
}
private String getSectionName(String line) {
int offset = line.indexOf("Disassembly of section");
if (offset != -1) {
return line.substring(line.indexOf('.'), line.indexOf(':'));
}
return null;
}
private String readHeader(BufferedReader in, StringBuffer out, String line) throws IOException {
while (line != null) {
if (line.indexOf("Disassembly of section") != -1) {
break;
}
if (line.indexOf("main.exe") != -1)
out.append("program \"main.exe\":\n\n");
Iterator i = sectlist.iterator();
while ( i.hasNext() ) {
String s = (String)i.next();
if (line.indexOf(s) != -1)
printSectionHeader(s, out, line);
}
line = nextLine(in);
}
return line;
}
private void printSectionHeader(String section, StringBuffer out, String line) {
out.append(" section "+section+" ");
StringTokenizer st = new StringTokenizer(line);
st.nextToken(); // 0
st.nextToken(); //.text
out.append(" size=0x" + st.nextToken());
out.append(" vma=0x" + st.nextToken());
out.append(" lma=0x" + st.nextToken());
out.append(" offset=0x" + st.nextToken());
out.append(" ;" + st.nextToken());
out.append(" \n");
}
private String readSection(BufferedReader in, StringBuffer out, String section) throws IOException {
if ( sections.contains(section) )
return convertSection(in, out, section);
else
return ignoreSection(in, out, section);
}
private String ignoreSection(BufferedReader in, StringBuffer out, String section) throws IOException {
out.append("; section "+section+" removed");
String line = nextLine(in);
while ( line != null) {
out.append("; "+line+"\n");
if ( getSectionName(line) != null )
return line;
line = nextLine(in);
}
return line;
}
private String convertSection(BufferedReader in, StringBuffer out, String section) throws IOException {
// add the start of the section name
out.append("\nstart " + section + ":\n");
// read the next line
String line = nextLine(in);
while (line != null) {
// beginning of new section
if (getSectionName(line) != null)
return line;
// ignore ... in output
if (line.indexOf("...") != -1) {
line = nextLine(in);
out.append("; ...");
}
if (line.indexOf("Address ") != -1) {
line = line.substring(0, line.indexOf("Address "));
line += nextLine(in);
}
if (isLabel(line)) {
out.append("\nlabel 0x");
StringTokenizer st = new StringTokenizer(line);
out.append(st.nextToken());
String name = st.nextToken();
out.append(" " + name.replaceAll("[<,>]", "\"") + '\n');
} else {
String tok;
StringTokenizer st = new StringTokenizer(line);
if (st.hasMoreTokens()) {
tok = st.nextToken();
out.append(StringUtil.rightJustify("0x" + tok, 10));
while (st.hasMoreTokens()) {
tok = st.nextToken();
if (tok.matches("\\p{XDigit}\\p{XDigit}"))
out.append(" 0x" + tok);
else
out.append(" " + tok);
}
out.append('\n');
}
}
line = nextLine(in);
}
return line;
}
int line_count;
private String nextLine(BufferedReader in) throws IOException {
line_count++;
String line = in.readLine();
return line;
}
/**
* @param s
* @return true if statement is of the form: <hexdig> <\<LABEL\>:>
*/
private boolean isLabel(String s) {
if (s.indexOf("<") == -1)
return false;
if (s.indexOf(">:") == -1)
return false;
return true;
}
}
| minf/avrora | src/avrora/syntax/objdump/ObjDumpReformatter.java | Java | bsd-3-clause | 7,812 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
/// <summary>
/// Config 的摘要说明
/// </summary>
public class Config
{
public static string[] ImageSavePath = new string[] { "upload1", "upload2", "upload3" };
} | hehekeke/msup | Public/plugin/ueditor1_3_6/net/Config.cs | C# | bsd-3-clause | 261 |
using System.Collections.Generic;
using System.IO;
namespace Coevery.Commands {
public class CommandParameters {
public IEnumerable<string> Arguments { get; set; }
public IDictionary<string, string> Switches { get; set; }
public TextReader Input { get; set; }
public TextWriter Output { get; set; }
}
}
| Coevery/Coevery-Framework | src/Coevery/Commands/CommandParameters.cs | C# | bsd-3-clause | 348 |
<?php
/**
* @link https://cms.skeeks.com/
* @copyright Copyright (c) 2010 SkeekS
* @license https://cms.skeeks.com/license/
* @author Semenov Alexander <semenov@skeeks.com>
*/
/* @var $this yii\web\View */
/* @var $helper \skeeks\cms\shop\helpers\ShopOfferChooseHelper */
?>
<?
$this->registerCss(<<<CSS
.sx-choose-property-group .form-group {
margin-bottom: 0px;
}
.sx-choose-property-group {
margin-bottom: 15px;
}
.sx-choose-property-group .sx-disabled-btn-option {
border-color: silver;
color: silver;
}
.btn-select-option {
margin-bottom: 5px;
}
CSS
);
$this->registerJs(<<<JS
$('#sx-select-offer .btn-select-option').on("click", function() {
var value = $(this).data('value');
if ($(this).data('disabled')) {
sx.notify.error("Выберите другую опцию");
return false;
}
var jGroup = $(this).closest(".sx-choose-property-group");
$("input", jGroup).val(value).change();
return false;
});
$('#sx-select-offer .sx-offer-choose select').on("change", function() {
$("#sx-select-offer").submit();
return false;
});
$('#sx-select-offer .sx-properties-choose input').on("change", function() {
$("#sx-select-offer .sx-offer-choose").empty();
$("#sx-select-offer").submit();
return false;
});
JS
);
?>
<? $form = \yii\bootstrap\ActiveForm::begin([
'id' => 'sx-select-offer',
'options' => [
'data-pjax' => 1,
],
]); ?>
<div class="sx-properties-choose">
<? if ($helper->chooseFields) : ?>
<? foreach ($helper->chooseFields as $code => $data) : ?>
<? $disabled = \yii\helpers\ArrayHelper::getValue($data, 'disabledOptions'); ?>
<? if ((array)\yii\helpers\ArrayHelper::getValue($data, 'options')) : ?>
<div class="sx-choose-property-group">
<div class="dropdown">
<?= $form->field($helper->chooseModel, $code)->hiddenInput()
/*->listBox(
\yii\helpers\ArrayHelper::getValue($data, 'options'),
[
'size' => 1,
'options' => \yii\helpers\ArrayHelper::getValue($data, 'disabeldOptions')
])*/
->label(
\yii\helpers\ArrayHelper::getValue($data, 'label')
);
?>
<div class="g-brd-bottom g-brd-gray-light-v3 py-1">
<div class="g-font-weight-400 g-font-size-default mb-0 ">
<a href="#" class="d-block g-color-black g-font-size-18 g-text-underline--none--hover" data-toggle="dropdown">
Выбрать
<span class="float-right g-pos-rel g-top-3 mr-1 hs-icon hs-icon-arrow-bottom"></span>
</a>
<div class="dropdown-menu" aria-labelledby="dropdownMenuButton">
<? foreach ((array)\yii\helpers\ArrayHelper::getValue($data, 'options') as $key => $value): ?>
<?
$isChecked = false;
$isDisabled = false;
$cssClass = 'u-btn-outline-darkgray';
if ($helper->chooseModel->{$code} == $key) {
$isChecked = true;
$cssClass = 'u-btn-primary';
}
if (in_array($key, $disabled)) {
$isDisabled = true;
$cssClass = "sx-disabled-btn-option";
}
?>
<button class="btn btn-select-option <?= $cssClass; ?>" data-value="<?= $key; ?>" data-disabled="<?= (int)$isDisabled; ?>">
<? if ($isChecked) : ?>
<i class="fas fa-check"></i>
<? endif; ?>
<?= $value; ?>
</button>
<? endforeach; ?>
</div>
</div>
</div>
</div>
</div>
<? endif; ?>
<? endforeach; ?>
<? endif; ?>
</div>
<div class="sx-offer-choose">
<? if (!$helper->is_offers_properties) : ?>
<?= $form->field($helper->chooseModel, 'offer_id')->listBox(\yii\helpers\ArrayHelper::map(
$helper->availableOffers,
'id',
'asText'
), ['size' => 1])->label("Предложение"); ?>
<? elseif (count($helper->availableOffers) > 1 && \Yii::$app->request->post()) : ?>
<?= $form->field($helper->chooseModel, 'offer_id')->listBox(\yii\helpers\ArrayHelper::map(
$helper->availableOffers,
'id',
'asText'
), ['size' => 1])->label("Предложение"); ?>
<? endif; ?>
</div>
<? $form::end(); ?>
| skeeks-cms/cms-shop | src/views/helpers/shop-offer-choose-dropdown.php | PHP | bsd-3-clause | 5,355 |
"""
Module parse to/from Excel
"""
# ---------------------------------------------------------------------
# ExcelFile class
import abc
from datetime import date, datetime, time, timedelta
from distutils.version import LooseVersion
from io import UnsupportedOperation
import os
from textwrap import fill
import warnings
import numpy as np
import pandas._libs.json as json
import pandas.compat as compat
from pandas.compat import (
OrderedDict, add_metaclass, lrange, map, range, string_types, u, zip)
from pandas.errors import EmptyDataError
from pandas.util._decorators import Appender, deprecate_kwarg
from pandas.core.dtypes.common import (
is_bool, is_float, is_integer, is_list_like)
from pandas.core import config
from pandas.core.frame import DataFrame
from pandas.io.common import (
_NA_VALUES, _is_url, _stringify_path, _urlopen, _validate_header_arg,
get_filepath_or_buffer)
from pandas.io.formats.printing import pprint_thing
from pandas.io.parsers import TextParser
__all__ = ["read_excel", "ExcelWriter", "ExcelFile"]
_writer_extensions = ["xlsx", "xls", "xlsm"]
_writers = {}
_read_excel_doc = """
Read an Excel file into a pandas DataFrame.
Support both `xls` and `xlsx` file extensions from a local filesystem or URL.
Support an option to read a single sheet or a list of sheets.
Parameters
----------
io : str, file descriptor, pathlib.Path, ExcelFile or xlrd.Book
The string could be a URL. Valid URL schemes include http, ftp, s3,
gcs, and file. For file URLs, a host is expected. For instance, a local
file could be /path/to/workbook.xlsx.
sheet_name : str, int, list, or None, default 0
Strings are used for sheet names. Integers are used in zero-indexed
sheet positions. Lists of strings/integers are used to request
multiple sheets. Specify None to get all sheets.
Available cases:
* Defaults to ``0``: 1st sheet as a `DataFrame`
* ``1``: 2nd sheet as a `DataFrame`
* ``"Sheet1"``: Load sheet with name "Sheet1"
* ``[0, 1, "Sheet5"]``: Load first, second and sheet named "Sheet5"
as a dict of `DataFrame`
* None: All sheets.
header : int, list of int, default 0
Row (0-indexed) to use for the column labels of the parsed
DataFrame. If a list of integers is passed those row positions will
be combined into a ``MultiIndex``. Use None if there is no header.
names : array-like, default None
List of column names to use. If file contains no header row,
then you should explicitly pass header=None.
index_col : int, list of int, default None
Column (0-indexed) to use as the row labels of the DataFrame.
Pass None if there is no such column. If a list is passed,
those columns will be combined into a ``MultiIndex``. If a
subset of data is selected with ``usecols``, index_col
is based on the subset.
parse_cols : int or list, default None
Alias of `usecols`.
.. deprecated:: 0.21.0
Use `usecols` instead.
usecols : int, str, list-like, or callable default None
Return a subset of the columns.
* If None, then parse all columns.
* If int, then indicates last column to be parsed.
.. deprecated:: 0.24.0
Pass in a list of int instead from 0 to `usecols` inclusive.
* If str, then indicates comma separated list of Excel column letters
and column ranges (e.g. "A:E" or "A,C,E:F"). Ranges are inclusive of
both sides.
* If list of int, then indicates list of column numbers to be parsed.
* If list of string, then indicates list of column names to be parsed.
.. versionadded:: 0.24.0
* If callable, then evaluate each column name against it and parse the
column if the callable returns ``True``.
.. versionadded:: 0.24.0
squeeze : bool, default False
If the parsed data only contains one column then return a Series.
dtype : Type name or dict of column -> type, default None
Data type for data or columns. E.g. {'a': np.float64, 'b': np.int32}
Use `object` to preserve data as stored in Excel and not interpret dtype.
If converters are specified, they will be applied INSTEAD
of dtype conversion.
.. versionadded:: 0.20.0
engine : str, default None
If io is not a buffer or path, this must be set to identify io.
Acceptable values are None or xlrd.
converters : dict, default None
Dict of functions for converting values in certain columns. Keys can
either be integers or column labels, values are functions that take one
input argument, the Excel cell content, and return the transformed
content.
true_values : list, default None
Values to consider as True.
.. versionadded:: 0.19.0
false_values : list, default None
Values to consider as False.
.. versionadded:: 0.19.0
skiprows : list-like
Rows to skip at the beginning (0-indexed).
nrows : int, default None
Number of rows to parse.
.. versionadded:: 0.23.0
na_values : scalar, str, list-like, or dict, default None
Additional strings to recognize as NA/NaN. If dict passed, specific
per-column NA values. By default the following values are interpreted
as NaN: '""" + fill("', '".join(sorted(_NA_VALUES)), 70, subsequent_indent=" ") + """'.
keep_default_na : bool, default True
If na_values are specified and keep_default_na is False the default NaN
values are overridden, otherwise they're appended to.
verbose : bool, default False
Indicate number of NA values placed in non-numeric columns.
parse_dates : bool, list-like, or dict, default False
The behavior is as follows:
* bool. If True -> try parsing the index.
* list of int or names. e.g. If [1, 2, 3] -> try parsing columns 1, 2, 3
each as a separate date column.
* list of lists. e.g. If [[1, 3]] -> combine columns 1 and 3 and parse as
a single date column.
* dict, e.g. {{'foo' : [1, 3]}} -> parse columns 1, 3 as date and call
result 'foo'
If a column or index contains an unparseable date, the entire column or
index will be returned unaltered as an object data type. For non-standard
datetime parsing, use ``pd.to_datetime`` after ``pd.read_csv``
Note: A fast-path exists for iso8601-formatted dates.
date_parser : function, optional
Function to use for converting a sequence of string columns to an array of
datetime instances. The default uses ``dateutil.parser.parser`` to do the
conversion. Pandas will try to call `date_parser` in three different ways,
advancing to the next if an exception occurs: 1) Pass one or more arrays
(as defined by `parse_dates`) as arguments; 2) concatenate (row-wise) the
string values from the columns defined by `parse_dates` into a single array
and pass that; and 3) call `date_parser` once for each row using one or
more strings (corresponding to the columns defined by `parse_dates`) as
arguments.
thousands : str, default None
Thousands separator for parsing string columns to numeric. Note that
this parameter is only necessary for columns stored as TEXT in Excel,
any numeric columns will automatically be parsed, regardless of display
format.
comment : str, default None
Comments out remainder of line. Pass a character or characters to this
argument to indicate comments in the input file. Any data between the
comment string and the end of the current line is ignored.
skip_footer : int, default 0
Alias of `skipfooter`.
.. deprecated:: 0.23.0
Use `skipfooter` instead.
skipfooter : int, default 0
Rows at the end to skip (0-indexed).
convert_float : bool, default True
Convert integral floats to int (i.e., 1.0 --> 1). If False, all numeric
data will be read in as floats: Excel stores all numbers as floats
internally.
mangle_dupe_cols : bool, default True
Duplicate columns will be specified as 'X', 'X.1', ...'X.N', rather than
'X'...'X'. Passing in False will cause data to be overwritten if there
are duplicate names in the columns.
**kwds : optional
Optional keyword arguments can be passed to ``TextFileReader``.
Returns
-------
DataFrame or dict of DataFrames
DataFrame from the passed in Excel file. See notes in sheet_name
argument for more information on when a dict of DataFrames is returned.
See Also
--------
to_excel : Write DataFrame to an Excel file.
to_csv : Write DataFrame to a comma-separated values (csv) file.
read_csv : Read a comma-separated values (csv) file into DataFrame.
read_fwf : Read a table of fixed-width formatted lines into DataFrame.
Examples
--------
The file can be read using the file name as string or an open file object:
>>> pd.read_excel('tmp.xlsx', index_col=0) # doctest: +SKIP
Name Value
0 string1 1
1 string2 2
2 #Comment 3
>>> pd.read_excel(open('tmp.xlsx', 'rb'),
... sheet_name='Sheet3') # doctest: +SKIP
Unnamed: 0 Name Value
0 0 string1 1
1 1 string2 2
2 2 #Comment 3
Index and header can be specified via the `index_col` and `header` arguments
>>> pd.read_excel('tmp.xlsx', index_col=None, header=None) # doctest: +SKIP
0 1 2
0 NaN Name Value
1 0.0 string1 1
2 1.0 string2 2
3 2.0 #Comment 3
Column types are inferred but can be explicitly specified
>>> pd.read_excel('tmp.xlsx', index_col=0,
... dtype={'Name': str, 'Value': float}) # doctest: +SKIP
Name Value
0 string1 1.0
1 string2 2.0
2 #Comment 3.0
True, False, and NA values, and thousands separators have defaults,
but can be explicitly specified, too. Supply the values you would like
as strings or lists of strings!
>>> pd.read_excel('tmp.xlsx', index_col=0,
... na_values=['string1', 'string2']) # doctest: +SKIP
Name Value
0 NaN 1
1 NaN 2
2 #Comment 3
Comment lines in the excel input file can be skipped using the `comment` kwarg
>>> pd.read_excel('tmp.xlsx', index_col=0, comment='#') # doctest: +SKIP
Name Value
0 string1 1.0
1 string2 2.0
2 None NaN
"""
def register_writer(klass):
"""Adds engine to the excel writer registry. You must use this method to
integrate with ``to_excel``. Also adds config options for any new
``supported_extensions`` defined on the writer."""
if not compat.callable(klass):
raise ValueError("Can only register callables as engines")
engine_name = klass.engine
_writers[engine_name] = klass
for ext in klass.supported_extensions:
if ext.startswith('.'):
ext = ext[1:]
if ext not in _writer_extensions:
config.register_option("io.excel.{ext}.writer".format(ext=ext),
engine_name, validator=str)
_writer_extensions.append(ext)
def _get_default_writer(ext):
_default_writers = {'xlsx': 'openpyxl', 'xlsm': 'openpyxl', 'xls': 'xlwt'}
try:
import xlsxwriter # noqa
_default_writers['xlsx'] = 'xlsxwriter'
except ImportError:
pass
return _default_writers[ext]
def get_writer(engine_name):
try:
return _writers[engine_name]
except KeyError:
raise ValueError("No Excel writer '{engine}'"
.format(engine=engine_name))
@Appender(_read_excel_doc)
@deprecate_kwarg("parse_cols", "usecols")
@deprecate_kwarg("skip_footer", "skipfooter")
def read_excel(io,
sheet_name=0,
header=0,
names=None,
index_col=None,
parse_cols=None,
usecols=None,
squeeze=False,
dtype=None,
engine=None,
converters=None,
true_values=None,
false_values=None,
skiprows=None,
nrows=None,
na_values=None,
keep_default_na=True,
verbose=False,
parse_dates=False,
date_parser=None,
thousands=None,
comment=None,
skip_footer=0,
skipfooter=0,
convert_float=True,
mangle_dupe_cols=True,
**kwds):
# Can't use _deprecate_kwarg since sheetname=None has a special meaning
if is_integer(sheet_name) and sheet_name == 0 and 'sheetname' in kwds:
warnings.warn("The `sheetname` keyword is deprecated, use "
"`sheet_name` instead", FutureWarning, stacklevel=2)
sheet_name = kwds.pop("sheetname")
if 'sheet' in kwds:
raise TypeError("read_excel() got an unexpected keyword argument "
"`sheet`")
if not isinstance(io, ExcelFile):
io = ExcelFile(io, engine=engine)
return io.parse(
sheet_name=sheet_name,
header=header,
names=names,
index_col=index_col,
usecols=usecols,
squeeze=squeeze,
dtype=dtype,
converters=converters,
true_values=true_values,
false_values=false_values,
skiprows=skiprows,
nrows=nrows,
na_values=na_values,
keep_default_na=keep_default_na,
verbose=verbose,
parse_dates=parse_dates,
date_parser=date_parser,
thousands=thousands,
comment=comment,
skipfooter=skipfooter,
convert_float=convert_float,
mangle_dupe_cols=mangle_dupe_cols,
**kwds)
@add_metaclass(abc.ABCMeta)
class _BaseExcelReader(object):
@property
@abc.abstractmethod
def sheet_names(self):
pass
@abc.abstractmethod
def get_sheet_by_name(self, name):
pass
@abc.abstractmethod
def get_sheet_by_index(self, index):
pass
@abc.abstractmethod
def get_sheet_data(self, sheet, convert_float):
pass
def parse(self,
sheet_name=0,
header=0,
names=None,
index_col=None,
usecols=None,
squeeze=False,
dtype=None,
true_values=None,
false_values=None,
skiprows=None,
nrows=None,
na_values=None,
verbose=False,
parse_dates=False,
date_parser=None,
thousands=None,
comment=None,
skipfooter=0,
convert_float=True,
mangle_dupe_cols=True,
**kwds):
_validate_header_arg(header)
ret_dict = False
# Keep sheetname to maintain backwards compatibility.
if isinstance(sheet_name, list):
sheets = sheet_name
ret_dict = True
elif sheet_name is None:
sheets = self.sheet_names
ret_dict = True
else:
sheets = [sheet_name]
# handle same-type duplicates.
sheets = list(OrderedDict.fromkeys(sheets).keys())
output = OrderedDict()
for asheetname in sheets:
if verbose:
print("Reading sheet {sheet}".format(sheet=asheetname))
if isinstance(asheetname, compat.string_types):
sheet = self.get_sheet_by_name(asheetname)
else: # assume an integer if not a string
sheet = self.get_sheet_by_index(asheetname)
data = self.get_sheet_data(sheet, convert_float)
usecols = _maybe_convert_usecols(usecols)
if sheet.nrows == 0:
output[asheetname] = DataFrame()
continue
if is_list_like(header) and len(header) == 1:
header = header[0]
# forward fill and pull out names for MultiIndex column
header_names = None
if header is not None and is_list_like(header):
header_names = []
control_row = [True] * len(data[0])
for row in header:
if is_integer(skiprows):
row += skiprows
data[row], control_row = _fill_mi_header(data[row],
control_row)
if index_col is not None:
header_name, _ = _pop_header_name(data[row], index_col)
header_names.append(header_name)
if is_list_like(index_col):
# Forward fill values for MultiIndex index.
if not is_list_like(header):
offset = 1 + header
else:
offset = 1 + max(header)
# Check if we have an empty dataset
# before trying to collect data.
if offset < len(data):
for col in index_col:
last = data[offset][col]
for row in range(offset + 1, len(data)):
if data[row][col] == '' or data[row][col] is None:
data[row][col] = last
else:
last = data[row][col]
has_index_names = is_list_like(header) and len(header) > 1
# GH 12292 : error when read one empty column from excel file
try:
parser = TextParser(data,
names=names,
header=header,
index_col=index_col,
has_index_names=has_index_names,
squeeze=squeeze,
dtype=dtype,
true_values=true_values,
false_values=false_values,
skiprows=skiprows,
nrows=nrows,
na_values=na_values,
parse_dates=parse_dates,
date_parser=date_parser,
thousands=thousands,
comment=comment,
skipfooter=skipfooter,
usecols=usecols,
mangle_dupe_cols=mangle_dupe_cols,
**kwds)
output[asheetname] = parser.read(nrows=nrows)
if not squeeze or isinstance(output[asheetname], DataFrame):
if header_names:
output[asheetname].columns = output[
asheetname].columns.set_names(header_names)
elif compat.PY2:
output[asheetname].columns = _maybe_convert_to_string(
output[asheetname].columns)
except EmptyDataError:
# No Data, return an empty DataFrame
output[asheetname] = DataFrame()
if ret_dict:
return output
else:
return output[asheetname]
class _XlrdReader(_BaseExcelReader):
def __init__(self, filepath_or_buffer):
"""Reader using xlrd engine.
Parameters
----------
filepath_or_buffer : string, path object or Workbook
Object to be parsed.
"""
err_msg = "Install xlrd >= 1.0.0 for Excel support"
try:
import xlrd
except ImportError:
raise ImportError(err_msg)
else:
if xlrd.__VERSION__ < LooseVersion("1.0.0"):
raise ImportError(err_msg +
". Current version " + xlrd.__VERSION__)
# If filepath_or_buffer is a url, want to keep the data as bytes so
# can't pass to get_filepath_or_buffer()
if _is_url(filepath_or_buffer):
filepath_or_buffer = _urlopen(filepath_or_buffer)
elif not isinstance(filepath_or_buffer, (ExcelFile, xlrd.Book)):
filepath_or_buffer, _, _, _ = get_filepath_or_buffer(
filepath_or_buffer)
if isinstance(filepath_or_buffer, xlrd.Book):
self.book = filepath_or_buffer
elif hasattr(filepath_or_buffer, "read"):
# N.B. xlrd.Book has a read attribute too
if hasattr(filepath_or_buffer, 'seek'):
try:
# GH 19779
filepath_or_buffer.seek(0)
except UnsupportedOperation:
# HTTPResponse does not support seek()
# GH 20434
pass
data = filepath_or_buffer.read()
self.book = xlrd.open_workbook(file_contents=data)
elif isinstance(filepath_or_buffer, compat.string_types):
self.book = xlrd.open_workbook(filepath_or_buffer)
else:
raise ValueError('Must explicitly set engine if not passing in'
' buffer or path for io.')
@property
def sheet_names(self):
return self.book.sheet_names()
def get_sheet_by_name(self, name):
return self.book.sheet_by_name(name)
def get_sheet_by_index(self, index):
return self.book.sheet_by_index(index)
def get_sheet_data(self, sheet, convert_float):
from xlrd import (xldate, XL_CELL_DATE,
XL_CELL_ERROR, XL_CELL_BOOLEAN,
XL_CELL_NUMBER)
epoch1904 = self.book.datemode
def _parse_cell(cell_contents, cell_typ):
"""converts the contents of the cell into a pandas
appropriate object"""
if cell_typ == XL_CELL_DATE:
# Use the newer xlrd datetime handling.
try:
cell_contents = xldate.xldate_as_datetime(
cell_contents, epoch1904)
except OverflowError:
return cell_contents
# Excel doesn't distinguish between dates and time,
# so we treat dates on the epoch as times only.
# Also, Excel supports 1900 and 1904 epochs.
year = (cell_contents.timetuple())[0:3]
if ((not epoch1904 and year == (1899, 12, 31)) or
(epoch1904 and year == (1904, 1, 1))):
cell_contents = time(cell_contents.hour,
cell_contents.minute,
cell_contents.second,
cell_contents.microsecond)
elif cell_typ == XL_CELL_ERROR:
cell_contents = np.nan
elif cell_typ == XL_CELL_BOOLEAN:
cell_contents = bool(cell_contents)
elif convert_float and cell_typ == XL_CELL_NUMBER:
# GH5394 - Excel 'numbers' are always floats
# it's a minimal perf hit and less surprising
val = int(cell_contents)
if val == cell_contents:
cell_contents = val
return cell_contents
data = []
for i in range(sheet.nrows):
row = [_parse_cell(value, typ)
for value, typ in zip(sheet.row_values(i),
sheet.row_types(i))]
data.append(row)
return data
class ExcelFile(object):
"""
Class for parsing tabular excel sheets into DataFrame objects.
Uses xlrd. See read_excel for more documentation
Parameters
----------
io : string, path object (pathlib.Path or py._path.local.LocalPath),
file-like object or xlrd workbook
If a string or path object, expected to be a path to xls or xlsx file.
engine : string, default None
If io is not a buffer or path, this must be set to identify io.
Acceptable values are None or ``xlrd``.
"""
_engines = {
'xlrd': _XlrdReader,
}
def __init__(self, io, engine=None):
if engine is None:
engine = 'xlrd'
if engine not in self._engines:
raise ValueError("Unknown engine: {engine}".format(engine=engine))
# could be a str, ExcelFile, Book, etc.
self.io = io
# Always a string
self._io = _stringify_path(io)
self._reader = self._engines[engine](self._io)
def __fspath__(self):
return self._io
def parse(self,
sheet_name=0,
header=0,
names=None,
index_col=None,
usecols=None,
squeeze=False,
converters=None,
true_values=None,
false_values=None,
skiprows=None,
nrows=None,
na_values=None,
parse_dates=False,
date_parser=None,
thousands=None,
comment=None,
skipfooter=0,
convert_float=True,
mangle_dupe_cols=True,
**kwds):
"""
Parse specified sheet(s) into a DataFrame
Equivalent to read_excel(ExcelFile, ...) See the read_excel
docstring for more info on accepted parameters
"""
# Can't use _deprecate_kwarg since sheetname=None has a special meaning
if is_integer(sheet_name) and sheet_name == 0 and 'sheetname' in kwds:
warnings.warn("The `sheetname` keyword is deprecated, use "
"`sheet_name` instead", FutureWarning, stacklevel=2)
sheet_name = kwds.pop("sheetname")
elif 'sheetname' in kwds:
raise TypeError("Cannot specify both `sheet_name` "
"and `sheetname`. Use just `sheet_name`")
if 'chunksize' in kwds:
raise NotImplementedError("chunksize keyword of read_excel "
"is not implemented")
return self._reader.parse(sheet_name=sheet_name,
header=header,
names=names,
index_col=index_col,
usecols=usecols,
squeeze=squeeze,
converters=converters,
true_values=true_values,
false_values=false_values,
skiprows=skiprows,
nrows=nrows,
na_values=na_values,
parse_dates=parse_dates,
date_parser=date_parser,
thousands=thousands,
comment=comment,
skipfooter=skipfooter,
convert_float=convert_float,
mangle_dupe_cols=mangle_dupe_cols,
**kwds)
@property
def book(self):
return self._reader.book
@property
def sheet_names(self):
return self._reader.sheet_names
def close(self):
"""close io if necessary"""
if hasattr(self.io, 'close'):
self.io.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def _excel2num(x):
"""
Convert Excel column name like 'AB' to 0-based column index.
Parameters
----------
x : str
The Excel column name to convert to a 0-based column index.
Returns
-------
num : int
The column index corresponding to the name.
Raises
------
ValueError
Part of the Excel column name was invalid.
"""
index = 0
for c in x.upper().strip():
cp = ord(c)
if cp < ord("A") or cp > ord("Z"):
raise ValueError("Invalid column name: {x}".format(x=x))
index = index * 26 + cp - ord("A") + 1
return index - 1
def _range2cols(areas):
"""
Convert comma separated list of column names and ranges to indices.
Parameters
----------
areas : str
A string containing a sequence of column ranges (or areas).
Returns
-------
cols : list
A list of 0-based column indices.
Examples
--------
>>> _range2cols('A:E')
[0, 1, 2, 3, 4]
>>> _range2cols('A,C,Z:AB')
[0, 2, 25, 26, 27]
"""
cols = []
for rng in areas.split(","):
if ":" in rng:
rng = rng.split(":")
cols.extend(lrange(_excel2num(rng[0]), _excel2num(rng[1]) + 1))
else:
cols.append(_excel2num(rng))
return cols
def _maybe_convert_usecols(usecols):
"""
Convert `usecols` into a compatible format for parsing in `parsers.py`.
Parameters
----------
usecols : object
The use-columns object to potentially convert.
Returns
-------
converted : object
The compatible format of `usecols`.
"""
if usecols is None:
return usecols
if is_integer(usecols):
warnings.warn(("Passing in an integer for `usecols` has been "
"deprecated. Please pass in a list of int from "
"0 to `usecols` inclusive instead."),
FutureWarning, stacklevel=2)
return lrange(usecols + 1)
if isinstance(usecols, compat.string_types):
return _range2cols(usecols)
return usecols
def _validate_freeze_panes(freeze_panes):
if freeze_panes is not None:
if (
len(freeze_panes) == 2 and
all(isinstance(item, int) for item in freeze_panes)
):
return True
raise ValueError("freeze_panes must be of form (row, column)"
" where row and column are integers")
# freeze_panes wasn't specified, return False so it won't be applied
# to output sheet
return False
def _trim_excel_header(row):
# trim header row so auto-index inference works
# xlrd uses '' , openpyxl None
while len(row) > 0 and (row[0] == '' or row[0] is None):
row = row[1:]
return row
def _maybe_convert_to_string(row):
"""
Convert elements in a row to string from Unicode.
This is purely a Python 2.x patch and is performed ONLY when all
elements of the row are string-like.
Parameters
----------
row : array-like
The row of data to convert.
Returns
-------
converted : array-like
"""
if compat.PY2:
converted = []
for i in range(len(row)):
if isinstance(row[i], compat.string_types):
try:
converted.append(str(row[i]))
except UnicodeEncodeError:
break
else:
break
else:
row = converted
return row
def _fill_mi_header(row, control_row):
"""Forward fills blank entries in row, but only inside the same parent index
Used for creating headers in Multiindex.
Parameters
----------
row : list
List of items in a single row.
control_row : list of bool
Helps to determine if particular column is in same parent index as the
previous value. Used to stop propagation of empty cells between
different indexes.
Returns
----------
Returns changed row and control_row
"""
last = row[0]
for i in range(1, len(row)):
if not control_row[i]:
last = row[i]
if row[i] == '' or row[i] is None:
row[i] = last
else:
control_row[i] = False
last = row[i]
return _maybe_convert_to_string(row), control_row
# fill blank if index_col not None
def _pop_header_name(row, index_col):
"""
Pop the header name for MultiIndex parsing.
Parameters
----------
row : list
The data row to parse for the header name.
index_col : int, list
The index columns for our data. Assumed to be non-null.
Returns
-------
header_name : str
The extracted header name.
trimmed_row : list
The original data row with the header name removed.
"""
# Pop out header name and fill w/blank.
i = index_col if not is_list_like(index_col) else max(index_col)
header_name = row[i]
header_name = None if header_name == "" else header_name
return header_name, row[:i] + [''] + row[i + 1:]
@add_metaclass(abc.ABCMeta)
class ExcelWriter(object):
"""
Class for writing DataFrame objects into excel sheets, default is to use
xlwt for xls, openpyxl for xlsx. See DataFrame.to_excel for typical usage.
Parameters
----------
path : string
Path to xls or xlsx file.
engine : string (optional)
Engine to use for writing. If None, defaults to
``io.excel.<extension>.writer``. NOTE: can only be passed as a keyword
argument.
date_format : string, default None
Format string for dates written into Excel files (e.g. 'YYYY-MM-DD')
datetime_format : string, default None
Format string for datetime objects written into Excel files
(e.g. 'YYYY-MM-DD HH:MM:SS')
mode : {'w' or 'a'}, default 'w'
File mode to use (write or append).
.. versionadded:: 0.24.0
Attributes
----------
None
Methods
-------
None
Notes
-----
None of the methods and properties are considered public.
For compatibility with CSV writers, ExcelWriter serializes lists
and dicts to strings before writing.
Examples
--------
Default usage:
>>> with ExcelWriter('path_to_file.xlsx') as writer:
... df.to_excel(writer)
To write to separate sheets in a single file:
>>> with ExcelWriter('path_to_file.xlsx') as writer:
... df1.to_excel(writer, sheet_name='Sheet1')
... df2.to_excel(writer, sheet_name='Sheet2')
You can set the date format or datetime format:
>>> with ExcelWriter('path_to_file.xlsx',
date_format='YYYY-MM-DD',
datetime_format='YYYY-MM-DD HH:MM:SS') as writer:
... df.to_excel(writer)
You can also append to an existing Excel file:
>>> with ExcelWriter('path_to_file.xlsx', mode='a') as writer:
... df.to_excel(writer, sheet_name='Sheet3')
"""
# Defining an ExcelWriter implementation (see abstract methods for more...)
# - Mandatory
# - ``write_cells(self, cells, sheet_name=None, startrow=0, startcol=0)``
# --> called to write additional DataFrames to disk
# - ``supported_extensions`` (tuple of supported extensions), used to
# check that engine supports the given extension.
# - ``engine`` - string that gives the engine name. Necessary to
# instantiate class directly and bypass ``ExcelWriterMeta`` engine
# lookup.
# - ``save(self)`` --> called to save file to disk
# - Mostly mandatory (i.e. should at least exist)
# - book, cur_sheet, path
# - Optional:
# - ``__init__(self, path, engine=None, **kwargs)`` --> always called
# with path as first argument.
# You also need to register the class with ``register_writer()``.
# Technically, ExcelWriter implementations don't need to subclass
# ExcelWriter.
def __new__(cls, path, engine=None, **kwargs):
# only switch class if generic(ExcelWriter)
if issubclass(cls, ExcelWriter):
if engine is None or (isinstance(engine, string_types) and
engine == 'auto'):
if isinstance(path, string_types):
ext = os.path.splitext(path)[-1][1:]
else:
ext = 'xlsx'
try:
engine = config.get_option('io.excel.{ext}.writer'
.format(ext=ext))
if engine == 'auto':
engine = _get_default_writer(ext)
except KeyError:
error = ValueError("No engine for filetype: '{ext}'"
.format(ext=ext))
raise error
cls = get_writer(engine)
return object.__new__(cls)
# declare external properties you can count on
book = None
curr_sheet = None
path = None
@abc.abstractproperty
def supported_extensions(self):
"extensions that writer engine supports"
pass
@abc.abstractproperty
def engine(self):
"name of engine"
pass
@abc.abstractmethod
def write_cells(self, cells, sheet_name=None, startrow=0, startcol=0,
freeze_panes=None):
"""
Write given formatted cells into Excel an excel sheet
Parameters
----------
cells : generator
cell of formatted data to save to Excel sheet
sheet_name : string, default None
Name of Excel sheet, if None, then use self.cur_sheet
startrow : upper left cell row to dump data frame
startcol : upper left cell column to dump data frame
freeze_panes: integer tuple of length 2
contains the bottom-most row and right-most column to freeze
"""
pass
@abc.abstractmethod
def save(self):
"""
Save workbook to disk.
"""
pass
def __init__(self, path, engine=None,
date_format=None, datetime_format=None, mode='w',
**engine_kwargs):
# validate that this engine can handle the extension
if isinstance(path, string_types):
ext = os.path.splitext(path)[-1]
else:
ext = 'xls' if engine == 'xlwt' else 'xlsx'
self.check_extension(ext)
self.path = path
self.sheets = {}
self.cur_sheet = None
if date_format is None:
self.date_format = 'YYYY-MM-DD'
else:
self.date_format = date_format
if datetime_format is None:
self.datetime_format = 'YYYY-MM-DD HH:MM:SS'
else:
self.datetime_format = datetime_format
self.mode = mode
def __fspath__(self):
return _stringify_path(self.path)
def _get_sheet_name(self, sheet_name):
if sheet_name is None:
sheet_name = self.cur_sheet
if sheet_name is None: # pragma: no cover
raise ValueError('Must pass explicit sheet_name or set '
'cur_sheet property')
return sheet_name
def _value_with_fmt(self, val):
"""Convert numpy types to Python types for the Excel writers.
Parameters
----------
val : object
Value to be written into cells
Returns
-------
Tuple with the first element being the converted value and the second
being an optional format
"""
fmt = None
if is_integer(val):
val = int(val)
elif is_float(val):
val = float(val)
elif is_bool(val):
val = bool(val)
elif isinstance(val, datetime):
fmt = self.datetime_format
elif isinstance(val, date):
fmt = self.date_format
elif isinstance(val, timedelta):
val = val.total_seconds() / float(86400)
fmt = '0'
else:
val = compat.to_str(val)
return val, fmt
@classmethod
def check_extension(cls, ext):
"""checks that path's extension against the Writer's supported
extensions. If it isn't supported, raises UnsupportedFiletypeError."""
if ext.startswith('.'):
ext = ext[1:]
if not any(ext in extension for extension in cls.supported_extensions):
msg = (u("Invalid extension for engine '{engine}': '{ext}'")
.format(engine=pprint_thing(cls.engine),
ext=pprint_thing(ext)))
raise ValueError(msg)
else:
return True
# Allow use as a contextmanager
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def close(self):
"""synonym for save, to make it more file-like"""
return self.save()
class _OpenpyxlWriter(ExcelWriter):
engine = 'openpyxl'
supported_extensions = ('.xlsx', '.xlsm')
def __init__(self, path, engine=None, mode='w', **engine_kwargs):
# Use the openpyxl module as the Excel writer.
from openpyxl.workbook import Workbook
super(_OpenpyxlWriter, self).__init__(path, mode=mode, **engine_kwargs)
if self.mode == 'a': # Load from existing workbook
from openpyxl import load_workbook
book = load_workbook(self.path)
self.book = book
else:
# Create workbook object with default optimized_write=True.
self.book = Workbook()
if self.book.worksheets:
try:
self.book.remove(self.book.worksheets[0])
except AttributeError:
# compat - for openpyxl <= 2.4
self.book.remove_sheet(self.book.worksheets[0])
def save(self):
"""
Save workbook to disk.
"""
return self.book.save(self.path)
@classmethod
def _convert_to_style(cls, style_dict):
"""
converts a style_dict to an openpyxl style object
Parameters
----------
style_dict : style dictionary to convert
"""
from openpyxl.style import Style
xls_style = Style()
for key, value in style_dict.items():
for nk, nv in value.items():
if key == "borders":
(xls_style.borders.__getattribute__(nk)
.__setattr__('border_style', nv))
else:
xls_style.__getattribute__(key).__setattr__(nk, nv)
return xls_style
@classmethod
def _convert_to_style_kwargs(cls, style_dict):
"""
Convert a style_dict to a set of kwargs suitable for initializing
or updating-on-copy an openpyxl v2 style object
Parameters
----------
style_dict : dict
A dict with zero or more of the following keys (or their synonyms).
'font'
'fill'
'border' ('borders')
'alignment'
'number_format'
'protection'
Returns
-------
style_kwargs : dict
A dict with the same, normalized keys as ``style_dict`` but each
value has been replaced with a native openpyxl style object of the
appropriate class.
"""
_style_key_map = {
'borders': 'border',
}
style_kwargs = {}
for k, v in style_dict.items():
if k in _style_key_map:
k = _style_key_map[k]
_conv_to_x = getattr(cls, '_convert_to_{k}'.format(k=k),
lambda x: None)
new_v = _conv_to_x(v)
if new_v:
style_kwargs[k] = new_v
return style_kwargs
@classmethod
def _convert_to_color(cls, color_spec):
"""
Convert ``color_spec`` to an openpyxl v2 Color object
Parameters
----------
color_spec : str, dict
A 32-bit ARGB hex string, or a dict with zero or more of the
following keys.
'rgb'
'indexed'
'auto'
'theme'
'tint'
'index'
'type'
Returns
-------
color : openpyxl.styles.Color
"""
from openpyxl.styles import Color
if isinstance(color_spec, str):
return Color(color_spec)
else:
return Color(**color_spec)
@classmethod
def _convert_to_font(cls, font_dict):
"""
Convert ``font_dict`` to an openpyxl v2 Font object
Parameters
----------
font_dict : dict
A dict with zero or more of the following keys (or their synonyms).
'name'
'size' ('sz')
'bold' ('b')
'italic' ('i')
'underline' ('u')
'strikethrough' ('strike')
'color'
'vertAlign' ('vertalign')
'charset'
'scheme'
'family'
'outline'
'shadow'
'condense'
Returns
-------
font : openpyxl.styles.Font
"""
from openpyxl.styles import Font
_font_key_map = {
'sz': 'size',
'b': 'bold',
'i': 'italic',
'u': 'underline',
'strike': 'strikethrough',
'vertalign': 'vertAlign',
}
font_kwargs = {}
for k, v in font_dict.items():
if k in _font_key_map:
k = _font_key_map[k]
if k == 'color':
v = cls._convert_to_color(v)
font_kwargs[k] = v
return Font(**font_kwargs)
@classmethod
def _convert_to_stop(cls, stop_seq):
"""
Convert ``stop_seq`` to a list of openpyxl v2 Color objects,
suitable for initializing the ``GradientFill`` ``stop`` parameter.
Parameters
----------
stop_seq : iterable
An iterable that yields objects suitable for consumption by
``_convert_to_color``.
Returns
-------
stop : list of openpyxl.styles.Color
"""
return map(cls._convert_to_color, stop_seq)
@classmethod
def _convert_to_fill(cls, fill_dict):
"""
Convert ``fill_dict`` to an openpyxl v2 Fill object
Parameters
----------
fill_dict : dict
A dict with one or more of the following keys (or their synonyms),
'fill_type' ('patternType', 'patterntype')
'start_color' ('fgColor', 'fgcolor')
'end_color' ('bgColor', 'bgcolor')
or one or more of the following keys (or their synonyms).
'type' ('fill_type')
'degree'
'left'
'right'
'top'
'bottom'
'stop'
Returns
-------
fill : openpyxl.styles.Fill
"""
from openpyxl.styles import PatternFill, GradientFill
_pattern_fill_key_map = {
'patternType': 'fill_type',
'patterntype': 'fill_type',
'fgColor': 'start_color',
'fgcolor': 'start_color',
'bgColor': 'end_color',
'bgcolor': 'end_color',
}
_gradient_fill_key_map = {
'fill_type': 'type',
}
pfill_kwargs = {}
gfill_kwargs = {}
for k, v in fill_dict.items():
pk = gk = None
if k in _pattern_fill_key_map:
pk = _pattern_fill_key_map[k]
if k in _gradient_fill_key_map:
gk = _gradient_fill_key_map[k]
if pk in ['start_color', 'end_color']:
v = cls._convert_to_color(v)
if gk == 'stop':
v = cls._convert_to_stop(v)
if pk:
pfill_kwargs[pk] = v
elif gk:
gfill_kwargs[gk] = v
else:
pfill_kwargs[k] = v
gfill_kwargs[k] = v
try:
return PatternFill(**pfill_kwargs)
except TypeError:
return GradientFill(**gfill_kwargs)
@classmethod
def _convert_to_side(cls, side_spec):
"""
Convert ``side_spec`` to an openpyxl v2 Side object
Parameters
----------
side_spec : str, dict
A string specifying the border style, or a dict with zero or more
of the following keys (or their synonyms).
'style' ('border_style')
'color'
Returns
-------
side : openpyxl.styles.Side
"""
from openpyxl.styles import Side
_side_key_map = {
'border_style': 'style',
}
if isinstance(side_spec, str):
return Side(style=side_spec)
side_kwargs = {}
for k, v in side_spec.items():
if k in _side_key_map:
k = _side_key_map[k]
if k == 'color':
v = cls._convert_to_color(v)
side_kwargs[k] = v
return Side(**side_kwargs)
@classmethod
def _convert_to_border(cls, border_dict):
"""
Convert ``border_dict`` to an openpyxl v2 Border object
Parameters
----------
border_dict : dict
A dict with zero or more of the following keys (or their synonyms).
'left'
'right'
'top'
'bottom'
'diagonal'
'diagonal_direction'
'vertical'
'horizontal'
'diagonalUp' ('diagonalup')
'diagonalDown' ('diagonaldown')
'outline'
Returns
-------
border : openpyxl.styles.Border
"""
from openpyxl.styles import Border
_border_key_map = {
'diagonalup': 'diagonalUp',
'diagonaldown': 'diagonalDown',
}
border_kwargs = {}
for k, v in border_dict.items():
if k in _border_key_map:
k = _border_key_map[k]
if k == 'color':
v = cls._convert_to_color(v)
if k in ['left', 'right', 'top', 'bottom', 'diagonal']:
v = cls._convert_to_side(v)
border_kwargs[k] = v
return Border(**border_kwargs)
@classmethod
def _convert_to_alignment(cls, alignment_dict):
"""
Convert ``alignment_dict`` to an openpyxl v2 Alignment object
Parameters
----------
alignment_dict : dict
A dict with zero or more of the following keys (or their synonyms).
'horizontal'
'vertical'
'text_rotation'
'wrap_text'
'shrink_to_fit'
'indent'
Returns
-------
alignment : openpyxl.styles.Alignment
"""
from openpyxl.styles import Alignment
return Alignment(**alignment_dict)
@classmethod
def _convert_to_number_format(cls, number_format_dict):
"""
Convert ``number_format_dict`` to an openpyxl v2.1.0 number format
initializer.
Parameters
----------
number_format_dict : dict
A dict with zero or more of the following keys.
'format_code' : str
Returns
-------
number_format : str
"""
return number_format_dict['format_code']
@classmethod
def _convert_to_protection(cls, protection_dict):
"""
Convert ``protection_dict`` to an openpyxl v2 Protection object.
Parameters
----------
protection_dict : dict
A dict with zero or more of the following keys.
'locked'
'hidden'
Returns
-------
"""
from openpyxl.styles import Protection
return Protection(**protection_dict)
def write_cells(self, cells, sheet_name=None, startrow=0, startcol=0,
freeze_panes=None):
# Write the frame cells using openpyxl.
sheet_name = self._get_sheet_name(sheet_name)
_style_cache = {}
if sheet_name in self.sheets:
wks = self.sheets[sheet_name]
else:
wks = self.book.create_sheet()
wks.title = sheet_name
self.sheets[sheet_name] = wks
if _validate_freeze_panes(freeze_panes):
wks.freeze_panes = wks.cell(row=freeze_panes[0] + 1,
column=freeze_panes[1] + 1)
for cell in cells:
xcell = wks.cell(
row=startrow + cell.row + 1,
column=startcol + cell.col + 1
)
xcell.value, fmt = self._value_with_fmt(cell.val)
if fmt:
xcell.number_format = fmt
style_kwargs = {}
if cell.style:
key = str(cell.style)
style_kwargs = _style_cache.get(key)
if style_kwargs is None:
style_kwargs = self._convert_to_style_kwargs(cell.style)
_style_cache[key] = style_kwargs
if style_kwargs:
for k, v in style_kwargs.items():
setattr(xcell, k, v)
if cell.mergestart is not None and cell.mergeend is not None:
wks.merge_cells(
start_row=startrow + cell.row + 1,
start_column=startcol + cell.col + 1,
end_column=startcol + cell.mergeend + 1,
end_row=startrow + cell.mergestart + 1
)
# When cells are merged only the top-left cell is preserved
# The behaviour of the other cells in a merged range is
# undefined
if style_kwargs:
first_row = startrow + cell.row + 1
last_row = startrow + cell.mergestart + 1
first_col = startcol + cell.col + 1
last_col = startcol + cell.mergeend + 1
for row in range(first_row, last_row + 1):
for col in range(first_col, last_col + 1):
if row == first_row and col == first_col:
# Ignore first cell. It is already handled.
continue
xcell = wks.cell(column=col, row=row)
for k, v in style_kwargs.items():
setattr(xcell, k, v)
register_writer(_OpenpyxlWriter)
class _XlwtWriter(ExcelWriter):
engine = 'xlwt'
supported_extensions = ('.xls',)
def __init__(self, path, engine=None, encoding=None, mode='w',
**engine_kwargs):
# Use the xlwt module as the Excel writer.
import xlwt
engine_kwargs['engine'] = engine
if mode == 'a':
raise ValueError('Append mode is not supported with xlwt!')
super(_XlwtWriter, self).__init__(path, mode=mode, **engine_kwargs)
if encoding is None:
encoding = 'ascii'
self.book = xlwt.Workbook(encoding=encoding)
self.fm_datetime = xlwt.easyxf(num_format_str=self.datetime_format)
self.fm_date = xlwt.easyxf(num_format_str=self.date_format)
def save(self):
"""
Save workbook to disk.
"""
return self.book.save(self.path)
def write_cells(self, cells, sheet_name=None, startrow=0, startcol=0,
freeze_panes=None):
# Write the frame cells using xlwt.
sheet_name = self._get_sheet_name(sheet_name)
if sheet_name in self.sheets:
wks = self.sheets[sheet_name]
else:
wks = self.book.add_sheet(sheet_name)
self.sheets[sheet_name] = wks
if _validate_freeze_panes(freeze_panes):
wks.set_panes_frozen(True)
wks.set_horz_split_pos(freeze_panes[0])
wks.set_vert_split_pos(freeze_panes[1])
style_dict = {}
for cell in cells:
val, fmt = self._value_with_fmt(cell.val)
stylekey = json.dumps(cell.style)
if fmt:
stylekey += fmt
if stylekey in style_dict:
style = style_dict[stylekey]
else:
style = self._convert_to_style(cell.style, fmt)
style_dict[stylekey] = style
if cell.mergestart is not None and cell.mergeend is not None:
wks.write_merge(startrow + cell.row,
startrow + cell.mergestart,
startcol + cell.col,
startcol + cell.mergeend,
val, style)
else:
wks.write(startrow + cell.row,
startcol + cell.col,
val, style)
@classmethod
def _style_to_xlwt(cls, item, firstlevel=True, field_sep=',',
line_sep=';'):
"""helper which recursively generate an xlwt easy style string
for example:
hstyle = {"font": {"bold": True},
"border": {"top": "thin",
"right": "thin",
"bottom": "thin",
"left": "thin"},
"align": {"horiz": "center"}}
will be converted to
font: bold on; \
border: top thin, right thin, bottom thin, left thin; \
align: horiz center;
"""
if hasattr(item, 'items'):
if firstlevel:
it = ["{key}: {val}"
.format(key=key, val=cls._style_to_xlwt(value, False))
for key, value in item.items()]
out = "{sep} ".format(sep=(line_sep).join(it))
return out
else:
it = ["{key} {val}"
.format(key=key, val=cls._style_to_xlwt(value, False))
for key, value in item.items()]
out = "{sep} ".format(sep=(field_sep).join(it))
return out
else:
item = "{item}".format(item=item)
item = item.replace("True", "on")
item = item.replace("False", "off")
return item
@classmethod
def _convert_to_style(cls, style_dict, num_format_str=None):
"""
converts a style_dict to an xlwt style object
Parameters
----------
style_dict : style dictionary to convert
num_format_str : optional number format string
"""
import xlwt
if style_dict:
xlwt_stylestr = cls._style_to_xlwt(style_dict)
style = xlwt.easyxf(xlwt_stylestr, field_sep=',', line_sep=';')
else:
style = xlwt.XFStyle()
if num_format_str is not None:
style.num_format_str = num_format_str
return style
register_writer(_XlwtWriter)
class _XlsxStyler(object):
# Map from openpyxl-oriented styles to flatter xlsxwriter representation
# Ordering necessary for both determinism and because some are keyed by
# prefixes of others.
STYLE_MAPPING = {
'font': [
(('name',), 'font_name'),
(('sz',), 'font_size'),
(('size',), 'font_size'),
(('color', 'rgb',), 'font_color'),
(('color',), 'font_color'),
(('b',), 'bold'),
(('bold',), 'bold'),
(('i',), 'italic'),
(('italic',), 'italic'),
(('u',), 'underline'),
(('underline',), 'underline'),
(('strike',), 'font_strikeout'),
(('vertAlign',), 'font_script'),
(('vertalign',), 'font_script'),
],
'number_format': [
(('format_code',), 'num_format'),
((), 'num_format',),
],
'protection': [
(('locked',), 'locked'),
(('hidden',), 'hidden'),
],
'alignment': [
(('horizontal',), 'align'),
(('vertical',), 'valign'),
(('text_rotation',), 'rotation'),
(('wrap_text',), 'text_wrap'),
(('indent',), 'indent'),
(('shrink_to_fit',), 'shrink'),
],
'fill': [
(('patternType',), 'pattern'),
(('patterntype',), 'pattern'),
(('fill_type',), 'pattern'),
(('start_color', 'rgb',), 'fg_color'),
(('fgColor', 'rgb',), 'fg_color'),
(('fgcolor', 'rgb',), 'fg_color'),
(('start_color',), 'fg_color'),
(('fgColor',), 'fg_color'),
(('fgcolor',), 'fg_color'),
(('end_color', 'rgb',), 'bg_color'),
(('bgColor', 'rgb',), 'bg_color'),
(('bgcolor', 'rgb',), 'bg_color'),
(('end_color',), 'bg_color'),
(('bgColor',), 'bg_color'),
(('bgcolor',), 'bg_color'),
],
'border': [
(('color', 'rgb',), 'border_color'),
(('color',), 'border_color'),
(('style',), 'border'),
(('top', 'color', 'rgb',), 'top_color'),
(('top', 'color',), 'top_color'),
(('top', 'style',), 'top'),
(('top',), 'top'),
(('right', 'color', 'rgb',), 'right_color'),
(('right', 'color',), 'right_color'),
(('right', 'style',), 'right'),
(('right',), 'right'),
(('bottom', 'color', 'rgb',), 'bottom_color'),
(('bottom', 'color',), 'bottom_color'),
(('bottom', 'style',), 'bottom'),
(('bottom',), 'bottom'),
(('left', 'color', 'rgb',), 'left_color'),
(('left', 'color',), 'left_color'),
(('left', 'style',), 'left'),
(('left',), 'left'),
],
}
@classmethod
def convert(cls, style_dict, num_format_str=None):
"""
converts a style_dict to an xlsxwriter format dict
Parameters
----------
style_dict : style dictionary to convert
num_format_str : optional number format string
"""
# Create a XlsxWriter format object.
props = {}
if num_format_str is not None:
props['num_format'] = num_format_str
if style_dict is None:
return props
if 'borders' in style_dict:
style_dict = style_dict.copy()
style_dict['border'] = style_dict.pop('borders')
for style_group_key, style_group in style_dict.items():
for src, dst in cls.STYLE_MAPPING.get(style_group_key, []):
# src is a sequence of keys into a nested dict
# dst is a flat key
if dst in props:
continue
v = style_group
for k in src:
try:
v = v[k]
except (KeyError, TypeError):
break
else:
props[dst] = v
if isinstance(props.get('pattern'), string_types):
# TODO: support other fill patterns
props['pattern'] = 0 if props['pattern'] == 'none' else 1
for k in ['border', 'top', 'right', 'bottom', 'left']:
if isinstance(props.get(k), string_types):
try:
props[k] = ['none', 'thin', 'medium', 'dashed', 'dotted',
'thick', 'double', 'hair', 'mediumDashed',
'dashDot', 'mediumDashDot', 'dashDotDot',
'mediumDashDotDot',
'slantDashDot'].index(props[k])
except ValueError:
props[k] = 2
if isinstance(props.get('font_script'), string_types):
props['font_script'] = ['baseline', 'superscript',
'subscript'].index(props['font_script'])
if isinstance(props.get('underline'), string_types):
props['underline'] = {'none': 0, 'single': 1, 'double': 2,
'singleAccounting': 33,
'doubleAccounting': 34}[props['underline']]
return props
class _XlsxWriter(ExcelWriter):
engine = 'xlsxwriter'
supported_extensions = ('.xlsx',)
def __init__(self, path, engine=None,
date_format=None, datetime_format=None, mode='w',
**engine_kwargs):
# Use the xlsxwriter module as the Excel writer.
import xlsxwriter
if mode == 'a':
raise ValueError('Append mode is not supported with xlsxwriter!')
super(_XlsxWriter, self).__init__(path, engine=engine,
date_format=date_format,
datetime_format=datetime_format,
mode=mode,
**engine_kwargs)
self.book = xlsxwriter.Workbook(path, **engine_kwargs)
def save(self):
"""
Save workbook to disk.
"""
return self.book.close()
def write_cells(self, cells, sheet_name=None, startrow=0, startcol=0,
freeze_panes=None):
# Write the frame cells using xlsxwriter.
sheet_name = self._get_sheet_name(sheet_name)
if sheet_name in self.sheets:
wks = self.sheets[sheet_name]
else:
wks = self.book.add_worksheet(sheet_name)
self.sheets[sheet_name] = wks
style_dict = {'null': None}
if _validate_freeze_panes(freeze_panes):
wks.freeze_panes(*(freeze_panes))
for cell in cells:
val, fmt = self._value_with_fmt(cell.val)
stylekey = json.dumps(cell.style)
if fmt:
stylekey += fmt
if stylekey in style_dict:
style = style_dict[stylekey]
else:
style = self.book.add_format(
_XlsxStyler.convert(cell.style, fmt))
style_dict[stylekey] = style
if cell.mergestart is not None and cell.mergeend is not None:
wks.merge_range(startrow + cell.row,
startcol + cell.col,
startrow + cell.mergestart,
startcol + cell.mergeend,
cell.val, style)
else:
wks.write(startrow + cell.row,
startcol + cell.col,
val, style)
register_writer(_XlsxWriter)
| GuessWhoSamFoo/pandas | pandas/io/excel.py | Python | bsd-3-clause | 66,902 |
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs hello_world.py, through hello_world.isolate, locally in a temporary
directory.
"""
import hashlib
import os
import shutil
import subprocess
import sys
import tempfile
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
def run(cmd):
print('Running: %s' % ' '.join(cmd))
cmd = [sys.executable, os.path.join(ROOT_DIR, '..', cmd[0])] + cmd[1:]
if sys.platform != 'win32':
cmd = ['time', '-p'] + cmd
subprocess.check_call(cmd)
def main():
# Uncomment to make isolate.py to output logs.
#os.environ['ISOLATE_DEBUG'] = '1'
try:
# All the files are put in a temporary directory. This is optional and
# simply done so the current directory doesn't have the following files
# created:
# - hello_world.isolated
# - hello_world.isolated.state
# - cache/
# - hashtable/
tempdir = tempfile.mkdtemp(prefix='hello_world')
cachedir = os.path.join(tempdir, 'cache')
hashtabledir = os.path.join(tempdir, 'hashtable')
isolateddir = os.path.join(tempdir, 'isolated')
isolated = os.path.join(isolateddir, 'hello_world.isolated')
os.mkdir(isolateddir)
print('Archiving')
run(
[
'isolate.py',
'hashtable',
'--isolate', os.path.join(ROOT_DIR, 'hello_world.isolate'),
'--isolated', isolated,
'--outdir', hashtabledir,
])
print('\nRunning')
hashval = hashlib.sha1(open(isolated, 'rb').read()).hexdigest()
run(
[
'run_isolated.py',
'--cache', cachedir,
'--remote', hashtabledir,
'--hash', hashval,
])
finally:
shutil.rmtree(tempdir)
return 0
if __name__ == '__main__':
sys.exit(main())
| leighpauls/k2cro4 | tools/swarm_client/example/run_example_local.py | Python | bsd-3-clause | 1,893 |
import tests.periodicities.period_test as per
per.buildModel((360 , 'BH' , 25));
| antoinecarme/pyaf | tests/periodicities/Business_Hour/Cycle_Business_Hour_25_BH_360.py | Python | bsd-3-clause | 83 |
<?php
namespace app\models;
use Yii;
/**
* This is the model class for table "wl_admin".
*
* @property integer $id
* @property string $name
* @property string $pwd
*/
class Admin extends \yii\db\ActiveRecord
{
/**
* @inheritdoc
*/
public static function tableName()
{
return 'wl_admin';
}
/**
* @inheritdoc
*/
public function rules()
{
return [
[['name'], 'string', 'max' => 30],
[['pwd'], 'string', 'max' => 32],
];
}
/**
* @inheritdoc
*/
public function attributeLabels()
{
return [
'id' => 'ID',
'name' => 'Name',
'pwd' => 'Pwd',
];
}
}
| yanan001/weiliang | models/Admin.php | PHP | bsd-3-clause | 729 |
"""
byceps.blueprints.site.board.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from . import board_access_grant, last_category_view, last_topic_view
| homeworkprod/byceps | byceps/services/board/dbmodels/__init__.py | Python | bsd-3-clause | 250 |
package apollo.gui.genomemap;
import java.awt.*;
import java.awt.event.*;
import java.util.*;
import java.io.*;
import javax.swing.*;
import apollo.datamodel.*;
import apollo.gui.Controller;
import apollo.gui.ControlledObjectI;
import apollo.gui.Selection;
import apollo.gui.SelectionItem;
import apollo.gui.SelectionManager;
import apollo.gui.Transformer;
import apollo.gui.TierManager;
import apollo.gui.TierManagerI;
import apollo.gui.drawable.Drawable;
import apollo.gui.drawable.DrawableUtil;
import apollo.gui.event.*;
import apollo.util.*;
import java.util.*;
/**
* This class is the base class for FeatureView and SiteView. It is
* abstract because it doesn't implement getTierData() and would be
* non functional even if it did because it doesn't create the manager
* object anywhere. The reason it has been separated from FeatureView is
* to enable the use of the Scrolling and Tier functionality in the
* SequenceView, which although different to the FeatureView is basically
* a TierView where each tier is a piece of the sequence, so we can use
* the Tier and Scroll functionality and just change the meaning of tiers
* by creating a new TierManager and changing all the draw methods.
*
* Presently scrolling is done by mapping the scroll value to a tier number.
* Tiers run in opposite directions on forward and reverse strands. The tier
* number has to be inverted on the forward strand to be consistent with how
* the reverse strand is scrolled. Also this means that ScrollableTierView
* has to know about strand, which is a little funny because its subclass SequenceView
* is unstranded. This makes the code a bit confusing. I think
* a better way would be to not scroll by tier, but just by y values. A view would set
* itself according to the y values put out by the scroll bar. Im guessing that
* one of the reasons to do scrolling by tier was to have a unit of scrolling be a tier,
* but this can still be achieved with scrolling by y value, it would just round it to
* the nearest tier. This would be a bit of a work I think so Im putting it off for now.
* I'm also curious what others think. - MG
*/
public abstract class TierView extends ManagedView implements
AdjustmentListener,
ControlledObjectI,
DropTargetViewI,
PickViewI,
TierManagerListener,
TierViewI {
protected JScrollBar vScroll;
protected boolean vscrollable = true;
protected boolean rightSide = true;
protected Controller controller;
protected Vector dragSources = new Vector();
/** Height of row in coordinate space (TierManager.Y_PIXELS_PER_FEATURE) */
private int rowCoordHeight=0;
protected SelectionManager selectionManager;
public TierView(JComponent ap,
String name,
SelectionManager selectionManager) {
super(ap, name, false);
this.selectionManager = selectionManager;
addScrollBar ();
}
public void setSelectionManager(SelectionManager selectionManager) {
this.selectionManager = selectionManager;
}
public void setTierManager(FeatureTierManager ftm) {
// Initialise the data in the TierManager
if (this.manager != null) {
Controller c;
if ((c = this.manager.getController()) != null) {
c.removeListener((EventListener)this.manager);
}
}
ftm.addTierManagerListener(this);
super.setTierManager((TierManagerI)ftm);
}
protected void updateManagerHeight() {
if (manager != null) {
super.updateManagerHeight();
/* Set the vscroller
This is the one of the ways that the tierview differs
from the dragview. Sites, results, and annotation views
all support scrolling. Not repainting until this is set */
setScrollValues();
}
}
public void moveScrollbarByWheelAmount(int nClick) {
setScrollbarValue(getScrollbarValue() + nClick*rowCoordHeight);
}
/** YOrientation gets flipped with revcomping */
public void setYOrientation(int direction) {
/* set this before altering the direction */
boolean flipped = (getTransform().getYOrientation() != direction &&
(direction == Transformer.UP ||
direction == Transformer.DOWN));
super.setYOrientation(direction);
// scrollbar needs syncing on flip,
// have to check for null manager, initial orient setting no manager yet
if (vscrollable && flipped && manager != null) {
flipScrollBar();
}
}
protected void addScrollBar() {
vScroll = new JScrollBar();
vScroll.setOrientation (JScrollBar.VERTICAL);
vScroll.addAdjustmentListener(this);
vScroll.setVisible (true);
getComponent().add(vScroll,ApolloLayoutManager.NONE);
}
// Overidden LinearView methods
public void setVisible(boolean state) {
super.setVisible(state);
setScrollVisibility(state);
}
/** Just repaints scrollbars */
public void paintView() {
super.paintView();
if (vscrollable) {
Rectangle vb = new Rectangle(getBounds());
if (rightSide)
vb.x = vb.x + vb.width - vScroll.getPreferredSize().width;
else
vb.x = 0;
vb.width = vScroll.getPreferredSize().width;
vScroll.setBounds(vb);
vScroll.invalidate();
vScroll.validate();
}
}
/** View is flipping (revcomping/changing y orientation),
preserve scrollbar and view setting by inverting scrollbar value
This isn't quite right - needs tweaking - off by page
*/
private void flipScrollBar() {
int newVal
= vScroll.getMaximum() - vScroll.getValue() - vScroll.getVisibleAmount();
vScroll.setValue(newVal);
}
public void setScrollSide(int side) {
if (side == ViewI.LEFTSIDE) {
rightSide = false;
} else {
rightSide = true;
}
}
public void incrementTierHeight() {
changeTierHeight(1);
}
public void decrementTierHeight() {
changeTierHeight(-1);
}
protected void changeTierHeight(int change) {
if (change == 1) {
manager.incrementTierHeight();
} else {
manager.decrementTierHeight();
}
updateManagerHeight();
if (isVisible())
getComponent().repaint();
}
public void setLowestVisibleTier(long tier) {
// Set the lowest visible tier in the manager
manager.setLowestVisible((int)tier);
// Get the position in transformer coords for the base
// Set the lowest visible in transformer
transformer.setYVisibleMinimum(manager.getMinimumVisibleTransformCoord());
transformer.setYRange(getYRange());
setInvalidity(true);
if (isVisible())
getComponent().repaint();
}
public int getLowestVisibleTier() {
return manager.getLowestVisible();
}
public boolean allowsTierDrags() {
return true;
}
public boolean beginTierDrag(MouseEvent evt) {
System.out.println("Need to override beginTierDrag!!!");
return true;
}
public void updateTierDrag(MouseEvent evt) {
System.out.println("Need to override updateTierDrag!!!");
}
public void endTierDrag(MouseEvent evt) {
System.out.println("Need to override endTierDrag!!!");
}
// TierManagerListener method
public boolean handleTierManagerEvent(TierManagerEvent evt) {
setScrollValues();
setInvalidity(true);
if (isVisible())
getComponent().repaint();
return true;
}
// DropTargetViewI methods (needed for tier rearrangements)
public boolean interpretDrop(DragViewI dragView, MouseEvent evt) {
return interpretDrop(dragView,evt,true,new StringBuffer());
}
public boolean interpretDrop(DragViewI dragView,
MouseEvent evt,
boolean doFlag,
StringBuffer action) {
//Check if source is valid
if (!isValidDragSource(dragView.getOriginView())) {
action.append("No action");
return false;
}
return true;
// Check if this is a tier drag
// Update tier order
}
public void registerDragSource(TierViewI view) {
if (!dragSources.contains(view)) {
dragSources.addElement(view);
}
}
public boolean isValidDragSource(TierViewI view) {
if (dragSources.contains(view)) {
return true;
}
return false;
}
// AdjustmentListener methods - called when scrollbar adjusted
public void adjustmentValueChanged(AdjustmentEvent evt) {
if (vscrollable && evt.getSource() == vScroll) {
syncScrollbars();
}
}
private void syncScrollbars() {
if (vScroll == null) {
try {
throw new Exception("Failed syncScrollbar");
} catch (Exception e) {
e.printStackTrace();
}
return;
}
if (scrollHack) {
setLowestVisibleTier((long) vScroll.getMaximum() -
vScroll.getValue()-manager.getNumVisible());
} else
setLowestVisibleTier(scrollValueToTierNumber(vScroll.getValue()));
}
/**
* This method makes scrolling on forward and reverse strands scroll the same way.
* On the forward strand the tier number from the scroll value has to be flipped
* around (subtracted from total number of tiers, and the extent has to be subtracted
* as well)
*/
private int scrollValueToTierNumber(int scrollValue) {
// Down Orientation
if (isDownOrientation())
return (int)manager.toTier(scrollValue);
// isUpOrientation()
int extent = (int)manager.getVisibleUserCoord();
// dont know why but sometimes this is < 0
if (extent < 0)
extent = 0;
int scrollPlusExtent = scrollValue + extent;
int tierToFlip = (int)manager.toTier(scrollPlusExtent);
int forwardStrandTier = manager.getNumTiers() - tierToFlip;
if (forwardStrandTier < 0)
forwardStrandTier = 0;
return forwardStrandTier;
}
/** Down Orientation happens with reverse strand normal
and forward strand in reverse comp (below axis)
*/
protected boolean isDownOrientation() {
return getTransform().getYOrientation() == Transformer.DOWN;
}
/** Up orientation happens with forward strand normal
and reverse strand in rev comp, its above the axis */
protected boolean isUpOrientation() {
return getTransform().getYOrientation() == Transformer.UP;
}
/**
* This is to set a view in its vertical "start" position, which is opposite
* for the 2 strands. This would be true even if we were not tier based.
*/
protected void putScrollAtStart() {
// Set forward strand to show 1st tier which is the maximum scroll value
if (isUpOrientation())
setScrollbarValue(getMaxScrollbarValue());
// ReverseStrand's 1st tier is the minimum scroll value
if (isDownOrientation())
setScrollbarValue(getMinScrollbarValue());
}
protected void setScrollbarValue(int val) {
vScroll.setValue(val);
}
protected int getMaxScrollbarValue() {
return vScroll.getMaximum();
}
protected int getVisibleScrollbarValue() {
return vScroll.getVisibleAmount();
}
protected int getMinScrollbarValue() {
return vScroll.getMinimum();
}
protected int getScrollbarValue() {
return vScroll.getValue();
}
protected int getInvertedScrollbarValue() {
return getMaxScrollbarValue() -
(getScrollbarValue() + vScroll.getVisibleAmount());
}
private boolean scrollHack = false;
public void setScrollHack(boolean value) {
scrollHack = value;
}
// Class specific methods
public void setVScrollable(boolean state) {
vscrollable = state;
}
public void fireViewEvent(int type) {
ViewEvent evt = new ViewEvent(getComponent(),this,type);
super.fireViewEvent(evt);
}
public void setScrollValues() {
if (vscrollable == true && vScroll != null) {
int oldval = vScroll.getValue();
int oldvisible = vScroll.getVisibleAmount();
vScroll.setMinimum(0);
int maxUserCoord
= (int)manager.getMaxUserCoord() + (int)manager.getMaxTierUserHeight();
vScroll.setMaximum(maxUserCoord);
int visCoord = (int)manager.getVisibleUserCoord();
if (visCoord > maxUserCoord)
visCoord = maxUserCoord;
if (rowCoordHeight==0) {
rowCoordHeight = (int)manager.getMaxTierUserHeight();
vScroll.setUnitIncrement(rowCoordHeight);
}
/*set paging to size of visible screen minus one row
so one row retained on page */
vScroll.setBlockIncrement(visCoord-rowCoordHeight);
if (scrollHack && oldvisible != vScroll.getVisibleAmount()) {
int bumpme = vScroll.getVisibleAmount() - oldvisible;
vScroll.setValue(vScroll.getValue() - bumpme);
}
// I found that subtracting visCoord messes revcomp up but is needed
// or otherwise the scrollbars still scroll when theres nothing to scroll
if (oldval > maxUserCoord - visCoord) {
// this happens if tiers dont fill screen
if (maxUserCoord-visCoord <= vScroll.getMinimum())
vScroll.setValue(vScroll.getMinimum());
else
vScroll.setValue(maxUserCoord - visCoord);
}
// setVisibleAmount has to come after setValue as it can get rejected
// depending on the value -
vScroll.setVisibleAmount(visCoord);
//Scrollbars are now permanent again whether there is anything to scroll or not.
//When they were not showing up and their space was reclaimed the logic was
//erroneous and views with and without scrollbars would have different
//basepairs/pixel and features would not line up. Until this is addressed
//scrollbars should just be kept in.
}
}
public void printScrollValues() {
System.out.println("vScroll params:-");
System.out.println(" minimum " + vScroll.getMinimum());
System.out.println(" maximum " + vScroll.getMaximum());
System.out.println(" value " + vScroll.getValue());
System.out.println(" visam " + vScroll.getVisibleAmount() +
" nvistier " + manager.getNumVisible());
}
public void setScrollVisibility(boolean state) {
if (vscrollable && vScroll!=null) {
vScroll.setVisible(state);
}
}
public Controller getController() {
return controller;
}
public Object getControllerWindow() {
return SwingMissingUtil.getWindowAncestor(getComponent());
}
public boolean needsAutoRemoval() {
return true;
}
public void setController(Controller c) {
controller = c;
controller.addListener(this);
if (getTierManager() != null) {
getTierManager().setController(getController());
}
}
/**
returns a FeatureList of SeqFeatureI instances that
fall beneath the point passed in
*/
public FeatureList findFeatures(Point pnt) {
return findFeatures(getSelectionRectangle(pnt));
}
/**
returns a FeatureList of SeqFeatureI instances that
overlap any of the areas in the Vector of rectangles
that is passed in
*/
public FeatureList findFeatures(Vector rects) {
FeatureList features = new FeatureList();
for (int i = 0; i < rects.size(); i++) {
Rectangle rect = (Rectangle) rects.elementAt (i);
features.addVector ((findFeatures(rect)).toVector());
}
return features;
}
/**
returns a FeatureList of SeqFeatureI instances (model not drawables)
overlap any of the areas in the Vector of rectangles that is passed in
*/
public FeatureList findFeatures(Rectangle rect) {
FeatureList features = new FeatureList();
Vector these_features = findDrawables(rect);
while (these_features.size() > 0) {
Drawable se = (Drawable) these_features.elementAt(0);
these_features.removeElement (se);
features.addFeature (se.getFeature());
}
return features;
}
/** Find geared to selection. The SelectionItems in the Selection
are model(SeqFeatureI) but they have the associated drawable
attached as a listener, so when the SelectionItem is selected it
will tell its drawable listener to select. This achieves 2 goals:
1) Keeps the Selection model based so someones own set of drawables
is not being passed around
2) With the drawables attached we dont have to refind them when
receiving the feature
selection event that came from us, which is rather inefficient and slow
part of PickViewI interface. Can not do actual selection here.
If part of previous selection it will get deselected after it gets
selected.
*/
public Selection findFeaturesForSelection(Point p, boolean selectParents) {
return findFeaturesForSelection(getSelectionRectangle(p),selectParents);
}
public Selection findFeaturesForSelection(Rectangle rect) {
// false - dont select parents
return findFeaturesForSelection(rect, false);
}
protected abstract Selection findFeaturesForSelection(Rectangle rect,
boolean selectParents);
/**
* CANT do selection here as this is called for mouse over as well!
*/
public Vector findDrawables(Point pnt) {
return findDrawables(getSelectionRectangle(pnt));
}
/** Finds all the sites in rect and creates SiteCodon for them.
* Returns vector of SiteCodons
* The SiteCodons are not drawn.
* Sites are notified when a SiteCodon is selected.
*/
public Vector findDrawables(Rectangle rect) {
return findDrawables (rect, false);
}
protected abstract Vector findDrawables(Rectangle rect,
boolean select_filter);
public void clear() {
getComponent().remove(vScroll);
vScroll.removeAdjustmentListener(this);
vScroll = null;
super.clear();
}
private int drag_low;
private int drag_high;
/** Returns a FeatureSet of Drawables/view, not a model FeatureSet.
This is what drag view expects which is what this is used for.
This could perhaps go in Selection? */
protected Vector drawablesForDrag(Selection selection) {
// Make a set of drawables for the selected features
Vector bundle = new Vector();
for (int i=0; i < selection.size(); i++) {
SeqFeatureI selFeat = selection.getSelectedData(i);
if (i == 0 || selFeat.getLow() < drag_low)
drag_low = selFeat.getLow();
if (i == 0 || selFeat.getHigh() > drag_high)
drag_high = selFeat.getHigh();
Drawable dsf = DrawableUtil.createDrawable(selFeat);
dsf.setFeature(selFeat);
dsf.setVisible(true);
dsf.setDrawn(true);
bundle.add(dsf);
}
return bundle;
}
/** given the entirety of what is currently selected, remove anything
that doesn't belong to this view and return the remaining selections.
This used to be handled in the Selection class, but it didn't quite
work, because the 'source'=='where it was originally selected'
which, may or may not be, the same as this view.
*/
public abstract Selection getViewSelection(Selection selection);
public DragViewI createDragView(MouseEvent evt,
Selection view_selection) {
// Check if anything is selected to drag
if (view_selection.size() != 0) {
// A drawableFeatureSet whose FeatureSet contains Drawables not model
// What would happen if selToFS returned a FeatureList instead?
// I dont think so it uses a lot of FeatureSet functionality
Vector drawable_vect = drawablesForDrag(view_selection);
Rectangle boxBounds = DrawableUtil.getBoxBounds(drawable_vect,
transformer,
manager);
if (evt.getX() >= boxBounds.x &&
evt.getX() < boxBounds.x + boxBounds.width + 1) {
Rectangle bounds = new Rectangle(evt.getX(),
evt.getY(),
boxBounds.width+1,
20000);
DragView dv = new DragView(getComponent(), "draggy", view_selection);
dv.setBounds(bounds);
dv.setOrigin(this, evt.getPoint());
dv.setDrawables(drawable_vect);
DrawableTierManager dm = new DrawableTierManager();
dm.setTransformer(transformer);
dm.setAggregateSizeChange(getTierManager().getAggregateSizeChange());
/* This is very, very sneaky. When the view has its manager
set then the View gives the drawables to the manager which then
sets up its tiers accordingly */
dv.setTierManager(dm);
int drag_right = (transformer.getXOrientation() == Transformer.LEFT ?
drag_high : drag_low);
Point pixel_right = getTransform().toPixel(drag_right + 1, 0);
/* Limits are in application coordinates. That is, nucleic acid
base count. But the upper limit needs to be rounded to
pixels? */
int upper_limit = (drag_high +
((boxBounds.x+boxBounds.width+1-pixel_right.x) *
(int)getTransform().getXCoordsPerPixel()));
dv.setLimits((new int [] {drag_low, upper_limit}));
dv.setYOrientation(getTransform().getYOrientation());
dv.setXOrientation(getTransform().getXOrientation());
dv.getTransform().setXVisibleMinimum(drag_low);
dv.setRelativePosition(new Point(evt.getX()-boxBounds.x,0));
return dv;
} else {
return null;
}
} else {
return null;
}
}
}
| genome-vendor/apollo | src/java/apollo/gui/genomemap/TierView.java | Java | bsd-3-clause | 21,330 |
/*
Copyright (c) 2000-2022, Board of Trustees of Leland Stanford Jr. University
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
package org.lockss.plugin.clockss.iop;
import org.apache.commons.io.FilenameUtils;
import org.lockss.daemon.PluginException;
import org.lockss.extractor.ArticleMetadata;
import org.lockss.extractor.FileMetadataExtractor;
import org.lockss.extractor.MetadataField;
import org.lockss.extractor.MetadataTarget;
import org.lockss.plugin.CachedUrl;
import org.lockss.plugin.clockss.Onix3BooksSchemaHelper;
import org.lockss.plugin.clockss.SourceXmlMetadataExtractorFactory;
import org.lockss.plugin.clockss.SourceXmlSchemaHelper;
import org.lockss.util.Logger;
import org.lockss.util.UrlUtil;
import java.util.ArrayList;
import java.util.List;
public class IopBookOnix3XmlMetadataExtractorFactory extends SourceXmlMetadataExtractorFactory {
private static final Logger log = Logger.getLogger(IopBookOnix3XmlMetadataExtractorFactory.class);
private static SourceXmlSchemaHelper Onix3Helper = null;
@Override
public FileMetadataExtractor createFileMetadataExtractor(MetadataTarget target,
String contentType)
throws PluginException {
return new IopBookOnix3XmlMetadataExtractor();
}
public class IopBookOnix3XmlMetadataExtractor extends SourceXmlMetadataExtractor {
@Override
protected SourceXmlSchemaHelper setUpSchema(CachedUrl cu) {
log.debug3("Fei: IopBook Onix3BooksSchemaHelper");
if (Onix3Helper == null) {
Onix3Helper = new Onix3BooksSchemaHelper();
}
return Onix3Helper;
}
/*
* (non-Javadoc)
* WARC XML files are a little non-standard in that they store the actual access.url
* location in the "self-uri" field for Jats and the proprietary ID field
* for ONIX
* set the access_url depending on the schema
* set the publisher as well. It may get replaced by the TDB value
*/
@Override
protected void postCookProcess(SourceXmlSchemaHelper schemaHelper,
CachedUrl cu, ArticleMetadata thisAM) {
log.debug3("Fei: in IopBook postCookProcess");
String access = thisAM.getRaw(Onix3BooksSchemaHelper.ONIX_idtype_proprietary);
if (access != null) {
thisAM.replace(MetadataField.FIELD_ACCESS_URL, access);
}
if (thisAM.get(MetadataField.FIELD_DATE)== null) {
String copydate = thisAM.getRaw(Onix3BooksSchemaHelper.ONIX_copy_date);
if (copydate != null) {
thisAM.put(MetadataField.FIELD_DATE,copydate);
}
}
}
/* In this case, use the RecordReference + .pdf for the matching file */
@Override
protected List<String> getFilenamesAssociatedWithRecord(SourceXmlSchemaHelper helper, CachedUrl cu,
ArticleMetadata oneAM) {
log.debug3("Fei: in IopBook getFilenamesAssociatedWithRecord");
String filenameValue = oneAM.getRaw(Onix3BooksSchemaHelper.ONIX_RR);
String cuBase = FilenameUtils.getFullPath(cu.getUrl());
if (filenameValue != null) {
log.debug3("Fei: PDF file path is : " + cuBase + filenameValue);
} else {
log.debug3("Fei: PDF file path not found : " + cuBase);
}
String fullPathFile = UrlUtil.minimallyEncodeUrl(cuBase + filenameValue + ".pdf");
List<String> returnList = new ArrayList<String>();
returnList.add(fullPathFile);
return returnList;
}
}
}
| lockss/lockss-daemon | plugins/src/org/lockss/plugin/clockss/iop/IopBookOnix3XmlMetadataExtractorFactory.java | Java | bsd-3-clause | 5,238 |
'use strict';
myApp.controller('registrationController', ['$scope', '$timeout', '$http', '$state', 'registrationCompleteService', function ($scope, $timeout, $http, $state, registrationCompleteService) {
$scope.errorMessage = "";
$scope.firstName = "";
$scope.secondName = "";
$scope.date = "";
$scope.email = "";
var existedEmail = false;
$scope.submit = function() {
if (!($scope.firstPassword === $scope.secondPassword)) {
showErrorMessage("Пароли не совпадают. Введите пароль еще раз");
} else if (!$scope.registrationForm.$valid) {
showErrorMessage("Введены неверные данные. убедитесь что все поля не подсвечены красным");
} else if (existedEmail) {
showErrorMessage("Такой пользователь уже существует. Введите другой email");
} else {
$http({
method: 'POST',
url: '/userRegistration',
data: {
firstName: $scope.firstName,
lastName: $scope.secondName,
birthDate: $scope.date,
email: $scope.email,
password: $scope.firstPassword
}
}).then(function successCallback(response) {
if (response) {
registrationCompleteService.registrationCompleteFn();
$state.go("loginPageState");
}
// this callback will be called asynchronously
// when the response is available
}, function errorCallback(response) {
showErrorMessage("Произошла ошибка на сервере, попробуйте еще раз чуть позже");
});
}
}
$scope.checkEmail = function() {
if ($scope.registrationForm.email.$valid) {
$http({
method: 'POST',
url: '/checkEmail',
data: {
email: $scope.email,
}
}).then(function successCallback(response) {
if (response.data) {
showErrorMessage("Такой пользователь уже существует. Введите другой email");
existedEmail = true;
} else {
existedEmail = false;
}
// this callback will be called asynchronously
// when the response is available
}, function errorCallback(response) {
});
}
}
function showErrorMessage(errorMessage) {
$scope.errorMessage = errorMessage;
$timeout(function() {
$scope.errorMessage = "";
}, 3000);
}
}]); | yslepianok/analysis_site | views/tests/app/pages/src/login/src/registrationController.js | JavaScript | bsd-3-clause | 2,918 |
(function($) {
$(".media-library-picker-field").each(function() {
var element = $(this);
var multiple = element.data("multiple");
var removeText = element.data("remove-text");
var removePrompt = element.data("remove-prompt");
var removeAllPrompt = element.data("remove-all-prompt");
var editText = element.data("edit-text");
var dirtyText = element.data("dirty-text");
var pipe = element.data("pipe");
var returnUrl = element.data("return-url");
var addUrl = element.data("add-url");
var promptOnNavigate = element.data("prompt-on-navigate");
var showSaveWarning = element.data("show-save-warning");
var addButton = element.find(".btn.add");
var saveButton = element.find('.btn.save');
var removeAllButton = element.find(".btn.remove");
var template =
'<li><div data-id="{contentItemId}" class="media-library-picker-item"><div class="thumbnail">{thumbnail}<div class="overlay"><h3>{title}</h3></div></div></div><a href="#" data-id="{contentItemId}" class="media-library-picker-remove">' + removeText + '</a>' + pipe + '<a href="{editLink}?ReturnUrl=' + returnUrl + '">' + editText + '</a></li>';
var refreshIds = function() {
var id = element.find('.selected-ids');
var ids = [];
element.find(".media-library-picker-item").each(function () {
ids.push($(this).attr("data-id"));
});
id.val(ids.join());
var itemsCount = ids.length;
if(!multiple && itemsCount > 0) {
addButton.hide();
saveButton.show();
}
else {
addButton.show();
saveButton.hide();
}
if(itemsCount > 1) {
removeAllButton.show();
}
else {
removeAllButton.hide();
}
};
var showSaveMsg = function () {
if (!showSaveWarning)
return;
element.find('.media-library-picker-message').show();
window.mediaLibraryDirty = true;
};
window.mediaLibraryDirty = false;
if (promptOnNavigate) {
if (!window.mediaLibraryNavigateAway) {
$(window).on("beforeunload", window.mediaLibraryNavigateAway = function() {
if (window.mediaLibraryDirty) {
return dirtyText;
}
});
element.closest("form").on("submit", function() {
window.mediaLibraryDirty = false;
});
}
}
refreshIds();
addButton.click(function () {
var url = addUrl;
$.colorbox({
href: url,
iframe: true,
reposition: true,
width: "100%",
height: "100%",
onLoad: function() { // hide the scrollbars from the main window
$('html, body').css('overflow', 'hidden');
$('#cboxClose').remove();
element.trigger("opened");
},
onClosed: function() {
$('html, body').css('overflow', '');
var selectedData = $.colorbox.selectedData;
if (selectedData == null) { // Dialog cancelled, do nothing
element.trigger("closed");
return;
}
var selectionLength = multiple ? selectedData.length : Math.min(selectedData.length, 1);
for (var i = 0; i < selectionLength ; i++) {
var tmpl = template
.replace(/\{contentItemId\}/g, selectedData[i].id)
.replace(/\{thumbnail\}/g, selectedData[i].thumbnail)
.replace(/\{title\}/g, selectedData[i].title)
.replace(/\{editLink\}/g, selectedData[i].editLink);
var content = $(tmpl);
element.find('.media-library-picker.items ul').append(content);
}
refreshIds();
if (selectedData.length) {
showSaveMsg();
}
element.trigger("closed");
}
});
});
removeAllButton.click(function (e) {
e.preventDefault();
if (!confirm(removeAllPrompt)) return false;
element.find('.media-library-picker.items ul').children('li').remove();
refreshIds();
showSaveMsg();
return false;
});
element.on("click",'.media-library-picker-remove', function(e) {
e.preventDefault();
if (!confirm(removePrompt)) return false;
$(this).closest('li').remove();
refreshIds();
showSaveMsg();
return false;
});
element.find(".media-library-picker.items ul").sortable({
handle: '.thumbnail',
stop: function() {
refreshIds();
showSaveMsg();
}
}).disableSelection();
});
})(jQuery); | sfmskywalker/Orchard | src/Orchard.Web/Modules/Orchard.MediaLibrary/Scripts/media-library-picker.js | JavaScript | bsd-3-clause | 5,488 |
package no.arkivlab.innsyn.operations.iface;
import no.arkivlab.innsyn.models.n5.ClassificationSystem;
public interface IClassificationSystemService {
public Iterable<ClassificationSystem> findAll();
public Iterable <ClassificationSystem> findAll(Integer pageNumber, Integer pageSize);
public ClassificationSystem findBySystemId(String systemId);
public Iterable <ClassificationSystem> findByCreatedDate(String createdDate);
public Iterable <ClassificationSystem> findByCreatedBy(String createdBy);
}
| KDRS-SA/kdrs-toolbox-innsyn | src/main/java/no/arkivlab/innsyn/operations/iface/IClassificationSystemService.java | Java | bsd-3-clause | 510 |
from regulus.tree.tree import Node
from regulus.tree import *
class TestNode(Node):
def __init__(self, **kwargs):
super(TestNode, self).__init__(**kwargs)
def __str__(self):
if self.data is None:
return "<none>"
return self.data
def show(root):
for node in depth_first(root):
print(node.data)
def show_depth(root, depth=0):
print('{}{} d={}'.format(' '*depth, str(root), depth))
for child in root.children:
show_depth(child, depth+1)
root = TestNode(data='root')
n1 = TestNode(data='.1',parent=root)
n2 = TestNode(data='.2',parent=root)
n11 = TestNode(data='.1.1',parent=n1)
n12 = TestNode(data='.1.2',parent=n1)
n21 = TestNode(data='.2.1',parent=n2)
n211 = TestNode(data='.2.1.1',parent=n21)
n212 = TestNode(data='.2.1.2',parent=n21)
n22 = TestNode(data='.2.2',parent=n2)
print('breath first. pre')
for n in breath_first(root):
print(n.data)
print('depth = ', root.depth())
print('breath first. post')
for n in breath_first(root, post=True):
print(n.data)
print('breath first. both')
for n in breath_first(root, both=True):
print(n.data)
print('depth first. pre')
for n in depth_first(root):
print(n.data)
print('depth first. post')
for n in depth_first(root, post=True):
print(n.data)
values = dict([('root', 2),
('.1', 5),
('.1.1', 15),
('.1.2', 3),
('.2', 6),
('.2.1', 20),
('.2.2', 9),
('.2.1.1', 0),
('.2.1.2', 30),
])
print('best first')
for v, n in best_first(root, value=lambda n: values[n.data]):
print(v, n.data)
print('reduce .1')
x = reduce(root, lambda n: '.1' in n.data, factory=TestNode)
show_depth(x)
print('reduce .2')
x = reduce(root, lambda n: '.2' in n.data, factory=TestNode)
show_depth(x) | yarden-livnat/regulus | tests/tree_test.py | Python | bsd-3-clause | 1,870 |
/*******************************************************************************
* Caleydo - Visualization for Molecular Biology - http://caleydo.org
* Copyright (c) The Caleydo Team. All rights reserved.
* Licensed under the new BSD license, available at http://caleydo.org/license
*******************************************************************************/
package org.caleydo.view.table;
import static org.caleydo.core.util.base.Runnables.withinSWTThread;
import static org.caleydo.core.view.util.UIRunnables.show;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.apache.commons.lang.WordUtils;
import org.caleydo.core.data.collection.EDimension;
import org.caleydo.core.data.datadomain.TablePerspectiveActions.ITablePerspectiveFactory;
import org.caleydo.core.data.perspective.table.TablePerspective;
import org.caleydo.core.internal.cmd.AOpenViewHandler;
import org.caleydo.core.util.collection.Pair;
/**
* @author Samuel Gratzl
*
*/
public class TablePerspectiveAction implements ITablePerspectiveFactory {
@Override
public Collection<Pair<String, Runnable>> create(TablePerspective tablePerspective, Object sender) {
List<Pair<String, Runnable>> r = new ArrayList<>();
r.add(Pair.make("Show in Table",
show(TableView.VIEW_TYPE, AOpenViewHandler.createSecondaryID() + "_lazy", tablePerspective)));
r.add(export("Export Data", tablePerspective, null, null));
final String recLabel = WordUtils.capitalizeFully(tablePerspective.getDataDomain().getRecordIDCategory()
.getCategoryName());
final String dimLabel = WordUtils.capitalizeFully(tablePerspective.getDataDomain().getDimensionIDCategory()
.getCategoryName());
if (ExportTablePerspectiveAction.hasGrouping(tablePerspective, EDimension.RECORD))
r.add(export("Export " + recLabel + " Grouping Data", tablePerspective, null, EDimension.RECORD));
if (ExportTablePerspectiveAction.hasGrouping(tablePerspective, EDimension.DIMENSION))
r.add(export("Export " + dimLabel + " Grouping Data", tablePerspective, null, EDimension.DIMENSION));
r.add(export("Export " + recLabel + " Identifiers", tablePerspective, EDimension.RECORD, null));
r.add(export("Export " + dimLabel + " Identifiers", tablePerspective, EDimension.DIMENSION, null));
return r;
}
private static Pair<String, Runnable> export(String label, TablePerspective tablePerspective,
EDimension limitToIdentifiersOf, EDimension exportGroupingsOf) {
return Pair.make(label, withinSWTThread(new ExportTablePerspectiveAction(tablePerspective,
limitToIdentifiersOf, exportGroupingsOf)));
}
}
| Caleydo/caleydo | org.caleydo.view.table/src/org/caleydo/view/table/TablePerspectiveAction.java | Java | bsd-3-clause | 2,609 |
// Licensed under the BSD license. See LICENSE.txt for more details.
#include "translator/rose_ast_attribute.h"
#include "translator/rose_util.h"
#include "translator/stencil_range.h"
namespace physis {
namespace translator {
const std::string GridCallAttribute::name = "GridCall";
GridCallAttribute::GridCallAttribute(SgInitializedName *grid_var,
KIND k):
grid_var_(grid_var), kind_(k) {
}
GridCallAttribute::~GridCallAttribute() {}
AstAttribute *GridCallAttribute::copy() {
return new GridCallAttribute(grid_var_, kind_);
}
bool GridCallAttribute::IsGet() {
return kind_ == GET;
}
bool GridCallAttribute::IsGetPeriodic() {
return kind_ == GET_PERIODIC;
}
bool GridCallAttribute::IsEmit() {
return kind_ == EMIT;
}
void CopyAllAttributes(SgNode *dst, SgNode *src) {
// ROSE does not seem to have API for locating all attached
// attributes or copy them all. So, as an ad-hoc work around, list
// all potentially attahced attributes here to get them copied to
// the destination node.
if (rose_util::GetASTAttribute<StencilIndexVarAttribute>(src)) {
rose_util::CopyASTAttribute<StencilIndexVarAttribute>(
dst, src, false);
LOG_DEBUG() << "StencilIndexVarAttribute found at: "
<< src->unparseToString() << "\n";
}
}
} // namespace translator
} // namespace physis
| naoyam/physis | translator/rose_ast_attribute.cc | C++ | bsd-3-clause | 1,367 |
# Handles checkout logic. This is somewhat contrary to standard REST convention since there is not actually a
# Checkout object. There's enough distinct logic specific to checkout which has nothing to do with updating an
# order that this approach is waranted.
class CheckoutController < Spree::BaseController
ssl_required
before_filter :load_order
rescue_from Spree::GatewayError, :with => :rescue_from_spree_gateway_error
# Updates the order and advances to the next state (when possible.)
def update
if @order.update_attributes(object_params)
if @order.next
state_callback(:after)
else
flash[:error] = I18n.t(:payment_processing_failed)
redirect_to checkout_state_path(@order.state) and return
end
if @order.state == "complete" or @order.completed?
flash[:notice] = I18n.t(:order_processed_successfully)
flash[:commerce_tracking] = "nothing special"
redirect_to completion_route
else
redirect_to checkout_state_path(@order.state)
end
else
render :edit
end
end
private
# Provides a route to redirect after order completion
def completion_route
order_path(@order)
end
def object_params
# For payment step, filter order parameters to produce the expected nested attributes for a single payment and its source, discarding attributes for payment methods other than the one selected
if @order.payment?
if params[:payment_source].present? && source_params = params.delete(:payment_source)[params[:order][:payments_attributes].first[:payment_method_id].underscore]
params[:order][:payments_attributes].first[:source_attributes] = source_params
end
if (params[:order][:payments_attributes])
params[:order][:payments_attributes].first[:amount] = @order.total
end
end
params[:order]
end
def load_order
@order = current_order
redirect_to cart_path and return unless @order and @order.checkout_allowed?
redirect_to cart_path and return if @order.completed?
@order.state = params[:state] if params[:state]
state_callback(:before)
end
def state_callback(before_or_after = :before)
method_name = :"#{before_or_after}_#{@order.state}"
send(method_name) if respond_to?(method_name, true)
end
def before_address
@order.bill_address ||= Address.new(:country => default_country)
@order.ship_address ||= Address.new(:country => default_country)
end
def before_delivery
return if params[:order].present?
@order.shipping_method ||= (@order.rate_hash.first && @order.rate_hash.first[:shipping_method])
end
def before_payment
current_order.payments.destroy_all if request.put?
end
def after_complete
session[:order_id] = nil
end
def default_country
Country.find Spree::Config[:default_country_id]
end
def rescue_from_spree_gateway_error
flash[:error] = t('spree_gateway_error_flash_for_checkout')
render :edit
end
end
| collin/spree_core | app/controllers/checkout_controller.rb | Ruby | bsd-3-clause | 3,003 |
<?php
namespace app\modules\user;
/**
* main module definition class
*/
class Module extends \yii\base\Module
{
/**
* @inheritdoc
*/
public $controllerNamespace = 'app\modules\user\controllers';
/**
* @inheritdoc
*/
public function init()
{
parent::init();
// custom initialization code goes here
}
}
| dench/webportal | modules/user/Module.php | PHP | bsd-3-clause | 367 |
/**
* Copyright (c) 2013, impossibl.com
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.impossibl.stencil.engine.tests;
import org.junit.Test;
public class FunctionTest extends Tests {
@Test
public void testDefaultParameter() {
assertMatch("$func test(a='Hello',b='World!') { return a + ' ' + b; };$test();", "Hello World!");
}
@Test
public void testAllParameter() {
assertMatch("$func test(*a) { return a; };$test(a=1,b=2);","{a=1, b=2}");
}
@Test
public void testRestOfNamedParameter() {
assertMatch("$func test(a,*b) { return b; };$test(a=1,b=2,c=3);","{b=2, c=3}");
}
@Test
public void testRestOfPositionalParameter() {
assertMatch("$func test(a,*b) { return b; };$test(1,2,3);","{1=2, 2=3}");
}
}
| impossibl/stencil | engine/src/test/java/com/impossibl/stencil/engine/tests/FunctionTest.java | Java | bsd-3-clause | 2,240 |
/*{{{
Copyright © 2018 Matthias Kretz <kretz@kde.org>
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the names of contributing organizations nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
}}}*/
#include <vir/test.h>
TEST(foo)
{
FUZZY_COMPARE_WITH_EXTRA_COLUMNS(1., 1., 2.);
FUZZY_COMPARE(2., 2.);
FUZZY_COMPARE_WITH_EXTRA_COLUMNS(3., 3., 1., 2., 3.);
}
| mattkretz/virtest | tests/plotdist.cpp | C++ | bsd-3-clause | 1,691 |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# Copyright (c) 2009, Roboterclub Aachen e.V.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Roboterclub Aachen e.V. nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY ROBOTERCLUB AACHEN E.V. ''AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL ROBOTERCLUB AACHEN E.V. BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# -----------------------------------------------------------------------------
import os
import builder_base
import filter.cpp as filter
# -----------------------------------------------------------------------------
def filter_subtype(value):
""" value needs to be parser.structure.SubType """
type = filter.typeName(value.subtype.name)
variable = filter.variableName(value.name)
if value.subtype.isArray:
return "%s %s[%s]" % (type, variable, value.subtype.count)
else:
return "%s %s" % (type, variable)
def filter_constructor(class_, default=True):
if default:
return "%s()" % filter.typeName(class_.name)
else:
parameter = []
for item in class_.iter():
if item.subtype.isArray:
raise builder.BuilderException("Array handling is incomplete " \
"right now! Could not generate code for %s" % item)
else:
type = filter.typeName(item.subtype.name)
name = filter.variableName(item.name)
parameter.append("%s %s" % (type, name))
if len(parameter) > 0:
return "%s(%s)" % (filter.typeName(class_.name), ", ".join(parameter))
else:
return ""
def filter_initialization_list(class_, default=True):
initList = []
for item in class_.iter():
if item.subtype.isArray:
raise builder.BuilderException("Array handling is incomplete " \
"right now! Could not generate code for %s" % item)
else:
type = filter.typeName(item.subtype.name)
name = filter.variableName(item.name)
if item.value is not None:
defaultValue = item.value
else :
defaultValue = ''
if default:
initList.append("%s(%s)" % (name, defaultValue))
else:
initList.append("%s(%s)" % (name, name))
return ", ".join(initList)
# -----------------------------------------------------------------------------
class TypeBuilder(builder_base.Builder):
VERSION = "0.1"
def setup(self, optparser):
optparser.add_option(
"--namespace",
dest = "namespace",
default = "robot",
help = "Namespace of the generated identifiers.")
optparser.add_option(
"--source_path",
dest = "source_path",
default = None,
help = "Output path for the source file")
optparser.add_option(
"--header_path",
dest = "header_path",
default = None,
help = "Output path for the header file")
optparser.add_option(
"--quote_include_path",
dest = "quote_include_path",
default = None,
help = "Include directive for the source file")
optparser.add_option(
"--system_include_path",
dest = "system_include_path",
default = None,
help = "Include directive for the source file")
def generate(self):
# check the commandline options
if self.options.outpath:
source_path = self.options.outpath
header_path = self.options.outpath
elif self.options.source_path and self.options.header_path:
source_path = self.options.source_path
header_path = self.options.header_path
else:
raise builder_base.BuilderException("You need to provide an output path!")
if self.options.system_include_path:
includeDirective = '<%s>' % os.path.join(self.options.system_include_path, 'packets.hpp')
elif self.options.quote_include_path:
includeDirective = '"%s"' % os.path.join(self.options.system_include_path, 'packets.hpp')
else:
includeDirective = '"%s"' % 'packets.hpp'
if self.options.namespace:
namespace = self.options.namespace
else:
raise builder_base.BuilderException("You need to provide a namespace!")
cppFilter = {
'enumElement': filter.enumElement,
'enumElementStrong': filter.typeName,
'variableName': filter.variableName,
'typeName': filter.typeName,
'subtype': filter_subtype,
'generateConstructor': filter_constructor,
'generateInitializationList': filter_initialization_list
}
template_header = self.template('templates/robot_packets.hpp.tpl', filter=cppFilter)
template_source = self.template('templates/robot_packets.cpp.tpl', filter=cppFilter)
substitutions = {
'components': self.tree.components,
'actions': self.tree.components.actions,
'events': self.tree.events,
'packets': self.tree.types,
'includeDirective': includeDirective,
'namespace': namespace
}
file = os.path.join(header_path, 'packets.hpp')
self.write(file, template_header.render(substitutions) + "\n")
file = os.path.join(source_path, 'packets.cpp')
self.write(file, template_source.render(substitutions) + "\n")
# -----------------------------------------------------------------------------
if __name__ == '__main__':
TypeBuilder().run()
| dergraaf/xpcc | tools/system_design/builder/cpp_packets.py | Python | bsd-3-clause | 6,142 |
#include "header.h"
#include "sanity.h"
#ifdef TARGET_LITTLE_ENDIAN
int TestSanity_TypeBasedAliasing()
{
unsigned long a;
a = 4;
*(unsigned short *)&a = 5;
TEST_ASSERT(a == 5);
return 0;
}
#endif
| prophile/crisscross | TestSuite/sanity.cpp | C++ | bsd-3-clause | 206 |
import unittest
from prestans.provider.cache import Base
class CacheBaseUnitTest(unittest.TestCase):
def test_debug(self):
base = Base()
self.assertEqual(base.debug, False)
base.debug = True
self.assertEqual(base.debug, True)
| anomaly/prestans | tests/provider/test_cache.py | Python | bsd-3-clause | 267 |
from baseneuron import BaseNeuron
import numpy as np
import pycuda.gpuarray as garray
from pycuda.tools import dtype_to_ctype
import pycuda.driver as cuda
from pycuda.compiler import SourceModule
class MorrisLecarCopy(BaseNeuron):
def __init__(self, n_dict, V, dt , debug=False):
self.num_neurons = len(n_dict['id'])
self.dt = np.double(dt)
self.steps = max(int(round(dt / 1e-5)),1)
self.debug = debug
self.ddt = dt / self.steps
self.V = V
self.n = garray.to_gpu(np.asarray(n_dict['initn'], dtype=np.float64))
self.V_1 = garray.to_gpu(np.asarray(n_dict['V1'], dtype=np.float64))
self.V_2 = garray.to_gpu(np.asarray(n_dict['V2'], dtype=np.float64))
self.V_3 = garray.to_gpu(np.asarray(n_dict['V3'], dtype=np.float64))
self.V_4 = garray.to_gpu(np.asarray(n_dict['V4'], dtype=np.float64))
self.Tphi = garray.to_gpu(np.asarray(n_dict['phi'], dtype=np.float64))
self.offset = garray.to_gpu(np.asarray(n_dict['offset'],
dtype=np.float64))
cuda.memcpy_htod(int(self.V), np.asarray(n_dict['initV'], dtype=np.double))
self.update = self.get_euler_kernel()
@property
def neuron_class(self): return True
def eval(self, st = None):
self.update.prepared_async_call(self.update_grid, self.update_block, st, self.V, self.n.gpudata, self.num_neurons, self.I.gpudata, self.ddt*1000, self.steps, self.V_1.gpudata, self.V_2.gpudata, self.V_3.gpudata, self.V_4.gpudata, self.Tphi.gpudata, self.offset.gpudata)
def get_euler_kernel(self):
template = """
#define NVAR 2
#define NNEU %(nneu)d //NROW * NCOL
#define V_L (-0.05)
#define V_Ca 0.1
#define V_K (-0.07)
#define g_Ca 1.1
#define g_K 2.0
#define g_L 0.5
__device__ %(type)s compute_n(%(type)s V, %(type)s n, %(type)s V_3, %(type)s V_4, %(type)s Tphi)
{
%(type)s n_inf = 0.5 * (1 + tanh((V - V_3) / V_4));
%(type)s dn = Tphi * cosh(( V - V_3) / (V_4*2)) * (n_inf - n);
return dn;
}
__device__ %(type)s compute_V(%(type)s V, %(type)s n, %(type)s I, %(type)s V_1, %(type)s V_2, %(type)s offset)
{
%(type)s m_inf = 0.5 * (1+tanh((V - V_1)/V_2));
%(type)s dV = (I - g_L * (V - V_L) - g_K * n * (V - V_K) - g_Ca * m_inf * (V - V_Ca) + offset);
return dV;
}
__global__ void
hhn_euler_multiple(%(type)s* g_V, %(type)s* g_n, int num_neurons, %(type)s* I_pre, %(type)s dt, int nsteps, \
%(type)s* V_1, %(type)s* V_2, %(type)s* V_3, %(type)s* V_4, %(type)s* Tphi, %(type)s* offset)
{
int bid = blockIdx.x;
int cart_id = bid * NNEU + threadIdx.x;
%(type)s I, V, n;
if(cart_id < num_neurons)
{
V = g_V[cart_id];
I = I_pre[cart_id];
n = g_n[cart_id];
%(type)s dV, dn;
for(int i = 0; i < nsteps; ++i)
{
dn = compute_n(V, n, V_3[cart_id], V_4[cart_id], Tphi[cart_id]);
dV = compute_V(V, n, I, V_1[cart_id], V_2[cart_id], offset[cart_id]);
V += dV * dt;
n += dn * dt;
}
g_V[cart_id] = V;
g_n[cart_id] = n;
}
}
"""#Used 40 registers, 1024+0 bytes smem, 84 bytes cmem[0], 308 bytes cmem[2], 28 bytes cmem[16]
dtype = np.double
scalartype = dtype.type if dtype.__class__ is np.dtype else dtype
self.update_block = (128,1,1)
self.update_grid = ((self.num_neurons - 1) / 128 + 1, 1)
mod = SourceModule(template % {"type": dtype_to_ctype(dtype), "nneu": self.update_block[0]}, options=["--ptxas-options=-v"])
func = mod.get_function("hhn_euler_multiple")
func.prepare([np.intp, np.intp, np.int32, np.intp, scalartype, np.int32, np.intp, np.intp, np.intp, np.intp, np.intp, np.intp])
return func
| cerrno/neurokernel | neurokernel/LPU/neurons/MorrisLecarCopy.py | Python | bsd-3-clause | 3,978 |
package edu.ucdenver.ccp.datasource.identifiers.impl.bio;
import edu.ucdenver.ccp.datasource.fileparsers.CcpExtensionOntology;
/*
* #%L
* Colorado Computational Pharmacology's common module
* %%
* Copyright (C) 2012 - 2014 Regents of the University of Colorado
* %%
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the Regents of the University of Colorado nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
import edu.ucdenver.ccp.datasource.identifiers.DataSource;
import edu.ucdenver.ccp.datasource.identifiers.Identifier;
import edu.ucdenver.ccp.datasource.identifiers.StringDataSourceIdentifier;
@Identifier(ontClass=CcpExtensionOntology.APIDBCRYPTODB_IDENTIFIER)
public class ApiDbCryptoDbID extends StringDataSourceIdentifier {
public ApiDbCryptoDbID(String resourceID) {
super(resourceID, DataSource.APIDBCRYPTODB);
}
}
| UCDenver-ccp/datasource | datasource-fileparsers/src/main/java/edu/ucdenver/ccp/datasource/identifiers/impl/bio/ApiDbCryptoDbID.java | Java | bsd-3-clause | 2,247 |
<?php
use yii\db\Schema;
use yii\db\Migration;
class m160229_125309_adding_server_table extends Migration
{
public function up()
{
$this->createTable('server', [
'code' => Schema::TYPE_STRING . '(5) NOT NULL',
'name' => Schema::TYPE_STRING . ' NOT NULL',
'status' => Schema::TYPE_INTEGER . '(1) NOT NULL DEFAULT 0',
], "DEFAULT CHARSET=utf8");
$this->addPrimaryKey('pk_server', 'server', 'code');
$this->createTable('server_type', [
'code' => Schema::TYPE_STRING . '(4) NOT NULL',
'name' => Schema::TYPE_STRING . ' NULL',
], "DEFAULT CHARSET=utf8");
$this->addPrimaryKey('pk_server_type', 'server_type', 'code');
$this->createTable('server_to_server_type', [
'create_date' => Schema::TYPE_DATETIME . ' NOT NULL DEFAULT NOW()',
'server' => Schema::TYPE_STRING . '(5) NOT NULL',
'server_type' => Schema::TYPE_STRING . '(4) NOT NULL',
'status' => Schema::TYPE_INTEGER . '(1) NOT NULL DEFAULT 0',
], "DEFAULT CHARSET=utf8");
$this->addPrimaryKey('pk_server_to_server_type', 'server_to_server_type',[
'create_date',
'server',
'server_type',
]);
$this->addForeignKey('fk_server_to_server_type_server_code', 'server_to_server_type', 'server', 'server', 'code', 'RESTRICT', 'RESTRICT');
$this->addForeignKey('fk_server_to_server_type_server_type_code', 'server_to_server_type', 'server_type', 'server_type', 'code', 'RESTRICT', 'RESTRICT');
$this->createTable('server_detail', [
'code' => Schema::TYPE_STRING . '(4) NOT NULL',
'name' => Schema::TYPE_STRING . ' NULL',
], "DEFAULT CHARSET=utf8");
$this->addPrimaryKey('pk_server_detail', 'server_detail', 'code');
$this->createTable('server_detail_to_server_type', [
'server_type' => Schema::TYPE_STRING . '(4) NOT NULL',
'server_detail' => Schema::TYPE_STRING . '(4) NOT NULL',
], "DEFAULT CHARSET=utf8");
$this->addPrimaryKey('pk_server_detail_to_server_type', 'server_detail_to_server_type', [
'server_type',
'server_detail',
]);
$this->addForeignKey('fk_server_detail_to_server_type_server_type_code', 'server_detail_to_server_type', 'server_type', 'server_type', 'code', 'RESTRICT', 'RESTRICT');
$this->addForeignKey('fk_server_detail_to_server_type_server_detail_code', 'server_detail_to_server_type', 'server_detail', 'server_detail', 'code', 'RESTRICT', 'RESTRICT');
$this->createTable('server_configuration', [
'create_date' => Schema::TYPE_DATETIME . ' NOT NULL DEFAULT NOW()',
'server' => Schema::TYPE_STRING . '(5) NOT NULL',
'server_type' => Schema::TYPE_STRING . '(4) NOT NULL',
'server_detail' => Schema::TYPE_STRING . '(4) NOT NULL',
'value' => Schema::TYPE_STRING . ' NULL',
'status' => Schema::TYPE_INTEGER . '(1) NOT NULL DEFAULT 0',
], "DEFAULT CHARSET=utf8");
$this->addPrimaryKey('pk_server_configuration', 'server_configuration', [
'server',
'server_type',
'server_detail',
]);
$this->addForeignKey('fk_server_configuration_server_code', 'server_configuration', 'server', 'server', 'code', 'RESTRICT', 'RESTRICT');
$this->addForeignKey('fk_server_configuration_server_type_code', 'server_configuration', 'server_type', 'server_type', 'code', 'RESTRICT', 'RESTRICT');
$this->addForeignKey('fk_server_configuration_server_detail_code', 'server_configuration', 'server_detail', 'server_detail', 'code', 'RESTRICT', 'RESTRICT');
}
public function down()
{
$this->dropForeignKey('fk_server_configuration_server_detail_code', 'server_configuration');
$this->dropForeignKey('fk_server_configuration_server_type_code', 'server_configuration');
$this->dropForeignKey('fk_server_configuration_server_code', 'server_configuration');
$this->dropPrimaryKey('pk_server_configuration', 'server_configuration');
$this->dropTable('server_configuration');
$this->dropForeignKey('fk_server_detail_to_server_type_server_detail_code', 'server_detail_to_server_type');
$this->dropForeignKey('fk_server_detail_to_server_type_server_type_code', 'server_detail_to_server_type');
$this->dropPrimaryKey('pk_server_detail_to_server_type', 'server_detail_to_server_type');
$this->dropTable('server_detail_to_server_type');
$this->dropPrimaryKey('pk_server_detail', 'server_detail');
$this->dropTable('server_detail');
$this->dropForeignKey('fk_server_to_server_type_server_type_code', 'server_to_server_type');
$this->dropForeignKey('fk_server_to_server_type_server_code', 'server_to_server_type');
$this->dropPrimaryKey('pk_server_to_server_type', 'server_to_server_type');
$this->dropTable('server_to_server_type');
$this->dropPrimaryKey('pk_server_type', 'server_type');
$this->dropTable('server_type');
$this->dropTable('server');
}
}
| dimichspb/tickets | console/migrations/m160229_125309_adding_server_table.php | PHP | bsd-3-clause | 5,206 |
__author__ = 'Cam Moore'
config = {
'description': 'regexquiz is the simple shell for answering regular expression quizzes for ICS 215',
'author': 'Cam Moore',
'author_email': 'cmoore@hawaii.edu',
'version': '1.1',
'install_requires': [],
'packages': ['regexquiz'],
'name': 'regexquiz'
}
try:
from setuptools import setup
config['entry_points'] = {
'console_scripts' : [
'regexquiz = regexquiz.cmdline:main'
],
}
except ImportError:
from distutils.core import setup
config['scripts'] = ['bin/regexquiz', 'bin/regexquiz.bat']
setup(**config)
| cammoore/RegExQuiz | setup.py | Python | bsd-3-clause | 627 |
<?php
/**
* Created by PhpStorm.
* User: admin
* Date: 2015-10-01
* Time: 16:55
*/
//use \phpqrcode\QRcode;
require(__DIR__ . '/../assets/phpqrcode/qrlib.php');
//$qrlibpath=Yii::getAlias('@web/../assets/phpqrcode/qrlib.php');
//require($qrlibpath);
/*
$value='http://tbhome.com.cn';
$file=false;
$level='L';
$size=10;
$margin=3;
*/
/*
$vcards=$_GET['vcards'];
if($vcards){
$filepath=time().'.vcf';
$file = fopen($filepath, "x+");//w+
fwrite($file, $vcards);
// header('Content-type:application/octet-stream');
// header('Accept-Ranges:bytes');
// header('Accept-Length:' . filesize($filepath));
// header('Content-Disposition:attachment;filename="' . $filename . '"');
echo fread($file, filesize($filepath));
fclose($file);
header("location:$filepath");
}
*/
$value=$_GET['value'];
$filename=$_GET['filename'];
//$file=$_GET['file'];
//$level=$_GET['level'];
//$size=$_GET['size'];
//$margin=$_GET['margin'];
//二维码内容
//header('Content-Type:text/html;charset=UTF-8');
//生成二维码图片
if(!isset($filename)){
QRcode::png($value,false,'H',8,1);
}else{
QRcode::png($value,$filename,'H',6,1);
echo '<img src="'.$filename.'">';
}
/*
public static function png($text, $outfile=false, $level=QR_ECLEVEL_L, $size=3, $margin=4, $saveandprint=false)
{
$enc = QRencode::factory($level, $size, $margin);
return $enc->encodePNG($text, $outfile, $saveandprint=false);
}
*/
/*
$logo = 'logo.png';//准备好的logo图片
$QR = 'qrcode.png';//已经生成的原始二维码图
if ($logo !== FALSE) {
$QR = imagecreatefromstring(file_get_contents($QR));
$logo = imagecreatefromstring(file_get_contents($logo));
$QR_width = imagesx($QR);//二维码图片宽度
$QR_height = imagesy($QR);//二维码图片高度
$logo_width = imagesx($logo);//logo图片宽度
$logo_height = imagesy($logo);//logo图片高度
$logo_qr_width = $QR_width / 5;
$scale = $logo_width/$logo_qr_width;
$logo_qr_height = $logo_height/$scale;
$from_width = ($QR_width - $logo_qr_width) / 2;
//重新组合图片并调整大小
imagecopyresampled($QR, $logo, $from_width, $from_width, 0, 0, $logo_qr_width,
$logo_qr_height, $logo_width, $logo_height);
}
//输出图片
imagepng($QR, 'helloweixin.png');
echo '<img src="helloweixin.png">';
*/
//include '../phpqrcode/phpqrcode.php';
//QRcode::png('http://m.tbhome.com.cn/index.php/Home/Member/fwcx/fwuid/4', 'filename.png');
/*
$data = 'http://gz.altmi.com';
// 生成的文件名
$filename = $errorCorrectionLevel.'|'.$matrixPointSize.'.png';
// 纠错级别:L、M、Q、H
$errorCorrectionLevel = 'L';
// 点的大小:1到10
$matrixPointSize = 4;
QRcode::png($data, $filename, $errorCorrectionLevel, $matrixPointSize, 2);
*/
?>
| whq78164/Vcards | frontend/web/qrcode.php | PHP | bsd-3-clause | 2,900 |
function unescapeURL(s) {
return decodeURIComponent(s.replace(/\+/g, "%20"))
}
function getURLParams() {
var params = {}
var m = window.location.href.match(/[\\?&]([^=]+)=([^&#]*)/g)
if (m) {
for (var i = 0; i < m.length; i++) {
var a = m[i].match(/.([^=]+)=(.*)/)
params[unescapeURL(a[1])] = unescapeURL(a[2])
}
}
return params
}
function submit() {
$(".errmsg").text("");
errors = false;
$(".featureDropdown").each(function(index) {
if ($(this).attr("value") == "default") {
$(".errmsg", $(this).closest(".feature")).text("Please select a value");
errors = true;
}
});
if (!errors) {
$("#form_turk").submit();
}
}
$(document).ready(function() {
var params = getURLParams();
if (params.assignmentId && (params.assignmentId != "ASSIGNMENT_ID_NOT_AVAILABLE")) {
$('#assignmentId').attr('value', params.assignmentId);
$("#submit_btn").click(function() {
submit();
});
} else {
$("#submit_btn").attr("disabled", "true");
$(".featureDropdown").attr("disabled", "true");
}
})
| marcua/qurk_experiments | qurkexp/media/js/celeb_features.js | JavaScript | bsd-3-clause | 1,186 |
class RemoteContentFetchingJob < SeekJob
attr_reader :content_blob_id
def initialize(content_blob)
@content_blob_id = content_blob.id
end
def perform_job(job_item)
job_item.retrieve
end
def gather_items
[ContentBlob.find_by_id(content_blob_id)].compact
end
def queue_name
QueueNames::REMOTE_CONTENT
end
end
| HITS-SDBV/seek | app/jobs/remote_content_fetching_job.rb | Ruby | bsd-3-clause | 345 |
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using System.Text;
#pragma warning disable 1591
namespace OpenCvSharp
{
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Ansi)]
internal unsafe struct WCvHistogram
{
public int type;
public void* bins;
public fixed float thresh[CvConst.CV_MAX_DIM * 2];
public float** thresh2;
public WCvMatND mat;
}
}
| shimat/opencvsharp_2410 | src/OpenCvSharp/Src/PInvoke/Struct/Etc/WCvHistogram.cs | C# | bsd-3-clause | 455 |
#!/usr/bin/env python
__author__ = "Adam Simpkin, and Felix Simkovic"
__contributing_authors__ = "Jens Thomas, and Ronan Keegan"
__credits__ = "Daniel Rigden, William Shepard, Charles Ballard, Villi Uski, and Andrey Lebedev"
__date__ = "05 May 2017"
__email__ = "hlasimpk@liv.ac.uk"
__version__ = "0.1"
import argparse
import os
import sys
from pyjob.stopwatch import StopWatch
import simbad.command_line
import simbad.exit
import simbad.util
import simbad.util.logging_util
import simbad.util.pyrvapi_results
logger = None
def simbad_argparse():
"""Create the argparse options"""
p = argparse.ArgumentParser(
description="SIMBAD: Sequence Independent Molecular replacement Based on Available Database",
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
simbad.command_line._argparse_core_options(p)
simbad.command_line._argparse_job_submission_options(p)
simbad.command_line._argparse_contaminant_options(p)
simbad.command_line._argparse_morda_options(p)
simbad.command_line._argparse_lattice_options(p)
simbad.command_line._argparse_rot_options(p)
simbad.command_line._argparse_mr_options(p)
simbad.command_line._argparse_mtz_options(p)
p.add_argument('mtz', help="The path to the input mtz file")
return p
def main():
"""Main SIMBAD routine"""
args = simbad_argparse().parse_args()
args.work_dir = simbad.command_line.get_work_dir(
args.run_dir, work_dir=args.work_dir, ccp4_jobid=args.ccp4_jobid, ccp4i2_xml=args.ccp4i2_xml
)
log_file = os.path.join(args.work_dir, 'simbad.log')
debug_log_file = os.path.join(args.work_dir, 'debug.log')
global logger
logger = simbad.util.logging_util.setup_logging(args.debug_lvl, logfile=log_file, debugfile=debug_log_file)
if not os.path.isfile(args.amore_exe):
raise OSError("amore executable not found")
gui = simbad.util.pyrvapi_results.SimbadOutput(
args.rvapi_document, args.webserver_uri, args.display_gui, log_file, args.work_dir, ccp4i2_xml=args.ccp4i2_xml, tab_prefix=args.tab_prefix
)
simbad.command_line.print_header()
logger.info("Running in directory: %s\n", args.work_dir)
stopwatch = StopWatch()
stopwatch.start()
end_of_cycle, solution_found, all_results = False, False, {}
while not (solution_found or end_of_cycle):
# =====================================================================================
# Perform the lattice search
solution_found = simbad.command_line._simbad_lattice_search(args)
logger.info("Lattice search completed in %d days, %d hours, %d minutes, and %d seconds",
*stopwatch.lap.time_pretty)
if solution_found and not args.process_all:
logger.info(
"Lucky you! SIMBAD worked its charm and found a lattice match for you.")
continue
elif solution_found and args.process_all:
logger.info(
"SIMBAD thinks it has found a solution however process_all is set, continuing to contaminant search")
else:
logger.info("No results found - lattice search was unsuccessful")
if args.output_pdb and args.output_mtz:
csv = os.path.join(args.work_dir, 'latt/lattice_mr.csv')
all_results['latt'] = simbad.util.result_by_score_from_csv(csv, 'final_r_free', ascending=True)
gui.display_results(False, args.results_to_display)
# =====================================================================================
# Perform the contaminant search
solution_found = simbad.command_line._simbad_contaminant_search(args)
logger.info("Contaminant search completed in %d days, %d hours, %d minutes, and %d seconds",
*stopwatch.lap.time_pretty)
if solution_found and not args.process_all:
logger.info(
"Check you out, crystallizing contaminants! But don't worry, SIMBAD figured it out and found a solution.")
continue
elif solution_found and args.process_all:
logger.info(
"SIMBAD thinks it has found a solution however process_all is set, continuing to morda search")
else:
logger.info(
"No results found - contaminant search was unsuccessful")
if args.output_pdb and args.output_mtz:
csv = os.path.join(args.work_dir, 'cont/cont_mr.csv')
all_results['cont'] = simbad.util.result_by_score_from_csv(csv, 'final_r_free', ascending=True)
gui.display_results(False, args.results_to_display)
# =====================================================================================
# Perform the morda search
solution_found = simbad.command_line._simbad_morda_search(args)
logger.info("Full MoRDa domain search completed in %d days, %d hours, %d minutes, and %d seconds",
*stopwatch.lap.time_pretty)
if solution_found:
logger.info("... and SIMBAD worked once again. Get in!")
continue
else:
logger.info("No results found - full search was unsuccessful")
if args.output_pdb and args.output_mtz:
csv = os.path.join(args.work_dir, 'morda/morda_mr.csv')
all_results['morda'] = simbad.util.result_by_score_from_csv(csv, 'final_r_free', ascending=True)
gui.display_results(False, args.results_to_display)
# =====================================================================================
# Make sure we only run the loop once for now
end_of_cycle = True
if len(all_results) >= 1:
if sys.version_info.major == 3:
sorted_results = sorted(all_results.items(), key=lambda kv: (kv[1], kv))
else:
sorted_results = sorted(all_results.iteritems(), key=lambda kv: (kv[1], kv))
result = sorted_results[0][1]
simbad.util.output_files(args.work_dir, result, args.output_pdb, args.output_mtz)
stopwatch.stop()
logger.info("All processing completed in %d days, %d hours, %d minutes, and %d seconds",
*stopwatch.time_pretty)
gui.display_results(True, args.results_to_display)
if args.rvapi_document:
gui.save_document()
if __name__ == "__main__":
import logging
logging.basicConfig(level=logging.NOTSET)
try:
main()
except Exception:
simbad.exit.exit_error(*sys.exc_info())
| rigdenlab/SIMBAD | simbad/command_line/simbad_full.py | Python | bsd-3-clause | 6,513 |
#include "AliAnalysisTaskPOmegaPenne.h"
#include <vector>
#include "AliAnalysisManager.h"
#include "AliAnalysisTaskNanoXioton.h"
#include "AliAnalysisTaskAODXioton.h"
#include "AliFemtoDreamEventCuts.h"
#include "AliFemtoDreamCascadeCuts.h"
#include "AliFemtoDreamCollConfig.h"
AliAnalysisTaskPOmegaPenne *AddTaskPOmegaPenne( bool isMC = false, TString CentEst = "kHM")
{
AliAnalysisManager *mgr = AliAnalysisManager::GetAnalysisManager();
if (!mgr)
{
printf("No analysis manager to connect to!\n");
return nullptr;
}
if (!mgr->GetInputEventHandler())
{
printf("This task requires an input event handler!\n");
return nullptr;
}
AliFemtoDreamEventCuts *evtCuts = AliFemtoDreamEventCuts::StandardCutsRun2();
evtCuts->CleanUpMult(false, false, false, true);
// ########################### CUTS ##############################
//
// Proton Cuts
//
AliFemtoDreamTrackCuts *TrackCutsProton = AliFemtoDreamTrackCuts::PrimProtonCuts( isMC, true, false, false);
TrackCutsProton->SetCutCharge(1);
// anti-Proton Cuts
//
AliFemtoDreamTrackCuts *TrackCutsAntiProton = AliFemtoDreamTrackCuts::PrimProtonCuts( isMC, true, false, false);
TrackCutsAntiProton->SetCutCharge(-1);
// Kaon Cuts
//
AliFemtoDreamTrackCuts *TrackCutsKaon = AliFemtoDreamTrackCuts::PrimKaonCuts( isMC, true, false, false);
TrackCutsKaon->SetCutCharge(1);
// AntiKaon Cuts
//
AliFemtoDreamTrackCuts *TrackCutsAntiKaon = AliFemtoDreamTrackCuts::PrimKaonCuts( isMC, true, false, false);
TrackCutsAntiKaon->SetCutCharge(-1);
// //Cascade Cuts
// AliFemtoDreamCascadeCuts *CascadeCuts = AliFemtoDreamCascadeCuts::XiCuts(isMC, false);
// CascadeCuts->SetXiCharge(-1);
// AliFemtoDreamTrackCuts *XiNegCuts = AliFemtoDreamTrackCuts::Xiv0PionCuts(isMC, true, false);
// XiNegCuts->SetCheckTPCRefit(false); //for nanos this is already done while prefiltering
// AliFemtoDreamTrackCuts *XiPosCuts = AliFemtoDreamTrackCuts::Xiv0ProtonCuts(isMC, true, false);
// XiPosCuts->SetCheckTPCRefit(false); //for nanos this is already done while prefiltering
// AliFemtoDreamTrackCuts *XiBachCuts = AliFemtoDreamTrackCuts::XiBachPionCuts(isMC, true, false);
// XiBachCuts->SetCheckTPCRefit(false); //for nanos this is already done while prefiltering
// CascadeCuts->Setv0Negcuts(XiNegCuts);
// CascadeCuts->Setv0PosCuts(XiPosCuts);
// CascadeCuts->SetBachCuts(XiBachCuts);
// CascadeCuts->SetPDGCodeCasc(3312);
// CascadeCuts->SetPDGCodev0(3122);
// CascadeCuts->SetPDGCodePosDaug(2212);
// CascadeCuts->SetPDGCodeNegDaug(-211);
// CascadeCuts->SetPDGCodeBach(-211);
// AliFemtoDreamCascadeCuts *AntiCascadeCuts = AliFemtoDreamCascadeCuts::XiCuts(isMC, false);
// AntiCascadeCuts->SetXiCharge(1);
// AliFemtoDreamTrackCuts *AntiXiNegCuts = AliFemtoDreamTrackCuts::Xiv0ProtonCuts(isMC, true, false);
// AntiXiNegCuts->SetCutCharge(-1);
// AntiXiNegCuts->SetCheckTPCRefit(false); //for nanos this is already done while prefiltering
// AliFemtoDreamTrackCuts *AntiXiPosCuts = AliFemtoDreamTrackCuts::Xiv0PionCuts(isMC, true, false);
// AntiXiPosCuts->SetCutCharge(1);
// AntiXiPosCuts->SetCheckTPCRefit(false); //for nanos this is already done while prefiltering
// AliFemtoDreamTrackCuts *AntiXiBachCuts =
// AliFemtoDreamTrackCuts::XiBachPionCuts(isMC, true, false);
// AntiXiBachCuts->SetCutCharge(1);
// AntiXiBachCuts->SetCheckTPCRefit(false); //for nanos this is already done while prefiltering
// AntiCascadeCuts->Setv0Negcuts(AntiXiNegCuts);
// AntiCascadeCuts->Setv0PosCuts(AntiXiPosCuts);
// AntiCascadeCuts->SetBachCuts(AntiXiBachCuts);
// AntiCascadeCuts->SetPDGCodeCasc(-3312);
// AntiCascadeCuts->SetPDGCodev0(-3122);
// AntiCascadeCuts->SetPDGCodePosDaug(211);
// AntiCascadeCuts->SetPDGCodeNegDaug(-2212);
// AntiCascadeCuts->SetPDGCodeBach(211);
std::vector<int> PDGParticles;
PDGParticles.push_back(2212); // Protons
PDGParticles.push_back(2212);
PDGParticles.push_back(321); // Kaons
PDGParticles.push_back(321);
// vector( size_type count, const T& value, const Allocator& alloc = Allocator());
// std::vector<int> NBins = std::vector<int>(10, 750);
// std::vector<float> kMin = std::vector<float>(10, 0.);
// std::vector<float> kMax = std::vector<float>(10, 3.);
// std::vector<int> pairQA = std::vector<int>(10, 0);
// std::vector<bool> closeRejection = std::vector<bool>(10, false);
std::vector<int> NBins;
NBins.push_back(750);
NBins.push_back(750);
NBins.push_back(750);
NBins.push_back(750);
NBins.push_back(750);
NBins.push_back(750);
NBins.push_back(750);
NBins.push_back(750);
NBins.push_back(750);
NBins.push_back(750);
std::vector<float> kMin;
kMin.push_back(0.);
kMin.push_back(0.);
kMin.push_back(0.);
kMin.push_back(0.);
kMin.push_back(0.);
kMin.push_back(0.);
kMin.push_back(0.);
kMin.push_back(0.);
kMin.push_back(0.);
kMin.push_back(0.);
std::vector<float> kMax;
kMax.push_back(3.);
kMax.push_back(3.);
kMax.push_back(3.);
kMax.push_back(3.);
kMax.push_back(3.);
kMax.push_back(3.);
kMax.push_back(3.);
kMax.push_back(3.);
kMax.push_back(3.);
kMax.push_back(3.);
std::vector<int> pairQA;
pairQA.push_back(0);
pairQA.push_back(0);
pairQA.push_back(0);
pairQA.push_back(0);
pairQA.push_back(0);
pairQA.push_back(0);
pairQA.push_back(0);
pairQA.push_back(0);
pairQA.push_back(0);
pairQA.push_back(0);
std::vector<bool> closeRejection;
closeRejection.push_back(false);
closeRejection.push_back(false);
closeRejection.push_back(false);
closeRejection.push_back(false);
closeRejection.push_back(false);
closeRejection.push_back(false);
closeRejection.push_back(false);
closeRejection.push_back(false);
closeRejection.push_back(false);
closeRejection.push_back(false);
pairQA[0] = 11; //p-p
pairQA[4] = 11; //ap-ap
closeRejection[0] = true;
closeRejection[4] = true;
pairQA[2] = 11; // p-K
pairQA[6] = 11; // ap-ak
// ZVtx bins
std::vector<float> ZVtxBins;
ZVtxBins.push_back(-10);
ZVtxBins.push_back(-8);
ZVtxBins.push_back(-6);
ZVtxBins.push_back(-4);
ZVtxBins.push_back(-2);
ZVtxBins.push_back(0);
ZVtxBins.push_back(2);
ZVtxBins.push_back(4);
ZVtxBins.push_back(6);
ZVtxBins.push_back(8);
ZVtxBins.push_back(10);
// Multiplicity bins
std::vector<int> MultBins;
MultBins.push_back(0);
MultBins.push_back(4);
MultBins.push_back(8);
MultBins.push_back(12);
MultBins.push_back(16);
MultBins.push_back(20);
MultBins.push_back(24);
MultBins.push_back(28);
MultBins.push_back(32);
MultBins.push_back(36);
MultBins.push_back(40);
MultBins.push_back(44);
MultBins.push_back(48);
MultBins.push_back(52);
MultBins.push_back(56);
MultBins.push_back(60);
MultBins.push_back(64);
MultBins.push_back(68);
MultBins.push_back(72);
MultBins.push_back(76);
MultBins.push_back(80);
AliFemtoDreamCollConfig *config = new AliFemtoDreamCollConfig("Femto", "Femto");
config->SetZBins(ZVtxBins);
config->SetMultBins(MultBins);
config->SetMultBinning(true);
config->SetPDGCodes(PDGParticles);
config->SetNBinsHist(NBins);
config->SetMinKRel(kMin);
config->SetMaxKRel(kMax);
config->SetMixingDepth(10);
config->SetExtendedQAPairs(pairQA);
config->SetClosePairRejection(closeRejection);
config->SetDeltaEtaMax(0.012);
config->SetDeltaPhiMax(0.012);
// task creation
AliAnalysisTaskPOmegaPenne *task = new AliAnalysisTaskPOmegaPenne("FemtoDreamPOmegaPenne", isMC);
if (CentEst == "kInt7")
{
task->SelectCollisionCandidates(AliVEvent::kINT7);
std::cout << "Added kINT7 Trigger \n";
}
else if (CentEst == "kHM")
{
task->SelectCollisionCandidates(AliVEvent::kHighMultV0);
std::cout << "Added kHighMultV0 Trigger \n";
}
else
{
std::cout << "=====================================================================" << std::endl;
std::cout << "=====================================================================" << std::endl;
std::cout << "Centrality Estimator not set, fix it else your Results will be empty!" << std::endl;
std::cout << "=====================================================================" << std::endl;
std::cout << "=====================================================================" << std::endl;
}
task->SetEventCuts(evtCuts);
task->SetTrackCutsProton(TrackCutsProton);
task->SetTrackCutsAntiProton(TrackCutsAntiProton);
task->SetTrackCutsKaon(TrackCutsKaon);
task->SetTrackCutsAntiKaon(TrackCutsAntiKaon);
task->SetCollectionConfig(config);
mgr->AddTask(task);
TString file = AliAnalysisManager::GetCommonFileName();
// AliAnalysisDataContainer *cinput = mgr->GetCommonInputContainer();
//
// mgr->ConnectInput(task, 0, cinput);
mgr->ConnectInput(task, 0, mgr->GetCommonInputContainer());
AliAnalysisDataContainer *coutputQA;
TString QAName = Form("MyTask");
coutputQA = mgr->CreateContainer(
QAName.Data(), TList::Class(), AliAnalysisManager::kOutputContainer,
Form("%s:%s", file.Data(), QAName.Data()));
mgr->ConnectOutput(task, 1, coutputQA);
return task;
}
| akubera/AliPhysics | PWGCF/FEMTOSCOPY/macros/AddTaskPOmegaPenne.C | C++ | bsd-3-clause | 9,679 |
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
handler500 = 'myapp.views.this_server_error'
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'helloworld.views.home', name='home'),
(r'^myapp/$', 'myapp.views.index'),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
| sodafree/backend | helloworld/helloworld/urls.py | Python | bsd-3-clause | 585 |
/// <reference path="../../node_modules/monaco-editor/monaco.d.ts" />
const liquidTags = [
'if',
'else',
'elseif',
'endif',
'render',
'assign',
'capture',
'endcapture',
'case',
'endcase',
'comment',
'endcomment',
'cycle',
'decrement',
'for',
'endfor',
'include',
'increment',
'layout',
'raw',
'endraw',
'render',
'tablerow',
'endtablerow',
'unless',
'endunless'
];
const liquidFilters = [
'abs',
'append',
'at_least',
'at_most',
'capitalize',
'ceil',
'compact',
'date',
'default',
'divided_by',
'downcase',
'escape',
'escape_once',
'first',
'floor',
'join',
'json',
'last',
'lstrip',
'map',
'minus',
'modulo',
'newline_to_br',
'plus',
'prepend',
'remove',
'remove_first',
'replace',
'replace_first',
'reverse',
'round',
'rstrip',
'size',
'slice',
'sort',
'sort_natural',
'split',
'strip',
'strip_html',
'strip_newlines',
'times',
'truncate',
'truncatewords',
'uniq',
'upcase',
'url_decode',
'url_encode',
'where'
]
interface ILiquidContextInfo {
showFilters: boolean,
showTags: boolean,
includeEndTags: boolean,
inTag: boolean,
inObject: boolean
}
function getLiquidContextInfo(model: monaco.editor.ITextModel, position: monaco.Position, triggerCharacter: string): ILiquidContextInfo {
var inTag: boolean;
var inObject: boolean;
var showTags: boolean;
var showFilters: boolean;
var findStart = model.findPreviousMatch('\\{(%|\\{)', position, true, false, null, true);
if (findStart && findStart.matches && !position.isBefore(findStart.range.getEndPosition())) {
if (findStart.matches[1] == '%') {
inTag = true;
} else if (findStart.matches[1] == '{') {
inObject = true
}
var searchPattern = inTag ? '%}' : '}}';
var findEnd = model.findNextMatch(searchPattern, position, false, false, null, true);
var currentRange = findStart.range.plusRange(findEnd.range);
if (currentRange.containsPosition(position)) {
if (inTag) {
var findTagName = model.findNextMatch('\\{%\\s*([a-zA-Z-_]+)', findStart.range.getStartPosition(), true, false, null, true);
if (findTagName && currentRange.containsRange(findTagName.range) && findTagName.matches.length > 1) {
if (findTagName.matches[1] == 'assign') {
showFilters = true;
} else {
showTags = false;
}
} else {
showTags = true;
}
} else {
showFilters = true;
}
}
}
return {
showFilters,
showTags,
inTag,
inObject
} as ILiquidContextInfo;
}
const completionItemProvider: monaco.languages.CompletionItemProvider = {
triggerCharacters: [' '],
provideCompletionItems: (model: monaco.editor.ITextModel, position: monaco.Position, context: monaco.languages.CompletionContext, token: monaco.CancellationToken) => {
var items: string[] = [];
if (context.triggerCharacter == ' ') {
var startTrigger = model.getValueInRange(new monaco.Range(position.lineNumber, position.column - 3, position.lineNumber, position.column - 1));
if (startTrigger != '{%' && !startTrigger.endsWith('|')) {
return null;
}
}
var liquidContext: ILiquidContextInfo = getLiquidContextInfo(model, position, context.triggerCharacter);
if (liquidContext.showFilters) {
items = liquidFilters;
} else if (liquidContext.showTags) {
items = liquidTags.filter((value: string) => { return !value.startsWith('end') });
}
const suggestions = items.map((value: string) => {
return {
label: value,
kind: monaco.languages.CompletionItemKind.Keyword,
insertText: value,
insertTextRules: monaco.languages.CompletionItemInsertTextRule.KeepWhitespace
} as monaco.languages.CompletionItem
});
return { suggestions } as monaco.languages.ProviderResult<monaco.languages.CompletionList>;
}
};
function ConfigureLiquidIntellisense(monaco: any, suggestHtml: boolean = true) {
if (suggestHtml) {
var modeConfiguration: monaco.languages.html.ModeConfiguration = {
completionItems: true,
colors: true,
foldingRanges: true,
selectionRanges: true,
diagnostics: false,
documentFormattingEdits: true,
documentRangeFormattingEdits: true
};
var options: monaco.languages.html.Options = {
format: monaco.languages.html.htmlDefaults.options.format,
suggest: { html5: true }
}
monaco.languages.html.registerHTMLLanguageService('liquid', options, modeConfiguration);
}
monaco.languages.registerCompletionItemProvider('liquid', completionItemProvider);
}
| xkproject/Orchard2 | src/OrchardCore.Modules/OrchardCore.Liquid/Assets/monaco/liquid-intellisense.ts | TypeScript | bsd-3-clause | 5,263 |
//===--- CompileCommands.cpp ----------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "CompileCommands.h"
#include "support/Logger.h"
#include "clang/Frontend/CompilerInvocation.h"
#include "clang/Tooling/ArgumentsAdjusters.h"
#include "llvm/Support/FileSystem.h"
#include "llvm/Support/FileUtilities.h"
#include "llvm/Support/MemoryBuffer.h"
#include "llvm/Support/Path.h"
#include "llvm/Support/Program.h"
namespace clang {
namespace clangd {
namespace {
// Query apple's `xcrun` launcher, which is the source of truth for "how should"
// clang be invoked on this system.
llvm::Optional<std::string> queryXcrun(llvm::ArrayRef<llvm::StringRef> Argv) {
auto Xcrun = llvm::sys::findProgramByName("xcrun");
if (!Xcrun) {
log("Couldn't find xcrun. Hopefully you have a non-apple toolchain...");
return llvm::None;
}
llvm::SmallString<64> OutFile;
llvm::sys::fs::createTemporaryFile("clangd-xcrun", "", OutFile);
llvm::FileRemover OutRemover(OutFile);
llvm::Optional<llvm::StringRef> Redirects[3] = {
/*stdin=*/{""}, /*stdout=*/{OutFile}, /*stderr=*/{""}};
vlog("Invoking {0} to find clang installation", *Xcrun);
int Ret = llvm::sys::ExecuteAndWait(*Xcrun, Argv,
/*Env=*/llvm::None, Redirects,
/*SecondsToWait=*/10);
if (Ret != 0) {
log("xcrun exists but failed with code {0}. "
"If you have a non-apple toolchain, this is OK. "
"Otherwise, try xcode-select --install.",
Ret);
return llvm::None;
}
auto Buf = llvm::MemoryBuffer::getFile(OutFile);
if (!Buf) {
log("Can't read xcrun output: {0}", Buf.getError().message());
return llvm::None;
}
StringRef Path = Buf->get()->getBuffer().trim();
if (Path.empty()) {
log("xcrun produced no output");
return llvm::None;
}
return Path.str();
}
// Resolve symlinks if possible.
std::string resolve(std::string Path) {
llvm::SmallString<128> Resolved;
if (llvm::sys::fs::real_path(Path, Resolved)) {
log("Failed to resolve possible symlink {0}", Path);
return Path;
}
return std::string(Resolved.str());
}
// Get a plausible full `clang` path.
// This is used in the fallback compile command, or when the CDB returns a
// generic driver with no path.
std::string detectClangPath() {
// The driver and/or cc1 sometimes depend on the binary name to compute
// useful things like the standard library location.
// We need to emulate what clang on this system is likely to see.
// cc1 in particular looks at the "real path" of the running process, and
// so if /usr/bin/clang is a symlink, it sees the resolved path.
// clangd doesn't have that luxury, so we resolve symlinks ourselves.
// On Mac, `which clang` is /usr/bin/clang. It runs `xcrun clang`, which knows
// where the real clang is kept. We need to do the same thing,
// because cc1 (not the driver!) will find libc++ relative to argv[0].
#ifdef __APPLE__
if (auto MacClang = queryXcrun({"xcrun", "--find", "clang"}))
return resolve(std::move(*MacClang));
#endif
// On other platforms, just look for compilers on the PATH.
for (const char *Name : {"clang", "gcc", "cc"})
if (auto PathCC = llvm::sys::findProgramByName(Name))
return resolve(std::move(*PathCC));
// Fallback: a nonexistent 'clang' binary next to clangd.
static int Dummy;
std::string ClangdExecutable =
llvm::sys::fs::getMainExecutable("clangd", (void *)&Dummy);
SmallString<128> ClangPath;
ClangPath = llvm::sys::path::parent_path(ClangdExecutable);
llvm::sys::path::append(ClangPath, "clang");
return std::string(ClangPath.str());
}
// On mac, /usr/bin/clang sets SDKROOT and then invokes the real clang.
// The effect of this is to set -isysroot correctly. We do the same.
const llvm::Optional<std::string> detectSysroot() {
#ifndef __APPLE__
return llvm::None;
#endif
// SDKROOT overridden in environment, respect it. Driver will set isysroot.
if (::getenv("SDKROOT"))
return llvm::None;
return queryXcrun({"xcrun", "--show-sdk-path"});
return llvm::None;
}
std::string detectStandardResourceDir() {
static int Dummy; // Just an address in this process.
return CompilerInvocation::GetResourcesPath("clangd", (void *)&Dummy);
}
} // namespace
CommandMangler CommandMangler::detect() {
CommandMangler Result;
Result.ClangPath = detectClangPath();
Result.ResourceDir = detectStandardResourceDir();
Result.Sysroot = detectSysroot();
return Result;
}
CommandMangler CommandMangler::forTests() {
return CommandMangler();
}
void CommandMangler::adjust(std::vector<std::string> &Cmd) const {
// Check whether the flag exists, either as -flag or -flag=*
auto Has = [&](llvm::StringRef Flag) {
for (llvm::StringRef Arg : Cmd) {
if (Arg.consume_front(Flag) && (Arg.empty() || Arg[0] == '='))
return true;
}
return false;
};
// clangd should not write files to disk, including dependency files
// requested on the command line.
Cmd = tooling::getClangStripDependencyFileAdjuster()(Cmd, "");
// Strip plugin related command line arguments. Clangd does
// not support plugins currently. Therefore it breaks if
// compiler tries to load plugins.
Cmd = tooling::getStripPluginsAdjuster()(Cmd, "");
Cmd = tooling::getClangSyntaxOnlyAdjuster()(Cmd, "");
if (ResourceDir && !Has("-resource-dir"))
Cmd.push_back(("-resource-dir=" + *ResourceDir));
// Don't set `-isysroot` if it is already set or if `--sysroot` is set.
// `--sysroot` is a superset of the `-isysroot` argument.
if (Sysroot && !Has("-isysroot") && !Has("--sysroot")) {
Cmd.push_back("-isysroot");
Cmd.push_back(*Sysroot);
}
// If the driver is a generic name like "g++" with no path, add a clang path.
// This makes it easier for us to find the standard libraries on mac.
if (ClangPath && llvm::sys::path::is_absolute(*ClangPath) && !Cmd.empty()) {
std::string &Driver = Cmd.front();
if (Driver == "clang" || Driver == "clang++" || Driver == "gcc" ||
Driver == "g++" || Driver == "cc" || Driver == "c++") {
llvm::SmallString<128> QualifiedDriver =
llvm::sys::path::parent_path(*ClangPath);
llvm::sys::path::append(QualifiedDriver, Driver);
Driver = std::string(QualifiedDriver.str());
}
}
}
CommandMangler::operator clang::tooling::ArgumentsAdjuster() {
return [Mangler{*this}](const std::vector<std::string> &Args,
llvm::StringRef File) {
auto Result = Args;
Mangler.adjust(Result);
return Result;
};
}
} // namespace clangd
} // namespace clang
| endlessm/chromium-browser | third_party/llvm/clang-tools-extra/clangd/CompileCommands.cpp | C++ | bsd-3-clause | 6,915 |
package org.webbitserver.netty;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelHandler;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.handler.codec.http.HttpResponseStatus;
import org.webbitserver.EventSourceHandler;
import org.webbitserver.HttpControl;
import org.webbitserver.HttpHandler;
import org.webbitserver.HttpRequest;
import org.webbitserver.HttpResponse;
import org.webbitserver.WebSocketConnection;
import org.webbitserver.WebSocketHandler;
import org.webbitserver.WebbitException;
import java.util.Iterator;
import java.util.concurrent.Executor;
public class NettyHttpControl implements HttpControl {
private final Iterator<HttpHandler> handlerIterator;
private final Executor executor;
private final ChannelHandlerContext ctx;
private final NettyHttpRequest webbitHttpRequest;
private final org.jboss.netty.handler.codec.http.HttpRequest nettyHttpRequest;
private final org.jboss.netty.handler.codec.http.HttpResponse nettyHttpResponse;
private final Thread.UncaughtExceptionHandler exceptionHandler;
private final Thread.UncaughtExceptionHandler ioExceptionHandler;
private HttpRequest defaultRequest;
private HttpResponse webbitHttpResponse;
private HttpControl defaultControl;
private NettyWebSocketConnection webSocketConnection;
private NettyEventSourceConnection eventSourceConnection;
public NettyHttpControl(Iterator<HttpHandler> handlerIterator,
Executor executor,
ChannelHandlerContext ctx,
NettyHttpRequest webbitHttpRequest,
NettyHttpResponse webbitHttpResponse,
org.jboss.netty.handler.codec.http.HttpRequest nettyHttpRequest,
org.jboss.netty.handler.codec.http.HttpResponse nettyHttpResponse,
Thread.UncaughtExceptionHandler exceptionHandler,
Thread.UncaughtExceptionHandler ioExceptionHandler) {
this.handlerIterator = handlerIterator;
this.executor = executor;
this.ctx = ctx;
this.webbitHttpRequest = webbitHttpRequest;
this.webbitHttpResponse = webbitHttpResponse;
this.nettyHttpRequest = nettyHttpRequest;
this.nettyHttpResponse = nettyHttpResponse;
this.ioExceptionHandler = ioExceptionHandler;
this.exceptionHandler = exceptionHandler;
defaultRequest = webbitHttpRequest;
defaultControl = this;
}
@Override
public void nextHandler() {
nextHandler(defaultRequest, webbitHttpResponse, defaultControl);
}
@Override
public void nextHandler(HttpRequest request, HttpResponse response) {
nextHandler(request, response, defaultControl);
}
@Override
public void nextHandler(HttpRequest request, HttpResponse response, HttpControl control) {
this.defaultRequest = request;
this.webbitHttpResponse = response;
this.defaultControl = control;
if (handlerIterator.hasNext()) {
HttpHandler handler = handlerIterator.next();
try {
handler.handleHttpRequest(request, response, control);
} catch (Throwable e) {
response.error(e);
}
} else {
response.status(404).end();
}
}
@Override
public WebSocketConnection upgradeToWebSocketConnection(WebSocketHandler webSocketHandler) {
NettyWebSocketConnection webSocketConnection = webSocketConnection();
WebSocketConnectionHandler webSocketConnectionHandler = new WebSocketConnectionHandler(executor, exceptionHandler, ioExceptionHandler, webSocketConnection, webSocketHandler);
performWebSocketHandshake(webSocketConnection, webSocketConnectionHandler);
try {
webSocketHandler.onOpen(webSocketConnection);
} catch (Throwable e) {
exceptionHandler.uncaughtException(Thread.currentThread(), new WebbitException(e));
}
return webSocketConnection;
}
@Override
public NettyWebSocketConnection webSocketConnection() {
if (webSocketConnection == null) {
webSocketConnection = new NettyWebSocketConnection(executor, webbitHttpRequest, ctx, null);
}
return webSocketConnection;
}
@Override
public NettyEventSourceConnection upgradeToEventSourceConnection(EventSourceHandler eventSourceHandler) {
NettyEventSourceConnection eventSourceConnection = eventSourceConnection();
EventSourceConnectionHandler eventSourceConnectionHandler = new EventSourceConnectionHandler(executor, exceptionHandler, ioExceptionHandler, eventSourceConnection, eventSourceHandler);
performEventSourceHandshake(eventSourceConnectionHandler);
try {
eventSourceHandler.onOpen(eventSourceConnection);
} catch (Exception e) {
exceptionHandler.uncaughtException(Thread.currentThread(), new WebbitException(e));
}
return eventSourceConnection;
}
@Override
public NettyEventSourceConnection eventSourceConnection() {
if (eventSourceConnection == null) {
eventSourceConnection = new NettyEventSourceConnection(executor, webbitHttpRequest, ctx);
}
return eventSourceConnection;
}
@Override
public Executor handlerExecutor() {
return executor;
}
@Override
public void execute(Runnable command) {
handlerExecutor().execute(command);
}
private void performEventSourceHandshake(ChannelHandler eventSourceConnectionHandler) {
nettyHttpResponse.setStatus(HttpResponseStatus.OK);
nettyHttpResponse.headers().add("Content-Type", "text/event-stream");
nettyHttpResponse.headers().add("Transfer-Encoding", "identity");
nettyHttpResponse.headers().add("Connection", "keep-alive");
nettyHttpResponse.headers().add("Cache-Control", "no-cache");
nettyHttpResponse.setChunked(false);
ctx.getChannel().write(nettyHttpResponse);
getReadyToSendEventSourceMessages(eventSourceConnectionHandler);
}
private void getReadyToSendEventSourceMessages(ChannelHandler eventSourceConnectionHandler) {
ChannelPipeline p = ctx.getChannel().getPipeline();
StaleConnectionTrackingHandler staleConnectionTracker = (StaleConnectionTrackingHandler) p.remove("staleconnectiontracker");
staleConnectionTracker.stopTracking(ctx.getChannel());
p.remove("aggregator");
p.replace("handler", "ssehandler", eventSourceConnectionHandler);
}
private void performWebSocketHandshake(NettyWebSocketConnection webSocketConnection, ChannelHandler webSocketConnectionHandler) {
WebSocketVersion[] versions = new WebSocketVersion[]{
new Hybi(nettyHttpRequest, nettyHttpResponse),
new Hixie76(nettyHttpRequest, nettyHttpResponse),
new Hixie75(nettyHttpRequest, nettyHttpResponse)
};
Channel channel = ctx.getChannel();
ChannelPipeline pipeline = channel.getPipeline();
for (WebSocketVersion webSocketVersion : versions) {
if (webSocketVersion.matches()) {
ChannelHandler webSocketFrameDecoder = webSocketVersion.createDecoder();
getReadyToReceiveWebSocketMessages(webSocketFrameDecoder, webSocketConnectionHandler, pipeline, channel);
webSocketVersion.prepareHandshakeResponse(webSocketConnection);
channel.write(nettyHttpResponse);
getReadyToSendWebSocketMessages(webSocketVersion.createEncoder(), pipeline);
break;
}
}
}
private void getReadyToReceiveWebSocketMessages(ChannelHandler webSocketFrameDecoder, ChannelHandler webSocketConnectionHandler, ChannelPipeline p, Channel channel) {
StaleConnectionTrackingHandler staleConnectionTracker = (StaleConnectionTrackingHandler) p.remove("staleconnectiontracker");
staleConnectionTracker.stopTracking(channel);
p.remove("aggregator");
p.replace("decoder", "wsdecoder", webSocketFrameDecoder);
p.replace("handler", "wshandler", webSocketConnectionHandler);
}
private void getReadyToSendWebSocketMessages(ChannelHandler webSocketFrameEncoder, ChannelPipeline p) {
p.replace("encoder", "wsencoder", webSocketFrameEncoder);
}
}
| lostdj/webbit | src/main/java/org/webbitserver/netty/NettyHttpControl.java | Java | bsd-3-clause | 8,568 |
/*
* Copyright (C) 2015, British Broadcasting Corporation
* All Rights Reserved.
*
* Author: Philip de Nier
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the British Broadcasting Corporation nor the names
* of its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <libMXF++/MXF.h>
using namespace std;
using namespace mxfpp;
const mxfKey UTF16TextBasedSetBase::setKey = MXF_SET_K(UTF16TextBasedSet);
UTF16TextBasedSetBase::UTF16TextBasedSetBase(HeaderMetadata *headerMetadata)
: TextBasedObject(headerMetadata, headerMetadata->createCSet(&setKey))
{
headerMetadata->add(this);
}
UTF16TextBasedSetBase::UTF16TextBasedSetBase(HeaderMetadata *headerMetadata, ::MXFMetadataSet *cMetadataSet)
: TextBasedObject(headerMetadata, cMetadataSet)
{}
UTF16TextBasedSetBase::~UTF16TextBasedSetBase()
{}
ByteArray UTF16TextBasedSetBase::getUTF16TextData() const
{
return getRawBytesItem(&MXF_ITEM_K(UTF16TextBasedSet, UTF16TextData));
}
void UTF16TextBasedSetBase::setUTF16TextData(ByteArray value)
{
setRawBytesItem(&MXF_ITEM_K(UTF16TextBasedSet, UTF16TextData), value);
}
| Limecraft/ebu-libmxfpp | libMXF++/metadata/base/UTF16TextBasedSetBase.cpp | C++ | bsd-3-clause | 2,544 |
# encoding: utf8
import autoslug.fields
from django.db import migrations
from slugify import slugify
def set_initial_slug(apps, schema_editor):
Character = apps.get_model('characters', 'Character')
for character in Character.objects.all():
character.slug = slugify(character.name)
character.save()
class Migration(migrations.Migration):
dependencies = [
('characters', '0006_character_slug'),
]
operations = [
migrations.RunPython(set_initial_slug),
migrations.AlterField(
model_name='character',
name='slug',
field=autoslug.fields.AutoSlugField(unique=True, editable=False),
),
]
| wengole/eveonline-assistant | eveonline-assistant/characters/migrations/0007_character_slug_populate.py | Python | bsd-3-clause | 695 |
<?php
/**
*
* @package Gems
* @subpackage Tracker
* @author Matijs de Jong <mjong@magnafacta.nl>
* @copyright Copyright (c) 2011 Erasmus MC
* @license New BSD License
* @version $Id$
*/
/**
* Adds basic token editing snippet parameter processing and checking.
*
* This class supplies the model and adds some display knowledge.
*
* @package Gems
* @subpackage Tracker
* @copyright Copyright (c) 2011 Erasmus MC
* @license New BSD License
* @since Class available since version 1.4
*/
abstract class Gems_Tracker_Snippets_EditTokenSnippetAbstract extends \Gems_Snippets_ModelFormSnippetAbstract
{
/**
* Required
*
* @var \Gems_Loader
*/
protected $loader;
/**
* Required
*
* @var \Zend_Controller_Request_Abstract
*/
protected $request;
/**
* Optional: $request or $tokenData must be set
*
* The token shown
*
* @var \Gems_Tracker_Token
*/
protected $token;
/**
* Optional: id of the selected token to show
*
* Can be derived from $request or $token
*
* @var string
*/
protected $tokenId;
/**
* Should be called after answering the request to allow the Target
* to check if all required registry values have been set correctly.
*
* @return boolean False if required are missing.
*/
public function checkRegistryRequestsAnswers()
{
return $this->loader && $this->request && parent::checkRegistryRequestsAnswers();
}
/**
* Creates the model
*
* @return \MUtil_Model_ModelAbstract
*/
protected function createModel()
{
$model = $this->token->getModel();
if ($model instanceof \Gems_Tracker_Model_StandardTokenModel) {
$model->addEditTracking();
if ($this->createData) {
$model->applyInsertionFormatting();
}
}
return $model;
}
/**
* Create the snippets content
*
* This is a stub function either override getHtmlOutput() or override render()
*
* @param \Zend_View_Abstract $view Just in case it is needed here
* @return \MUtil_Html_HtmlInterface Something that can be rendered
*/
public function getHtmlOutput(\Zend_View_Abstract $view)
{
if ($this->tokenId) {
if ($this->token->exists) {
return parent::getHtmlOutput($view);
} else {
$this->addMessage(sprintf($this->_('Token %s not found.'), $this->tokenId));
}
} else {
$this->addMessage($this->_('No token specified.'));
}
}
/**
* Helper function to allow generalized statements about the items in the model to used specific item names.
*
* @param int $count
* @return $string
*/
public function getTopic($count = 1)
{
return $this->plural('token', 'tokens', $count);
}
/**
* The place to check if the data set in the snippet is valid
* to generate the snippet.
*
* When invalid data should result in an error, you can throw it
* here but you can also perform the check in the
* checkRegistryRequestsAnswers() function from the
* {@see \MUtil_Registry_TargetInterface}.
*
* @return boolean
*/
public function hasHtmlOutput()
{
if (! $this->tokenId) {
if ($this->token) {
$this->tokenId = $this->token->getTokenId();
} elseif ($this->request) {
$this->tokenId = $this->request->getParam(\MUtil_Model::REQUEST_ID);
}
}
if ($this->tokenId && (! $this->token)) {
$this->token = $this->loader->getTracker()->getToken($this->tokenId);
}
// Output always true, returns an error message as html when anything is wrong
return parent::hasHtmlOutput();
}
}
| GemsTracker/gemstracker-library | classes/Gems/Tracker/Snippets/EditTokenSnippetAbstract.php | PHP | bsd-3-clause | 3,957 |
<?php
/**
* FileName:cache.test.php
* Author:liupeng
* Create Date:2015-09-16
*/
return [
'fileCache'=>[
'class'=>'\yii\caching\FileCache',
'keyPrefix'=>'common.file.',
'serializer'=>[
'igbinary_serialize','igbinary_unserialize'
]
],
'cache' => [
'class' => '\yii\liuxy\MemCache',
'keyPrefix' => 'common.',
'serializer'=>[
'igbinary_serialize','igbinary_unserialize'
],
'servers' => [
[
'host' => '127.0.0.1',
'port' => 11211,
'weight' => 1
]
]
]
]; | liupdhc/yii2-liuxy-demo | common/config/env/cache.test.php | PHP | bsd-3-clause | 646 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Author: Jiajie Zhang
Email: zhangjiajie043@gmail.com
"""
import logging
import os
from math import log
import cv2
import numpy as np
import numpy.linalg as la
from scipy.spatial import distance as spd
import pywt
import Tyf
import tifffile as tiff
class Image(object):
""" Image class
Attributes:
full path
original image, may be uint16 type
fullsize gray image
exif info, Tyf.TiffFile type
image features
"""
def __init__(self, full_path):
super(Image, self).__init__()
self.full_path = full_path
self.dir, self.name = os.path.split(full_path)
self.focal_len = None
self.features = {}
self.tf = None
_, ext = os.path.splitext(full_path)
if ext.lower() in (".tiff", ".tif") and os.path.isfile(full_path):
self.original_image, self.exif_info = ImageProcessing.read_tif_image(full_path)
gray_img = cv2.cvtColor(self.original_image, cv2.COLOR_RGB2GRAY)
self.fullsize_gray_image = ImageProcessing.convert_to_float(gray_img)
else:
self.original_image = None
self.fullsize_gray_image = None
self.exif_info = None
self.reset_all()
def reset_focal_length(self):
f = self.get_exif_value("FocalLength")
if f and len(f) == 2:
self.focal_len = f[0] * 1.0 / f[1]
elif f and len(f) == 1:
self.focal_len = f[0]
else:
self.focal_len = None
def reset_all(self):
self.reset_focal_length()
self.features = {}
self.tf = None
def get_exif_value(self, name):
if not self.exif_info:
return None
info = self.exif_info[0].find(name)
if not info:
return None
else:
return info.value
class DataModel(object):
# Align options
AUTO_MASK = 1
ALIGN_STARS = 2
ALIGN_GROUND = 3
# Display options
ORIGINAL_IMAGE = 1
def __init__(self):
super(DataModel, self).__init__()
self.logger = logging.getLogger(self.__class__.__name__)
self.images = []
self.ref_ind = 0
self.image_dir = None
self.final_sky_img = None # Of type double
self.final_ground_img = None # Of type double
# For concurrency issue
self.is_adding_image = False
# Other GUI options
self.merge_option_type = self.ALIGN_STARS
def add_image(self, path):
self.logger.debug("add_image()")
img_dir, name = os.path.split(path)
if not os.path.exists(path) or not os.path.isfile(path):
self.logger.error("File %s not exists!", path)
return False
for img in self.images:
if path == img.full_path:
self.logger.info("Image is already open. File: %s", path)
return False
if self.is_adding_image:
return False
self.is_adding_image = True
img = Image(path)
focal_len = img.get_exif_value("FocalLength")
self.images.append(img)
self.logger.debug("Loading image %s... Focal length = %s", name, focal_len)
if not self.image_dir:
self.image_dir = img_dir
self.is_adding_image = False
return True
def update_final_sky(self, img):
self.logger.debug("update_final_sky()")
self.final_sky_num += 1
if self.final_sky_img is None and self.final_sky_num == 1:
self.final_sky_img = np.copy(img)
elif self.final_sky_img is not None and self.final_sky_num > 0:
# self.final_sky_img = np.fmax(self.final_sky_img, img)
self.final_sky_img = self.final_sky_img / self.final_sky_num * (self.final_sky_num - 1) + img / self.final_sky_num
def update_final_ground(self, img):
self.logger.debug("update_final_ground()")
self.final_ground_num += 1
if self.final_ground_img is None and self.final_ground_num == 1:
self.final_ground_img = np.copy(img)
elif self.final_ground_img is not None and self.final_ground_num > 0:
self.final_ground_img = self.final_ground_img / self.final_ground_num * (self.final_ground_num - 1) + img / self.final_ground_num
def clear_images(self):
self.logger.debug("clear_images()")
self.images = []
self.reset_final_sky()
self.reset_final_ground()
self.image_dir = None
self.ref_ind = 0
self.is_adding_image = False
def reset_final_sky(self):
self.logger.debug("reset_final_sky()")
self.final_sky_img = None
self.final_sky_num = 0
def reset_final_ground(self):
self.logger.debug("reset_final_ground()")
self.final_ground_img = None
self.final_ground_num = 0
def reset_result(self):
self.logger.debug("reset_result()")
self.reset_final_sky()
self.reset_final_ground()
for img in self.images:
img.features = {}
def has_image(self):
res = len(self.images) > 0
self.logger.debug("has_image(): %s", res)
return res
def iter_images(self):
self.logger.debug("iter_images()")
return iter(self.images)
def total_images(self):
res = len(self.images)
self.logger.debug("total_images(): %s", res)
return res
def has_sky_result(self):
res = self.final_sky_img is not None
self.logger.debug("has_sky_result(): %s", res)
return res
def has_ground_result(self):
res = self.final_ground_img is not None
self.logger.debug("has_ground_result(): %s", res)
return res
class ImageProcessing(object):
def __init__(self):
super(ImageProcessing, self).__init__()
@staticmethod
def _try_wavedec(img_blr, resize_factor=0.25):
img_shape = img_blr.shape
need_resize = abs(resize_factor - 1) > 0.001
level = int(6 - log(1 / resize_factor, 2))
if need_resize:
img_blr_resize = cv2.resize(img_blr, None, fx=resize_factor, fy=resize_factor)
else:
img_blr_resize = img_blr
coeffs = pywt.wavedec2(img_blr_resize, "db8", level=level)
coeffs[0].fill(0)
coeffs[-1][0].fill(0)
coeffs[-1][1].fill(0)
coeffs[-1][2].fill(0)
img_rec_resize = pywt.waverec2(coeffs, "db8")
if need_resize:
img_rec = cv2.resize(img_rec_resize, (img_shape[1], img_shape[0]))
else:
img_rec = img_rec_resize
return img_rec
@staticmethod
def detect_star_points(img_gray, mask=None, resize_length=2200):
logging.debug("detect_star_point()")
logging.debug("resize_length = %s", resize_length)
sigma = 3
img_shape = img_gray.shape
img_blr = cv2.GaussianBlur(img_gray, (9, 9), sigma)
img_blr = (img_blr - np.mean(img_blr)) / (np.max(img_blr) - np.min(img_blr))
resize_factor = 1
while max(img_shape) * resize_factor > resize_length:
resize_factor *= 0.5
logging.debug("calc mask...")
s = int(max(img_shape) * 0.02 * resize_factor * 2)
tmp_mask = cv2.resize(img_gray, None, fx=resize_factor, fy=resize_factor)
tmp_mask = np.logical_and(tmp_mask < np.percentile(tmp_mask, 10), tmp_mask < 0.15).astype(np.uint8) * 255
logging.debug("calc mask logical select done")
tmp_mask = 255 - cv2.dilate(tmp_mask, cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (s, s)))
tmp_mask = cv2.resize(tmp_mask, (img_shape[1], img_shape[0]))
if mask is None:
mask = tmp_mask > 127
else:
mask = np.logical_and(tmp_mask > 127, mask > 0)
logging.debug("calc mask done")
mask_rate = np.sum(mask) * 100.0 / np.prod(mask.shape)
logging.debug("mask rate: %.2f", mask_rate)
if mask_rate < 50:
mask = np.ones(tmp_mask.shape, dtype="bool")
while True:
try:
img_rec = ImageProcessing._try_wavedec(img_blr, resize_factor=resize_factor) * mask
bw = ((img_rec > np.percentile(img_rec[mask], 99.5)) * mask).astype(np.uint8) * 255
# img_rec = ImageProcessing._try_wavedec(img_blr, resize_factor=resize_factor)
# bw = ((img_rec > np.percentile(img_rec, 99.5))).astype(np.uint8) * 255
bw = cv2.morphologyEx(bw, cv2.MORPH_OPEN, np.ones((3, 3), np.uint8))
_, contours, _ = cv2.findContours(np.copy(bw), cv2.RETR_LIST, cv2.CHAIN_APPROX_NONE)
contours = filter(lambda x: len(x) > 5, contours)
logging.debug("%d star points detected", len(contours))
if len(contours) > 400:
break
else:
raise ValueError, "No enough points"
except ValueError as e:
if resize_factor >= 1:
raise ValueError, "Cannot detect enough star points"
else:
resize_factor *= 2
logging.debug("resize factor = %f", resize_factor)
elps = map(cv2.fitEllipse, contours)
centroids = np.array(map(lambda e: e[0], elps))
areas = np.array(map(lambda x: cv2.contourArea(x) + 0.5 * len(x), contours))
eccentricities = np.sqrt(np.array(map(lambda x: 1 - (x[1][0] / x[1][1]) ** 2, elps)))
mask = np.zeros(bw.shape, np.uint8)
intensities = np.zeros(areas.shape)
for i in range(len(contours)):
cv2.drawContours(mask, contours[i], 0, 255, -1)
rect = cv2.boundingRect(contours[i])
val = cv2.mean(img_rec[rect[1]:rect[1] + rect[3] + 1, rect[0]:rect[0] + rect[2] + 1],
mask[rect[1]:rect[1] + rect[3] + 1, rect[0]:rect[0] + rect[2] + 1])
mask[rect[1]:rect[1] + rect[3] + 1, rect[0]:rect[0] + rect[2] + 1] = 0
intensities[i] = val[0]
inds = np.logical_and(areas > 10, areas < 200, eccentricities < .9)
inds = np.logical_and(inds, areas > np.percentile(areas, 20), intensities > np.percentile(intensities, 20))
star_pts = centroids[inds] # [x, y]
areas = areas[inds]
intensities = intensities[inds]
return star_pts, areas * intensities
@staticmethod
def convert_to_spherical_coord(star_pts, img_size, f):
logging.debug("convert_coord_img_sph()")
p0 = (star_pts - img_size / 2.0) / (np.max(img_size) / 2)
p = p0 * 18 # Fullframe half size, 18mm
lam = np.arctan2(p[:, 0], f)
phi = np.arcsin(p[:, 1] / np.sqrt(np.sum(p ** 2, axis=1) + f ** 2))
return np.stack((lam, phi), axis=-1)
@staticmethod
def extract_point_features(sph, vol, k=15):
logging.debug("extract_point_features()")
pts_num = len(sph)
vec = np.stack((np.cos(sph[:, 1]) * np.cos(sph[:, 0]),
np.cos(sph[:, 1]) * np.sin(sph[:, 0]),
np.sin(sph[:, 1])), axis=-1)
dist_mat = 1 - spd.cdist(vec, vec, "cosine")
vec_dist_ind = np.argsort(-dist_mat)
dist_mat = np.where(dist_mat < -1, -1, np.where(dist_mat > 1, 1, dist_mat))
dist_mat = np.arccos(dist_mat[np.array(range(pts_num))[:, np.newaxis], vec_dist_ind[:, :2 * k]])
vol = vol[vec_dist_ind[:, :2 * k]]
vol_ind = np.argsort(-vol * dist_mat)
def make_cross_mat(v):
return np.array([[0, -v[2], v[1]], [v[2], 0, -v[0]], [-v[1], v[0], 0]])
theta_feature = np.zeros((pts_num, k))
rho_feature = np.zeros((pts_num, k))
vol_feature = np.zeros((pts_num, k))
for i in range(pts_num):
v0 = vec[i]
vs = vec[vec_dist_ind[i, vol_ind[i, :k]]]
angles = np.inner(vs, make_cross_mat(v0))
angles = angles / la.norm(angles, axis=1)[:, np.newaxis]
cr = np.inner(angles, make_cross_mat(angles[0]))
s = la.norm(cr, axis=1) * np.sign(np.inner(cr, v0))
c = np.inner(angles, angles[0])
theta_feature[i] = np.arctan2(s, c)
rho_feature[i] = dist_mat[i, vol_ind[i, :k]]
vol_feature[i] = vol[i, vol_ind[i, :k]]
fx = np.arange(-np.pi, np.pi, 3 * np.pi / 180)
features = np.zeros((pts_num, len(fx)))
for i in range(k):
sigma = 2.5 * np.exp(-rho_feature[:, i] * 100) + .04
tmp = np.exp(-np.subtract.outer(theta_feature[:, i], fx) ** 2 / 2 / sigma[:, np.newaxis] ** 2)
tmp = tmp * (vol_feature[:, i] * rho_feature[:, i] ** 2 / sigma)[:, np.newaxis]
features += tmp
features = features / np.sqrt(np.sum(features ** 2, axis=1)).reshape((pts_num, 1))
return features
@staticmethod
def find_initial_match(feature1, feature2):
logging.debug("find_initial_match()")
measure_dist_mat = spd.cdist(feature1["feature"], feature2["feature"], "cosine")
pts1, pts2 = feature1["pts"], feature2["pts"]
pts_mean = np.mean(np.vstack((pts1, pts2)), axis=0)
pts_min = np.min(np.vstack((pts1, pts2)), axis=0)
pts_max = np.max(np.vstack((pts1, pts2)), axis=0)
pts_dist_mat = spd.cdist((pts1 - pts_mean) / (pts_max - pts_min), (pts2 - pts_mean) / (pts_max - pts_min),
"euclidean")
alpha = 0.00
dist_mat = measure_dist_mat * (1 - alpha) + pts_dist_mat * alpha
num1, num2 = dist_mat.shape
# For a given point p1 in image1, find the most similar point p12 in image2,
# then find the point p21 in image1 that most similar to p12, check the
# distance between p1 and p21.
idx12 = np.argsort(dist_mat, axis=1)
idx21 = np.argsort(dist_mat, axis=0)
ind = idx21[0, idx12[:, 0]] == range(num1)
# Check Euclidean distance between the nearest pair
d_th = min(np.percentile(dist_mat[range(num1), idx12[:, 0]], 30),
np.percentile(dist_mat[idx21[0, :], range(num2)], 30))
ind = np.logical_and(ind, dist_mat[range(num1), idx12[:, 0]] < d_th)
pair_idx = np.stack((np.where(ind)[0], idx12[ind, 0]), axis=-1)
# Check angular distance between the nearest pair
xyz1 = np.stack((np.cos(feature1["sph"][:, 1]) * np.cos(feature1["sph"][:, 0]),
np.cos(feature1["sph"][:, 1]) * np.sin(feature1["sph"][:, 0]),
np.sin(feature1["sph"][:, 1])), axis=-1)
xyz2 = np.stack((np.cos(feature2["sph"][:, 1]) * np.cos(feature2["sph"][:, 0]),
np.cos(feature2["sph"][:, 1]) * np.sin(feature2["sph"][:, 0]),
np.sin(feature2["sph"][:, 1])), axis=-1)
theta = np.arccos(np.sum(xyz1[pair_idx[:, 0]] * xyz2[pair_idx[:, 1]], axis=1))
theta_th = min(np.percentile(theta, 75), np.pi / 6)
pts_dist = la.norm(feature1["pts"][pair_idx[:, 0]] - feature2["pts"][pair_idx[:, 1]], axis=1)
dist_th = max(np.max(feature1["pts"]), np.max(feature2["pts"])) * 0.3
pair_idx = pair_idx[np.logical_and(theta < theta_th, pts_dist < dist_th)]
logging.debug("find {0} pairs for initial".format(len(pair_idx)))
return pair_idx
@staticmethod
def fine_tune_transform(feature1, feature2, init_pair_idx):
ind = []
k = 1
while len(ind) < 0.6 * min(len(feature1["pts"]), len(feature2["pts"])) and k < 10:
# Step 1. Randomly choose 20 points evenly distributed on the image
rand_pts = np.random.rand(20, 2) * (np.amax(feature1["pts"], axis=0) - np.amin(feature1["pts"], axis=0)) * \
np.array([1, 0.8]) + np.amin(feature1["pts"], axis=0)
# Step 2. Find nearest points from feature1
dist_mat = spd.cdist(rand_pts, feature1["pts"][init_pair_idx[:, 0]])
tmp_ind = np.argmin(dist_mat, axis=1)
# Step 3. Use these points to find a homography
tf = cv2.findHomography(feature1["pts"][init_pair_idx[tmp_ind, 0]], feature2["pts"][init_pair_idx[tmp_ind, 1]],
method=cv2.RANSAC, ransacReprojThreshold=5)
# Then use the transform find more matched points
pts12 = cv2.perspectiveTransform(np.array([[p] for p in feature1["pts"]], dtype="float32"), tf[0])[:, 0, :]
dist_mat = spd.cdist(pts12, feature2["pts"])
num1, num2 = dist_mat.shape
idx12 = np.argsort(dist_mat, axis=1)
tmp_ind = np.argwhere(np.array([dist_mat[i, idx12[i, 0]] for i in range(num1)]) < 5)
if len(tmp_ind) > len(ind):
ind = tmp_ind
logging.debug("len(ind) = %d, len(feature) = %d", len(ind), min(len(feature1["pts"]), len(feature2["pts"])))
k += 1
pair_idx = np.hstack((ind, idx12[ind, 0]))
tf = cv2.findHomography(feature1["pts"][pair_idx[:, 0]], feature2["pts"][pair_idx[:, 1]],
method=cv2.RANSAC, ransacReprojThreshold=5)
return tf, pair_idx
@staticmethod
def convert_to_float(np_image):
if np_image.dtype == np.float32 or np_image.dtype == np.float64:
return np.copy(np_image)
else:
return np_image.astype("float32") / np.iinfo(np_image.dtype).max
@staticmethod
def read_tif_image(full_path):
img = tiff.imread(full_path)
exif_info = Tyf.open(full_path)
return img, exif_info
@staticmethod
def save_tif_image(full_path, img, exif=None):
if img.dtype != np.uint8 and img.dtype != np.uint16:
return
logging.debug("saving image...")
tiff.imsave(full_path, img)
tmp_exif = Tyf.open(full_path)
tmp_exif.load_raster()
if exif and isinstance(exif, Tyf.TiffFile):
logging.debug("saving exif...")
exif[0].stripes = tmp_exif[0].stripes
exif.save(full_path)
if __name__ == "__main__":
logging_level = logging.DEBUG
logging_format = "%(asctime)s (%(name)s) [%(levelname)s] line %(lineno)d: %(message)s"
logging.basicConfig(format=logging_format, level=logging_level)
data_model = DataModel()
img_tmpl = u"/Volumes/ZJJ-4TB/Photos/17.08.21 Eclipse Trip/6DII/IMG_{:04d}_0.tif"
for p in [img_tmpl.format(i) for i in (79, 80, 81, 82)]:
logging.debug("image: %s", p)
data_model.add_image(p)
ref_img = data_model.images[0]
f = ref_img.focal_len
img_shape = ref_img.fullsize_gray_image.shape
img_size = np.array([img_shape[1], img_shape[0]])
data_model.reset_result()
pts, vol = ImageProcessing.detect_star_points(ref_img.fullsize_gray_image)
sph = ImageProcessing.convert_to_spherical_coord(pts, np.array((img_shape[1], img_shape[0])), f)
feature = ImageProcessing.extract_point_features(sph, vol)
ref_img.features["pts"] = pts
ref_img.features["sph"] = sph
ref_img.features["vol"] = vol
ref_img.features["feature"] = feature
data_model.final_sky_img = np.copy(ref_img.original_image).astype("float32") / np.iinfo(
ref_img.original_image.dtype).max
img = data_model.images[1]
pts, vol = ImageProcessing.detect_star_points(img.fullsize_gray_image)
sph = ImageProcessing.convert_to_spherical_coord(pts, img_size, f)
feature = ImageProcessing.extract_point_features(sph, vol)
img.features["pts"] = pts
img.features["sph"] = sph
img.features["vol"] = vol
img.features["feature"] = feature
pair_idx = ImageProcessing.find_initial_match(img.features, ref_img.features)
tf, pair_idx = ImageProcessing.fine_tune_transform(img.features, ref_img.features, pair_idx)
img_tf = cv2.warpPerspective(img.original_image, tf[0], tuple(img_size))
img_tf = img_tf.astype("float32") / np.iinfo(img_tf.dtype).max
data_model.final_sky_img = data_model.final_sky_img / 2 + img_tf / 2
result_img = (data_model.final_sky_img * np.iinfo("uint16").max).astype("uint16")
ImageProcessing.save_tif_image("test.tif", result_img, data_model.images[0].exif_info)
| LoveDaisy/star_alignment | python/DataModel.py | Python | bsd-3-clause | 20,257 |
package net.minecraft.server;
import forge.Configuration;
import forge.IGuiHandler;
import forge.MinecraftForge;
import forge.NetworkMod;
import ic2.api.Ic2Recipes;
import ic2.api.Items;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Properties;
import nuclearcontrol.BlockNuclearControlMain;
import nuclearcontrol.ContainerRemoteThermo;
import nuclearcontrol.ItemNuclearControlMain;
import nuclearcontrol.ItemRemoteSensorKit;
import nuclearcontrol.ItemSensorLocationCard;
import nuclearcontrol.ItemToolDigitalThermometer;
import nuclearcontrol.ItemToolThermometer;
import nuclearcontrol.ThermometerVersion;
import nuclearcontrol.TileEntityHowlerAlarm;
import nuclearcontrol.TileEntityIC2Thermo;
import nuclearcontrol.TileEntityIndustrialAlarm;
import nuclearcontrol.TileEntityRemoteThermo;
public class mod_IC2NuclearControl extends NetworkMod implements IGuiHandler
{
private static final String CONFIG_NUCLEAR_CONTROL = "IC2NuclearControl.cfg";
private static final String CONFIG_THERMO_BLOCK = "mod_thermo.cfg";
private static final String CONFIG_THERMOMETER = "IC2Thermometer.cfg";
public static Item itemToolThermometer;
public static Item itemToolDigitalThermometer;
public static Item itemRemoteSensorKit;
public static Item itemSensorLocationCard;
public static Block blockNuclearControlMain;
public static int modelId;
public static float alarmRange;
private static mod_IC2NuclearControl instance;
public static boolean isClient()
{
return false;
}
public boolean clientSideRequired()
{
return true;
}
public boolean serverSideRequired()
{
return false;
}
private static File getConfigFile(String var0)
{
return new File(new File("config"), var0);
}
private static Configuration importConfig() throws IOException
{
int var0 = -1;
int var1 = -1;
int var2 = -1;
File var4 = getConfigFile("mod_thermo.cfg");
if (var4.exists() && var4.canRead())
{
Properties var5 = new Properties();
var5.load(new FileInputStream(var4));
var0 = Integer.parseInt(var5.getProperty("thermo_blockid", "192"));
}
var4 = getConfigFile("IC2Thermometer.cfg");
Configuration var3;
if (var4.exists() && var4.canRead())
{
var3 = new Configuration(var4);
var3.load();
var1 = getOldIdFor(var3, "itemToolThermometer", 31000);
var2 = getOldIdFor(var3, "itemToolDigitalThermometer", 31001);
}
var4 = getConfigFile("IC2NuclearControl.cfg");
var3 = new Configuration(var4);
var3.load();
if (var1 != -1)
{
getIdFor(var3, "itemToolThermometer", var1, false);
}
if (var2 != -1)
{
getIdFor(var3, "itemToolDigitalThermometer", var2, false);
}
if (var0 != -1)
{
getIdFor(var3, "blockNuclearControlMain", var0, true);
}
var3.save();
return var3;
}
public void load()
{
instance = this;
Configuration var1;
try
{
File var2 = getConfigFile("IC2NuclearControl.cfg");
if (!var2.exists())
{
var1 = importConfig();
}
else
{
var1 = new Configuration(var2);
var1.load();
}
}
catch (IOException var3)
{
var3.printStackTrace();
var1 = null;
}
ModLoader.setInGameHook(this, true, false);
this.initBlocks(var1);
this.registerBlocks();
ModLoader.registerTileEntity(TileEntityIC2Thermo.class, "IC2Thermo");
ModLoader.registerTileEntity(TileEntityHowlerAlarm.class, "IC2HowlerAlarm");
ModLoader.registerTileEntity(TileEntityIndustrialAlarm.class, "IC2IndustrialAlarm");
ModLoader.registerTileEntity(TileEntityRemoteThermo.class, "IC2RemoteThermo");
MinecraftForge.setGuiHandler(this, this);
if (var1 != null)
{
var1.save();
}
}
public void modsLoaded()
{
super.modsLoaded();
this.addRecipes();
}
private static int getIdFor(Configuration var0, String var1, int var2, boolean var3)
{
try
{
return var3 ? (new Integer(var0.getOrCreateBlockIdProperty(var1, var2).value)).intValue() : (new Integer(var0.getOrCreateIntProperty(var1, "item", var2).value)).intValue();
}
catch (Exception var5)
{
System.out.println("Can\'t get id for :" + var1);
return var2;
}
}
private static int getOldIdFor(Configuration var0, String var1, int var2)
{
try
{
return (new Integer(var0.getOrCreateIntProperty(var1, "general", var2).value)).intValue();
}
catch (Exception var4)
{
System.out.println("Can\'t get id for :" + var1);
return var2;
}
}
public void initBlocks(Configuration var1)
{
blockNuclearControlMain = (new BlockNuclearControlMain(getIdFor(var1, "blockNuclearControlMain", 192, true), 0)).c(0.5F).a("blockThermalMonitor").j();
itemToolThermometer = (new ItemToolThermometer(getIdFor(var1, "itemToolThermometer", 31000, false), 2, ThermometerVersion.ANALOG)).a("ItemToolThermometer");
itemToolDigitalThermometer = (new ItemToolDigitalThermometer(getIdFor(var1, "itemToolDigitalThermometer", 31001, false), 18, ThermometerVersion.DIGITAL, 1, 80, 80)).a("ItemToolDigitalThermometer");
itemRemoteSensorKit = (new ItemRemoteSensorKit(getIdFor(var1, "itemRemoteSensorKit", 31002, false), 34)).a("ItemRemoteSensorKit");
itemSensorLocationCard = (new ItemSensorLocationCard(getIdFor(var1, "itemSensorLocationCard", 31003, false), 50)).a("ItemSensorLocationCard");
}
public void registerBlocks()
{
ModLoader.registerBlock(blockNuclearControlMain, ItemNuclearControlMain.class);
}
public void addRecipes()
{
ItemStack var1 = new ItemStack(blockNuclearControlMain, 1, 0);
Ic2Recipes.addCraftingRecipe(var1, new Object[] {"GGG", "GCG", "GRG", 'G', Items.getItem("reinforcedGlass"), 'R', Item.REDSTONE, 'C', Items.getItem("advancedCircuit")});
ItemStack var2 = new ItemStack(blockNuclearControlMain, 1, 2);
Ic2Recipes.addCraftingRecipe(var2, new Object[] {"NNN", "ICI", "IRI", 'I', Item.IRON_INGOT, 'R', Item.REDSTONE, 'N', Block.NOTE_BLOCK, 'C', Items.getItem("electronicCircuit")});
ItemStack var3 = new ItemStack(blockNuclearControlMain, 1, 1);
Ic2Recipes.addCraftingRecipe(var3, new Object[] {"GOG", "GHG", "GRG", 'G', Items.getItem("reinforcedGlass"), 'O', new ItemStack(Item.INK_SACK, 1, 14), 'R', Item.REDSTONE, 'H', var2});
Ic2Recipes.addCraftingRecipe(new ItemStack(blockNuclearControlMain, 1, 3), new Object[] {" F ", " M ", " T ", 'T', var1, 'M', Items.getItem("machine"), 'F', Items.getItem("frequencyTransmitter")});
Ic2Recipes.addCraftingRecipe(new ItemStack(itemToolThermometer, 1), new Object[] {"IG ", "GWG", " GG", 'G', Block.GLASS, 'I', Item.IRON_INGOT, 'W', Items.getItem("waterCell")});
ItemStack var4 = new ItemStack(itemToolDigitalThermometer, 1);
Ic2Recipes.addCraftingRecipe(var4, new Object[] {"I ", "IC ", " GI", 'G', Item.GLOWSTONE_DUST, 'I', Items.getItem("refinedIronIngot"), 'C', Items.getItem("electronicCircuit")});
Ic2Recipes.addCraftingRecipe(new ItemStack(itemRemoteSensorKit, 1), new Object[] {" F", " D ", "P ", 'P', Item.PAPER, 'D', var4, 'F', Items.getItem("frequencyTransmitter")});
}
public static void launchGui(World var0, int var1, int var2, int var3, EntityHuman var4, int var5)
{
var4.openGui(instance, var5, var0, var1, var2, var3);
}
public static void chatMessage(EntityHuman var0, String var1)
{
((EntityPlayer)var0).netServerHandler.sendPacket(new Packet3Chat(var1));
}
public Object getGuiElement(int var1, EntityHuman var2, World var3, int var4, int var5, int var6)
{
switch (var1)
{
case 0:
return null;
case 3:
TileEntity var7 = var3.getTileEntity(var4, var5, var6);
return new ContainerRemoteThermo(var2, (TileEntityRemoteThermo)var7);
default:
return null;
}
}
public String getVersion()
{
return "v1.1.6";
}
public String getPriorities()
{
return "after:mod_IC2";
}
}
| mushroomhostage/ic2-nuclear-control | 1.1.6bukkit/mod_IC2NuclearControl.java | Java | bsd-3-clause | 8,747 |
'''
A WSGI Middleware is a function or callable object similar to a
:ref:`WSGI application handlers <wsgi-handlers>`
with the only difference that it can return nothing (``None``).
Middleware can be used in conjunction with a
:ref:`WsgiHandler <wsgi-handler>` or any
other handler which iterate through a list of middleware in a similar
way (for example django wsgi handler).
.. important::
An asynchronous WSGI middleware is a callable accepting a WSGI
``environ`` and ``start_response`` as the only input parameters and
it must returns an :ref:`asynchronous iterator <wsgi-async-iter>`
or nothing.
The two most important wsgi middleware in pulsar are:
* the :ref:`Router <wsgi-router>` for serving dynamic web applications
* the :ref:`MediaRouter <wsgi-media-router>` for serving static files
In addition, pulsar provides with the following four middlewares which don't
serve requests, instead they perform initialisation and sanity checks.
.. _wsgi-additional-middleware:
Clean path
~~~~~~~~~~~~~~~~~~
.. autofunction:: clean_path_middleware
Authorization
~~~~~~~~~~~~~~~~~~
.. autofunction:: authorization_middleware
.. _wait-for-body-middleware:
Wait for request body
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. autofunction:: wait_for_body_middleware
Middleware in Executor
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. autofunction:: middleware_in_executor
'''
import re
from io import BytesIO
from functools import wraps
from asyncio import get_event_loop
from pulsar.api import HttpRedirect
from .auth import parse_authorization_header
def clean_path_middleware(environ, start_response=None):
'''Clean url from double slashes and redirect if needed.'''
path = environ['PATH_INFO']
if path and '//' in path:
url = re.sub("/+", '/', path)
if not url.startswith('/'):
url = '/%s' % url
qs = environ['QUERY_STRING']
if qs:
url = '%s?%s' % (url, qs)
raise HttpRedirect(url)
def authorization_middleware(environ, start_response=None):
'''Parse the ``HTTP_AUTHORIZATION`` key in the ``environ``.
If available, set the ``http.authorization`` key in ``environ`` with
the result obtained from :func:`~.parse_authorization_header` function.
'''
key = 'http.authorization'
c = environ.get(key)
if c is None:
code = 'HTTP_AUTHORIZATION'
if code in environ:
environ[key] = parse_authorization_header(environ[code])
async def wait_for_body_middleware(environ, start_response=None):
'''Use this middleware to wait for the full body.
This middleware wait for the full body to be received before letting
other middleware to be processed.
Useful when using synchronous web-frameworks such as :django:`django <>`.
'''
if environ.get('wsgi.async'):
try:
chunk = await environ['wsgi.input'].read()
except TypeError:
chunk = b''
environ['wsgi.input'] = BytesIO(chunk)
environ.pop('wsgi.async')
def middleware_in_executor(middleware):
'''Use this middleware to run a synchronous middleware in the event loop
executor.
Useful when using synchronous web-frameworks such as :django:`django <>`.
'''
@wraps(middleware)
def _(environ, start_response):
loop = get_event_loop()
return loop.run_in_executor(None, middleware, environ, start_response)
return _
| quantmind/pulsar | pulsar/apps/wsgi/middleware.py | Python | bsd-3-clause | 3,415 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/device_monitors/system_message_window_win.h"
#include <dbt.h>
#include <string>
#include <vector>
#include "base/files/file_path.h"
#include "base/message_loop/message_loop.h"
#include "base/system_monitor/system_monitor.h"
#include "base/test/mock_devices_changed_observer.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace media {
class SystemMessageWindowWinTest : public testing::Test {
public:
~SystemMessageWindowWinTest() override {}
protected:
void SetUp() override {
system_monitor_.AddDevicesChangedObserver(&observer_);
}
base::MessageLoop message_loop_;
base::SystemMonitor system_monitor_;
base::MockDevicesChangedObserver observer_;
SystemMessageWindowWin window_;
};
TEST_F(SystemMessageWindowWinTest, DevicesChanged) {
EXPECT_CALL(observer_, OnDevicesChanged(testing::_)).Times(1);
window_.OnDeviceChange(DBT_DEVNODES_CHANGED, NULL);
message_loop_.RunUntilIdle();
}
TEST_F(SystemMessageWindowWinTest, RandomMessage) {
window_.OnDeviceChange(DBT_DEVICEQUERYREMOVE, NULL);
message_loop_.RunUntilIdle();
}
} // namespace media
| danakj/chromium | media/device_monitors/system_message_window_win_unittest.cc | C++ | bsd-3-clause | 1,322 |
<?php
/*
* This file is part of the 'octris/php-tmdialog' package.
*
* (c) Harald Lapp <harald@octris.org>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Octris\TMDialog;
/**
* Library reading plist XML files.
*
* @copyright copyright (c) 2014 by Harald Lapp
* @author Harald Lapp <harald@octris.org>
*/
class Plist
{
/**
* Constructor.
*/
public function __construct()
{
}
/**
* Main parse method.
*
* @param \DOMNode $node DOMNode to parse.
* @return array Data of parsed node.
*/
protected function parse(\DOMNode $node)
{
$type = $node->nodeName;
$name = 'parse' . ucfirst($type);
switch ($type) {
case 'integer':
case 'string':
case 'data':
case 'date':
$return = $node->textContent;
break;
case 'true':
case 'false':
$return = ($type == 'true');
break;
default:
if ($type != '' && method_exists($this, $name)) {
$return = $this->{$name}($node);
} else {
$return = null;
}
}
return $return;
}
/**
* Parse a plist dictionary.
*
* @param \DOMNode $node DOMNode to parse.
* @return array Data of parsed node.
*/
public function parseDict(\DOMNode $node)
{
$dict = array();
// for each child of this node
for ($child = $node->firstChild; $child != null; $child = $child->nextSibling) {
if ($child->nodeName == 'key') {
$key = $child->textContent;
$vnode = $child->nextSibling;
// skip text nodes
while ($vnode->nodeType == XML_TEXT_NODE) {
$vnode = $vnode->nextSibling;
}
// recursively parse the children
$value = $this->parse($vnode);
$dict[$key] = $value;
}
}
return $dict;
}
/**
* Parse a plist array.
*
* @param \DOMNode $node DOMNode to parse.
* @return array Data of parsed node.
*/
protected function parseArray(\DOMNode $node)
{
$array = array();
for ($child = $node->firstChild; $child != null; $child = $child->nextSibling) {
if ($child->nodeType == XML_ELEMENT_NODE) {
$array[] = $this->parse($child);
}
}
return $array;
}
/**
* Process plist XML.
*
* @param string $xml Plist XML to process.
* @return array Data of parsed plist XML.
*/
public function process($xml)
{
$plist = new \DOMDocument();
$plist->loadXML($xml);
$root = $plist->documentElement->firstChild;
while ($root->nodeName == '#text') {
$root = $root->nextSibling;
}
return $this->parse($root);
}
}
| octris/php-tmdialog | libs/Plist.php | PHP | bsd-3-clause | 3,308 |
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.template.context_processors import csrf
from django.http import HttpResponse
import cStringIO
from data.models import RadiosondeMeasurement, WeatherMeasurement, MeasuringDevice
from forms import WeatherMeasurementForm, RadiosondeMeasurementForm
import numpy as np
import scipy.io
import xlsxwriter
@login_required
def index(request):
"""
Main view for the page, handles the forms.
"""
# Fetch POST data and redirects to the corresponding view if a button has been clicked.
if request.POST:
form_values = request.POST
if 'matlab-weather' in request.POST:
form_weather = WeatherMeasurementForm(form_values)
request.session['POSTweather'] = form_values
if form_weather.is_valid():
return matlab_weather(request, form_weather)
if 'matlab-radiosonde' in request.POST:
form_radiosonde = RadiosondeMeasurementForm(form_values)
request.session['POSTradiosonde'] = form_values
if form_radiosonde.is_valid():
return matlab_radiosonde(request, form_radiosonde)
if 'excel-weather' in request.POST:
form_weather = WeatherMeasurementForm(form_values)
request.session['POSTweather'] = form_values
if form_weather.is_valid():
return excel_weather(request, form_weather)
if 'excel-radiosonde' in request.POST:
form_radiosonde = RadiosondeMeasurementForm(form_values)
request.session['POSTradiosonde'] = form_values
if form_radiosonde.is_valid():
return excel_radiosonde(request, form_radiosonde)
# Loads or initializes the weather data form
if 'POSTweather' in request.session:
form_weather = WeatherMeasurementForm(request.session['POSTweather'])
else:
form_weather = WeatherMeasurementForm()
# Loads or initializes the radionsonde data form
if 'POSTradiosonde' in request.session:
form_radiosonde = RadiosondeMeasurementForm(request.session['POSTradiosonde'])
else:
form_radiosonde = RadiosondeMeasurementForm()
args = {}
args.update(csrf((request)))
args['form_weather'] = form_weather
args['form_radiosonde'] = form_radiosonde
# Indicates if the radionsonde data form should be displayed
if MeasuringDevice.objects.filter(type = 'R').exists() & RadiosondeMeasurement.objects.exists():
args['radiosonde_data_available'] = True
else:
args['radiosonde_data_available'] = False
# Indicates if the weather data form should be displayed
if MeasuringDevice.objects.filter(type = 'S').exists() & WeatherMeasurement.objects.exists():
args['weather_data_available'] = True
else:
args['weather_data_available'] = False
args['title'] = 'Data downloads'
return render(request, 'downloads/index.html', args)
@login_required
def matlab_radiosonde(request, form):
"""
Reads the radiosonde form and converts the data into a matlab file
"""
start = form.cleaned_data['start_date_radiosonde']
end = form.cleaned_data['end_date_radiosonde']
time = form.cleaned_data['time_radiosonde']
fields = form.cleaned_data['fields_radiosonde']
query = RadiosondeMeasurement.objects.filter(date__gte = start, date__lte = end, time__in = time).values()
radiosonde = dict()
for elem in query:
date = elem['date'].strftime('y%Ym%md%d')
if date not in radiosonde:
radiosonde[date] = dict()
if elem['time'] not in radiosonde[date]:
radiosonde[date][str(elem['time'])] = []
radiosonde[date][elem['time']].append(elem)
dtfields = []
for f in fields:
dtfields.append((str(f), 'f8'))
for d in radiosonde:
for t in radiosonde[d]:
nbElems = len(radiosonde[d][t])
res = np.zeros((nbElems,), dtype=dtfields)
idx = 0
for elem in radiosonde[d][t]:
for f in fields:
res[idx][str(f)] = elem[str(f)]
idx = idx + 1
radiosonde[d][t] = res
for d in radiosonde:
if 'AM' in radiosonde[d] and 'PM' in radiosonde[d]:
dtAMPM = [('AM', np.object), ('PM', np.object)]
res = np.zeros((1,), dtype=dtAMPM)
res[0]['AM'] = radiosonde[d]['AM']
res[0]['PM'] = radiosonde[d]['PM']
radiosonde[d] = res
elif 'AM' in radiosonde[d]:
dtAM = [('AM', np.object)]
res = np.zeros((1,), dtype=dtAM)
res[0]['AM'] = radiosonde[d]['AM']
radiosonde[d] = res
elif 'PM' in radiosonde[d]:
dtAM = [('PM', np.object)]
res = np.zeros((1,), dtype=dtAM)
res[0]['PM'] = radiosonde[d]['PM']
radiosonde[d] = res
dtdays = []
for d in radiosonde:
dtdays.append((d, np.object))
dtdays.sort()
result = np.zeros((1,), dtype=dtdays)
for d in radiosonde:
result[0][d] = radiosonde[d]
fobj = cStringIO.StringIO()
response = HttpResponse(content_type='application/matlab-mat')
response['Content-Disposition'] = 'attachment; filename=radiosonde.mat'
scipy.io.savemat(fobj, {'radiosonde': result}, oned_as='column')
response.write(fobj.getvalue())
return response
@login_required
def matlab_weather(request, form):
"""
Reads the weather form and converts the data into a matlab file
"""
start_date = form.cleaned_data['start_date_weather']
end_date = form.cleaned_data['end_date_weather']
start_time = form.cleaned_data['start_time_weather']
end_time = form.cleaned_data['end_time_weather']
measuring_device = MeasuringDevice.objects.get(id = form.cleaned_data['measuring_device_weather'])
fields = form.cleaned_data['fields_weather']
query = WeatherMeasurement.objects.filter(date__gte = start_date, date__lte = end_date, time__gte = start_time, time__lte = end_time, device = measuring_device).values()
weather = dict()
for elem in query:
date = elem['date'].strftime('y%Ym%md%d')
time = elem['time'].strftime('h%Hm%Ms%S')
if date not in weather:
weather[date] = dict()
if elem['time'] not in weather[date]:
weather[date][time] = []
weather[date][time].append(elem)
dtfields = []
for f in fields:
dtfields.append((str(f), 'f8'))
for d in weather:
for t in weather[d]:
nbElems = len(weather[d][t])
res = np.zeros((nbElems,), dtype=dtfields)
idx = 0
for elem in weather[d][t]:
for f in fields:
res[idx][str(f)] = elem[str(f)]
idx = idx + 1
weather[d][t] = res
for d in weather:
dttime = []
for t in weather[d]:
dttime.append((t, np.object))
dttime.sort()
resultTime = np.zeros((1,), dtype=dttime)
for t in weather[d]:
resultTime[0][t] = weather[d][t]
weather[d] = resultTime
dtdays = []
for d in weather:
dtdays.append((d, np.object))
dtdays.sort()
result = np.zeros((1,), dtype=dtdays)
for d in weather:
result[0][d] = weather[d]
fobj = cStringIO.StringIO()
response = HttpResponse(content_type='application/matlab-mat')
response['Content-Disposition'] = 'attachment; filename=weather.mat'
scipy.io.savemat(fobj, {'weather': result}, oned_as='column')
response.write(fobj.getvalue())
return response
@login_required
def excel_radiosonde(request, form):
"""
Reads the radiosonde form and converts the data into a excel file
"""
start = form.cleaned_data['start_date_radiosonde']
end = form.cleaned_data['end_date_radiosonde']
time = form.cleaned_data['time_radiosonde']
fields = form.cleaned_data['fields_radiosonde']
query = RadiosondeMeasurement.objects.filter(date__gte = start, date__lte = end, time__in = time).order_by('date').values()
fobj = cStringIO.StringIO()
response = HttpResponse(content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
response['Content-Disposition'] = 'attachment; filename=radiosonde.xlsx'
workbook = xlsxwriter.Workbook(fobj)
worksheet = workbook.add_worksheet()
# Adjust the column width.
worksheet.set_column(0, 0, 10)
# Adjust the column width.
worksheet.set_column(1, 1, 5)
bold = workbook.add_format({'bold': 1})
date_format = workbook.add_format({'num_format': 'dd mm yyyy'})
worksheet.write(0,0, 'Date', bold)
worksheet.write(0,1, 'Time', bold)
col = 2
if 'pressure' in fields:
worksheet.write(0, col, 'Atmospheric pressure (hPa)', bold)
col = col + 1
if 'height' in fields:
worksheet.write(0, col, 'Geopotential height (m)', bold)
col = col + 1
if 'temperature' in fields:
worksheet.write(0, col, 'Temperature (C)', bold)
col = col + 1
if 'dew_point' in fields:
worksheet.write(0, col, 'Dewpoint temperature (C)', bold)
col = col + 1
if 'rel_humidity' in fields:
worksheet.write(0, col, 'Relative humidity (%)', bold)
col = col + 1
if 'wind_direction' in fields:
worksheet.write(0, col, 'Wind direction (deg)', bold)
col = col + 1
if 'wind_speed' in fields:
worksheet.write(0, col, 'Wind speed (m/s)', bold)
col = col + 1
for row, elem in enumerate(query, start = 1):
worksheet.write_datetime(row, 0, elem['date'], date_format)
worksheet.write_string(row, 1, elem['time'])
for col, f in enumerate(fields, start = 2):
worksheet.write(row, col, elem[f])
col = 2
workbook.close()
response.write(fobj.getvalue())
return response
@login_required
def excel_weather(request, form):
"""
Reads the weather form and converts the data into a excel file
"""
start_date = form.cleaned_data['start_date_weather']
end_date = form.cleaned_data['end_date_weather']
start_time = form.cleaned_data['start_time_weather']
end_time = form.cleaned_data['end_time_weather']
measuring_device = MeasuringDevice.objects.get(id = form.cleaned_data['measuring_device_weather'])
fields = form.cleaned_data['fields_weather']
query = WeatherMeasurement.objects.filter(date__gte = start_date, date__lte = end_date, time__gte = start_time, time__lte = end_time, device = measuring_device).values()
fobj = cStringIO.StringIO()
response = HttpResponse(content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
response['Content-Disposition'] = 'attachment; filename=radiosonde.xlsx'
workbook = xlsxwriter.Workbook(fobj)
worksheet = workbook.add_worksheet()
# Adjust the column widths.
worksheet.set_column(0, 0, 10)
worksheet.set_column(1, 1, 5)
bold = workbook.add_format({'bold': 1})
date_format = workbook.add_format({'num_format': 'dd/mm/yyyy'})
time_format = workbook.add_format({'num_format': 'hh:mm'})
worksheet.write(0,0, 'Date', bold)
worksheet.write(0,1, 'Time', bold)
texts = {'temperature':'Temperature (C)',
'humidity':'Humidity (%)',
'dew_point':'Dew point (C)',
'wind_speed':'Wind speed (m/s)',
'wind_direction':'Wind direction (deg)',
'pressure':'Pressure (hPa)',
'rainfall_rate':'Rainfall rate (mm/hr)',
'solar_radiation':'Solar radiation (W/m2)',
'uv_index':'UV Index'}
for col, f in enumerate(fields, start = 2):
worksheet.write(0, col, texts[f])
for row, elem in enumerate(query, start = 1):
worksheet.write_datetime(row, 0, elem['date'], date_format)
worksheet.write_datetime(row, 1, elem['time'], time_format)
for col, f in enumerate(fields, start = 2):
worksheet.write(row, col, elem[f])
workbook.close()
response.write(fobj.getvalue())
return response
| FSavoy/visuo-server | downloads/views.py | Python | bsd-3-clause | 12,500 |
import type Set from "../Set.js";
import everySet from "./everySet";
function equalsSet<T>(one: Set<T>, two: Set<T>): boolean {
if (one.size !== two.size) {
return false;
}
return everySet(one, value => two.has(value));
}
export default equalsSet;
| teasim/helpers | source/functional/equalsSet.js | JavaScript | bsd-3-clause | 255 |
""" test with the .transform """
from io import StringIO
import numpy as np
import pytest
from pandas.core.dtypes.common import ensure_platform_int, is_timedelta64_dtype
import pandas as pd
from pandas import (
Categorical,
DataFrame,
MultiIndex,
Series,
Timestamp,
concat,
date_range,
)
import pandas._testing as tm
from pandas.core.groupby.groupby import DataError
def assert_fp_equal(a, b):
assert (np.abs(a - b) < 1e-12).all()
def test_transform():
data = Series(np.arange(9) // 3, index=np.arange(9))
index = np.arange(9)
np.random.shuffle(index)
data = data.reindex(index)
grouped = data.groupby(lambda x: x // 3)
transformed = grouped.transform(lambda x: x * x.sum())
assert transformed[7] == 12
# GH 8046
# make sure that we preserve the input order
df = DataFrame(
np.arange(6, dtype="int64").reshape(3, 2), columns=["a", "b"], index=[0, 2, 1]
)
key = [0, 0, 1]
expected = (
df.sort_index()
.groupby(key)
.transform(lambda x: x - x.mean())
.groupby(key)
.mean()
)
result = df.groupby(key).transform(lambda x: x - x.mean()).groupby(key).mean()
tm.assert_frame_equal(result, expected)
def demean(arr):
return arr - arr.mean()
people = DataFrame(
np.random.randn(5, 5),
columns=["a", "b", "c", "d", "e"],
index=["Joe", "Steve", "Wes", "Jim", "Travis"],
)
key = ["one", "two", "one", "two", "one"]
result = people.groupby(key).transform(demean).groupby(key).mean()
expected = people.groupby(key).apply(demean).groupby(key).mean()
tm.assert_frame_equal(result, expected)
# GH 8430
df = tm.makeTimeDataFrame()
g = df.groupby(pd.Grouper(freq="M"))
g.transform(lambda x: x - 1)
# GH 9700
df = DataFrame({"a": range(5, 10), "b": range(5)})
result = df.groupby("a").transform(max)
expected = DataFrame({"b": range(5)})
tm.assert_frame_equal(result, expected)
def test_transform_fast():
df = DataFrame({"id": np.arange(100000) / 3, "val": np.random.randn(100000)})
grp = df.groupby("id")["val"]
values = np.repeat(grp.mean().values, ensure_platform_int(grp.count().values))
expected = Series(values, index=df.index, name="val")
result = grp.transform(np.mean)
tm.assert_series_equal(result, expected)
result = grp.transform("mean")
tm.assert_series_equal(result, expected)
# GH 12737
df = DataFrame(
{
"grouping": [0, 1, 1, 3],
"f": [1.1, 2.1, 3.1, 4.5],
"d": pd.date_range("2014-1-1", "2014-1-4"),
"i": [1, 2, 3, 4],
},
columns=["grouping", "f", "i", "d"],
)
result = df.groupby("grouping").transform("first")
dates = [
Timestamp("2014-1-1"),
Timestamp("2014-1-2"),
Timestamp("2014-1-2"),
Timestamp("2014-1-4"),
]
expected = DataFrame(
{"f": [1.1, 2.1, 2.1, 4.5], "d": dates, "i": [1, 2, 2, 4]},
columns=["f", "i", "d"],
)
tm.assert_frame_equal(result, expected)
# selection
result = df.groupby("grouping")[["f", "i"]].transform("first")
expected = expected[["f", "i"]]
tm.assert_frame_equal(result, expected)
# dup columns
df = DataFrame([[1, 2, 3], [4, 5, 6]], columns=["g", "a", "a"])
result = df.groupby("g").transform("first")
expected = df.drop("g", axis=1)
tm.assert_frame_equal(result, expected)
def test_transform_broadcast(tsframe, ts):
grouped = ts.groupby(lambda x: x.month)
result = grouped.transform(np.mean)
tm.assert_index_equal(result.index, ts.index)
for _, gp in grouped:
assert_fp_equal(result.reindex(gp.index), gp.mean())
grouped = tsframe.groupby(lambda x: x.month)
result = grouped.transform(np.mean)
tm.assert_index_equal(result.index, tsframe.index)
for _, gp in grouped:
agged = gp.mean()
res = result.reindex(gp.index)
for col in tsframe:
assert_fp_equal(res[col], agged[col])
# group columns
grouped = tsframe.groupby({"A": 0, "B": 0, "C": 1, "D": 1}, axis=1)
result = grouped.transform(np.mean)
tm.assert_index_equal(result.index, tsframe.index)
tm.assert_index_equal(result.columns, tsframe.columns)
for _, gp in grouped:
agged = gp.mean(1)
res = result.reindex(columns=gp.columns)
for idx in gp.index:
assert_fp_equal(res.xs(idx), agged[idx])
def test_transform_axis_1(request, transformation_func):
# GH 36308
if transformation_func == "tshift":
request.node.add_marker(pytest.mark.xfail(reason="tshift is deprecated"))
args = ("ffill",) if transformation_func == "fillna" else ()
df = DataFrame({"a": [1, 2], "b": [3, 4], "c": [5, 6]}, index=["x", "y"])
result = df.groupby([0, 0, 1], axis=1).transform(transformation_func, *args)
expected = df.T.groupby([0, 0, 1]).transform(transformation_func, *args).T
if transformation_func == "diff":
# Result contains nans, so transpose coerces to float
expected["b"] = expected["b"].astype("int64")
# cumcount returns Series; the rest are DataFrame
tm.assert_equal(result, expected)
def test_transform_axis_ts(tsframe):
# make sure that we are setting the axes
# correctly when on axis=0 or 1
# in the presence of a non-monotonic indexer
# GH12713
base = tsframe.iloc[0:5]
r = len(base.index)
c = len(base.columns)
tso = DataFrame(
np.random.randn(r, c), index=base.index, columns=base.columns, dtype="float64"
)
# monotonic
ts = tso
grouped = ts.groupby(lambda x: x.weekday())
result = ts - grouped.transform("mean")
expected = grouped.apply(lambda x: x - x.mean())
tm.assert_frame_equal(result, expected)
ts = ts.T
grouped = ts.groupby(lambda x: x.weekday(), axis=1)
result = ts - grouped.transform("mean")
expected = grouped.apply(lambda x: (x.T - x.mean(1)).T)
tm.assert_frame_equal(result, expected)
# non-monotonic
ts = tso.iloc[[1, 0] + list(range(2, len(base)))]
grouped = ts.groupby(lambda x: x.weekday())
result = ts - grouped.transform("mean")
expected = grouped.apply(lambda x: x - x.mean())
tm.assert_frame_equal(result, expected)
ts = ts.T
grouped = ts.groupby(lambda x: x.weekday(), axis=1)
result = ts - grouped.transform("mean")
expected = grouped.apply(lambda x: (x.T - x.mean(1)).T)
tm.assert_frame_equal(result, expected)
def test_transform_dtype():
# GH 9807
# Check transform dtype output is preserved
df = DataFrame([[1, 3], [2, 3]])
result = df.groupby(1).transform("mean")
expected = DataFrame([[1.5], [1.5]])
tm.assert_frame_equal(result, expected)
def test_transform_bug():
# GH 5712
# transforming on a datetime column
df = DataFrame({"A": Timestamp("20130101"), "B": np.arange(5)})
result = df.groupby("A")["B"].transform(lambda x: x.rank(ascending=False))
expected = Series(np.arange(5, 0, step=-1), name="B")
tm.assert_series_equal(result, expected)
def test_transform_numeric_to_boolean():
# GH 16875
# inconsistency in transforming boolean values
expected = Series([True, True], name="A")
df = DataFrame({"A": [1.1, 2.2], "B": [1, 2]})
result = df.groupby("B").A.transform(lambda x: True)
tm.assert_series_equal(result, expected)
df = DataFrame({"A": [1, 2], "B": [1, 2]})
result = df.groupby("B").A.transform(lambda x: True)
tm.assert_series_equal(result, expected)
def test_transform_datetime_to_timedelta():
# GH 15429
# transforming a datetime to timedelta
df = DataFrame({"A": Timestamp("20130101"), "B": np.arange(5)})
expected = Series([Timestamp("20130101") - Timestamp("20130101")] * 5, name="A")
# this does date math without changing result type in transform
base_time = df["A"][0]
result = (
df.groupby("A")["A"].transform(lambda x: x.max() - x.min() + base_time)
- base_time
)
tm.assert_series_equal(result, expected)
# this does date math and causes the transform to return timedelta
result = df.groupby("A")["A"].transform(lambda x: x.max() - x.min())
tm.assert_series_equal(result, expected)
def test_transform_datetime_to_numeric():
# GH 10972
# convert dt to float
df = DataFrame({"a": 1, "b": date_range("2015-01-01", periods=2, freq="D")})
result = df.groupby("a").b.transform(
lambda x: x.dt.dayofweek - x.dt.dayofweek.mean()
)
expected = Series([-0.5, 0.5], name="b")
tm.assert_series_equal(result, expected)
# convert dt to int
df = DataFrame({"a": 1, "b": date_range("2015-01-01", periods=2, freq="D")})
result = df.groupby("a").b.transform(
lambda x: x.dt.dayofweek - x.dt.dayofweek.min()
)
expected = Series([0, 1], name="b")
tm.assert_series_equal(result, expected)
def test_transform_casting():
# 13046
data = """
idx A ID3 DATETIME
0 B-028 b76cd912ff "2014-10-08 13:43:27"
1 B-054 4a57ed0b02 "2014-10-08 14:26:19"
2 B-076 1a682034f8 "2014-10-08 14:29:01"
3 B-023 b76cd912ff "2014-10-08 18:39:34"
4 B-023 f88g8d7sds "2014-10-08 18:40:18"
5 B-033 b76cd912ff "2014-10-08 18:44:30"
6 B-032 b76cd912ff "2014-10-08 18:46:00"
7 B-037 b76cd912ff "2014-10-08 18:52:15"
8 B-046 db959faf02 "2014-10-08 18:59:59"
9 B-053 b76cd912ff "2014-10-08 19:17:48"
10 B-065 b76cd912ff "2014-10-08 19:21:38"
"""
df = pd.read_csv(
StringIO(data), sep=r"\s+", index_col=[0], parse_dates=["DATETIME"]
)
result = df.groupby("ID3")["DATETIME"].transform(lambda x: x.diff())
assert is_timedelta64_dtype(result.dtype)
result = df[["ID3", "DATETIME"]].groupby("ID3").transform(lambda x: x.diff())
assert is_timedelta64_dtype(result.DATETIME.dtype)
def test_transform_multiple(ts):
grouped = ts.groupby([lambda x: x.year, lambda x: x.month])
grouped.transform(lambda x: x * 2)
grouped.transform(np.mean)
def test_dispatch_transform(tsframe):
df = tsframe[::5].reindex(tsframe.index)
grouped = df.groupby(lambda x: x.month)
filled = grouped.fillna(method="pad")
fillit = lambda x: x.fillna(method="pad")
expected = df.groupby(lambda x: x.month).transform(fillit)
tm.assert_frame_equal(filled, expected)
def test_transform_transformation_func(request, transformation_func):
# GH 30918
df = DataFrame(
{
"A": ["foo", "foo", "foo", "foo", "bar", "bar", "baz"],
"B": [1, 2, np.nan, 3, 3, np.nan, 4],
},
index=pd.date_range("2020-01-01", "2020-01-07"),
)
if transformation_func == "cumcount":
test_op = lambda x: x.transform("cumcount")
mock_op = lambda x: Series(range(len(x)), x.index)
elif transformation_func == "fillna":
test_op = lambda x: x.transform("fillna", value=0)
mock_op = lambda x: x.fillna(value=0)
elif transformation_func == "tshift":
msg = (
"Current behavior of groupby.tshift is inconsistent with other "
"transformations. See GH34452 for more details"
)
request.node.add_marker(pytest.mark.xfail(reason=msg))
else:
test_op = lambda x: x.transform(transformation_func)
mock_op = lambda x: getattr(x, transformation_func)()
result = test_op(df.groupby("A"))
groups = [df[["B"]].iloc[:4], df[["B"]].iloc[4:6], df[["B"]].iloc[6:]]
expected = concat([mock_op(g) for g in groups])
if transformation_func == "cumcount":
tm.assert_series_equal(result, expected)
else:
tm.assert_frame_equal(result, expected)
def test_transform_select_columns(df):
f = lambda x: x.mean()
result = df.groupby("A")[["C", "D"]].transform(f)
selection = df[["C", "D"]]
expected = selection.groupby(df["A"]).transform(f)
tm.assert_frame_equal(result, expected)
def test_transform_exclude_nuisance(df):
# this also tests orderings in transform between
# series/frame to make sure it's consistent
expected = {}
grouped = df.groupby("A")
expected["C"] = grouped["C"].transform(np.mean)
expected["D"] = grouped["D"].transform(np.mean)
expected = DataFrame(expected)
result = df.groupby("A").transform(np.mean)
tm.assert_frame_equal(result, expected)
def test_transform_function_aliases(df):
result = df.groupby("A").transform("mean")
expected = df.groupby("A").transform(np.mean)
tm.assert_frame_equal(result, expected)
result = df.groupby("A")["C"].transform("mean")
expected = df.groupby("A")["C"].transform(np.mean)
tm.assert_series_equal(result, expected)
def test_series_fast_transform_date():
# GH 13191
df = DataFrame(
{"grouping": [np.nan, 1, 1, 3], "d": pd.date_range("2014-1-1", "2014-1-4")}
)
result = df.groupby("grouping")["d"].transform("first")
dates = [
pd.NaT,
Timestamp("2014-1-2"),
Timestamp("2014-1-2"),
Timestamp("2014-1-4"),
]
expected = Series(dates, name="d")
tm.assert_series_equal(result, expected)
def test_transform_length():
# GH 9697
df = DataFrame({"col1": [1, 1, 2, 2], "col2": [1, 2, 3, np.nan]})
expected = Series([3.0] * 4)
def nsum(x):
return np.nansum(x)
results = [
df.groupby("col1").transform(sum)["col2"],
df.groupby("col1")["col2"].transform(sum),
df.groupby("col1").transform(nsum)["col2"],
df.groupby("col1")["col2"].transform(nsum),
]
for result in results:
tm.assert_series_equal(result, expected, check_names=False)
def test_transform_coercion():
# 14457
# when we are transforming be sure to not coerce
# via assignment
df = DataFrame({"A": ["a", "a"], "B": [0, 1]})
g = df.groupby("A")
expected = g.transform(np.mean)
result = g.transform(lambda x: np.mean(x))
tm.assert_frame_equal(result, expected)
def test_groupby_transform_with_int():
# GH 3740, make sure that we might upcast on item-by-item transform
# floats
df = DataFrame(
{
"A": [1, 1, 1, 2, 2, 2],
"B": Series(1, dtype="float64"),
"C": Series([1, 2, 3, 1, 2, 3], dtype="float64"),
"D": "foo",
}
)
with np.errstate(all="ignore"):
result = df.groupby("A").transform(lambda x: (x - x.mean()) / x.std())
expected = DataFrame(
{"B": np.nan, "C": Series([-1, 0, 1, -1, 0, 1], dtype="float64")}
)
tm.assert_frame_equal(result, expected)
# int case
df = DataFrame(
{
"A": [1, 1, 1, 2, 2, 2],
"B": 1,
"C": [1, 2, 3, 1, 2, 3],
"D": "foo",
}
)
with np.errstate(all="ignore"):
result = df.groupby("A").transform(lambda x: (x - x.mean()) / x.std())
expected = DataFrame({"B": np.nan, "C": [-1, 0, 1, -1, 0, 1]})
tm.assert_frame_equal(result, expected)
# int that needs float conversion
s = Series([2, 3, 4, 10, 5, -1])
df = DataFrame({"A": [1, 1, 1, 2, 2, 2], "B": 1, "C": s, "D": "foo"})
with np.errstate(all="ignore"):
result = df.groupby("A").transform(lambda x: (x - x.mean()) / x.std())
s1 = s.iloc[0:3]
s1 = (s1 - s1.mean()) / s1.std()
s2 = s.iloc[3:6]
s2 = (s2 - s2.mean()) / s2.std()
expected = DataFrame({"B": np.nan, "C": concat([s1, s2])})
tm.assert_frame_equal(result, expected)
# int downcasting
result = df.groupby("A").transform(lambda x: x * 2 / 2)
expected = DataFrame({"B": 1, "C": [2, 3, 4, 10, 5, -1]})
tm.assert_frame_equal(result, expected)
def test_groupby_transform_with_nan_group():
# GH 9941
df = DataFrame({"a": range(10), "b": [1, 1, 2, 3, np.nan, 4, 4, 5, 5, 5]})
result = df.groupby(df.b)["a"].transform(max)
expected = Series([1.0, 1.0, 2.0, 3.0, np.nan, 6.0, 6.0, 9.0, 9.0, 9.0], name="a")
tm.assert_series_equal(result, expected)
def test_transform_mixed_type():
index = MultiIndex.from_arrays([[0, 0, 0, 1, 1, 1], [1, 2, 3, 1, 2, 3]])
df = DataFrame(
{
"d": [1.0, 1.0, 1.0, 2.0, 2.0, 2.0],
"c": np.tile(["a", "b", "c"], 2),
"v": np.arange(1.0, 7.0),
},
index=index,
)
def f(group):
group["g"] = group["d"] * 2
return group[:1]
grouped = df.groupby("c")
result = grouped.apply(f)
assert result["d"].dtype == np.float64
# this is by definition a mutating operation!
with pd.option_context("mode.chained_assignment", None):
for key, group in grouped:
res = f(group)
tm.assert_frame_equal(res, result.loc[key])
@pytest.mark.parametrize(
"op, args, targop",
[
("cumprod", (), lambda x: x.cumprod()),
("cumsum", (), lambda x: x.cumsum()),
("shift", (-1,), lambda x: x.shift(-1)),
("shift", (1,), lambda x: x.shift()),
],
)
def test_cython_transform_series(op, args, targop):
# GH 4095
s = Series(np.random.randn(1000))
s_missing = s.copy()
s_missing.iloc[2:10] = np.nan
labels = np.random.randint(0, 50, size=1000).astype(float)
# series
for data in [s, s_missing]:
# print(data.head())
expected = data.groupby(labels).transform(targop)
tm.assert_series_equal(expected, data.groupby(labels).transform(op, *args))
tm.assert_series_equal(expected, getattr(data.groupby(labels), op)(*args))
@pytest.mark.parametrize("op", ["cumprod", "cumsum"])
@pytest.mark.parametrize("skipna", [False, True])
@pytest.mark.parametrize(
"input, exp",
[
# When everything is NaN
({"key": ["b"] * 10, "value": np.nan}, Series([np.nan] * 10, name="value")),
# When there is a single NaN
(
{"key": ["b"] * 10 + ["a"] * 2, "value": [3] * 3 + [np.nan] + [3] * 8},
{
("cumprod", False): [3.0, 9.0, 27.0] + [np.nan] * 7 + [3.0, 9.0],
("cumprod", True): [
3.0,
9.0,
27.0,
np.nan,
81.0,
243.0,
729.0,
2187.0,
6561.0,
19683.0,
3.0,
9.0,
],
("cumsum", False): [3.0, 6.0, 9.0] + [np.nan] * 7 + [3.0, 6.0],
("cumsum", True): [
3.0,
6.0,
9.0,
np.nan,
12.0,
15.0,
18.0,
21.0,
24.0,
27.0,
3.0,
6.0,
],
},
),
],
)
def test_groupby_cum_skipna(op, skipna, input, exp):
df = DataFrame(input)
result = df.groupby("key")["value"].transform(op, skipna=skipna)
if isinstance(exp, dict):
expected = exp[(op, skipna)]
else:
expected = exp
expected = Series(expected, name="value")
tm.assert_series_equal(expected, result)
@pytest.mark.arm_slow
@pytest.mark.parametrize(
"op, args, targop",
[
("cumprod", (), lambda x: x.cumprod()),
("cumsum", (), lambda x: x.cumsum()),
("shift", (-1,), lambda x: x.shift(-1)),
("shift", (1,), lambda x: x.shift()),
],
)
def test_cython_transform_frame(op, args, targop):
s = Series(np.random.randn(1000))
s_missing = s.copy()
s_missing.iloc[2:10] = np.nan
labels = np.random.randint(0, 50, size=1000).astype(float)
strings = list("qwertyuiopasdfghjklz")
strings_missing = strings[:]
strings_missing[5] = np.nan
df = DataFrame(
{
"float": s,
"float_missing": s_missing,
"int": [1, 1, 1, 1, 2] * 200,
"datetime": pd.date_range("1990-1-1", periods=1000),
"timedelta": pd.timedelta_range(1, freq="s", periods=1000),
"string": strings * 50,
"string_missing": strings_missing * 50,
},
columns=[
"float",
"float_missing",
"int",
"datetime",
"timedelta",
"string",
"string_missing",
],
)
df["cat"] = df["string"].astype("category")
df2 = df.copy()
df2.index = pd.MultiIndex.from_product([range(100), range(10)])
# DataFrame - Single and MultiIndex,
# group by values, index level, columns
for df in [df, df2]:
for gb_target in [
{"by": labels},
{"level": 0},
{"by": "string"},
]: # {"by": 'string_missing'}]:
# {"by": ['int','string']}]:
gb = df.groupby(**gb_target)
# allowlisted methods set the selection before applying
# bit a of hack to make sure the cythonized shift
# is equivalent to pre 0.17.1 behavior
if op == "shift":
gb._set_group_selection()
if op != "shift" and "int" not in gb_target:
# numeric apply fastpath promotes dtype so have
# to apply separately and concat
i = gb[["int"]].apply(targop)
f = gb[["float", "float_missing"]].apply(targop)
expected = pd.concat([f, i], axis=1)
else:
expected = gb.apply(targop)
expected = expected.sort_index(axis=1)
tm.assert_frame_equal(expected, gb.transform(op, *args).sort_index(axis=1))
tm.assert_frame_equal(expected, getattr(gb, op)(*args).sort_index(axis=1))
# individual columns
for c in df:
if c not in ["float", "int", "float_missing"] and op != "shift":
msg = "No numeric types to aggregate"
with pytest.raises(DataError, match=msg):
gb[c].transform(op)
with pytest.raises(DataError, match=msg):
getattr(gb[c], op)()
else:
expected = gb[c].apply(targop)
expected.name = c
tm.assert_series_equal(expected, gb[c].transform(op, *args))
tm.assert_series_equal(expected, getattr(gb[c], op)(*args))
def test_transform_with_non_scalar_group():
# GH 10165
cols = pd.MultiIndex.from_tuples(
[
("syn", "A"),
("mis", "A"),
("non", "A"),
("syn", "C"),
("mis", "C"),
("non", "C"),
("syn", "T"),
("mis", "T"),
("non", "T"),
("syn", "G"),
("mis", "G"),
("non", "G"),
]
)
df = DataFrame(
np.random.randint(1, 10, (4, 12)), columns=cols, index=["A", "C", "G", "T"]
)
msg = "transform must return a scalar value for each group.*"
with pytest.raises(ValueError, match=msg):
df.groupby(axis=1, level=1).transform(lambda z: z.div(z.sum(axis=1), axis=0))
@pytest.mark.parametrize(
"cols,exp,comp_func",
[
("a", Series([1, 1, 1], name="a"), tm.assert_series_equal),
(
["a", "c"],
DataFrame({"a": [1, 1, 1], "c": [1, 1, 1]}),
tm.assert_frame_equal,
),
],
)
@pytest.mark.parametrize("agg_func", ["count", "rank", "size"])
def test_transform_numeric_ret(cols, exp, comp_func, agg_func, request):
if agg_func == "size" and isinstance(cols, list):
# https://github.com/pytest-dev/pytest/issues/6300
# workaround to xfail fixture/param permutations
reason = "'size' transformation not supported with NDFrameGroupy"
request.node.add_marker(pytest.mark.xfail(reason=reason))
# GH 19200
df = DataFrame(
{"a": pd.date_range("2018-01-01", periods=3), "b": range(3), "c": range(7, 10)}
)
result = df.groupby("b")[cols].transform(agg_func)
if agg_func == "rank":
exp = exp.astype("float")
comp_func(result, exp)
@pytest.mark.parametrize("mix_groupings", [True, False])
@pytest.mark.parametrize("as_series", [True, False])
@pytest.mark.parametrize("val1,val2", [("foo", "bar"), (1, 2), (1.0, 2.0)])
@pytest.mark.parametrize(
"fill_method,limit,exp_vals",
[
(
"ffill",
None,
[np.nan, np.nan, "val1", "val1", "val1", "val2", "val2", "val2"],
),
("ffill", 1, [np.nan, np.nan, "val1", "val1", np.nan, "val2", "val2", np.nan]),
(
"bfill",
None,
["val1", "val1", "val1", "val2", "val2", "val2", np.nan, np.nan],
),
("bfill", 1, [np.nan, "val1", "val1", np.nan, "val2", "val2", np.nan, np.nan]),
],
)
def test_group_fill_methods(
mix_groupings, as_series, val1, val2, fill_method, limit, exp_vals
):
vals = [np.nan, np.nan, val1, np.nan, np.nan, val2, np.nan, np.nan]
_exp_vals = list(exp_vals)
# Overwrite placeholder values
for index, exp_val in enumerate(_exp_vals):
if exp_val == "val1":
_exp_vals[index] = val1
elif exp_val == "val2":
_exp_vals[index] = val2
# Need to modify values and expectations depending on the
# Series / DataFrame that we ultimately want to generate
if mix_groupings: # ['a', 'b', 'a, 'b', ...]
keys = ["a", "b"] * len(vals)
def interweave(list_obj):
temp = []
for x in list_obj:
temp.extend([x, x])
return temp
_exp_vals = interweave(_exp_vals)
vals = interweave(vals)
else: # ['a', 'a', 'a', ... 'b', 'b', 'b']
keys = ["a"] * len(vals) + ["b"] * len(vals)
_exp_vals = _exp_vals * 2
vals = vals * 2
df = DataFrame({"key": keys, "val": vals})
if as_series:
result = getattr(df.groupby("key")["val"], fill_method)(limit=limit)
exp = Series(_exp_vals, name="val")
tm.assert_series_equal(result, exp)
else:
result = getattr(df.groupby("key"), fill_method)(limit=limit)
exp = DataFrame({"val": _exp_vals})
tm.assert_frame_equal(result, exp)
@pytest.mark.parametrize("fill_method", ["ffill", "bfill"])
def test_pad_stable_sorting(fill_method):
# GH 21207
x = [0] * 20
y = [np.nan] * 10 + [1] * 10
if fill_method == "bfill":
y = y[::-1]
df = DataFrame({"x": x, "y": y})
expected = df.drop("x", 1)
result = getattr(df.groupby("x"), fill_method)()
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("test_series", [True, False])
@pytest.mark.parametrize(
"freq",
[
None,
pytest.param(
"D",
marks=pytest.mark.xfail(
reason="GH#23918 before method uses freq in vectorized approach"
),
),
],
)
@pytest.mark.parametrize("periods", [1, -1])
@pytest.mark.parametrize("fill_method", ["ffill", "bfill", None])
@pytest.mark.parametrize("limit", [None, 1])
def test_pct_change(test_series, freq, periods, fill_method, limit):
# GH 21200, 21621, 30463
vals = [3, np.nan, np.nan, np.nan, 1, 2, 4, 10, np.nan, 4]
keys = ["a", "b"]
key_v = np.repeat(keys, len(vals))
df = DataFrame({"key": key_v, "vals": vals * 2})
df_g = df
if fill_method is not None:
df_g = getattr(df.groupby("key"), fill_method)(limit=limit)
grp = df_g.groupby(df.key)
expected = grp["vals"].obj / grp["vals"].shift(periods) - 1
if test_series:
result = df.groupby("key")["vals"].pct_change(
periods=periods, fill_method=fill_method, limit=limit, freq=freq
)
tm.assert_series_equal(result, expected)
else:
result = df.groupby("key").pct_change(
periods=periods, fill_method=fill_method, limit=limit, freq=freq
)
tm.assert_frame_equal(result, expected.to_frame("vals"))
@pytest.mark.parametrize(
"func, expected_status",
[
("ffill", ["shrt", "shrt", "lng", np.nan, "shrt", "ntrl", "ntrl"]),
("bfill", ["shrt", "lng", "lng", "shrt", "shrt", "ntrl", np.nan]),
],
)
def test_ffill_bfill_non_unique_multilevel(func, expected_status):
# GH 19437
date = pd.to_datetime(
[
"2018-01-01",
"2018-01-01",
"2018-01-01",
"2018-01-01",
"2018-01-02",
"2018-01-01",
"2018-01-02",
]
)
symbol = ["MSFT", "MSFT", "MSFT", "AAPL", "AAPL", "TSLA", "TSLA"]
status = ["shrt", np.nan, "lng", np.nan, "shrt", "ntrl", np.nan]
df = DataFrame({"date": date, "symbol": symbol, "status": status})
df = df.set_index(["date", "symbol"])
result = getattr(df.groupby("symbol")["status"], func)()
index = MultiIndex.from_tuples(
tuples=list(zip(*[date, symbol])), names=["date", "symbol"]
)
expected = Series(expected_status, index=index, name="status")
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("func", [np.any, np.all])
def test_any_all_np_func(func):
# GH 20653
df = DataFrame(
[["foo", True], [np.nan, True], ["foo", True]], columns=["key", "val"]
)
exp = Series([True, np.nan, True], name="val")
res = df.groupby("key")["val"].transform(func)
tm.assert_series_equal(res, exp)
def test_groupby_transform_rename():
# https://github.com/pandas-dev/pandas/issues/23461
def demean_rename(x):
result = x - x.mean()
if isinstance(x, pd.Series):
return result
result = result.rename(columns={c: "{c}_demeaned" for c in result.columns})
return result
df = DataFrame({"group": list("ababa"), "value": [1, 1, 1, 2, 2]})
expected = DataFrame({"value": [-1.0 / 3, -0.5, -1.0 / 3, 0.5, 2.0 / 3]})
result = df.groupby("group").transform(demean_rename)
tm.assert_frame_equal(result, expected)
result_single = df.groupby("group").value.transform(demean_rename)
tm.assert_series_equal(result_single, expected["value"])
@pytest.mark.parametrize("func", [min, max, np.min, np.max, "first", "last"])
def test_groupby_transform_timezone_column(func):
# GH 24198
ts = pd.to_datetime("now", utc=True).tz_convert("Asia/Singapore")
result = DataFrame({"end_time": [ts], "id": [1]})
result["max_end_time"] = result.groupby("id").end_time.transform(func)
expected = DataFrame([[ts, 1, ts]], columns=["end_time", "id", "max_end_time"])
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"func, values",
[
("idxmin", ["1/1/2011"] * 2 + ["1/3/2011"] * 7 + ["1/10/2011"]),
("idxmax", ["1/2/2011"] * 2 + ["1/9/2011"] * 7 + ["1/10/2011"]),
],
)
def test_groupby_transform_with_datetimes(func, values):
# GH 15306
dates = pd.date_range("1/1/2011", periods=10, freq="D")
stocks = DataFrame({"price": np.arange(10.0)}, index=dates)
stocks["week_id"] = dates.isocalendar().week
result = stocks.groupby(stocks["week_id"])["price"].transform(func)
expected = Series(data=pd.to_datetime(values), index=dates, name="price")
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("func", ["cumsum", "cumprod", "cummin", "cummax"])
def test_transform_absent_categories(func):
# GH 16771
# cython transforms with more groups than rows
x_vals = [1]
x_cats = range(2)
y = [1]
df = DataFrame({"x": Categorical(x_vals, x_cats), "y": y})
result = getattr(df.y.groupby(df.x), func)()
expected = df.y
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("func", ["ffill", "bfill", "shift"])
@pytest.mark.parametrize("key, val", [("level", 0), ("by", Series([0]))])
def test_ffill_not_in_axis(func, key, val):
# GH 21521
df = DataFrame([[np.nan]])
result = getattr(df.groupby(**{key: val}), func)()
expected = df
tm.assert_frame_equal(result, expected)
def test_transform_invalid_name_raises():
# GH#27486
df = DataFrame({"a": [0, 1, 1, 2]})
g = df.groupby(["a", "b", "b", "c"])
with pytest.raises(ValueError, match="not a valid function name"):
g.transform("some_arbitrary_name")
# method exists on the object, but is not a valid transformation/agg
assert hasattr(g, "aggregate") # make sure the method exists
with pytest.raises(ValueError, match="not a valid function name"):
g.transform("aggregate")
# Test SeriesGroupBy
g = df["a"].groupby(["a", "b", "b", "c"])
with pytest.raises(ValueError, match="not a valid function name"):
g.transform("some_arbitrary_name")
@pytest.mark.parametrize(
"obj",
[
DataFrame(
{"a": [0, 0, 0, 1, 1, 1], "b": range(6)},
index=["A", "B", "C", "D", "E", "F"],
),
Series([0, 0, 0, 1, 1, 1], index=["A", "B", "C", "D", "E", "F"]),
],
)
def test_transform_agg_by_name(request, reduction_func, obj):
func = reduction_func
g = obj.groupby(np.repeat([0, 1], 3))
if func == "ngroup": # GH#27468
request.node.add_marker(
pytest.mark.xfail(reason="TODO: g.transform('ngroup') doesn't work")
)
if func == "size" and obj.ndim == 2: # GH#27469
request.node.add_marker(
pytest.mark.xfail(reason="TODO: g.transform('size') doesn't work")
)
if func == "corrwith" and isinstance(obj, Series): # GH#32293
request.node.add_marker(
pytest.mark.xfail(reason="TODO: implement SeriesGroupBy.corrwith")
)
args = {"nth": [0], "quantile": [0.5], "corrwith": [obj]}.get(func, [])
result = g.transform(func, *args)
# this is the *definition* of a transformation
tm.assert_index_equal(result.index, obj.index)
if hasattr(obj, "columns"):
tm.assert_index_equal(result.columns, obj.columns)
# verify that values were broadcasted across each group
assert len(set(DataFrame(result).iloc[-3:, -1])) == 1
def test_transform_lambda_with_datetimetz():
# GH 27496
df = DataFrame(
{
"time": [
Timestamp("2010-07-15 03:14:45"),
Timestamp("2010-11-19 18:47:06"),
],
"timezone": ["Etc/GMT+4", "US/Eastern"],
}
)
result = df.groupby(["timezone"])["time"].transform(
lambda x: x.dt.tz_localize(x.name)
)
expected = Series(
[
Timestamp("2010-07-15 03:14:45", tz="Etc/GMT+4"),
Timestamp("2010-11-19 18:47:06", tz="US/Eastern"),
],
name="time",
)
tm.assert_series_equal(result, expected)
def test_transform_fastpath_raises():
# GH#29631 case where fastpath defined in groupby.generic _choose_path
# raises, but slow_path does not
df = DataFrame({"A": [1, 1, 2, 2], "B": [1, -1, 1, 2]})
gb = df.groupby("A")
def func(grp):
# we want a function such that func(frame) fails but func.apply(frame)
# works
if grp.ndim == 2:
# Ensure that fast_path fails
raise NotImplementedError("Don't cross the streams")
return grp * 2
# Check that the fastpath raises, see _transform_general
obj = gb._obj_with_exclusions
gen = gb.grouper.get_iterator(obj, axis=gb.axis)
fast_path, slow_path = gb._define_paths(func)
_, group = next(gen)
with pytest.raises(NotImplementedError, match="Don't cross the streams"):
fast_path(group)
result = gb.transform(func)
expected = DataFrame([2, -2, 2, 4], columns=["B"])
tm.assert_frame_equal(result, expected)
def test_transform_lambda_indexing():
# GH 7883
df = DataFrame(
{
"A": ["foo", "bar", "foo", "bar", "foo", "flux", "foo", "flux"],
"B": ["one", "one", "two", "three", "two", "six", "five", "three"],
"C": range(8),
"D": range(8),
"E": range(8),
}
)
df = df.set_index(["A", "B"])
df = df.sort_index()
result = df.groupby(level="A").transform(lambda x: x.iloc[-1])
expected = DataFrame(
{
"C": [3, 3, 7, 7, 4, 4, 4, 4],
"D": [3, 3, 7, 7, 4, 4, 4, 4],
"E": [3, 3, 7, 7, 4, 4, 4, 4],
},
index=MultiIndex.from_tuples(
[
("bar", "one"),
("bar", "three"),
("flux", "six"),
("flux", "three"),
("foo", "five"),
("foo", "one"),
("foo", "two"),
("foo", "two"),
],
names=["A", "B"],
),
)
tm.assert_frame_equal(result, expected)
def test_categorical_and_not_categorical_key(observed):
# Checks that groupby-transform, when grouping by both a categorical
# and a non-categorical key, doesn't try to expand the output to include
# non-observed categories but instead matches the input shape.
# GH 32494
df_with_categorical = DataFrame(
{
"A": Categorical(["a", "b", "a"], categories=["a", "b", "c"]),
"B": [1, 2, 3],
"C": ["a", "b", "a"],
}
)
df_without_categorical = DataFrame(
{"A": ["a", "b", "a"], "B": [1, 2, 3], "C": ["a", "b", "a"]}
)
# DataFrame case
result = df_with_categorical.groupby(["A", "C"], observed=observed).transform("sum")
expected = df_without_categorical.groupby(["A", "C"]).transform("sum")
tm.assert_frame_equal(result, expected)
expected_explicit = DataFrame({"B": [4, 2, 4]})
tm.assert_frame_equal(result, expected_explicit)
# Series case
result = df_with_categorical.groupby(["A", "C"], observed=observed)["B"].transform(
"sum"
)
expected = df_without_categorical.groupby(["A", "C"])["B"].transform("sum")
tm.assert_series_equal(result, expected)
expected_explicit = Series([4, 2, 4], name="B")
tm.assert_series_equal(result, expected_explicit)
| jreback/pandas | pandas/tests/groupby/transform/test_transform.py | Python | bsd-3-clause | 38,188 |
"""
Commands
Commands describe the input the account can do to the game.
"""
from evennia import Command as BaseCommand
from evennia import default_cmds
from evennia.commands import cmdset
class Command(BaseCommand):
"""
Inherit from this if you want to create your own command styles
from scratch. Note that Evennia's default commands inherits from
MuxCommand instead.
Note that the class's `__doc__` string (this text) is
used by Evennia to create the automatic help entry for
the command, so make sure to document consistently here.
Each Command implements the following methods, called
in this order (only func() is actually required):
- at_pre_command(): If this returns True, execution is aborted.
- parse(): Should perform any extra parsing needed on self.args
and store the result on self.
- func(): Performs the actual work.
- at_post_command(): Extra actions, often things done after
every command, like prompts.
"""
pass
class CmdNoLimbo(default_cmds.MuxCommand):
"""
This command is not available in Limbo. Go to the |ySandbox|n to experiment and get the full help text.
"""
key = "build"
locks = "cmd:perm(desc) or perm(Builders)"
help_category = "Building"
def func(self):
self.caller.msg("Building is not available in Limbo. "
"Go to the |ySandbox| to experiment and get all build commands.")
class CmdTap(BaseCommand):
"""
Inspect character actions for debug purposes.
Usage:
tap <object or #dbref>
untap
"""
key = "tap"
aliases = ["untap"]
locks = "cmd:superuser()"
def parse(self):
self.args = self.args.strip()
def func(self):
caller = self.caller
if self.cmdname == "untap":
if caller.ndb.tapped_data:
targetsess, orig_data_in, orig_data_out = caller.ndb.tapped_data
targetsess.data_in = orig_data_in
targetsess.data_out = orig_data_out
caller.msg(f"|rUntapped {targetsess.account.name}.|n")
del caller.ndb.tapped_data
else:
caller.msg("No tap to untap.")
return
if not self.args:
caller.msg("Usage: tap <object or #dbref> or untap")
return
if caller.ndb.tapped_data:
targetsess, _, _ = caller.ndb.tapped_data
caller.msg(f"|rYou are already tapping {targetsess.account.name}. Untap first.")
return
target = caller.search(self.args, global_search=True)
if not target:
return
targetsess = target.sessions.get()[0]
def _patched_data_in(*args, **kwargs):
try:
text = kwargs["text"][0][0].strip('\n')
except (IndexError, KeyError, ValueError):
text = kwargs
taptxt = f"|wTAP|||g {targetsess.account.name} cmd:>|n '{text}'"
if text != 'idle':
caller.msg(taptxt)
targetsess.sessionhandler.call_inputfuncs(targetsess, **kwargs)
def _patched_data_out(*args, **kwargs):
try:
text = kwargs["text"]
if not isinstance(text, str):
text = text[0] # a tuple
text = text.strip("\n")
text = "|wTAP|||n " + "\n|wTAP|||n ".join(text.split("\n"))
except (IndexError, KeyError, ValueError):
text = kwargs
taptxt = f"|wTAP|||y {targetsess.account.name} sees:|n\n{text}"
caller.msg(taptxt)
targetsess.sessionhandler.data_out(targetsess, **kwargs)
# patch object with custom version
caller.ndb.tapped_data = (targetsess, targetsess.data_in, targetsess.data_out)
targetsess.data_in = _patched_data_in
targetsess.data_out = _patched_data_out
caller.msg(f"|gStart tapping {targetsess.account.name}...|n")
#------------------------------------------------------------
#
# The default commands inherit from
#
# evennia.commands.default.muxcommand.MuxCommand.
#
# If you want to make sweeping changes to default commands you can
# uncomment this copy of the MuxCommand parent and add
#
# COMMAND_DEFAULT_CLASS = "commands.command.MuxCommand"
#
# to your settings file. Be warned that the default commands expect
# the functionality implemented in the parse() method, so be
# careful with what you change.
#
#------------------------------------------------------------
#from evennia.utils import utils
#class MuxCommand(Command):
# """
# This sets up the basis for a MUX command. The idea
# is that most other Mux-related commands should just
# inherit from this and don't have to implement much
# parsing of their own unless they do something particularly
# advanced.
#
# Note that the class's __doc__ string (this text) is
# used by Evennia to create the automatic help entry for
# the command, so make sure to document consistently here.
# """
# def has_perm(self, srcobj):
# """
# This is called by the cmdhandler to determine
# if srcobj is allowed to execute this command.
# We just show it here for completeness - we
# are satisfied using the default check in Command.
# """
# return super(MuxCommand, self).has_perm(srcobj)
#
# def at_pre_cmd(self):
# """
# This hook is called before self.parse() on all commands
# """
# pass
#
# def at_post_cmd(self):
# """
# This hook is called after the command has finished executing
# (after self.func()).
# """
# pass
#
# def parse(self):
# """
# This method is called by the cmdhandler once the command name
# has been identified. It creates a new set of member variables
# that can be later accessed from self.func() (see below)
#
# The following variables are available for our use when entering this
# method (from the command definition, and assigned on the fly by the
# cmdhandler):
# self.key - the name of this command ('look')
# self.aliases - the aliases of this cmd ('l')
# self.permissions - permission string for this command
# self.help_category - overall category of command
#
# self.caller - the object calling this command
# self.cmdstring - the actual command name used to call this
# (this allows you to know which alias was used,
# for example)
# self.args - the raw input; everything following self.cmdstring.
# self.cmdset - the cmdset from which this command was picked. Not
# often used (useful for commands like 'help' or to
# list all available commands etc)
# self.obj - the object on which this command was defined. It is often
# the same as self.caller.
#
# A MUX command has the following possible syntax:
#
# name[ with several words][/switch[/switch..]] arg1[,arg2,...] [[=|,] arg[,..]]
#
# The 'name[ with several words]' part is already dealt with by the
# cmdhandler at this point, and stored in self.cmdname (we don't use
# it here). The rest of the command is stored in self.args, which can
# start with the switch indicator /.
#
# This parser breaks self.args into its constituents and stores them in the
# following variables:
# self.switches = [list of /switches (without the /)]
# self.raw = This is the raw argument input, including switches
# self.args = This is re-defined to be everything *except* the switches
# self.lhs = Everything to the left of = (lhs:'left-hand side'). If
# no = is found, this is identical to self.args.
# self.rhs: Everything to the right of = (rhs:'right-hand side').
# If no '=' is found, this is None.
# self.lhslist - [self.lhs split into a list by comma]
# self.rhslist - [list of self.rhs split into a list by comma]
# self.arglist = [list of space-separated args (stripped, including '=' if it exists)]
#
# All args and list members are stripped of excess whitespace around the
# strings, but case is preserved.
# """
# raw = self.args
# args = raw.strip()
#
# # split out switches
# switches = []
# if args and len(args) > 1 and args[0] == "/":
# # we have a switch, or a set of switches. These end with a space.
# switches = args[1:].split(None, 1)
# if len(switches) > 1:
# switches, args = switches
# switches = switches.split('/')
# else:
# args = ""
# switches = switches[0].split('/')
# arglist = [arg.strip() for arg in args.split()]
#
# # check for arg1, arg2, ... = argA, argB, ... constructs
# lhs, rhs = args, None
# lhslist, rhslist = [arg.strip() for arg in args.split(',')], []
# if args and '=' in args:
# lhs, rhs = [arg.strip() for arg in args.split('=', 1)]
# lhslist = [arg.strip() for arg in lhs.split(',')]
# rhslist = [arg.strip() for arg in rhs.split(',')]
#
# # save to object properties:
# self.raw = raw
# self.switches = switches
# self.args = args.strip()
# self.arglist = arglist
# self.lhs = lhs
# self.lhslist = lhslist
# self.rhs = rhs
# self.rhslist = rhslist
#
# # if the class has the account_caller property set on itself, we make
# # sure that self.caller is always the account if possible. We also create
# # a special property "character" for the puppeted object, if any. This
# # is convenient for commands defined on the Account only.
# if hasattr(self, "account_caller") and self.account_caller:
# if utils.inherits_from(self.caller, "evennia.objects.objects.DefaultObject"):
# # caller is an Object/Character
# self.character = self.caller
# self.caller = self.caller.account
# elif utils.inherits_from(self.caller, "evennia.accounts.accounts.DefaultAccount"):
# # caller was already an Account
# self.character = self.caller.get_puppet(self.session)
# else:
# self.character = None
#
| evennia/evdemo | evdemo/commands/command.py | Python | bsd-3-clause | 10,623 |
<?php
namespace Book\Controller;
use Doctrine\Common\Collections\ArrayCollection as ArrayCollection;
use DoctrineModule\Paginator\Adapter\Collection as Collection;
use Zend\Paginator\Paginator;
use Zend\Mvc\Controller\AbstractActionController;
use Zend\View\Model\ViewModel;
use Zend\Session\Container;
use Book\Form\BookForm as BookForm;
use Book\Form\BookFormValidator as BookFormValidator;
Class BookController extends AbstractActionController
{
public function indexAction()
{
$routeMatch = $this->getEvent()->getRouteMatch();
$lang = $routeMatch->getParam('lang','en');
$page = $routeMatch->getParam('page',1);
$itemCountPerPage = 8;
$session = new Container('POSBookIndex');
if(!isset($session->searchFilter)) {
$session->searchFilter = '';
} else {
$session->searchFilter = '';
}
$books = $this->posDatabasePlugin()->getBooks($session->searchFilter);
$collection = new ArrayCollection($books);
$paginator = new Paginator(new Collection($collection));
$paginator->setCurrentPageNumber($page);
$paginator->setItemCountPerPage($itemCountPerPage);
return new ViewModel(array(
'lang' => $lang,
'page' => $page,
'itemCountPerPage' => $itemCountPerPage,
'paginator'=> $paginator,
'messages' => $this->flashMessenger()->getMessages(),
'utilityPlugin' => $this->utilityPlugin()
));
}
public function newAction()
{
$routeMatch = $this->getEvent()->getRouteMatch();
$lang = $routeMatch->getParam('lang','en');
$session = new Container('POSBookEdit');
$bookForm = new BookForm();
$request = $this->getRequest();
if($request->isPost()) {
$bookFormValidator = new BookFormValidator();
{
$bookForm->setInputFilter($bookFormValidator->getInputFilter());
$bookForm->setData($request->getPost());
}
if($bookForm->isValid()) {
$name = $request->getPost('name');
$stock = $request->getPost('stock');
$price = $request->getPost('price');
$this->posDatabasePlugin()->saveBook($name,$stock,$price);
$this->flashMessenger()->addMessage('New Book Has Been Added !');
return $this->redirect()->toRoute('book', array('lang' => $lang,'action' => 'index'));
}
}
return new ViewModel(array(
'bookForm' => $bookForm,
'lang' => $lang
));
}
public function editAction()
{
$routeMatch = $this->getEvent()->getRouteMatch();
$lang = $routeMatch->getParam('lang','en');
$session = new Container('POSBookEdit');
if(!isset($session->book)) {
$this->flashMessenger()->addMessage('Book with the ID specified not found !');
return $this->redirect()->toRoute('book', array('lang' => $this->lang,'action' => 'index'));
} else {
$bookForm = new BookForm();
$request = $this->getRequest();
if($request->isPost()) {
$bookFormValidator = new BookFormValidator();
{
$bookForm->setInputFilter($bookFormValidator->getInputFilter());
$bookForm->setData($request->getPost());
}
if($bookForm->isValid()) {
$this->posDatabasePlugin()->updateBook($session->book['id'],$request->getPost('name'),$request->getPost('stock'),$request->getPost('price'));
$this->flashMessenger()->addMessage('Book Data Has Been Updated !');
return $this->redirect()->toRoute('book', array('lang' => $lang,'action' => 'index'));
}
} else {
$bookForm->get('name')->setValue($session->book['name']);
$bookForm->get('stock')->setValue($session->book['stock']);
$bookForm->get('price')->setValue($session->book['price']);
}
return new ViewModel(array(
'bookForm' => $bookForm,
'lang' => $lang
));
}
}
public function prepareEditAction()
{
$routeMatch = $this->getEvent()->getRouteMatch();
$id = $routeMatch->getParam('id',null);
$lang = $routeMatch->getParam('lang','en');
$session = new Container('POSBookEdit');
$session->book = array();
if($id === null) {
$this->flashMessenger()->addMessage('Book with the ID specified not found !');
return $this->redirect()->toRoute('book', array('lang' => $lang,'action' => 'index'));
} else {
$session->book = $this->posDatabasePlugin()->getBook($id);
return $this->redirect()->toRoute('edit_entry', array('lang' => $lang,'action' => 'edit'));
}
}
public function deleteAction()
{
return new ViewModel(array(
));
}
}
?> | frznkyo/Ujian | module/Book/src/Book/Controller/BookController.php | PHP | bsd-3-clause | 4,532 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('letters', '0011_auto_20150215_1830'),
]
operations = [
migrations.DeleteModel(
name='LetterFile',
),
migrations.AddField(
model_name='contenttemplate',
name='template_name',
field=models.CharField(default='Test template', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='logo',
name='created',
field=models.DateTimeField(default=datetime.datetime(2015, 2, 15, 22, 22, 25, 364812, tzinfo=utc), auto_now_add=True),
preserve_default=False,
),
migrations.AddField(
model_name='logo',
name='end_time',
field=models.DateTimeField(default=datetime.datetime(2015, 2, 15, 22, 22, 33, 653083, tzinfo=utc), auto_now_add=True),
preserve_default=False,
),
migrations.AddField(
model_name='logo',
name='name',
field=models.CharField(default='Test logo', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='logo',
name='start_time',
field=models.DateTimeField(default=datetime.datetime(2015, 2, 15, 22, 22, 52, 14459, tzinfo=utc), auto_now_add=True),
preserve_default=False,
),
migrations.AlterField(
model_name='lettertext',
name='barcode',
field=models.CharField(default='abc123', max_length=100),
preserve_default=False,
),
]
| garry-cairns/correspondence | api/correspondence/letters/migrations/0012_auto_20150215_2223.py | Python | bsd-3-clause | 1,825 |
<?php
use yii\helpers\Html;
use yii\grid\GridView;
use yii\widgets\Pjax;
/* @var $this yii\web\View */
/* @var $searchModel common\models\OncallSearch */
/* @var $dataProvider yii\data\ActiveDataProvider */
use miloschuman\highcharts\Highcharts;
$this->title = 'Oncalls';
$this->params['breadcrumbs'][] = $this->title;
?>
<div style='display: none'>
<?= Highcharts::widget([
'scripts' => [
'highcharts-more',
'themes/grid-grid',
'modules/exporting',
'modules/solid-gauge',
'modules/drilldown',
]
]);
?>
</div>
<div class="oncall-index">
<h1><?= Html::encode($this->title) ?></h1>
<?php // echo $this->render('_search', ['model' => $searchModel]); ?>
<p>
<?= Html::a('Create Oncall', ['create'], ['class' => 'btn btn-success']) ?>
</p>
<?php Pjax::begin(); ?> <?= GridView::widget([
'dataProvider' => $dataProvider,
'filterModel' => $searchModel,
'columns' => [
['class' => 'yii\grid\SerialColumn'],
'id',
'title',
'detail:ntext',
'docs:ntext',
'status',
// 'ref',
// 'created_date',
// 'updated_date',
// 'employee_id',
['class' => 'yii\grid\ActionColumn'],
],
]); ?>
<?php Pjax::end(); ?></div>
<div class="row">
<div class="chart" id="charttt">
<?php
$sqlsprider = "SELECT count(statustask_id) as datasprider from tickets
group by statustask_id";
$datasprider = Yii::$app->db->createCommand($sqlsprider)->queryAll();
$main_data = [];
foreach ($datasprider as $data) {
$main_data[] = [
'sprider' => $data['datasprider'] * 1,
];
}
$main = json_encode($main_data);
$this->registerJs(" $(function () {
$('#chartttt').highcharts({
chart: {
polar: true,
type: 'line'
},
title: {
text: 'SAP System'
},
pane: {
size: '80%'
},
xAxis: {
categories: ['Sales', 'Marketing', 'Development', 'Customer Support',
'Information Technology', 'Administration'],
tickmarkPlacement: 'on',
lineWidth: 0
},
yAxis: {
gridLineInterpolation: 'polygon',
lineWidth: 0,
min: 0
}
tooltip: {
shared: true,
},
legend: {
align: 'right',
verticalAlign: 'top',
y: 70,
layout: 'vertical'
},
series: [{
name: 'Allocated Budget',
data: [$main],
pointPlacement: 'on'
}]
});
});
");
?>
</div>
| reachz/itask | frontend/views/oncall/index.php | PHP | bsd-3-clause | 4,730 |
#include <boost/thread.hpp>
#include <QFileInfo>
#include <QApplication>
#include "image/image.hpp"
#include "boost/program_options.hpp"
#include "mapping/fa_template.hpp"
#include "libs/gzip_interface.hpp"
#include "mapping/atlas.hpp"
namespace po = boost::program_options;
extern fa_template fa_template_imp;
extern std::vector<atlas> atlas_list;
std::string get_fa_template_path(void);
bool atl_load_atlas(std::string atlas_name)
{
std::cout << "loading atlas..." << std::endl;
std::replace(atlas_name.begin(),atlas_name.end(),',',' ');
std::istringstream in(atlas_name);
std::vector<std::string> name_list;
std::copy(std::istream_iterator<std::string>(in),
std::istream_iterator<std::string>(),std::back_inserter(name_list));
for(unsigned int index = 0;index < name_list.size();++index)
{
std::string atlas_path = QCoreApplication::applicationDirPath().toLocal8Bit().begin();
atlas_path += "/atlas/";
atlas_path += name_list[index];
atlas_path += ".nii.gz";
atlas_list.push_back(atlas());
if(!atlas_list.back().load_from_file(atlas_path.c_str()))
{
std::cout << "Cannot load atlas " << atlas_path << std::endl;
return false;
}
std::cout << name_list[index] << " loaded." << std::endl;
atlas_list.back().name = name_list[index];
}
return true;
}
void atl_get_mapping(image::basic_image<float,3>& from,
image::basic_image<float,3>& to,
const image::vector<3>& vs,
unsigned int factor,
unsigned int thread_count,
image::basic_image<image::vector<3>,3>& mapping,
float* out_trans)
{
std::cout << "perform image registration..." << std::endl;
image::affine_transform<3,float> arg;
arg.scaling[0] = vs[0] / std::fabs(fa_template_imp.tran[0]);
arg.scaling[1] = vs[1] / std::fabs(fa_template_imp.tran[5]);
arg.scaling[2] = vs[2] / std::fabs(fa_template_imp.tran[10]);
image::reg::align_center(from,to,arg);
image::filter::gaussian(from);
from -= image::segmentation::otsu_threshold(from);
image::lower_threshold(from,0.0);
image::normalize(from,1.0);
image::normalize(to,1.0);
bool terminated = false;
std::cout << "perform linear registration..." << std::endl;
image::reg::linear(from,to,arg,image::reg::affine,image::reg::mutual_information(),terminated);
image::transformation_matrix<3,float> T(arg,from.geometry(),to.geometry()),iT(arg,from.geometry(),to.geometry());
iT.inverse();
// output linear registration
float T_buf[16];
T.save_to_transform(T_buf);
T_buf[15] = 1.0;
std::copy(T_buf,T_buf+4,std::ostream_iterator<float>(std::cout," "));
std::cout << std::endl;
std::copy(T_buf+4,T_buf+8,std::ostream_iterator<float>(std::cout," "));
std::cout << std::endl;
std::copy(T_buf+8,T_buf+12,std::ostream_iterator<float>(std::cout," "));
std::cout << std::endl;
image::basic_image<float,3> new_from(to.geometry());
image::resample(from,new_from,iT);
std::cout << "perform nonlinear registration..." << std::endl;
//image::reg::bfnorm(new_from,to,*bnorm_data,*terminated);
std::cout << "order=" << factor << std::endl;
std::cout << "thread count=" << thread_count << std::endl;
image::reg::bfnorm_mapping<float,3> mni(new_from.geometry(),image::geometry<3>(factor*7,factor*9,factor*7));
multi_thread_reg(mni,new_from,to,thread_count,terminated);
mapping.resize(from.geometry());
for(image::pixel_index<3> index;from.geometry().is_valid(index);index.next(from.geometry()))
if(from[index.index()] > 0)
{
image::vector<3,float> pos;
T(index,pos);// from -> new_from
mni(pos,mapping[index.index()]); // new_from -> to
fa_template_imp.to_mni(mapping[index.index()]);
}
image::matrix::product(fa_template_imp.tran.begin(),T_buf,out_trans,image::dyndim(4,4),image::dyndim(4,4));
}
void atl_save_mapping(const std::string& file_name,const image::geometry<3>& geo,
const image::basic_image<image::vector<3>,3>& mapping,const float* trans,const float* vs,
bool multiple)
{
for(unsigned int i = 0;i < atlas_list.size();++i)
{
std::string base_name = file_name;
base_name += ".";
base_name += atlas_list[i].name;
image::basic_image<short,3> all_roi(geo);
for(unsigned int j = 0;j < atlas_list[i].get_list().size();++j)
{
std::string output = base_name;
output += ".";
output += atlas_list[i].get_list()[j];
output += ".nii.gz";
image::basic_image<unsigned char,3> roi(geo);
for(unsigned int k = 0;k < mapping.size();++k)
if (atlas_list[i].is_labeled_as(mapping[k], j))
{
roi[k] = 1;
all_roi[k] = atlas_list[i].get_label_at(mapping[k]);
}
if(multiple)
{
image::io::nifti out;
out.set_voxel_size(vs);
if(trans)
out.set_image_transformation(trans);
else
image::flip_xy(roi);
out << roi;
out.save_to_file(output.c_str());
std::cout << "save " << output << std::endl;
}
}
{
std::string label_name = base_name;
label_name += ".txt";
std::ofstream txt_out(label_name.c_str());
for(unsigned int j = 0;j < atlas_list[i].get_list().size();++j)
txt_out << atlas_list[i].get_num()[j] << " " << atlas_list[i].get_list()[j] << std::endl;
}
base_name += ".nii.gz";
image::io::nifti out;
out.set_voxel_size(vs);
if(trans)
out.set_image_transformation(trans);
else
image::flip_xy(all_roi);
out << all_roi;
out.save_to_file(base_name.c_str());
std::cout << "save " << base_name << std::endl;
}
}
int atl(int ac, char *av[])
{
po::options_description norm_desc("fiber tracking options");
norm_desc.add_options()
("help", "help message")
("action", po::value<std::string>(), "atl: output atlas")
("source", po::value<std::string>(), "assign the .fib file name")
("order", po::value<int>()->default_value(0), "normalization order (0~3)")
("thread_count", po::value<int>()->default_value(4), "thread count")
("atlas", po::value<std::string>(), "atlas name")
("output", po::value<std::string>()->default_value("multiple"), "output files")
;
if(!ac)
{
std::cout << norm_desc << std::endl;
return 1;
}
po::variables_map vm;
po::store(po::command_line_parser(ac, av).options(norm_desc).run(), vm);
po::notify(vm);
gz_mat_read mat_reader;
std::string file_name = vm["source"].as<std::string>();
std::cout << "loading " << file_name << "..." <<std::endl;
if(!QFileInfo(file_name.c_str()).exists())
{
std::cout << file_name << " does not exist. terminating..." << std::endl;
return 0;
}
if (!mat_reader.load_from_file(file_name.c_str()))
{
std::cout << "Invalid MAT file format" << std::endl;
return 0;
}
unsigned int col,row;
const unsigned short* dim = 0;
const float* vs = 0;
const float* fa0 = 0;
if(!mat_reader.read("dimension",row,col,dim) ||
!mat_reader.read("voxel_size",row,col,vs) ||
!mat_reader.read("fa0",row,col,fa0))
{
std::cout << "Invalid file format" << std::endl;
return 0;
}
image::geometry<3> geo(dim);
if(!fa_template_imp.load_from_file(get_fa_template_path().c_str()) ||
!atl_load_atlas(vm["atlas"].as<std::string>()))
return -1;
const float* trans = 0;
//QSDR
if(mat_reader.read("trans",row,col,trans))
{
std::cout << "Transformation matrix found." << std::endl;
image::basic_image<image::vector<3>,3> mapping(geo);
for(image::pixel_index<3> index;geo.is_valid(index);index.next(geo))
{
image::vector<3,float> pos(index),mni;
image::vector_transformation(pos.begin(),mni.begin(),trans,image::vdim<3>());
mapping[index.index()] = mni;
}
atl_save_mapping(file_name,geo,mapping,trans,vs,vm["output"].as<std::string>() == "multiple");
return 0;
}
image::basic_image<float,3> from(fa0,geo);
image::basic_image<image::vector<3>,3> mapping;
unsigned int factor = vm["order"].as<int>() + 1;
unsigned int thread_count = vm["thread_count"].as<int>();
image::vector<3> vs_(vs);
float out_trans[16];
atl_get_mapping(from,fa_template_imp.I,vs_,factor,thread_count,mapping,out_trans);
atl_save_mapping(file_name,geo,mapping,0,vs,vm["output"].as<std::string>() == "multiple");
}
| sbaete/DSI-Studio-qt5 | cmd/atl.cpp | C++ | bsd-3-clause | 9,073 |
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @emails oncall+react_native
* @format
*/
/* global device, element, by, expect */
const {
openComponentWithLabel,
openExampleWithTitle,
} = require('../e2e-helpers');
describe('Button', () => {
beforeAll(async () => {
await device.reloadReactNative();
await openComponentWithLabel(
'Button',
'Button Simple React Native button component.',
);
});
it('Simple button should be tappable', async () => {
await openExampleWithTitle('Simple Button');
await element(by.id('simple_button')).tap();
await expect(element(by.text('Simple has been pressed!'))).toBeVisible();
await element(by.text('OK')).tap();
});
it('Adjusted color button should be tappable', async () => {
await openExampleWithTitle('Adjusted color');
await element(by.id('purple_button')).tap();
await expect(element(by.text('Purple has been pressed!'))).toBeVisible();
await element(by.text('OK')).tap();
});
it("Two buttons with JustifyContent:'space-between' should be tappable", async () => {
await openExampleWithTitle('Fit to text layout');
await element(by.id('left_button')).tap();
await expect(element(by.text('Left has been pressed!'))).toBeVisible();
await element(by.text('OK')).tap();
await element(by.id('right_button')).tap();
await expect(element(by.text('Right has been pressed!'))).toBeVisible();
await element(by.text('OK')).tap();
});
it('Disabled button should not interact', async () => {
await openExampleWithTitle('Disabled Button');
await element(by.id('disabled_button')).tap();
await expect(
element(by.text('Disabled has been pressed!')),
).toBeNotVisible();
});
});
| hoangpham95/react-native | packages/rn-tester/e2e/__tests__/Button-test.js | JavaScript | bsd-3-clause | 1,885 |
package verifiers
import (
"context"
"encoding/json"
"fmt"
"net/http"
"go.skia.org/infra/go/gerrit"
"go.skia.org/infra/go/git"
"go.skia.org/infra/go/skerr"
"go.skia.org/infra/skcq/go/footers"
"go.skia.org/infra/skcq/go/types"
tree_status_types "go.skia.org/infra/tree_status/go/types"
)
// NewTreeStatusVerifier returns an instance of TreeStatusVerifier.
func NewTreeStatusVerifier(httpClient *http.Client, treeStatusURL string, footersMap map[string]string) (types.Verifier, error) {
return &TreeStatusVerifier{
httpClient: httpClient,
treeStatusURL: treeStatusURL,
footersMap: footersMap,
}, nil
}
// TreeStatusVerifier implements the types.Verifier interface.
type TreeStatusVerifier struct {
httpClient *http.Client
treeStatusURL string
footersMap map[string]string
}
type TreeStatus struct {
Message string `json:"message" datastore:"message"`
GeneralState string `json:"general_state" datastore:"general_state,omitempty"`
}
// Name implements the types.Verifier interface.
func (tv *TreeStatusVerifier) Name() string {
return "TreeStatusVerifier"
}
// Verify implements the types.Verifier interface.
func (tv *TreeStatusVerifier) Verify(ctx context.Context, ci *gerrit.ChangeInfo, startTime int64) (state types.VerifierState, reason string, err error) {
// Check to see if NoTreeChecksFooter has been specified.
noTreeChecks := git.GetBoolFooterVal(tv.footersMap, footers.NoTreeChecksFooter, ci.Issue)
if noTreeChecks {
return types.VerifierSuccessState, fmt.Sprintf("Tree check is skipped because \"%s: %t\" has been specified", footers.NoTreeChecksFooter, noTreeChecks), nil
}
resp, err := tv.httpClient.Get(tv.treeStatusURL)
if err != nil {
return "", "", skerr.Wrapf(err, "Could not get response from %s", tv.treeStatusURL)
}
var treeStatus TreeStatus
if err := json.NewDecoder(resp.Body).Decode(&treeStatus); err != nil {
return "", "", skerr.Wrapf(err, "Could not decode response from %s", tv.treeStatusURL)
}
if treeStatus.GeneralState == tree_status_types.OpenState {
return types.VerifierSuccessState, fmt.Sprintf("Tree is open: \"%s\"", treeStatus.Message), nil
} else {
return types.VerifierWaitingState, fmt.Sprintf("Waiting for tree to be open. Tree is currently in %s state: \"%s\"", treeStatus.GeneralState, treeStatus.Message), nil
}
}
// Cleanup implements the types.Verifier interface.
func (cv *TreeStatusVerifier) Cleanup(ctx context.Context, ci *gerrit.ChangeInfo, cleanupPatchsetID int64) {
return
}
| google/skia-buildbot | skcq/go/verifiers/tree_status_verifier.go | GO | bsd-3-clause | 2,503 |
<?php
/**
* Magento
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://opensource.org/licenses/osl-3.0.php
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@magentocommerce.com so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade Magento to newer
* versions in the future. If you wish to customize Magento for your
* needs please refer to http://www.magentocommerce.com for more information.
*
* @category Mage
* @package Mage_Customer
* @copyright Copyright (c) 2011 Magento Inc. (http://www.magentocommerce.com)
* @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0)
*/
$installer = $this;
/* @var $installer Mage_Customer_Model_Entity_Setup */
$installer->startSetup();
$installer->addAttribute('customer', 'confirmation', array(
'label' => 'Is confirmed',
'visible' => false,
'required' => false,
));
$installer->endSetup();
| 5452/durex | app/code/core/Mage/Customer/sql/customer_setup/mysql4-upgrade-0.8.7-0.8.8.php | PHP | bsd-3-clause | 1,241 |
<?php
/**
* Password
*
* @category
* @package phalconskeleton
* @author Tim Marshall <Tim@CodingBeard.com>
* @copyright (c) 2015, Tim Marshall
* @license New BSD License
*/
namespace CodingBeard\Forms\Fields;
use CodingBeard\Forms\Fields\Field;
class Password extends Field
{
/**
* Volt template for this field
* @var string
*/
public $template = 'password';
/**
* Field key
* @var string
*/
public $key = 'key';
/**
* Field Label
* @var string
*/
public $label = '';
/**
* Field Sublabel
* @var string
*/
public $sublabel = '';
/**
* Whether the field is required or not
* @var bool
*/
public $required = false;
/**
* Regex pattern to match against
* @var bool|string
*/
public $pattern = false;
/**
* Classes to be added to the input tag
* @var string
*/
public $class = '';
/**
* Ratio of element size on a large screen (out fo 12)
* @var int
*/
public $size = 12;
/**
* Whether the field needs a matching repeat field
* @var bool
*/
public $repeat;
/**
* Whether this field is a repeated field
* @var bool
*/
public $isRepeat;
/**
* Default value for the field
* @var string
*/
public $default;
/**
* Create a textbox field
* $properties = [
* 'key' => '',
* 'label' => '',
* 'sublabel' => '',
* 'required' => false,
* 'pattern' => '',
* 'class' => '',
* 'size' => 12,
* 'repeat' => false,
* 'default' => ''
* ]
* @param array $properties
*/
public function __construct($properties)
{
parent::__construct($properties);
}
public function setDefault($value)
{
$this->default = $value;
}
}
| CodingBeard/phalconskeleton | app/plugins/CodingBeard/Forms/Fields/Password.php | PHP | bsd-3-clause | 1,893 |
import {Protocol} from "devtools-protocol"
import CDP = require("chrome-remote-interface")
import cp = require("child_process")
import fs = require("fs")
import path = require("path")
const url = `file://${path.resolve(process.argv[2])}`
interface CallFrame {
name: string
url: string
line: number
col: number
}
interface Err {
text: string
url: string
line: number
col: number
trace: CallFrame[]
}
interface Msg {
level: string
text: string
url: string
line: number
col: number
trace: CallFrame[]
}
class TimeoutError extends Error {
constructor() {
super("timeout")
}
}
function timeout(ms: number): Promise<void> {
return new Promise((_resolve, reject) => {
const timer = setTimeout(() => reject(new TimeoutError()), ms)
timer.unref()
})
}
type Box = {x: number, y: number, width: number, height: number}
type State = {type: string, bbox?: Box, children?: State[]}
function create_baseline(items: State[]): string {
let baseline = ""
function descend(items: State[], level: number): void {
for (const {type, bbox, children} of items) {
baseline += `${" ".repeat(level)}${type}`
if (bbox != null) {
const {x, y, width, height} = bbox
baseline += ` bbox=[${x}, ${y}, ${width}, ${height}]`
}
baseline += "\n"
if (children != null)
descend(children, level+1)
}
}
descend(items, 0)
return baseline
}
function load_baseline(baseline_path: string): string | null {
const proc = cp.spawnSync("git", ["show", `:./${baseline_path}`])
return proc.status == 0 ? proc.stdout : null
}
function diff_baseline(baseline_path: string): string | null {
const proc = cp.spawnSync("git", ["diff", "--color", "--exit-code", baseline_path], {encoding: "utf8"})
return proc.status == 0 ? null : diff_highlight(proc.stdout)
}
function diff_highlight(diff: string): string {
const hl_path = "/usr/share/doc/git/contrib/diff-highlight/diff-highlight"
const proc = cp.spawnSync("perl", [hl_path], {input: diff, encoding: "utf8"})
return proc.status == 0 ? proc.stdout : diff
}
async function run_tests(): Promise<void> {
let client
try {
client = await CDP()
const {Network, Page, Runtime, Log} = client
let messages: Msg[] = []
let errors: Err[] = []
function collect_trace(stackTrace: Protocol.Runtime.StackTrace): CallFrame[] {
return stackTrace.callFrames.map(({functionName, url, lineNumber, columnNumber}) => {
return {name: functionName || "(anonymous)", url, line: lineNumber+1, col: columnNumber+1}
})
}
function handle_exception(exceptionDetails: Protocol.Runtime.ExceptionDetails): Err {
const {text, exception, url, lineNumber, columnNumber, stackTrace} = exceptionDetails
return {
text: exception != null && exception.description != null ? exception.description : text,
url: url || "(inline)",
line: lineNumber+1,
col: columnNumber+1,
trace: stackTrace ? collect_trace(stackTrace) : [],
}
}
Runtime.consoleAPICalled(({type, args, stackTrace}) => {
const text = args.map(({value}) => value ? value.toString() : "").join(" ")
let msg: Msg
if (stackTrace != null) {
const trace = collect_trace(stackTrace)
const {url, line, col} = trace[0]
msg = {level: type, text, url, line, col, trace}
} else
msg = {level: type, text, url: "(inline)", line: 1, col: 1, trace: []}
messages.push(msg)
})
//Runtime.exceptionThrown(({exceptionDetails}) => errors.push(handle_exception(exceptionDetails))
Log.entryAdded(({entry}) => {
const {source, level, text, url, lineNumber, stackTrace} = entry
if (source === "network" && level === "error") {
errors.push({
text,
url: url || "(inline)",
line: lineNumber != null ? lineNumber+1 : 1,
col: 1,
trace: stackTrace != null ? collect_trace(stackTrace) : [],
})
}
})
async function evaluate<T>(expression: string): Promise<{value: T} | null> {
const {result, exceptionDetails} = await Runtime.evaluate({expression, awaitPromise: true}) //, returnByValue: true})
if (exceptionDetails == null)
return result.value !== undefined ? {value: result.value} : null
else {
errors.push(handle_exception(exceptionDetails))
return null
}
}
async function is_ready(): Promise<boolean> {
const expr = "typeof Bokeh !== 'undefined'"
const result = await evaluate(expr)
return result != null && result.value === true
}
await Network.enable()
await Runtime.enable()
await Page.enable()
await Log.enable()
await Page.navigate({url})
await Page.loadEventFired()
await is_ready()
const ret = await evaluate<string>("JSON.stringify(exports.top_level)")
if (ret != null) {
const top_level = JSON.parse(ret.value) as Suite
type Suite = {description: string, suites: Suite[], tests: Test[]}
type Test = {description: string}
type Result = {state: State, bbox: DOMRect, time: number}
const baseline_names = new Set<string>()
let failures = 0
async function run({suites, tests}: Suite, parents: Suite[], seq: number[]) {
for (let i = 0; i < suites.length; i++) {
console.log(`${" ".repeat(seq.length)}${suites[i].description}`)
await run(suites[i], parents.concat(suites[i]), seq.concat(i))
}
for (let i = 0; i < tests.length; i++) {
messages = []
errors = []
const prefix = " ".repeat(seq.length)
console.log(`${prefix}${tests[i].description}`)
//const start = Date.now()
const x0 = evaluate<string>(`exports.run_test(${JSON.stringify(seq.concat(i))})`)
const x1 = timeout(5000)
let output
try {
output = await Promise.race([x0, x1])
} catch(err) {
if (err instanceof TimeoutError) {
console.log("timeout")
continue
}
}
const result = JSON.parse((output as {value: string}).value) as Result
//console.log(result)
//const image = await Page.captureScreenshot({format: "png", clip: {...result.bbox, scale: 1.0}})
//console.log(image.data.length)
function encode(s: string): string {
return s.replace(/[ \/]/g, "_")
}
let failure = false
if (errors.length != 0) {
failure = true
for (const {text} of messages) {
console.log(`${prefix}${text}`)
}
for (const {text} of errors) {
console.log(`${prefix}${text}`)
}
}
const baseline_name = parents.map((suite) => suite.description).concat(tests[i].description).map(encode).join("__")
if (baseline_names.has(baseline_name)) {
console.log(`${prefix}duplicated description`)
failure = true
} else {
baseline_names.add(baseline_name)
const baseline_path = path.join("test", "baselines", baseline_name)
const baseline = create_baseline([result.state])
fs.writeFileSync(baseline_path, baseline)
const existing = load_baseline(baseline_path)
if (existing == null) {
console.log(`${prefix}no baseline`)
failure = true
} else if (existing != baseline) {
const diff = diff_baseline(baseline_path)
console.log(diff)
failure = true
} else {
}
}
if (failure)
failures++
/*
console.log(`${prefix}test run in ${result.time} ms`)
console.log(`${prefix}total run in ${Date.now() - start} ms`)
*/
}
}
await run(top_level, [], [])
if (failures != 0)
process.exit(1)
}
} catch (err) {
console.error(err.message)
process.exit(1)
} finally {
if (client) {
await client.close()
}
}
}
run_tests()
| timsnyder/bokeh | bokehjs/test/devtools.ts | TypeScript | bsd-3-clause | 8,212 |
require 'spec_helper'
describe Spree::Taxjar do
let(:reimbursement) { create(:reimbursement) }
let!(:country) { create(:country) }
let!(:state) { create(:state, country: country, abbr: "TX") }
let!(:zone) { create(:zone, name: "Country Zone", default_tax: true, zone_members: []) }
let!(:ship_address) { create(:ship_address, city: "Adrian", zipcode: "79001", state: state) }
let!(:tax_category) { create(:tax_category, tax_rates: []) }
let!(:order) { create(:order,ship_address_id: ship_address.id) }
let!(:line_item) { create(:line_item, price: 10, quantity: 3, order_id: order.id) }
let!(:state_al) { create(:state, country: country, abbr: "AL") }
let!(:ship_address_al) { create(:ship_address, city: "Adrian", zipcode: "79001", state: state_al) }
let!(:order_al) { create(:order,ship_address_id: ship_address_al.id) }
let!(:line_item_al) { create(:line_item, price: 10, quantity: 3, order_id: order_al.id) }
let!(:shipment_al) { create(:shipment, cost: 10, order: order_al) }
let!(:taxjar_api_key) { Spree::Config[:taxjar_api_key] = '04d828b7374896d7867b03289ea20957' }
let(:client) { double(Taxjar::Client) }
let(:spree_taxjar) { Spree::Taxjar.new(order) }
describe '#has_nexus?' do
context 'nexus_regions is not present' do
it 'should return false' do
VCR.use_cassette "no_nexuses" do
expect(spree_taxjar.has_nexus?).to eq false
end
end
end
context 'nexus_regions is present' do
context 'tax_address is present in nexus regions' do
it 'should return true' do
VCR.use_cassette "has_nexuses" do
expect(spree_taxjar.has_nexus?).to eq true
end
end
end
context 'tax_address is not present in nexus regions' do
before :each do
@spree_taxjar_new = Spree::Taxjar.new(order_al)
end
it 'should return false' do
VCR.use_cassette "has_nexuses" do
expect(@spree_taxjar_new.has_nexus?).to eq false
end
end
end
end
end
context 'When reimbursement is not present' do
before :each do
Spree::Config[:taxjar_api_key] = '04d828b7374896d7867b03289ea20957'
allow(::Taxjar::Client).to receive(:new).with(api_key: Spree::Config[:taxjar_api_key]).and_return(client)
end
let(:spree_taxjar) { Spree::Taxjar.new(order) }
describe '#initialize' do
it 'expects spree_taxjar to set instance variable order' do
expect(spree_taxjar.instance_variable_get(:@order)).to eq order
end
it 'expects spree_taxjar to set instance variable client' do
expect(spree_taxjar.instance_variable_get(:@client)).to eq client
end
it 'expects spree_taxjar to set instance variable reimbursement to nil' do
expect(spree_taxjar.instance_variable_get(:@reimbursement)).to eq nil
end
end
describe '#create_transaction_for_order' do
context 'when has_nexus? returns false' do
before do
allow(spree_taxjar).to receive(:has_nexus?).and_return(false)
end
it 'should return nil' do
expect(spree_taxjar.create_transaction_for_order).to eq nil
end
end
context 'when has_nexus? returns true' do
before do
allow(::Taxjar::Client).to receive(:new).with(api_key: Spree::Config[:taxjar_api_key]).and_return(client)
allow(spree_taxjar).to receive(:has_nexus?).and_return(true)
allow(client).to receive(:create_order).and_return(true)
end
it 'should return create order for the transaction' do
expect(client).to receive(:create_order).and_return(true)
end
after { spree_taxjar.create_transaction_for_order }
end
end
describe '#delete_transaction_for_order' do
context 'when has_nexus? returns false' do
before do
allow(spree_taxjar).to receive(:has_nexus?).and_return(false)
end
it 'should return nil' do
expect(spree_taxjar.delete_transaction_for_order).to eq nil
end
end
context 'when has_nexus? returns true' do
before do
@transaction_parameters = {}
allow(spree_taxjar).to receive(:has_nexus?).and_return(true)
allow(client).to receive(:delete_order).with(order.number).and_return(true)
end
it { expect(spree_taxjar).to receive(:has_nexus?).and_return(true) }
it 'should return create order for the transaction' do
expect(client).to receive(:delete_order).with(order.number).and_return(true)
end
after { spree_taxjar.delete_transaction_for_order }
end
end
describe '#calculate_tax_for_order' do
context 'when has_nexus? returns false' do
before do
allow(spree_taxjar).to receive(:has_nexus?).and_return(false)
end
it 'should return nil' do
expect(spree_taxjar.calculate_tax_for_order).to eq 0
end
end
context 'when has_nexus? returns true' do
before do
allow(spree_taxjar).to receive(:has_nexus?).and_return(true)
allow(client).to receive(:tax_for_order).and_return(true)
end
it { expect(spree_taxjar).to receive(:has_nexus?).and_return(true) }
it 'should return create order for the transaction' do
expect(client).to receive(:tax_for_order).and_return(true)
end
after { spree_taxjar.calculate_tax_for_order }
end
end
end
context 'When reimbursement is present' do
let(:spree_taxjar) { Spree::Taxjar.new(order, reimbursement) }
before do
allow(::Taxjar::Client).to receive(:new).with(api_key: Spree::Config[:taxjar_api_key]).and_return(client)
end
describe '#create_refund_transaction_for_order' do
context 'when has_nexus? returns false' do
before do
allow(spree_taxjar).to receive(:has_nexus?).and_return(false)
end
it 'should return nil' do
expect(spree_taxjar.create_refund_transaction_for_order).to eq nil
end
end
context 'when reimbursement is present' do
before do
allow(spree_taxjar).to receive(:has_nexus?).and_return(true)
allow(spree_taxjar).to receive(:reimbursement_present?).and_return(true)
end
it 'should return nil' do
expect(spree_taxjar.create_refund_transaction_for_order).to eq nil
end
end
context 'when has_nexus? returns true & reimbursement is not present' do
before do
allow(spree_taxjar).to receive(:has_nexus?).and_return(true)
allow(spree_taxjar).to receive(:reimbursement_present?).and_return(false)
allow(client).to receive(:create_refund).with(:refund_params).and_return(true)
end
it 'should return create order for the transaction' do
expect(client).to receive(:create_refund).and_return(true)
end
after { spree_taxjar.create_refund_transaction_for_order }
end
end
end
end
| nimish13/spree_taxjar | spec/models/spree/taxjar_spec.rb | Ruby | bsd-3-clause | 7,073 |
<?php
namespace backend\controllers;
use Yii;
use app\models\Album;
use app\models\AlbumSearch;
use app\models\Gallery;
use app\models\GallerySearch;
use yii\web\Controller;
use yii\web\NotFoundHttpException;
use yii\filters\VerbFilter;
use yii\web\UploadedFile;
use app\models\UploadForm;
/**
* AlbumController implements the CRUD actions for Album model.
*/
class AlbumController extends Controller
{
public function behaviors()
{
return [
'verbs' => [
'class' => VerbFilter::className(),
'actions' => [
'delete' => ['post'],
],
],
];
}
/**
* Lists all Album models.
* @return mixed
*/
public function actionIndex()
{
$searchModel = new AlbumSearch();
$dataProvider = $searchModel->search(Yii::$app->request->queryParams);
return $this->render('index', [
'searchModel' => $searchModel,
'dataProvider' => $dataProvider,
]);
}
/**
* Displays a single Album model.
* @param integer $id
* @return mixed
*/
public function actionView($id)
{
$searchModel = new GallerySearch();
$searchModel->id_album=$id;
$dataProvider = $searchModel->search(Yii::$app->request->queryParams);
return $this->render('view', [
'searchModel' => $searchModel,
'dataProvider' => $dataProvider,
'album'=>$this->findModel($id),
]);
}
/**
* Creates a new Album model.
* If creation is successful, the browser will be redirected to the 'view' page.
* @return mixed
*/
public function actionCreate()
{
$model = new Album();
if ($model->load(Yii::$app->request->post()) && $model->validate()) {
$model->gbr_album = UploadedFile::getInstance($model, 'gbr_album');
if (UploadedFile::getInstance($model, 'gbr_album')) {$model->gbr_album->saveAs('picture/album/' . $model->gbr_album->baseName . '.' . $model->gbr_album->extension);}
$model->save();
return $this->redirect(['view', 'id' => $model->id_album]);
} else {
return $this->render('create', [
'model' => $model,
]);
}
}
/*
public function actionAddpicture($id)
{
$model = new Gallery();
$model->id_album=$id;
if ($model->load(Yii::$app->request->post()) && $model->validate()) {
$model->gbr_gallery = UploadedFile::getInstance($model, 'gbr_gallery');
if (UploadedFile::getInstance($model,'gbr_gallery'))
{
$model->gbr_gallery->saveAs('picture/gallery/' . $model->gbr_gallery->baseName . '.' . $model->gbr_gallery->extension);
}
$model->save();
return $this->redirect(['view', 'id' => $id]);
} else {
return $this->render('create_picture', [
'model' => $model,
]);
}
}
*/
/**
* Updates an existing Album model.
* If update is successful, the browser will be redirected to the 'view' page.
* @param integer $id
* @return mixed
*/
public function actionUpdate($id)
{
$model = $this->findModel($id);
$gambar_old=$model->gbr_album;
if ($model->load(Yii::$app->request->post()) && $model->validate()) {
if (UploadedFile::getInstance($model, 'gbr_album')){
$model->gbr_album = UploadedFile::getInstance($model, 'gbr_album');
$model->gbr_album->saveAs('picture/album/' . $model->gbr_album->baseName . '.' . $model->gbr_album->extension);
}
else{
$model->gbr_album=$gambar_old;
}
$model->save();
return $this->redirect(['view', 'id' => $model->id_album]);
} else {
return $this->render('update', [
'model' => $model,
]);
}
}
/**
* Deletes an existing Album model.
* If deletion is successful, the browser will be redirected to the 'index' page.
* @param integer $id
* @return mixed
*/
public function actionDelete($id)
{
$this->findModel($id)->delete();
return $this->redirect(['index']);
}
/**
* Finds the Album model based on its primary key value.
* If the model is not found, a 404 HTTP exception will be thrown.
* @param integer $id
* @return Album the loaded model
* @throws NotFoundHttpException if the model cannot be found
*/
protected function findModel($id)
{
if (($model = Album::findOne($id)) !== null) {
return $model;
} else {
throw new NotFoundHttpException('The requested page does not exist.');
}
}
}
| ciptohs/yiicms | backend/controllers/AlbumController.php | PHP | bsd-3-clause | 4,899 |
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is configman
#
# The Initial Developer of the Original Code is
# Mozilla Foundation
# Portions created by the Initial Developer are Copyright (C) 2011
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# K Lars Lohn, lars@mozilla.com
# Peter Bengtsson, peterbe@mozilla.com
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
import sys
import os
import unittest
from contextlib import contextmanager
import ConfigParser
import io
from cStringIO import StringIO
import getopt
import configman.config_manager as config_manager
from configman.dotdict import DotDict, DotDictWithAcquisition
import configman.datetime_util as dtu
from configman.config_exceptions import NotAnOptionError
from configman.value_sources.source_exceptions import \
AllHandlersFailedException
import configman.value_sources
import configman.value_sources.for_configparse
class TestCase(unittest.TestCase):
def test_empty_ConfigurationManager_constructor(self):
# because the default option argument defaults to using sys.argv we
# have to mock that
c = config_manager.ConfigurationManager(
use_admin_controls=False,
#use_config_files=False,
use_auto_help=False,
argv_source=[]
)
self.assertEqual(c.option_definitions, config_manager.Namespace())
def test_get_config_1(self):
n = config_manager.Namespace()
n.add_option('a', 1, 'the a')
n.add_option('b', 17)
c = config_manager.ConfigurationManager(
[n],
use_admin_controls=False,
#use_config_files=False,
use_auto_help=False,
argv_source=[]
)
d = c.get_config()
e = DotDict()
e.a = 1
e.b = 17
self.assertEqual(d, e)
def test_get_config_2(self):
n = config_manager.Namespace()
n.add_option('a', 1, 'the a')
n.b = 17
n.c = c = config_manager.Namespace()
c.x = 'fred'
c.y = 3.14159
c.add_option('z', 99, 'the 99')
c = config_manager.ConfigurationManager(
[n],
use_admin_controls=False,
#use_config_files=False,
use_auto_help=False,
argv_source=[]
)
d = c.get_config()
e = DotDict()
e.a = 1
e.b = 17
e.c = DotDict()
e.c.x = 'fred'
e.c.y = 3.14159
e.c.z = 99
self.assertEqual(d, e)
def test_walk_config(self):
"""step through them all"""
n = config_manager.Namespace(doc='top')
n.add_option('aaa', False, 'the a', short_form='a')
n.c = config_manager.Namespace(doc='c space')
n.c.add_option('fred', doc='husband from Flintstones')
n.c.add_option('wilma', doc='wife from Flintstones')
n.d = config_manager.Namespace(doc='d space')
n.d.add_option('fred', doc='male neighbor from I Love Lucy')
n.d.add_option('ethel', doc='female neighbor from I Love Lucy')
n.d.x = config_manager.Namespace(doc='x space')
n.d.x.add_option('size', 100, 'how big in tons', short_form='s')
n.d.x.add_option('password', 'secrets', 'the password')
c = config_manager.ConfigurationManager(
[n],
use_admin_controls=True,
#use_config_files=False,
use_auto_help=False,
argv_source=[]
)
e = [('aaa', 'aaa', n.aaa.name),
('c', 'c', n.c._doc),
('c.wilma', 'wilma', n.c.wilma.name),
('c.fred', 'fred', n.c.fred.name),
('d', 'd', n.d._doc),
('d.ethel', 'ethel', n.d.ethel.name),
('d.fred', 'fred', n.d.fred.name),
('d.x', 'x', n.d.x._doc),
('d.x.size', 'size', n.d.x.size.name),
('d.x.password', 'password', n.d.x.password.name),
]
e.sort()
r = [(q, k, v.name if isinstance(v, config_manager.Option) else v._doc)
for q, k, v in c._walk_config()]
r.sort()
for expected, received in zip(e, r):
self.assertEqual(received, expected)
def _some_namespaces(self):
"""set up some namespaces"""
n = config_manager.Namespace(doc='top')
n.add_option('aaa', '2011-05-04T15:10:00', 'the a',
short_form='a',
from_string_converter=dtu.datetime_from_ISO_string
)
n.c = config_manager.Namespace(doc='c space')
n.c.add_option('fred', 'stupid', 'husband from Flintstones')
n.c.add_option('wilma', 'waspish', 'wife from Flintstones')
n.d = config_manager.Namespace(doc='d space')
n.d.add_option('fred', 'crabby', 'male neighbor from I Love Lucy')
n.d.add_option('ethel', 'silly', 'female neighbor from I Love Lucy')
n.x = config_manager.Namespace(doc='x space')
n.x.add_option('size', 100, 'how big in tons', short_form='s')
n.x.add_option('password', 'secret', 'the password')
return n
def test_overlay_config_1(self):
n = config_manager.Namespace()
n.add_option('a')
n.a.default = 1
n.a.doc = 'the a'
n.b = 17
n.c = c = config_manager.Namespace()
c.x = 'fred'
c.y = 3.14159
c.add_option('z')
c.z.default = 99
c.z.doc = 'the 99'
c = config_manager.ConfigurationManager([n],
use_admin_controls=False,
#use_config_files=False,
use_auto_help=False,
argv_source=[])
o = {"a": 2, "c.z": 22, "c.x": 'noob', "c.y": "2.89"}
c._overlay_value_sources_recurse(o)
d = c._generate_config(DotDict)
e = DotDict()
e.a = 2
e.b = 17
e.c = DotDict()
e.c.x = 'noob'
e.c.y = 2.89
e.c.z = 22
self.assertEqual(d, e)
def test_overlay_config_2(self):
n = config_manager.Namespace()
n.add_option('a')
n.a.default = 1
n.a.doc = 'the a'
n.b = 17
n.c = c = config_manager.Namespace()
c.x = 'fred'
c.y = 3.14159
c.add_option('z')
c.z.default = 99
c.z.doc = 'the 99'
c = config_manager.ConfigurationManager([n],
use_admin_controls=False,
#use_config_files=False,
use_auto_help=False,
argv_source=[])
o = {"a": 2, "c.z": 22, "c.x": 'noob', "c.y": "2.89", "n": "not here"}
c._overlay_value_sources_recurse(o, ignore_mismatches=True)
d = c._generate_config(DotDict)
e = DotDict()
e.a = 2
e.b = 17
e.c = DotDict()
e.c.x = 'noob'
e.c.y = 2.89
e.c.z = 22
self.assertEqual(d, e)
def test_overlay_config_3(self):
n = config_manager.Namespace()
n.add_option('a')
n.a.default = 1
n.a.doc = 'the a'
n.b = 17
n.c = c = config_manager.Namespace()
c.x = 'fred'
c.y = 3.14159
c.add_option('z')
c.z.default = 99
c.z.doc = 'the 99'
c = config_manager.ConfigurationManager([n],
use_admin_controls=True,
#use_config_files=False,
use_auto_help=False,
argv_source=[])
output = {
"a": 2,
"c.z": 22,
"c.x": 'noob',
"c.y": "2.89",
"c.n": "not here"
}
self.assertRaises(NotAnOptionError,
c._overlay_value_sources_recurse, output,
ignore_mismatches=False)
def test_overlay_config_4(self):
"""test overlay dict w/flat source dict"""
n = config_manager.Namespace()
n.add_option('a', doc='the a', default=1)
n.b = 17
n.c = config_manager.Namespace()
n.c.add_option('extra', doc='the x', default=3.14159)
g = {'a': 2, 'c.extra': 2.89}
c = config_manager.ConfigurationManager([n], [g],
use_admin_controls=True,
#use_config_files=False,
use_auto_help=False,
argv_source=[])
self.assertEqual(c.option_definitions.a, n.a)
self.assertTrue(isinstance(c.option_definitions.b,
config_manager.Option))
self.assertEqual(c.option_definitions.a.value, 2)
self.assertEqual(c.option_definitions.b.value, 17)
self.assertEqual(c.option_definitions.b.default, 17)
self.assertEqual(c.option_definitions.b.name, 'b')
self.assertEqual(c.option_definitions.c.extra.name, 'extra')
self.assertEqual(c.option_definitions.c.extra.doc, 'the x')
self.assertEqual(c.option_definitions.c.extra.default, 3.14159)
self.assertEqual(c.option_definitions.c.extra.value, 2.89)
def test_overlay_config_4a(self):
"""test overlay dict w/deep source dict"""
n = config_manager.Namespace()
n.add_option('a', 1, doc='the a')
n.b = 17
n.c = config_manager.Namespace()
n.c.add_option('extra', doc='the x', default=3.14159)
g = {'a': 2, 'c': {'extra': 2.89}}
c = config_manager.ConfigurationManager([n], [g],
use_admin_controls=True,
#use_config_files=False,
use_auto_help=False,
argv_source=[])
self.assertEqual(c.option_definitions.a, n.a)
self.assertTrue(isinstance(c.option_definitions.b,
config_manager.Option))
self.assertEqual(c.option_definitions.a.value, 2)
self.assertEqual(c.option_definitions.b.value, 17)
self.assertEqual(c.option_definitions.b.default, 17)
self.assertEqual(c.option_definitions.b.name, 'b')
self.assertEqual(c.option_definitions.c.extra.name, 'extra')
self.assertEqual(c.option_definitions.c.extra.doc, 'the x')
self.assertEqual(c.option_definitions.c.extra.default, 3.14159)
self.assertEqual(c.option_definitions.c.extra.value, 2.89)
def test_overlay_config_5(self):
"""test namespace definition w/getopt"""
n = config_manager.Namespace()
n.add_option('a', doc='the a', default=1)
n.b = 17
n.add_option('c', doc='the c', default=False)
c = config_manager.ConfigurationManager([n], [getopt],
use_admin_controls=True,
#use_config_files=False,
use_auto_help=False,
argv_source=['--a', '2', '--c'])
self.assertEqual(c.option_definitions.a, n.a)
self.assertTrue(isinstance(c.option_definitions.b,
config_manager.Option))
self.assertEqual(c.option_definitions.a.value, 2)
self.assertEqual(c.option_definitions.b.value, 17)
self.assertEqual(c.option_definitions.b.default, 17)
self.assertEqual(c.option_definitions.b.name, 'b')
self.assertEqual(c.option_definitions.c.name, 'c')
self.assertEqual(c.option_definitions.c.value, True)
def test_overlay_config_6(self):
"""test namespace definition w/getopt"""
n = config_manager.Namespace()
n.add_option('a', doc='the a', default=1)
n.b = 17
n.c = config_manager.Namespace()
n.c.add_option('extra', short_form='e', doc='the x', default=3.14159)
c = config_manager.ConfigurationManager([n], [getopt],
use_admin_controls=True,
#use_config_files=False,
use_auto_help=False,
argv_source=['--a', '2', '--c.extra',
'11.0'])
self.assertEqual(c.option_definitions.a, n.a)
self.assertEqual(type(c.option_definitions.b), config_manager.Option)
self.assertEqual(c.option_definitions.a.value, 2)
self.assertEqual(c.option_definitions.b.value, 17)
self.assertEqual(c.option_definitions.b.default, 17)
self.assertEqual(c.option_definitions.b.name, 'b')
self.assertEqual(c.option_definitions.c.extra.name, 'extra')
self.assertEqual(c.option_definitions.c.extra.doc, 'the x')
self.assertEqual(c.option_definitions.c.extra.default, 3.14159)
self.assertEqual(c.option_definitions.c.extra.value, 11.0)
def test_overlay_config_6a(self):
"""test namespace w/getopt w/short form"""
n = config_manager.Namespace()
n.add_option('a', doc='the a', default=1)
n.b = 17
n.c = config_manager.Namespace()
n.c.add_option('extra', 3.14159, 'the x', short_form='e')
c = config_manager.ConfigurationManager([n], [getopt],
use_admin_controls=True,
#use_config_files=False,
use_auto_help=False,
argv_source=['--a', '2', '-e', '11.0'])
self.assertEqual(c.option_definitions.a, n.a)
self.assertEqual(type(c.option_definitions.b), config_manager.Option)
self.assertEqual(c.option_definitions.a.value, 2)
self.assertEqual(c.option_definitions.b.value, 17)
self.assertEqual(c.option_definitions.b.default, 17)
self.assertEqual(c.option_definitions.b.name, 'b')
self.assertEqual(c.option_definitions.c.extra.name, 'extra')
self.assertEqual(c.option_definitions.c.extra.doc, 'the x')
self.assertEqual(c.option_definitions.c.extra.default, 3.14159)
self.assertEqual(c.option_definitions.c.extra.value, 11.0)
def test_overlay_config_7(self):
"""test namespace definition flat file"""
n = config_manager.Namespace()
n.add_option('a', doc='the a', default=1)
n.b = 17
n.c = config_manager.Namespace()
n.c.add_option('extra', 3.14159, 'the x')
n.c.add_option('string', 'fred', doc='str')
@contextmanager
def dummy_open():
yield ['# comment line to be ignored\n',
'\n', # blank line to be ignored
'a=22\n',
'b = 33\n',
'c.extra = 2.0\n',
'c.string = wilma\n'
]
#g = config_manager.ConfValueSource('dummy-filename', dummy_open)
c = config_manager.ConfigurationManager([n], [dummy_open],
use_admin_controls=True,
#use_config_files=False,
use_auto_help=False)
self.assertEqual(c.option_definitions.a, n.a)
self.assertEqual(type(c.option_definitions.b), config_manager.Option)
self.assertEqual(c.option_definitions.a.value, 22)
self.assertEqual(c.option_definitions.b.value, 33)
self.assertEqual(c.option_definitions.b.default, 17)
self.assertEqual(c.option_definitions.b.name, 'b')
self.assertEqual(c.option_definitions.c.extra.name, 'extra')
self.assertEqual(c.option_definitions.c.extra.doc, 'the x')
self.assertEqual(c.option_definitions.c.extra.default, 3.14159)
self.assertEqual(c.option_definitions.c.extra.value, 2.0)
self.assertEqual(c.option_definitions.c.string.name, 'string')
self.assertEqual(c.option_definitions.c.string.doc, 'str')
self.assertEqual(c.option_definitions.c.string.default, 'fred')
self.assertEqual(c.option_definitions.c.string.value, 'wilma')
def test_overlay_config_8(self):
"""test namespace definition ini file"""
n = config_manager.Namespace()
n.other = config_manager.Namespace()
n.other.add_option('t', 'tee', 'the t')
n.d = config_manager.Namespace()
n.d.add_option('a', 1, doc='the a')
n.d.b = 17
n.c = config_manager.Namespace()
n.c.add_option('extra', 3.14159, 'the x')
n.c.add_option('string', 'fred', doc='str')
ini_data = """
other.t=tea
# blank line to be ignored
d.a=22
d.b=33
c.extra = 2.0
c.string = wilma
"""
def strio():
return io.BytesIO(ini_data)
c = config_manager.ConfigurationManager([n], [strio],
use_admin_controls=True,
use_auto_help=False)
self.assertEqual(c.option_definitions.other.t.name, 't')
self.assertEqual(c.option_definitions.other.t.value, 'tea')
self.assertEqual(c.option_definitions.d.a, n.d.a)
self.assertEqual(type(c.option_definitions.d.b), config_manager.Option)
self.assertEqual(c.option_definitions.d.a.value, 22)
self.assertEqual(c.option_definitions.d.b.value, 33)
self.assertEqual(c.option_definitions.d.b.default, 17)
self.assertEqual(c.option_definitions.d.b.name, 'b')
self.assertEqual(c.option_definitions.c.extra.name, 'extra')
self.assertEqual(c.option_definitions.c.extra.doc, 'the x')
self.assertEqual(c.option_definitions.c.extra.default, 3.14159)
self.assertEqual(c.option_definitions.c.extra.value, 2.0)
self.assertEqual(c.option_definitions.c.string.name, 'string')
self.assertEqual(c.option_definitions.c.string.doc, 'str')
self.assertEqual(c.option_definitions.c.string.default, 'fred')
self.assertEqual(c.option_definitions.c.string.value, 'wilma')
def test_overlay_config_9(self):
"""test namespace definition ini file"""
n = config_manager.Namespace()
n.other = config_manager.Namespace()
n.other.add_option('t', 'tee', 'the t')
n.d = config_manager.Namespace()
n.d.add_option('a', 1, doc='the a')
n.d.b = 17
n.c = config_manager.Namespace()
n.c.add_option('extra', 3.14159, 'the x')
n.c.add_option('string', 'fred', 'str')
ini_data = """
other.t=tea
# blank line to be ignored
d.a=22
c.extra = 2.0
c.string = from ini
"""
def strio():
return io.BytesIO(ini_data)
e = DotDict()
e.fred = DotDict() # should be ignored
e.fred.t = 'T' # should be ignored
e.d = DotDict()
e.d.a = 16
e.c = DotDict()
e.c.extra = 18.6
e.c.string = 'from environment'
#fake_os_module = DotDict()
#fake_os_module.environ = e
#import configman.value_sources.for_mapping as fm
#saved_os = fm.os
#fm.os = fake_os_module
saved_environ = os.environ
os.environ = e
try:
c = config_manager.ConfigurationManager([n], [e, strio, getopt],
use_admin_controls=True,
use_auto_help=False,
argv_source=['--other.t', 'TTT',
'--c.extra', '11.0'])
finally:
os.environ = saved_environ
#fm.os = saved_os
self.assertEqual(c.option_definitions.other.t.name, 't')
self.assertEqual(c.option_definitions.other.t.value, 'TTT')
self.assertEqual(c.option_definitions.d.a, n.d.a)
self.assertEqual(type(c.option_definitions.d.b), config_manager.Option)
self.assertEqual(c.option_definitions.d.a.value, 22)
self.assertEqual(c.option_definitions.d.b.value, 17)
self.assertEqual(c.option_definitions.d.b.default, 17)
self.assertEqual(c.option_definitions.d.b.name, 'b')
self.assertEqual(c.option_definitions.c.extra.name, 'extra')
self.assertEqual(c.option_definitions.c.extra.doc, 'the x')
self.assertEqual(c.option_definitions.c.extra.default, 3.14159)
self.assertEqual(c.option_definitions.c.extra.value, 11.0)
self.assertEqual(c.option_definitions.c.string.name, 'string')
self.assertEqual(c.option_definitions.c.string.doc, 'str')
self.assertEqual(c.option_definitions.c.string.default, 'fred')
self.assertEqual(c.option_definitions.c.string.value, 'from ini')
def test_overlay_config_10(self):
"""test namespace definition ini file"""
n = config_manager.Namespace()
n.other = config_manager.Namespace()
n.other.add_option('t', 'tee', 'the t')
n.d = config_manager.Namespace()
n.d.add_option('a', 1, 'the a')
n.d.b = 17
n.c = config_manager.Namespace()
n.c.add_option('extra', 3.14159, 'the x')
n.c.add_option('string', 'fred', doc='str')
ini_data = """
other.t=tea
# blank line to be ignored
d.a=22
c.extra = 2.0
c.string = from ini
"""
def strio():
return io.BytesIO(ini_data)
e = DotDict()
e.other = DotDict()
e.other.t = 'T'
e.d = DotDict()
e.d.a = 16
e.c = DotDict()
e.c.extra = 18.6
e.c.string = 'from environment'
#v = config_manager.GetoptValueSource(
#argv_source=['--c.extra', '11.0']
#)
c = config_manager.ConfigurationManager([n], [e, strio, getopt],
use_admin_controls=True,
argv_source=['--c.extra', '11.0'],
#use_config_files=False,
use_auto_help=False)
self.assertEqual(c.option_definitions.other.t.name, 't')
self.assertEqual(c.option_definitions.other.t.value, 'tea')
self.assertEqual(c.option_definitions.d.a, n.d.a)
self.assertEqual(type(c.option_definitions.d.b), config_manager.Option)
self.assertEqual(c.option_definitions.d.a.value, 22)
self.assertEqual(c.option_definitions.d.b.value, 17)
self.assertEqual(c.option_definitions.d.b.default, 17)
self.assertEqual(c.option_definitions.d.b.name, 'b')
self.assertEqual(c.option_definitions.c.extra.name, 'extra')
self.assertEqual(c.option_definitions.c.extra.doc, 'the x')
self.assertEqual(c.option_definitions.c.extra.default, 3.14159)
self.assertEqual(c.option_definitions.c.extra.value, 11.0)
self.assertEqual(c.option_definitions.c.string.name, 'string')
self.assertEqual(c.option_definitions.c.string.doc, 'str')
self.assertEqual(c.option_definitions.c.string.default, 'fred')
self.assertEqual(c.option_definitions.c.string.value, 'from ini')
def test_mapping_types_1(self):
n = config_manager.Namespace()
n.add_option('a')
n.a.default = 1
n.a.doc = 'the a'
n.b = 17
n.c = c = config_manager.Namespace()
c.x = 'fred'
c.y = 3.14159
c.add_option('z')
c.z.default = 99
c.z.doc = 'the 99'
c = config_manager.ConfigurationManager([n],
use_admin_controls=False,
#use_config_files=False,
use_auto_help=False,
argv_source=[])
o = {"a": 2, "c.z": 22, "c.x": 'noob', "c.y": "2.89"}
c._overlay_value_sources_recurse(o)
e = DotDict()
e.a = 2
e.b = 17
e.c = DotDict()
e.c.x = 'noob'
e.c.y = 2.89
e.c.z = 22
d = c._generate_config(dict)
self.assertTrue(isinstance(d, dict))
self.assertTrue(isinstance(d['c'], dict))
self.assertEqual(d, e)
d = c._generate_config(DotDict)
self.assertTrue(isinstance(d, DotDict))
self.assertTrue(isinstance(d.c, DotDict))
self.assertEqual(d, e)
d = c._generate_config(DotDictWithAcquisition)
self.assertTrue(isinstance(d, DotDictWithAcquisition))
self.assertTrue(isinstance(d.c, DotDictWithAcquisition))
self.assertEqual(d, e)
self.assertEqual(d.a, 2)
self.assertEqual(d.c.a, 2)
self.assertEqual(d.c.b, 17)
def test_get_option_names(self):
n = config_manager.Namespace()
n.add_option('a', 1, 'the a')
n.b = 17
n.c = config_manager.Namespace()
n.c.add_option('fred')
n.c.add_option('wilma')
n.d = config_manager.Namespace()
n.d.add_option('fred')
n.d.add_option('wilma')
n.d.x = config_manager.Namespace()
n.d.x.add_option('size')
c = config_manager.ConfigurationManager([n],
use_admin_controls=False,
#use_config_files=False,
use_auto_help=False,
argv_source=[])
names = c.get_option_names()
names.sort()
e = ['a', 'b', 'c.fred', 'c.wilma', 'd.fred', 'd.wilma', 'd.x.size']
e.sort()
self.assertEqual(names, e)
def test_get_option(self):
n = config_manager.Namespace()
n.add_option('a', 1, 'the a')
n.b = 17
n.c = config_manager.Namespace()
n.c.add_option('fred')
n.c.add_option('wilma')
n.d = config_manager.Namespace()
n.d.add_option('fred')
n.d.add_option('wilma')
n.d.x = config_manager.Namespace()
n.d.x.add_option('size')
c = config_manager.ConfigurationManager([n],
use_admin_controls=True,
#use_config_files=False,
use_auto_help=False,
argv_source=[])
self.assertEqual(c._get_option('a'), n.a)
self.assertEqual(c._get_option('b').name, 'b')
self.assertEqual(c._get_option('c.fred'), n.c.fred)
self.assertEqual(c._get_option('c.wilma'), n.c.wilma)
self.assertEqual(c._get_option('d.fred'), n.d.fred)
self.assertEqual(c._get_option('d.wilma'), n.d.wilma)
self.assertEqual(c._get_option('d.wilma'), n.d.wilma)
self.assertEqual(c._get_option('d.x.size'), n.d.x.size)
def test_output_summary(self):
"""test_output_summary: the output from help"""
n = config_manager.Namespace()
n.add_option('aaa', False, 'the a', short_form='a')
n.add_option('bee', True)
n.b = 17
n.c = config_manager.Namespace()
n.c.add_option('fred', doc='husband from Flintstones')
n.d = config_manager.Namespace()
n.d.add_option('fred', doc='male neighbor from I Love Lucy')
n.d.x = config_manager.Namespace()
n.d.x.add_option('size', 100, 'how big in tons', short_form='s')
n.d.x.add_option('password', 'secrets', 'the password')
c = config_manager.ConfigurationManager([n],
use_admin_controls=True,
#use_config_files=False,
use_auto_help=False,
argv_source=[],
#app_name='foo',
#app_version='1.0',
#app_description='This app is cool.'
)
s = StringIO()
c.output_summary(output_stream=s)
r = s.getvalue()
self.assertTrue('Options:\n' in r)
options = r.split('Options:\n')[1]
s.close()
expect = [
('-a, --aaa', 'the a (default: False)'),
('--b', '(default: 17)'),
('--bee', '(default: True)'),
('--c.fred', 'husband from Flintstones'),
('--d.fred', 'male neighbor from I Love Lucy'),
('--d.x.password', 'the password (default: *********)'),
('-s, --d.x.size', 'how big in tons (default: 100)'),
]
point = -1 # used to assert the sort order
for i, (start, end) in enumerate(expect):
self.assertTrue(point < options.find(start + ' ')
< options.find(' ' + end))
point = options.find(end)
def test_output_summary_header(self):
"""a config with an app_name, app_version and app_description is
printed on the output summary.
"""
n = config_manager.Namespace()
n.add_option('aaa', False, 'the a', short_form='a')
c = config_manager.ConfigurationManager(n,
use_admin_controls=True,
use_auto_help=False,
argv_source=[],
)
def get_output(conf):
s = StringIO()
conf.output_summary(output_stream=s)
return s.getvalue()
output = get_output(c)
assert 'Options:' in output
self.assertTrue('Application:' not in output)
c.app_name = 'foobar'
output = get_output(c)
assert 'Options:' in output
self.assertTrue('Application: foobar' in output)
c.app_version = '1.0'
output = get_output(c)
assert 'Options:' in output
self.assertTrue('Application: foobar 1.0' in output)
c.app_description = "This ain't your mama's app"
output = get_output(c)
assert 'Options:' in output
self.assertTrue('Application: foobar 1.0\n' in output)
self.assertTrue("This ain't your mama's app\n\n" in output)
def test_eval_as_converter(self):
"""does eval work as a to string converter on an Option object?"""
n = config_manager.Namespace()
n.add_option('aaa', doc='the a', default='', short_form='a')
self.assertEqual(n.aaa.value, '')
def test_RequiredConfig_get_required_config(self):
class Foo:
required_config = {'foo': True}
class Bar:
required_config = {'bar': False}
class Poo:
pass
class Combined(config_manager.RequiredConfig, Foo, Poo, Bar):
pass
result = Combined.get_required_config()
self.assertEqual(result.foo.value, True)
self.assertEqual(result.bar.value, False)
c = Combined()
c.config_assert({'foo': True, 'bar': False})
self.assertRaises(AssertionError, c.config_assert, ({},))
def test_app_name_from_app_obj(self):
class MyApp(config_manager.RequiredConfig):
app_name = 'fred'
app_version = '1.0'
app_description = "my app"
def __init__(inner_self, config):
inner_self.config = config
n = config_manager.Namespace()
n.admin = config_manager.Namespace()
n.admin.add_option('application',
MyApp,
'the app object class')
c = config_manager.ConfigurationManager([n],
use_admin_controls=True,
use_auto_help=False,
argv_source=[])
self.assertEqual(c.app_name, MyApp.app_name)
self.assertEqual(c.app_version, MyApp.app_version)
self.assertEqual(c.app_description, MyApp.app_description)
def test_help_out(self):
class MyApp(config_manager.RequiredConfig):
app_name = 'fred'
app_version = '1.0'
app_description = "my app"
required_config = config_manager.Namespace()
required_config.add_option('password', 'fred', 'the password')
def __init__(inner_self, config):
inner_self.config = config
n = config_manager.Namespace()
n.admin = config_manager.Namespace()
n.admin.add_option('application',
MyApp,
'the app object class')
class MyConfigManager(config_manager.ConfigurationManager):
def output_summary(inner_self):
output_stream = StringIO()
r = super(MyConfigManager, inner_self).output_summary(
output_stream=output_stream,
block_password=False)
r = output_stream.getvalue()
output_stream.close()
self.assertTrue('Application: fred 1.0' in r)
self.assertTrue('my app\n\n' in r)
self.assertTrue('Options:\n' in r)
self.assertTrue(' --help' in r and 'print this' in r)
self.assertTrue('print this (default: True)' not in r)
self.assertTrue(' --password' in r)
self.assertTrue('the password (default: *********)' in r)
self.assertTrue(' --admin.application' not in r)
def my_exit():
pass
old_sys_exit = sys.exit
sys.exit = my_exit
try:
MyConfigManager(n,
[getopt],
use_admin_controls=True,
use_auto_help=True,
argv_source=['--password=wilma', '--help'])
finally:
sys.exit = old_sys_exit
def test_write_gets_called(self):
class MyApp(config_manager.RequiredConfig):
app_name = 'fred'
app_version = '1.0'
app_description = "my app"
required_config = config_manager.Namespace()
required_config.add_option('password', 'fred', 'the password')
def __init__(inner_self, config):
inner_self.config = config
n = config_manager.Namespace()
n.admin = config_manager.Namespace()
n.admin.add_option('application',
MyApp,
'the app object class')
class MyConfigManager(config_manager.ConfigurationManager):
def __init__(inner_self, *args, **kwargs):
inner_self.write_called = False
super(MyConfigManager, inner_self).__init__(*args, **kwargs)
def dump_conf(inner_self):
inner_self.dump_conf_called = True
def my_exit():
pass
old_sys_exit = sys.exit
sys.exit = my_exit
try:
c = MyConfigManager(n,
[getopt],
use_admin_controls=True,
use_auto_help=True,
argv_source=['--password=wilma',
'--admin.dump_conf=x.ini'])
self.assertEqual(c.dump_conf_called, True)
finally:
sys.exit = old_sys_exit
def test_get_options(self):
class MyApp(config_manager.RequiredConfig):
app_name = 'fred'
app_version = '1.0'
app_description = "my app"
required_config = config_manager.Namespace()
required_config.add_option('password', 'fred', 'the password')
required_config.sub = config_manager.Namespace()
required_config.sub.add_option('name',
'ethel',
'the name')
def __init__(inner_self, config):
inner_self.config = config
n = config_manager.Namespace()
n.admin = config_manager.Namespace()
n.admin.add_option('application',
MyApp,
'the app object class')
c = config_manager.ConfigurationManager(n,
use_admin_controls=True,
use_auto_help=False,
argv_source=[])
r = c._get_options()
e = (
('admin.print_conf', 'print_conf', None),
('admin.application', 'application', MyApp),
('admin.dump_conf', 'dump_conf', ''),
('admin.conf', 'conf', './config.ini'),
('password', 'password', 'fred'),
('sub.name', 'name', 'ethel'))
for expected, result in zip(e, r):
expected_key, expected_name, expected_default = expected
result_key, result_option = result
self.assertEqual(expected_key, result_key)
self.assertEqual(expected_name, result_option.name)
self.assertEqual(expected_default, result_option.default)
def test_log_config(self):
class MyApp(config_manager.RequiredConfig):
app_name = 'fred'
app_version = '1.0'
app_description = "my app"
required_config = config_manager.Namespace()
required_config.add_option('password', 'fred', 'the password')
required_config.sub = config_manager.Namespace()
required_config.sub.add_option('name',
'ethel',
'the name')
def __init__(inner_self, config):
inner_self.config = config
n = config_manager.Namespace()
n.admin = config_manager.Namespace()
n.admin.add_option('application',
MyApp,
'the app object class')
c = config_manager.ConfigurationManager(n,
[getopt],
use_admin_controls=True,
use_auto_help=False,
argv_source=['--sub.name=wilma'])
class FakeLogger(object):
def __init__(self):
self.log = []
def info(self, *args):
self.log.append(args[0] % args[1:])
fl = FakeLogger()
c.log_config(fl)
e = ["app_name: fred",
"app_version: 1.0",
"current configuration:",
"password: *********",
"sub.name: wilma"]
for expected, received in zip(e, fl.log):
self.assertEqual(expected, received)
def test_extra_commandline_parameters(self):
class MyApp(config_manager.RequiredConfig):
app_name = 'fred'
app_version = '1.0'
app_description = "my app"
required_config = config_manager.Namespace()
required_config.add_option('password', 'fred', 'the password')
required_config.sub = config_manager.Namespace()
required_config.sub.add_option('name',
'ethel',
'the name')
def __init__(inner_self, config):
inner_self.config = config
n = config_manager.Namespace()
n.admin = config_manager.Namespace()
n.admin.add_option('application',
MyApp,
'the app object class')
c = config_manager.ConfigurationManager(n,
[getopt],
use_admin_controls=True,
use_auto_help=False,
argv_source=['--sub.name=wilma',
'argument 1',
'argument 2',
'argument 3'])
expected = ['argument 1',
'argument 2',
'argument 3']
self.assertEqual(c.args, expected)
def test_print_conf_called(self):
class MyApp(config_manager.RequiredConfig):
app_name = 'fred'
app_version = '1.0'
app_description = "my app"
required_config = config_manager.Namespace()
required_config.add_option('password', 'fred', 'the password')
required_config.sub = config_manager.Namespace()
required_config.sub.add_option('name',
'ethel',
'the name')
def __init__(inner_self, config):
inner_self.config = config
n = config_manager.Namespace()
n.admin = config_manager.Namespace()
n.admin.add_option('application',
MyApp,
'the app object class')
class MyConfigManager(config_manager.ConfigurationManager):
def __init__(inner_self, *args, **kwargs):
inner_self.write_called = False
super(MyConfigManager, inner_self).__init__(*args, **kwargs)
def print_conf(inner_self):
inner_self.print_conf_called = True
c = MyConfigManager(n,
[getopt],
use_admin_controls=True,
use_auto_help=False,
quit_after_admin=False,
argv_source=['--admin.print_conf=ini',
'argument 1',
'argument 2',
'argument 3'])
self.assertEqual(c.print_conf_called, True)
def test_non_compliant_app_object(self):
# the MyApp class doesn't define required config
class MyApp():
app_name = 'fred'
app_version = '1.0'
app_description = "my app"
def __init__(inner_self, config):
inner_self.config = config
n = config_manager.Namespace()
n.admin = config_manager.Namespace()
n.admin.add_option('application',
MyApp,
'the app object class')
c = config_manager.ConfigurationManager(n,
[getopt],
use_admin_controls=True,
use_auto_help=False,
argv_source=['argument 1',
'argument 2',
'argument 3'])
conf = c.get_config()
self.assertEqual(conf.keys(), ['admin']) # there should be nothing but
# the admin key
def test_print_conf(self):
n = config_manager.Namespace()
class MyConfigManager(config_manager.ConfigurationManager):
def __init__(inner_self, *args, **kwargs):
inner_self.write_called = False
super(MyConfigManager, inner_self).__init__(*args, **kwargs)
def print_conf(self):
temp_stdout = sys.stdout
sys.stdout = 17
try:
super(MyConfigManager, self).print_conf()
finally:
sys.stdout = temp_stdout
def write_conf(inner_self, file_type, opener, skip_keys=None):
self.assertEqual(file_type, 'ini')
with opener() as f:
self.assertEqual(f, 17)
MyConfigManager(
n,
[getopt],
use_admin_controls=True,
use_auto_help=False,
quit_after_admin=False,
argv_source=['--admin.print_conf=ini',
'argument 1',
'argument 2',
'argument 3'],
config_pathname='fred')
def test_dump_conf(self):
n = config_manager.Namespace()
class MyConfigManager(config_manager.ConfigurationManager):
def __init__(inner_self, *args, **kwargs):
inner_self.write_called = False
super(MyConfigManager, inner_self).__init__(*args, **kwargs)
def write_conf(inner_self, file_type, opener, skip_keys=None):
self.assertEqual(file_type, 'ini')
self.assertEqual(opener.args, ('fred.ini', 'w'))
MyConfigManager(
n,
[getopt],
use_admin_controls=True,
use_auto_help=False,
quit_after_admin=False,
argv_source=['--admin.dump_conf=fred.ini',
'argument 1',
'argument 2',
'argument 3'],
config_pathname='fred'
)
def test_print_conf_some_options_excluded(self):
n = config_manager.Namespace()
n.add_option('gender',
default='Male',
doc='What kind of genitalia?')
n.add_option('salary',
default=10000,
doc='How much do you earn?',
exclude_from_print_conf=True
)
old_stdout = sys.stdout
temp_output = StringIO()
sys.stdout = temp_output
try:
config_manager.ConfigurationManager(
n,
[getopt],
use_admin_controls=True,
use_auto_help=False,
quit_after_admin=False,
argv_source=['--admin.print_conf=ini'],
config_pathname='fred'
)
finally:
sys.stdout = old_stdout
printed = temp_output.getvalue()
self.assertTrue('gender' in printed)
self.assertTrue('salary' not in printed)
def test_dump_conf_some_options_excluded(self):
n = config_manager.Namespace()
n.add_option('gender',
default='Male',
doc='What kind of genitalia?',
exclude_from_print_conf=True)
n.add_option('salary',
default=10000,
doc='How much do you earn?',
exclude_from_dump_conf=True
)
try:
config_manager.ConfigurationManager(
n,
[getopt],
use_admin_controls=True,
use_auto_help=False,
quit_after_admin=False,
argv_source=['--admin.dump_conf=foo.conf'],
config_pathname='fred'
)
printed = open('foo.conf').read()
self.assertTrue('gender' in printed)
self.assertTrue('salary' not in printed)
finally:
if os.path.isfile('foo.conf'):
os.remove('foo.conf')
def test_config_pathname_set(self):
class MyConfigManager(config_manager.ConfigurationManager):
def __init__(inner_self, *args, **kwargs):
inner_self.write_called = False
super(MyConfigManager, inner_self).__init__(*args, **kwargs)
def get_config_pathname(self):
temp_fn = os.path.isdir
os.path.isdir = lambda x: False
try:
r = super(MyConfigManager, self)._get_config_pathname()
finally:
os.path.isdir = temp_fn
return r
self.assertRaises(AllHandlersFailedException,
MyConfigManager,
use_admin_controls=True,
use_auto_help=False,
quit_after_admin=False,
argv_source=['argument 1',
'argument 2',
'argument 3'],
config_pathname='fred')
def test_ConfigurationManager_block_password(self):
function = config_manager.ConfigurationManager._block_password
self.assertEqual(function('foo', 'bar', 'peter', block_password=False),
('foo', 'bar', 'peter'))
self.assertEqual(function('foo', 'bar', 'peter', block_password=True),
('foo', 'bar', 'peter'))
self.assertEqual(function('foo', 'password', 'peter',
block_password=True),
('foo', 'password', '*********'))
self.assertEqual(function('foo', 'my_password', 'peter',
block_password=True),
('foo', 'my_password', '*********'))
def test_do_aggregations(self):
def aggregation_test(all_config, local_namespace, args):
self.assertTrue('password' in all_config)
self.assertTrue('sub1' in all_config)
self.assertTrue('name' in all_config.sub1)
self.assertTrue('name' in local_namespace)
self.assertTrue('spouse' in local_namespace)
self.assertEqual(len(args), 2)
return ('%s married %s using password %s but '
'divorced because of %s.' % (local_namespace.name,
local_namespace.spouse,
all_config.password,
args[1]))
class MyApp(config_manager.RequiredConfig):
app_name = 'fred'
app_version = '1.0'
app_description = "my app"
required_config = config_manager.Namespace()
required_config.add_option('password', '@$*$&26Ht', 'the password')
required_config.namespace('sub1')
required_config.sub1.add_option('name', 'ethel', 'the name')
required_config.sub1.add_option('spouse', 'fred', 'the spouse')
required_config.sub1.add_aggregation('statement', aggregation_test)
def __init__(inner_self, config):
inner_self.config = config
n = config_manager.Namespace()
n.admin = config_manager.Namespace()
n.admin.add_option('application',
MyApp,
'the app object class')
c = config_manager.ConfigurationManager(n,
[getopt],
use_admin_controls=True,
use_auto_help=False,
argv_source=['--sub1.name=wilma',
'arg1',
'arg2'])
config = c.get_config()
self.assertEqual(config.sub1.statement,
'wilma married fred using password @$*$&26Ht '
'but divorced because of arg2.')
def test_context(self):
class AggregatedValue(object):
def __init__(self, value):
self.value = value
def close(self):
self.value = None
def aggregation_test(all_config, local_namespace, args):
self.assertTrue('password' in all_config)
self.assertTrue('sub1' in all_config)
self.assertTrue('name' in all_config.sub1)
self.assertTrue('name' in local_namespace)
self.assertTrue('spouse' in local_namespace)
self.assertEqual(len(args), 2)
return AggregatedValue('%s married %s using password %s but '
'divorced because of %s.' %
(local_namespace.name,
local_namespace.spouse,
all_config.password,
args[1]))
class MyApp(config_manager.RequiredConfig):
app_name = 'fred'
app_version = '1.0'
app_description = "my app"
required_config = config_manager.Namespace()
required_config.add_option('password', '@$*$&26Ht', 'the password')
required_config.namespace('sub1')
required_config.sub1.add_option('name', 'ethel', 'the name')
required_config.sub1.add_option('spouse', 'fred', 'the spouse')
required_config.sub1.add_aggregation('statement', aggregation_test)
def __init__(inner_self, config):
inner_self.config = config
n = config_manager.Namespace()
n.admin = config_manager.Namespace()
n.admin.add_option('application',
MyApp,
'the app object class')
c = config_manager.ConfigurationManager(n,
[getopt],
use_admin_controls=True,
use_auto_help=False,
argv_source=['--sub1.name=wilma',
'arg1',
'arg2'])
with c.context() as config:
statement = config.sub1.statement
self.assertEqual(statement.value,
'wilma married fred using password @$*$&26Ht '
'but divorced because of arg2.')
self.assertTrue(statement.value is None)
def test_failing_aggregate_error_bubbling(self):
"""Reproduces and assures this issue
https://github.com/mozilla/configman/issues/21
"""
class AggregatedValue(object):
def __init__(self, value):
self.value = value
def close(self):
self.value = None
class SomeException(Exception):
pass
def aggregation_test(all_config, local_namespace, args):
# the aggregator might be broken
raise SomeException('anything')
class MyApp(config_manager.RequiredConfig):
app_name = 'fred'
app_version = '1.0'
app_description = "my app"
required_config = config_manager.Namespace()
required_config.add_aggregation('statement', aggregation_test)
n = config_manager.Namespace()
n.admin = config_manager.Namespace()
n.admin.add_option('application',
MyApp,
'the app object class')
c = config_manager.ConfigurationManager(n,
[getopt],
use_admin_controls=True,
use_auto_help=False,
argv_source=[])
contextmanager_ = c.context()
self.assertRaises(SomeException, contextmanager_.__enter__)
| peterbe/configman | configman/tests/test_config_manager.py | Python | bsd-3-clause | 56,350 |
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/browser/android/dialog_overlay_impl.h"
#include "base/task/post_task.h"
#include "content/browser/frame_host/render_frame_host_impl.h"
#include "content/browser/renderer_host/render_widget_host_view_base.h"
#include "content/browser/web_contents/web_contents_impl.h"
#include "content/public/android/content_jni_headers/DialogOverlayImpl_jni.h"
#include "content/public/browser/browser_task_traits.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/web_contents_delegate.h"
#include "gpu/ipc/common/gpu_surface_tracker.h"
#include "ui/android/view_android_observer.h"
#include "ui/android/window_android.h"
using base::android::AttachCurrentThread;
using base::android::JavaParamRef;
using base::android::ScopedJavaLocalRef;
namespace content {
static jlong JNI_DialogOverlayImpl_Init(JNIEnv* env,
const JavaParamRef<jobject>& obj,
jlong high,
jlong low,
jboolean power_efficient) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
RenderFrameHostImpl* rfhi =
content::RenderFrameHostImpl::FromOverlayRoutingToken(
base::UnguessableToken::Deserialize(high, low));
if (!rfhi)
return 0;
// TODO(http://crbug.com/673886): Support overlay surfaces in VR using GVR
// reprojection video surface.
RenderWidgetHostViewBase* rwhvb =
static_cast<RenderWidgetHostViewBase*>(rfhi->GetView());
if (!rwhvb || rwhvb->IsInVR())
return 0;
WebContentsImpl* web_contents_impl = static_cast<WebContentsImpl*>(
content::WebContents::FromRenderFrameHost(rfhi));
// If the overlay would not be immediately used, fail the request.
if (!rfhi->IsCurrent() || !web_contents_impl || web_contents_impl->IsHidden())
return 0;
// Dialog-based overlays are not supported for persistent video.
if (web_contents_impl->HasPersistentVideo())
return 0;
// If we require a power-efficient overlay, then approximate that with "is
// fullscreen". The reason is that we want to be somewhat sure that we don't
// have more layers than HWC can support, else SurfaceFlinger will fall back
// to GLES composition. In fullscreen mode, the android status bar is hidden,
// as is the nav bar (if present). The chrome activity surface also gets
// hidden when possible.
if (power_efficient && !web_contents_impl->IsFullscreen())
return 0;
return reinterpret_cast<jlong>(
new DialogOverlayImpl(obj, rfhi, web_contents_impl, power_efficient));
}
DialogOverlayImpl::DialogOverlayImpl(const JavaParamRef<jobject>& obj,
RenderFrameHostImpl* rfhi,
WebContents* web_contents,
bool power_efficient)
: WebContentsObserver(web_contents),
rfhi_(rfhi),
power_efficient_(power_efficient),
observed_window_android_(false) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
DCHECK(rfhi_);
JNIEnv* env = AttachCurrentThread();
obj_ = JavaObjectWeakGlobalRef(env, obj);
web_contents->GetNativeView()->AddObserver(this);
// Note that we're not allowed to call back into |obj| before it calls
// CompleteInit. However, the observer won't actually call us back until the
// token changes. As long as the java side calls us from the ui thread before
// returning, we won't send a callback before then.
}
void DialogOverlayImpl::CompleteInit(JNIEnv* env,
const JavaParamRef<jobject>& obj) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
WebContentsDelegate* delegate = web_contents()->GetDelegate();
if (!delegate) {
Stop();
return;
}
// Note: It's ok to call SetOverlayMode() directly here, because there can be
// at most one overlay alive at the time. This logic needs to be updated if
// ever AndroidOverlayProviderImpl.MAX_OVERLAYS > 1.
delegate->SetOverlayMode(true);
// Send the initial token, if there is one. The observer will notify us about
// changes only.
if (auto* window = web_contents()->GetNativeView()->GetWindowAndroid()) {
RegisterWindowObserverIfNeeded(window);
ScopedJavaLocalRef<jobject> token = window->GetWindowToken();
if (!token.is_null()) {
Java_DialogOverlayImpl_onWindowToken(env, obj, token);
}
// else we will send one if we get a callback from ViewAndroid.
}
}
DialogOverlayImpl::~DialogOverlayImpl() {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
}
void DialogOverlayImpl::Stop() {
UnregisterCallbacksIfNeeded();
JNIEnv* env = AttachCurrentThread();
ScopedJavaLocalRef<jobject> obj = obj_.get(env);
if (!obj.is_null())
Java_DialogOverlayImpl_onDismissed(env, obj);
obj_.reset();
}
void DialogOverlayImpl::Destroy(JNIEnv* env, const JavaParamRef<jobject>& obj) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
UnregisterCallbacksIfNeeded();
// We delete soon since this might be part of an onDismissed callback.
base::DeleteSoon(FROM_HERE, {BrowserThread::UI}, this);
}
void DialogOverlayImpl::GetCompositorOffset(
JNIEnv* env,
const base::android::JavaParamRef<jobject>& obj,
const base::android::JavaParamRef<jobject>& rect) {
gfx::Point point =
web_contents()->GetNativeView()->GetLocationOfContainerViewInWindow();
Java_DialogOverlayImpl_receiveCompositorOffset(env, rect, point.x(),
point.y());
}
void DialogOverlayImpl::UnregisterCallbacksIfNeeded() {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
if (!rfhi_)
return;
// We clear overlay mode here rather than in Destroy(), because we may have
// been called via a WebContentsDestroyed() event, and this might be the last
// opportunity we have to access web_contents().
WebContentsDelegate* delegate = web_contents()->GetDelegate();
if (delegate)
delegate->SetOverlayMode(false);
if (observed_window_android_) {
auto* window_android = web_contents()->GetNativeView()->GetWindowAndroid();
if (window_android)
window_android->RemoveObserver(this);
observed_window_android_ = false;
}
web_contents()->GetNativeView()->RemoveObserver(this);
rfhi_ = nullptr;
}
void DialogOverlayImpl::RenderFrameDeleted(RenderFrameHost* render_frame_host) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
if (render_frame_host == rfhi_)
Stop();
}
void DialogOverlayImpl::RenderFrameHostChanged(RenderFrameHost* old_host,
RenderFrameHost* new_host) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
if (old_host == rfhi_)
Stop();
}
void DialogOverlayImpl::FrameDeleted(RenderFrameHost* render_frame_host) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
if (render_frame_host == rfhi_)
Stop();
}
void DialogOverlayImpl::OnVisibilityChanged(content::Visibility visibility) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
if (visibility == content::Visibility::HIDDEN)
Stop();
}
void DialogOverlayImpl::OnRootWindowVisibilityChanged(bool visible) {
if (!visible)
Stop();
}
void DialogOverlayImpl::WebContentsDestroyed() {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
Stop();
}
void DialogOverlayImpl::DidToggleFullscreenModeForTab(bool entered_fullscreen,
bool will_cause_resize) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
// If the caller doesn't care about power-efficient overlays, then don't send
// any callbacks about state change.
if (!power_efficient_)
return;
JNIEnv* env = AttachCurrentThread();
ScopedJavaLocalRef<jobject> obj = obj_.get(env);
if (!obj.is_null())
Java_DialogOverlayImpl_onPowerEfficientState(env, obj, entered_fullscreen);
}
void DialogOverlayImpl::OnAttachedToWindow() {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
JNIEnv* env = AttachCurrentThread();
ScopedJavaLocalRef<jobject> token;
if (auto* window = web_contents()->GetNativeView()->GetWindowAndroid()) {
RegisterWindowObserverIfNeeded(window);
token = window->GetWindowToken();
}
ScopedJavaLocalRef<jobject> obj = obj_.get(env);
if (!obj.is_null())
Java_DialogOverlayImpl_onWindowToken(env, obj, token);
}
void DialogOverlayImpl::OnDetachedFromWindow() {
JNIEnv* env = AttachCurrentThread();
ScopedJavaLocalRef<jobject> obj = obj_.get(env);
if (!obj.is_null())
Java_DialogOverlayImpl_onWindowToken(env, obj, nullptr);
Stop();
}
void DialogOverlayImpl::RegisterWindowObserverIfNeeded(
ui::WindowAndroid* window) {
if (!observed_window_android_) {
observed_window_android_ = true;
window->AddObserver(this);
}
}
static jint JNI_DialogOverlayImpl_RegisterSurface(
JNIEnv* env,
const JavaParamRef<jobject>& surface) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
return gpu::GpuSurfaceTracker::Get()->AddSurfaceForNativeWidget(
gpu::GpuSurfaceTracker::SurfaceRecord(
gfx::kNullAcceleratedWidget, surface.obj(),
false /* can_be_used_with_surface_control */));
}
static void JNI_DialogOverlayImpl_UnregisterSurface(
JNIEnv* env,
jint surface_id) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
gpu::GpuSurfaceTracker::Get()->RemoveSurface(surface_id);
}
static ScopedJavaLocalRef<jobject>
JNI_DialogOverlayImpl_LookupSurfaceForTesting(
JNIEnv* env,
jint surfaceId) {
bool can_be_used_with_surface_control = false;
gl::ScopedJavaSurface surface =
gpu::GpuSurfaceTracker::Get()->AcquireJavaSurface(
surfaceId, &can_be_used_with_surface_control);
return ScopedJavaLocalRef<jobject>(surface.j_surface());
}
} // namespace content
| endlessm/chromium-browser | content/browser/android/dialog_overlay_impl.cc | C++ | bsd-3-clause | 9,829 |
<?php
/**
* Created by PhpStorm.
* User: CristianoGD
* Date: 27/09/2015
* Time: 08:44
*/
namespace Aluno\Model;
//imports Zend/InputFilter
use Zend\InputFilter\InputFilterAwareInterface,
Zend\InputFilter\InputFilter,
Zend\InputFilter\InputFilterInterface;
use Zend\Validator\NotEmpty,
Zend\Validator\StringLength;
class Aluno
{
//Alunos
public $id_aluno;
public $nome_aluno;
public $matricula_aluno;
public $curso_aluno;
public $serie_aluno;
public $materia_aluno;
public $professor_aluno;
public $prova1;
public $prova2;
public $prova3;
public $prova4;
public $trabalho1;
public $trabalho2;
public $trabalho3;
public $trabalho4;
public $faltas;
public $situacao_aluno;
protected $inputFilter;
public function exchangeArray($data)
{
//alunos
$this->id_aluno = (!empty($data['id_aluno'])) ? $data['id_aluno'] : null;
$this->nome_aluno = (!empty($data['nome_aluno'])) ? $data['nome_aluno'] : null;
$this->matricula_aluno = (!empty($data['matricula_aluno'])) ? $data['matricula_aluno'] : null;
$this->curso_aluno = (!empty($data['curso_aluno'])) ? $data['curso_aluno'] : null;
$this->serie_aluno = (!empty($data['serie_aluno'])) ? $data['serie_aluno'] : null;
$this->materia_aluno = (!empty($data['materia_aluno'])) ? $data['materia_aluno'] : null;
$this->professor_aluno = (!empty($data['professor_aluno'])) ? $data['professor_aluno'] : null;
$this->prova1 = (!empty($data['prova1'])) ? $data['prova1'] : null;
$this->prova2 = (!empty($data['prova2'])) ? $data['prova2'] : null;
$this->prova3 = (!empty($data['prova3'])) ? $data['prova3'] : null;
$this->prova4 = (!empty($data['prova4'])) ? $data['prova4'] : null;
$this->trabalho1 = (!empty($data['trabalho1'])) ? $data['trabalho1'] : null;
$this->trabalho2 = (!empty($data['trabalho2'])) ? $data['trabalho2'] : null;
$this->trabalho3 = (!empty($data['trabalho3'])) ? $data['trabalho3'] : null;
$this->trabalho4 = (!empty($data['trabalho4'])) ? $data['trabalho4'] : null;
$this->faltas = (!empty($data['faltas'])) ? $data['faltas'] : null;
$this->situacao_aluno = (!empty($data['situacao_aluno'])) ? $data['situacao_aluno'] : null;
}
/**
* @param InputFilterInterface $inputFilter
* @throws \Exception
*/
public function setInputFilter(InputFilterInterface $inputFilter){
throw new \Exception('Não utilizado.');
}
/**
* @return InputFilter
*/
public function getInputFilter(){
if(!$this->inputFilter){
$inputFilter = new InputFilter();
//input filter para campo id
$inputFilter->add(array(
'name'=>'id_aluno',
'required'=>true,
'filters'=>array(
array('name'=>'Int'),#transforma string para inteiro
),
));
//input filter para campo de nome
$inputFilter->add(array(
'name'=>'nome_aluno',
'required'=>true,
'filters'=>array(
array('name'=>'StripTags'),#remove xml e html da string
array('name'=>'StringTrim'),#remove espaços do inicio e do final da string
//array('name'=>'StringToUpper'),#transforma string em maiusculo
),
'validators'=>array(
array(
'name'=>'NotEmpty',
'options'=>array(
'messages'=>array(
NotEmpty::IS_EMPTY=>'Campo obrigatório.'
),
),
),
array(
'name'=>'StringLength',
'options'=>array(
'encoding'=>'UTF-8',
'min'=>3,
'max'=>100,
'messages'=>array(
StringLength::TOO_SHORT=>'Mínimo de caracteres aceitáveis %min%.',
StringLength::TOO_LONG=>'Máximo de caracteres aceitáveis %max%.',
),
),
),
),
));
//input filter para campo matricula
$inputFilter->add(array(
'name'=>'matricula_aluno',
'required'=>true,
'filters'=>array(
array('name'=>'StripTags'),#remove xml e html da string
array('name'=>'StringTrim'),#remove espaços do inicio e do final da string
//array('name'=>'StringToUpper'),#transforma string em maiusculo
),
'validators'=>array(
array(
'name'=>'NotEmpty',
'options'=>array(
'messages'=>array(
NotEmpty::IS_EMPTY=>'Campo obrigatório.'
),
),
),
array(
'name'=>'StringLength',
'options'=>array(
'encoding'=>'UTF-8',
'min'=>3,
'max'=>100,
'messages'=>array(
StringLength::TOO_SHORT=>'Mínimo de caracteres aceitáveis %min%.',
StringLength::TOO_LONG=>'Máximo de caracteres aceitáveis %max%.',
),
),
),
),
));
//input filter para campo curso
$inputFilter->add(array(
'name'=>'curso_aluno',
'required'=>true,
'filters'=>array(
array('name'=>'StripTags'),#remove xml e html da string
array('name'=>'StringTrim'),#remove espaços do inicio e do final da string
//array('name'=>'StringToUpper'),#transforma string em maiusculo
),
'validators'=>array(
array(
'name'=>'NotEmpty',
'options'=>array(
'messages'=>array(
NotEmpty::IS_EMPTY=>'Campo obrigatório.'
),
),
),
array(
'name'=>'StringLength',
'options'=>array(
'encoding'=>'UTF-8',
/*'min'=>3,
'max'=>100,
'messages'=>array(
StringLength::TOO_SHORT=>'Mínimo de caracteres aceitáveis %min%.',
StringLength::TOO_LONG=>'Máximo de caracteres aceitáveis %max%.',
),*/
),
),
),
));
//input filter para campo serie
$inputFilter->add(array(
'name'=>'serie_aluno',
'required'=>true,
'filters'=>array(
array('name'=>'StripTags'),#remove xml e html da string
array('name'=>'StringTrim'),#remove espaços do inicio e do final da string
//array('name'=>'StringToUpper'),#transforma string em maiusculo
),
'validators'=>array(
array(
'name'=>'NotEmpty',
'options'=>array(
'messages'=>array(
NotEmpty::IS_EMPTY=>'Campo obrigatório.'
),
),
),
array(
'name'=>'StringLength',
'options'=>array(
'encoding'=>'UTF-8',
/*'min'=>3,
'max'=>100,
'messages'=>array(
StringLength::TOO_SHORT=>'Mínimo de caracteres aceitáveis %min%.',
StringLength::TOO_LONG=>'Máximo de caracteres aceitáveis %max%.',
),*/
),
),
),
));
//input filter para campo materia
$inputFilter->add(array(
'name'=>'materia_aluno',
'required'=>true,
'filters'=>array(
array('name'=>'StripTags'),#remove xml e html da string
array('name'=>'StringTrim'),#remove espaços do inicio e do final da string
//array('name'=>'StringToUpper'),#transforma string em maiusculo
),
'validators'=>array(
array(
'name'=>'NotEmpty',
'options'=>array(
'messages'=>array(
NotEmpty::IS_EMPTY=>'Campo obrigatório.'
),
),
),
array(
'name'=>'StringLength',
'options'=>array(
'encoding'=>'UTF-8',
/*'min'=>3,
'max'=>100,
'messages'=>array(
StringLength::TOO_SHORT=>'Mínimo de caracteres aceitáveis %min%.',
StringLength::TOO_LONG=>'Máximo de caracteres aceitáveis %max%.',
),*/
),
),
),
));
//input filter para campo curso
$inputFilter->add(array(
'name'=>'professor_aluno',
'required'=>true,
'filters'=>array(
array('name'=>'StripTags'),#remove xml e html da string
array('name'=>'StringTrim'),#remove espaços do inicio e do final da string
//array('name'=>'StringToUpper'),#transforma string em maiusculo
),
'validators'=>array(
array(
'name'=>'NotEmpty',
'options'=>array(
'messages'=>array(
NotEmpty::IS_EMPTY=>'Campo obrigatório.'
),
),
),
array(
'name'=>'StringLength',
'options'=>array(
'encoding'=>'UTF-8',
/*'min'=>3,
'max'=>100,
'messages'=>array(
StringLength::TOO_SHORT=>'Mínimo de caracteres aceitáveis %min%.',
StringLength::TOO_LONG=>'Máximo de caracteres aceitáveis %max%.',
),*/
),
),
),
));
//input filter para nota provas
$inputFilter->add(array(
'name'=>'prova1',
'required'=>false,
/*'filters'=>array(
array('name'=>'Int'),#transforma string para inteiro
),*/
));
$inputFilter->add(array(
'name'=>'prova2',
'required'=>false,
/*'filters'=>array(
array('name'=>'Int'),#transforma string para inteiro
),*/
));
$inputFilter->add(array(
'name'=>'prova3',
'required'=>false,
/*'filters'=>array(
array('name'=>'Int'),#transforma string para inteiro
),*/
));
$inputFilter->add(array(
'name'=>'prova4',
'required'=>false,
/*'filters'=>array(
array('name'=>'Int'),#transforma string para inteiro
),*/
));
//input filter para nota trabalhos
$inputFilter->add(array(
'name'=>'trabalho1',
'required'=>false,
/*'filters'=>array(
array('name'=>'Int'),#transforma string para inteiro
),*/
));
$inputFilter->add(array(
'name'=>'trabalho2',
'required'=>false,
/*'filters'=>array(
array('name'=>'Int'),#transforma string para inteiro
),*/
));
$inputFilter->add(array(
'name'=>'trabalho3',
'required'=>false,
/*'filters'=>array(
array('name'=>'Int'),#transforma string para inteiro
),*/
));
$inputFilter->add(array(
'name'=>'trabalho4',
'required'=>false,
/*'filters'=>array(
array('name'=>'Int'),#transforma string para inteiro
),*/
));
//input faltas
$inputFilter->add(array(
'name'=>'faltas',
'required'=>false,
'filters'=>array(
array('name'=>'Int'),#transforma string para inteiro
),
));
//input situacao
$inputFilter->add(array(
'name' => 'situacao_aluno',
'required' => true,
/*'filters' => array(
array('name' => 'Int'),
),*/
'validators' => array(
array(
'name' => 'NotEmpty',
'options' => array(
'messages' => array(
NotEmpty::IS_EMPTY => 'Campo obrigatório.'
),
),
),
),
));
$inputFilter->setData(array(
'float' => '1.234,56', // (float) 1.234; should be 1,234.56 (it_IT to en_US)
'integer' => '1.234', // (float) 1.234; should be 1,234 (it_IT to en_US)
'nfloat' => '-1.234,56', // (float) -1.234; should be -1,234.56 (it_IT to en_US)
'ninteger' => '-1.234' // (float) -1.234; should be -1,234 (it_IT to en_US)
));
$this->inputFilter=$inputFilter;
}
return $this->inputFilter;
}
} | breakx/SistemaEscolar | module/Aluno/src/Aluno/Model/Aluno.php | PHP | bsd-3-clause | 15,185 |
// Copyright 2015 Google Inc. All Rights Reserved.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package acme
import (
"crypto"
"crypto/rand"
"crypto/rsa"
"crypto/sha256"
"encoding/base64"
"encoding/json"
"fmt"
"math/big"
)
// jwsEncodeJSON signs claimset using provided key and a nonce.
// The result is serialized in JSON format.
// See https://tools.ietf.org/html/rfc7515#section-7.
func jwsEncodeJSON(claimset interface{}, key *rsa.PrivateKey, nonce string) ([]byte, error) {
jwk := jwkEncode(&key.PublicKey)
phead := fmt.Sprintf(`{"alg":"RS256","jwk":%s,"nonce":%q}`, jwk, nonce)
phead = base64.RawURLEncoding.EncodeToString([]byte(phead))
cs, err := json.Marshal(claimset)
if err != nil {
return nil, err
}
payload := base64.RawURLEncoding.EncodeToString(cs)
h := sha256.New()
h.Write([]byte(phead + "." + payload))
sig, err := rsa.SignPKCS1v15(rand.Reader, key, crypto.SHA256, h.Sum(nil))
if err != nil {
return nil, err
}
enc := struct {
Protected string `json:"protected"`
Payload string `json:"payload"`
Sig string `json:"signature"`
}{
Protected: phead,
Payload: payload,
Sig: base64.RawURLEncoding.EncodeToString(sig),
}
return json.Marshal(&enc)
}
// jwkEncode encodes public part of an RSA key into a JWK.
// The result is also suitable for creating a JWK thumbprint.
func jwkEncode(pub *rsa.PublicKey) string {
n := pub.N
e := big.NewInt(int64(pub.E))
// fields order is important
// see https://tools.ietf.org/html/rfc7638#section-3.3 for details
return fmt.Sprintf(`{"e":"%s","kty":"RSA","n":"%s"}`,
base64.RawURLEncoding.EncodeToString(e.Bytes()),
base64.RawURLEncoding.EncodeToString(n.Bytes()),
)
}
// JWKThumbprint creates a JWK thumbprint out of pub
// as specified in https://tools.ietf.org/html/rfc7638.
func JWKThumbprint(pub *rsa.PublicKey) string {
jwk := jwkEncode(pub)
h := sha256.New()
h.Write([]byte(jwk))
return base64.RawURLEncoding.EncodeToString(h.Sum(nil))
}
| upmc-enterprises/kubernetes-secret-manager | vendor/github.com/google/acme/jws.go | GO | bsd-3-clause | 2,473 |
<?php
$hookValue = $params['hookValue'];
$oldCfId = $arrRow['nID'];
$senderId = $arrRow['nSenderId'];
$circulationTitle = $arrRow['strName'];
$mailinglistId = $arrRow['nMailingListId'];
$endAction = $arrRow['nEndAction'] - $hookValue;
$cpId = $arrProcessInfo['nID'];
$oldChId = $arrProcessInfo['nCirculationHistoryId'];
$slotId = $arrProcessInfo['nSlotId'];
$userId = $arrProcessInfo['nUserId'];
$dateInProcessSince = $arrProcessInfo['dateInProcessSince'];
$decissionState = $arrProcessInfo['nDecissionState'];
$dateDecission = $arrProcessInfo['dateDecission'];
function arGetInputfield($inputfieldId = false)
{
if ($inputfieldId)
{
$query = "SELECT * FROM cf_inputfield WHERE nID = '$inputfieldId' LIMIT 1;";
$result = mysql_query($query);
$result = @mysql_fetch_array($result, MYSQL_ASSOC);
if ($result) return $result;
}
}
// read cf_circulationhistory
$query = "SELECT * FROM cf_circulationhistory WHERE nCirculationFormId = '$oldCfId' LIMIT 1;";
$result = mysql_query($query);
$circulationHistory = @mysql_fetch_array($result, MYSQL_ASSOC);
// write table cf_circulationform
$query = "INSERT INTO cf_circulationform values (null, '$senderId', '$circulationTitle', '$mailinglistId', 0, '$endAction', 0)";
$result = @mysql_query($query);
// get the circulationform Id
$query = "SELECT MAX(nID) as cfId FROM cf_circulationform WHERE bDeleted = 0";
$result = @mysql_query($query);
$row = @mysql_fetch_array($result, MYSQL_ASSOC);
$cfId = $row['cfId'];
// write table cf_circulationhistory
$query = "INSERT INTO cf_circulationhistory values (null, 1, ".time().", '".$circulationHistory['strAdditionalText']."', '$cfId')";
$result = @mysql_query($query);
// get the circulationhistory Id
$query = "SELECT MAX(nID) as chId FROM cf_circulationhistory";
$result = @mysql_query($query);
$row = @mysql_fetch_array($result, MYSQL_ASSOC);
$chId = $row['chId'];
$fieldvalues = $circulation->getFieldValues($oldCfId, $oldChId);
foreach ($fieldvalues as $key => $value)
{
$inputfieldId = $value['nInputFieldId'];
$inputfield = arGetInputfield($inputfieldId);
$fieldValue = $inputfield['strStandardValue'];
$split = explode('_', $key);
$slotId = $split[1];
$query = "INSERT INTO cf_fieldvalue values (null, '$inputfieldId', '$fieldValue', '$slotId', '$cfId', '$chId')";
$result = @mysql_query($query);
}
// send the circulation to the first receiver
require_once '../pages/send_circulation.php';
$arrNextUser = getNextUserInList(-1, $mailinglistId, -1);
sendToUser($arrNextUser[0], $cfId, $arrNextUser[1], 0, $chId);
?> | UMD-SEAM/bugbox | framework/Targets/cuteflow_2_11_2/application/extensions/autoRestart/ar_controller.php | PHP | bsd-3-clause | 2,717 |
/*
* This file is part of Maliit Plugins
*
* Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies). All rights reserved.
* Copyright (C) 2012-2013 Canonical Ltd
*
* Contact: Mohammad Anwari <Mohammad.Anwari@nokia.com>
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
* Neither the name of Nokia Corporation nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#include "setup.h"
#include "abstracttexteditor.h"
#include "abstractfeedback.h"
#include "models/layout.h"
#include "models/key.h"
#include "models/wordcandidate.h"
#include "models/text.h"
#include "logic/layouthelper.h"
#include "logic/layoutupdater.h"
#include "logic/eventhandler.h"
namespace MaliitKeyboard {
namespace Setup {
void connectAll(Logic::EventHandler *event_handler,
Logic::LayoutUpdater *updater,
AbstractTextEditor *editor)
{
// TODO: Connect event handler to feedback.
connectEventHandlerToTextEditor(event_handler, editor);
connectLayoutUpdaterToTextEditor(updater, editor);
}
void connectEventHandlerToTextEditor(Logic::EventHandler *event_handler,
AbstractTextEditor *editor)
{
QObject::connect(event_handler, SIGNAL(keyPressed(Key)),
editor, SLOT(onKeyPressed(Key)));
QObject::connect(event_handler, SIGNAL(keyReleased(Key)),
editor, SLOT(onKeyReleased(Key)));
QObject::connect(event_handler, SIGNAL(keyEntered(Key)),
editor, SLOT(onKeyEntered(Key)));
QObject::connect(event_handler, SIGNAL(keyExited(Key)),
editor, SLOT(onKeyExited(Key)));
}
void connectLayoutUpdaterToTextEditor(Logic::LayoutUpdater *updater,
AbstractTextEditor *editor)
{
QObject::connect(updater, SIGNAL(wordCandidateSelected(QString)),
editor, SLOT(replaceAndCommitPreedit(QString)));
QObject::connect(updater, SIGNAL(addToUserDictionary()),
editor, SLOT(showUserCandidate()));
QObject::connect(updater, SIGNAL(userCandidateSelected(QString)),
editor, SLOT(addToUserDictionary(QString)));
QObject::connect(editor, SIGNAL(preeditEnabledChanged(bool)),
updater, SLOT(setWordRibbonVisible(bool)));
QObject::connect(editor, SIGNAL(wordCandidatesChanged(WordCandidateList)),
updater, SLOT(onWordCandidatesChanged(WordCandidateList)));
QObject::connect(editor, SIGNAL(autoCapsActivated()),
updater, SIGNAL(autoCapsActivated()));
}
}} // namespace Setup, MaliitKeyboard
| develersrl/maliit-demo-qml | maliit-keyboard/view/setup.cpp | C++ | bsd-3-clause | 3,998 |
import { HttpClientTestingModule } from '@angular/common/http/testing';
import { Injector } from '@angular/core';
import { fakeAsync, getTestBed, TestBed } from '@angular/core/testing';
import { ActivatedRoute, ActivatedRouteSnapshot, Router } from '@angular/router';
import { RouterTestingModule } from '@angular/router/testing';
import { TranslateLoader, TranslateModule, TranslateParser, TranslateService } from '@ngx-translate/core';
import { Store } from '@ngxs/store';
import { ApplicationService } from 'app/service/application/application.service';
import { AuthenticationService } from 'app/service/authentication/authentication.service';
import { MonitoringService } from 'app/service/monitoring/monitoring.service';
import { RouterService } from 'app/service/router/router.service';
import { UserService } from 'app/service/user/user.service';
import { WorkflowRunService } from 'app/service/workflow/run/workflow.run.service';
import { WorkflowService } from 'app/service/workflow/workflow.service';
import { AddPipeline } from 'app/store/pipelines.action';
import { NgxsStoreModule } from 'app/store/store.module';
import { of } from 'rxjs';
import 'rxjs/add/observable/of';
import { Application } from '../../../model/application.model';
import { Pipeline } from '../../../model/pipeline.model';
import { Project } from '../../../model/project.model';
import { EnvironmentService } from '../../../service/environment/environment.service';
import { NavbarService } from '../../../service/navbar/navbar.service';
import { PipelineService } from '../../../service/pipeline/pipeline.service';
import { ProjectService } from '../../../service/project/project.service';
import { ProjectStore } from '../../../service/project/project.store';
import { VariableService } from '../../../service/variable/variable.service';
import { SharedModule } from '../../../shared/shared.module';
import { ToastService } from '../../../shared/toast/ToastService';
import { PipelineModule } from '../pipeline.module';
import { PipelineAddComponent } from './pipeline.add.component';
describe('CDS: Pipeline Add Component', () => {
let injector: Injector;
let store: Store;
beforeEach(async () => {
await TestBed.configureTestingModule({
declarations: [
],
providers: [
ApplicationService,
EnvironmentService,
ProjectStore,
ProjectService,
MonitoringService,
{ provide: ActivatedRoute, useClass: MockActivatedRoutes },
{ provide: Router, useClass: MockRouter },
{ provide: ToastService, useClass: MockToast },
TranslateService,
TranslateLoader,
TranslateParser,
NavbarService,
PipelineService,
EnvironmentService,
VariableService,
WorkflowService,
WorkflowRunService,
UserService,
RouterService,
AuthenticationService
],
imports: [
PipelineModule,
NgxsStoreModule,
RouterTestingModule.withRoutes([]),
SharedModule,
TranslateModule.forRoot(),
HttpClientTestingModule
]
}).compileComponents();
injector = getTestBed();
store = injector.get(Store);
});
afterEach(() => {
injector = undefined;
store = undefined;
});
it('should create an empty pipeline', fakeAsync(() => {
// Create component
let fixture = TestBed.createComponent(PipelineAddComponent);
let component = fixture.debugElement.componentInstance;
expect(component).toBeTruthy();
let project: Project = new Project();
project.key = 'key1';
project.applications = new Array<Application>();
let app1 = new Application();
app1.name = 'app1';
let app2 = new Application();
app2.name = 'app2';
project.applications.push(app1, app2);
fixture.componentInstance.project = project;
fixture.componentInstance.newPipeline = new Pipeline();
fixture.componentInstance.newPipeline.name = 'myPip';
spyOn(store, 'dispatch').and.callFake(() => of(null));
fixture.componentInstance.createPipeline();
expect(store.dispatch).toHaveBeenCalledWith(new AddPipeline({
projectKey: 'key1',
pipeline: fixture.componentInstance.newPipeline
}));
}));
});
class MockToast {
success(title: string, msg: string) {
}
}
class MockRouter {
public navigate() {
}
}
class MockActivatedRoutes extends ActivatedRoute {
constructor() {
super();
this.params = of({ key: 'key1', appName: 'app1' });
this.queryParams = of({ key: 'key1', appName: 'app1' });
this.snapshot = new ActivatedRouteSnapshot();
let project = new Project();
project.key = 'key1';
this.snapshot.data = {
project
};
this.data = of({ project });
}
}
| ovh/cds | ui/src/app/views/pipeline/add/pipeline.add.component.spec.ts | TypeScript | bsd-3-clause | 5,196 |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "extensions/browser/api/document_scan/document_scan_interface_chromeos.h"
#include <string>
#include <vector>
#include "base/bind.h"
#include "base/run_loop.h"
#include "base/test/task_environment.h"
#include "chromeos/dbus/dbus_thread_manager.h"
#include "chromeos/dbus/fake_lorgnette_manager_client.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/cros_system_api/dbus/service_constants.h"
namespace extensions {
namespace api {
// Tests of networking_private_crypto support for Networking Private API.
class DocumentScanInterfaceChromeosTest : public testing::Test {
public:
DocumentScanInterfaceChromeosTest() = default;
~DocumentScanInterfaceChromeosTest() override = default;
void SetUp() override { chromeos::DBusThreadManager::Initialize(); }
void TearDown() override { chromeos::DBusThreadManager::Shutdown(); }
chromeos::FakeLorgnetteManagerClient* GetLorgnetteManagerClient() {
return static_cast<chromeos::FakeLorgnetteManagerClient*>(
chromeos::DBusThreadManager::Get()->GetLorgnetteManagerClient());
}
protected:
base::test::TaskEnvironment task_environment_;
DocumentScanInterfaceChromeos scan_interface_;
};
TEST_F(DocumentScanInterfaceChromeosTest, ListScanners) {
// Use constexpr const char* instead of constexpr char[] because implicit
// conversion in lambda doesn't work.
constexpr const char* kScannerName = "Monet";
constexpr const char* kScannerManufacturer = "Jacques-Louis David";
constexpr const char* kScannerModel = "Le Havre";
constexpr const char* kScannerType = "Impressionism";
GetLorgnetteManagerClient()->AddScannerTableEntry(
kScannerName,
{{lorgnette::kScannerPropertyManufacturer, kScannerManufacturer},
{lorgnette::kScannerPropertyModel, kScannerModel},
{lorgnette::kScannerPropertyType, kScannerType}});
base::RunLoop run_loop;
scan_interface_.ListScanners(base::BindOnce(
[](base::RunLoop* run_loop,
const std::vector<DocumentScanInterface::ScannerDescription>&
descriptions,
const std::string& error) {
run_loop->Quit();
ASSERT_EQ(1u, descriptions.size());
// Wrap by std::string explicitly, because const reference of the
// constexpr in the enclosing scope, which EXPECT_EQ macro uses,
// cannot be taken.
EXPECT_EQ(std::string(kScannerName), descriptions[0].name);
EXPECT_EQ(std::string(kScannerManufacturer),
descriptions[0].manufacturer);
EXPECT_EQ(std::string(kScannerModel), descriptions[0].model);
EXPECT_EQ(std::string(kScannerType), descriptions[0].scanner_type);
EXPECT_EQ("image/png", descriptions[0].image_mime_type);
EXPECT_EQ("", error);
},
&run_loop));
run_loop.Run();
}
TEST_F(DocumentScanInterfaceChromeosTest, ScanFailure) {
base::RunLoop run_loop;
scan_interface_.Scan(
"Monet", DocumentScanInterface::kScanModeColor, 4096,
base::BindOnce(
[](base::RunLoop* run_loop, const std::string& scanned_image,
const std::string& mime_type, const std::string& error) {
run_loop->Quit();
EXPECT_EQ("", scanned_image);
EXPECT_EQ("", mime_type);
EXPECT_EQ("Image scan failed", error);
},
&run_loop));
run_loop.Run();
}
TEST_F(DocumentScanInterfaceChromeosTest, ScanSuccess) {
constexpr char kScannerName[] = "Monet";
constexpr int kResolution = 4096;
GetLorgnetteManagerClient()->AddScanData(
kScannerName,
chromeos::LorgnetteManagerClient::ScanProperties{
lorgnette::kScanPropertyModeColor, kResolution},
"PrettyPicture");
base::RunLoop run_loop;
scan_interface_.Scan(
kScannerName, DocumentScanInterface::kScanModeColor, kResolution,
base::BindOnce(
[](base::RunLoop* run_loop, const std::string& scanned_image,
const std::string& mime_type, const std::string& error) {
run_loop->Quit();
// Data URL plus base64 representation of "PrettyPicture".
EXPECT_EQ("data:image/png;base64,UHJldHR5UGljdHVyZQ==",
scanned_image);
EXPECT_EQ("image/png", mime_type);
EXPECT_EQ("", error);
},
&run_loop));
run_loop.Run();
}
} // namespace api
} // namespace extensions
| endlessm/chromium-browser | extensions/browser/api/document_scan/document_scan_interface_chromeos_unittest.cc | C++ | bsd-3-clause | 4,540 |
/*============================================================================
The Medical Imaging Interaction Toolkit (MITK)
Copyright (c) German Cancer Research Center (DKFZ)
All rights reserved.
Use of this source code is governed by a 3-clause BSD license that can be
found in the LICENSE file.
============================================================================*/
#include "QmitkStoreSCPLauncher.h"
#include <QMessageBox>
#include <QProcessEnvironment>
#include <mitkLogMacros.h>
#include <fstream>
#include <iostream>
#include <QFile>
#include <QTextStream>
#include <QIODevice>
#include <QDir>
#include <QDirIterator>
#include <QCoreApplication>
#include "org_mitk_gui_qt_dicom_config.h"
QmitkStoreSCPLauncher::QmitkStoreSCPLauncher(QmitkStoreSCPLauncherBuilder* builder)
: m_StoreSCP(new QProcess())
{
m_StoreSCP->setProcessChannelMode(QProcess::MergedChannels);
connect( m_StoreSCP, SIGNAL(error(QProcess::ProcessError)),this, SLOT(OnProcessError(QProcess::ProcessError)));
connect( m_StoreSCP, SIGNAL(stateChanged(QProcess::ProcessState)),this, SLOT(OnStateChanged(QProcess::ProcessState)));
connect( m_StoreSCP, SIGNAL(readyReadStandardOutput()),this, SLOT(OnReadyProcessOutput()));
SetArgumentList(builder);
}
QmitkStoreSCPLauncher::~QmitkStoreSCPLauncher()
{
disconnect( m_StoreSCP, SIGNAL(error(QProcess::ProcessError)),this, SLOT(OnProcessError(QProcess::ProcessError)));
disconnect( m_StoreSCP, SIGNAL(stateChanged(QProcess::ProcessState)),this, SLOT(OnStateChanged(QProcess::ProcessState)));
disconnect( m_StoreSCP, SIGNAL(readyReadStandardOutput()),this, SLOT(OnReadyProcessOutput()));
m_StoreSCP->close();
m_StoreSCP->waitForFinished(1000);
delete m_StoreSCP;
}
void QmitkStoreSCPLauncher::StartStoreSCP()
{
FindPathToStoreSCP();
m_StoreSCP->start(m_PathToStoreSCP,m_ArgumentList);
}
void QmitkStoreSCPLauncher::FindPathToStoreSCP()
{
QString appPath= QCoreApplication::applicationDirPath();
if(m_PathToStoreSCP.isEmpty())
{
QString fileName;
#ifdef _WIN32
fileName = "/storescp.exe";
#else
fileName = "/storescp";
#endif
m_PathToStoreSCP = appPath + fileName;
//In developement the storescp isn't copied into bin directory
if(!QFile::exists(m_PathToStoreSCP))
{
m_PathToStoreSCP = static_cast<QString>(DCMTK_STORESCP);
}
}
}
void QmitkStoreSCPLauncher::OnReadyProcessOutput()
{
QString out(m_StoreSCP->readAllStandardOutput());
QStringList allDataList,importList;
allDataList = out.split("\n",QString::SkipEmptyParts);
QStringListIterator it(allDataList);
while(it.hasNext())
{
QString output = it.next();
if (output.contains("E: "))
{
output.replace("E: ","");
m_ErrorText = output;
OnProcessError(QProcess::UnknownError);
return;
}
if(output.contains("I: storing DICOM file: "))
{
output.replace("I: storing DICOM file: ","");
output.replace("\\", "/"); // cannot handle backslashes
output.replace("\r", ""); // cannot handle carriage return
importList += output;
}
}
if(!importList.isEmpty())
{
emit SignalStartImport(importList);
}
}
void QmitkStoreSCPLauncher::OnProcessError(QProcess::ProcessError err)
{
switch(err)
{
case QProcess::FailedToStart:
m_ErrorText.prepend("Failed to start storage provider: ");
m_ErrorText.append(m_StoreSCP->errorString());
emit SignalStoreSCPError(m_ErrorText);
m_ErrorText.clear();
break;
case QProcess::Crashed:
m_ErrorText.prepend("Storage provider closed: ");
m_ErrorText.append(m_StoreSCP->errorString());
emit SignalStoreSCPError(m_ErrorText);
m_ErrorText.clear();
break;
case QProcess::Timedout:
m_ErrorText.prepend("Storage provider timeout: ");
m_ErrorText.append(m_StoreSCP->errorString());
emit SignalStoreSCPError(m_ErrorText);
m_ErrorText.clear();
break;
case QProcess::WriteError:
m_ErrorText.prepend("Storage provider write error: ");
m_ErrorText.append(m_StoreSCP->errorString());
emit SignalStoreSCPError(m_ErrorText);
m_ErrorText.clear();
break;
case QProcess::ReadError:
m_ErrorText.prepend("Storage provider read error: ");
m_ErrorText.append(m_StoreSCP->errorString());
emit SignalStoreSCPError(m_ErrorText);
m_ErrorText.clear();
break;
case QProcess::UnknownError:
m_ErrorText.prepend("Storage provider unknown error: ");
m_ErrorText.append(m_StoreSCP->errorString());
emit SignalStoreSCPError(m_ErrorText);
m_ErrorText.clear();
break;
default:
m_ErrorText.prepend("Storage provider unknown error: ");
m_ErrorText.append(m_StoreSCP->errorString());
emit SignalStoreSCPError(m_ErrorText);
m_ErrorText.clear();
break;
}
}
void QmitkStoreSCPLauncher::OnStateChanged(QProcess::ProcessState status)
{
switch(status)
{
case QProcess::NotRunning:
m_StatusText.prepend("Storage provider not running!");
emit SignalStatusOfStoreSCP(m_StatusText);
m_StatusText.clear();
break;
case QProcess::Starting:
m_StatusText.prepend("Starting storage provider!");
emit SignalStatusOfStoreSCP(m_StatusText);
m_StatusText.clear();
break;
case QProcess::Running:
m_StatusText.prepend(m_ArgumentList[0]).prepend(" Port: ").prepend(m_ArgumentList[2]).prepend(" AET: ").prepend("Storage provider running! ");
emit SignalStatusOfStoreSCP(m_StatusText);
m_StatusText.clear();
break;
default:
m_StatusText.prepend("Storage provider unknown error!");
emit SignalStatusOfStoreSCP(m_StatusText);
m_StatusText.clear();
break;
}
}
void QmitkStoreSCPLauncher::SetArgumentList(QmitkStoreSCPLauncherBuilder* builder)
{
m_ArgumentList << *builder->GetPort() << QString("-aet") <<*builder->GetAETitle() << *builder->GetTransferSyntax()
<< *builder->GetOtherNetworkOptions() << *builder->GetMode() << QString("-od") << *builder->GetOutputDirectory();
}
QString QmitkStoreSCPLauncher::ArgumentListToQString()
{
QString argumentString;
QStringListIterator argumentIterator(m_ArgumentList);
while(argumentIterator.hasNext())
{
argumentString.append(" ");
argumentString.append(argumentIterator.next());
}
return argumentString;
}
| fmilano/mitk | Plugins/org.mitk.gui.qt.dicom/src/internal/QmitkStoreSCPLauncher.cpp | C++ | bsd-3-clause | 6,664 |
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* @fileoverview
* 'settings-users-add-user-dialog' is the dialog shown for adding new allowed
* users to a ChromeOS device.
*/
(function() {
/**
* Regular expression for adding a user where the string provided is just
* the part before the "@".
* Email alias only, assuming it's a gmail address.
* e.g. 'john'
* @type {!RegExp}
*/
const NAME_ONLY_REGEX =
new RegExp('^\\s*([\\w\\.!#\\$%&\'\\*\\+-\\/=\\?\\^`\\{\\|\\}~]+)\\s*$');
/**
* Regular expression for adding a user where the string provided is a full
* email address.
* e.g. 'john@chromium.org'
* @type {!RegExp}
*/
const EMAIL_REGEX = new RegExp(
'^\\s*([\\w\\.!#\\$%&\'\\*\\+-\\/=\\?\\^`\\{\\|\\}~]+)@' +
'([A-Za-z0-9\-]{2,63}\\..+)\\s*$');
/** @enum {number} */
const UserAddError = {
NO_ERROR: 0,
INVALID_EMAIL: 1,
USER_EXISTS: 2,
};
Polymer({
is: 'settings-users-add-user-dialog',
behaviors: [I18nBehavior],
properties: {
/** @private */
errorCode_: {
type: Number,
value: UserAddError.NO_ERROR,
},
/** @private */
isEmail_: {
type: Boolean,
value: false,
},
/** @private */
isEmpty_: {
type: Boolean,
value: true,
},
},
usersPrivate_: chrome.usersPrivate,
open() {
this.$.addUserInput.value = '';
this.onInput_();
this.$.dialog.showModal();
// Set to valid initially since the user has not typed anything yet.
this.$.addUserInput.invalid = false;
},
/** @private */
addUser_() {
// May be submitted by the Enter key even if the input value is invalid.
if (this.$.addUserInput.disabled) {
return;
}
const input = this.$.addUserInput.value;
const nameOnlyMatches = NAME_ONLY_REGEX.exec(input);
let userEmail;
if (nameOnlyMatches) {
userEmail = nameOnlyMatches[1] + '@gmail.com';
} else {
const emailMatches = EMAIL_REGEX.exec(input);
// Assuming the input validated, one of these two must match.
assert(emailMatches);
userEmail = emailMatches[1] + '@' + emailMatches[2];
}
this.usersPrivate_.isWhitelistedUser(userEmail, doesUserExist => {
if (doesUserExist) {
// This user email had been saved previously
this.errorCode_ = UserAddError.USER_EXISTS;
return;
}
this.$.dialog.close();
this.usersPrivate_.addWhitelistedUser(
userEmail,
/* callback */ function(success) {});
this.$.addUserInput.value = '';
});
},
/**
* @return {boolean}
* @private
*/
canAddUser_() {
return this.isEmail_ && !this.isEmpty_;
},
/** @private */
onCancelTap_() {
this.$.dialog.cancel();
},
/** @private */
onInput_() {
const input = this.$.addUserInput.value;
this.isEmail_ = NAME_ONLY_REGEX.test(input) || EMAIL_REGEX.test(input);
this.isEmpty_ = input.length == 0;
if (!this.isEmail_ && !this.isEmpty_) {
this.errorCode_ = UserAddError.INVALID_EMAIL;
return;
}
this.errorCode_ = UserAddError.NO_ERROR;
},
/**
* @private
* @return {boolean}
*/
shouldShowError_() {
return this.errorCode_ != UserAddError.NO_ERROR;
},
/**
* @private
* @return {string}
*/
getErrorString_(errorCode_) {
if (errorCode_ == UserAddError.USER_EXISTS) {
return this.i18n('userExistsError');
}
// TODO errorString for UserAddError.INVALID_EMAIL crbug/1007481
return '';
},
});
})();
| endlessm/chromium-browser | chrome/browser/resources/settings/chromeos/os_people_page/users_add_user_dialog.js | JavaScript | bsd-3-clause | 3,625 |
<?php
namespace frontend\models;
use Yii;
/**
* This is the model class for collection "invite".
*
* @property \MongoId|string $_id
* @property mixed $user_id
* @property mixed $patient_id
* @property mixed $description
* @property mixed $status
* @property mixed $msg_status
* @property mixed $created_at
*/
class Invite extends \yii\mongodb\ActiveRecord
{
/**
* @inheritdoc
*/
public $patientname;
public $doctorname;
public static function collectionName()
{
return ['medieasy', 'invite'];
}
/**
* @inheritdoc
*/
public function attributes()
{
return [
'_id',
'user_id',
'patient_id',
'description',
'status',
'refered_by',
'doc_comments',
'msg_status',
'created_at',
];
}
/**
* @inheritdoc
*/
public function rules()
{
return [
[['user_id', 'patient_id', 'description', 'status', 'msg_status', 'created_at','refered_by','doc_comments'], 'safe'],
];
}
/**
* @inheritdoc
*/
public function attributeLabels()
{
return [
'_id' => 'ID',
'user_id' => 'User ID',
'patient_id' => 'Patient ID',
'description' => 'Description',
'status' => 'Status',
'msg_status' => 'Msg Status',
'created_at' => 'Created At',
'refered_by'=>'Refered by',
'doc_comments'=>'Comments',
];
}
public function invite()
{
$post = Yii::$app->request->post();
$user = new Invite();
$user->user_id = new \MongoId($post["recipient-id"]);
$user->patient_id = new \MongoId(Yii::$app->user->identity->_id);
$user->description = $post['message-text'];
$user->created_at = date ("Y-m-d H:i:s");
$user->status=1;
$user->msg_status=1;
if ($user->save()) {
return $user;
}
}
}
| jaibabu9/987654321 | frontend/models/Invite.php | PHP | bsd-3-clause | 2,130 |
// Part of Arac Neural Network Composition Library.
// (c) 2008 by Justin S Bayer, <bayer.justin@googlemail.com>
#include <iostream>
#include "blockpermutation.h"
#include "../../utilities/utilities.h"
using arac::structure::connections::BlockPermutationConnection;
using arac::structure::connections::Connection;
using arac::structure::modules::Module;
using arac::utilities::block_permutation;
BlockPermutationConnection::BlockPermutationConnection(
Module* incoming_p, Module* outgoing_p,
std::vector<int> sequence_shape,
std::vector<int> block_shape) :
PermutationConnection(incoming_p, outgoing_p)
{
// TODO: Check that modules are of the same size and that sequence and block
// shapes divide each other nicely.
std::vector<int> permutation;
block_permutation(permutation, sequence_shape, block_shape);
set_permutation(permutation);
}
BlockPermutationConnection::~BlockPermutationConnection()
{
}
| bayerj/arac | src/cpp/structure/connections/blockpermutation.cpp | C++ | bsd-3-clause | 954 |
<?php
use yii\helpers\Html;
use yii\widgets\ActiveForm;
/* @var $this yii\web\View */
/* @var $model app\models\Groupes */
/* @var $form yii\widgets\ActiveForm */
?>
<div class="groupes-form">
<?php $form = ActiveForm::begin(); ?>
<?= $form->field($model, 'groupe-deno')->textarea(['rows' => 6]) ?>
<div class="form-group">
<?= Html::submitButton($model->isNewRecord ? 'Create' : 'Update', ['class' => $model->isNewRecord ? 'btn btn-success' : 'btn btn-primary']) ?>
</div>
<?php ActiveForm::end(); ?>
</div>
| LorenzoPicon/gestotvfreebox | view/Groupes/_form.php | PHP | bsd-3-clause | 545 |
import { ChangelistSummaryResponse } from '../rpc_types';
export const twoPatchsets: ChangelistSummaryResponse = {
cl: {
system: 'gerrit',
id: '1805837',
owner: 'chromium-autoroll@example.google.com.iam.gserviceaccount.com',
status: 'Open',
subject: 'Roll src-internal da33810f35a7..af6fbc37d76b (1 commits)',
updated: '2019-09-15T14:25:22Z',
url: 'https://chromium-review.googlesource.com/1805837',
},
patch_sets: [
{
id: 'bd92c1d223172fe846fdd8f0fa6532ec2cd2ed72',
order: 1,
try_jobs: [
{
id: '8102241932564492368',
name: 'android-nougat-arm64-rel',
updated: '2019-09-15T13:25:32.686534Z',
system: 'buildbucket',
url: 'https://cr-buildbucket.appspot.com/build/8102241932564492368',
},
],
},
{
id: '0d88927361c931267cfa152c6c0ac87bd3e9a1c7',
order: 4,
try_jobs: [
{
id: '8902241932564492368',
name: 'android-marshmallow-arm64-rel',
updated: '2019-09-15T14:25:32.686534Z',
system: 'buildbucket',
url: 'https://cr-buildbucket.appspot.com/build/8902241932564492368',
},
{
id: '8902241932564492048',
name: 'linux-rel',
updated: '2019-09-15T14:25:32.686534Z',
system: 'buildbucket',
url: 'https://cr-buildbucket.appspot.com/build/8902241932564492048',
},
{
id: '8902241932564492512',
name: 'mac-rel',
updated: '2019-09-15T14:25:32.686534Z',
system: 'buildbucket',
url: 'https://cr-buildbucket.appspot.com/build/8902241932564492512',
},
{
id: '8902241932564492144',
name: 'win10_chromium_x64_rel_ng',
updated: '2019-09-15T14:25:32.686534Z',
system: 'buildbucket',
url: 'https://cr-buildbucket.appspot.com/build/8902241932564492144',
},
],
},
],
num_total_patch_sets: 2,
};
| google/skia-buildbot | golden/modules/changelist-controls-sk/test_data.ts | TypeScript | bsd-3-clause | 2,001 |
// Prexonite
//
// Copyright (c) 2014, Christian Klauser
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// The names of the contributors may be used to endorse or
// promote products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
// IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
// IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
using System;
using JetBrains.Annotations;
using Prexonite.Modular;
using Prexonite.Types;
namespace Prexonite.Compiler.Ast
{
public class AstUsing : AstScopedBlock,
IAstHasBlocks
{
private const string LabelPrefix = "using";
public AstUsing([NotNull] ISourcePosition p,
[NotNull] AstBlock lexicalScope)
: base(p, lexicalScope)
{
_block = new AstScopedBlock(p, this,prefix:LabelPrefix);
}
private AstExpr _resourceExpression;
private readonly AstScopedBlock _block;
#region IAstHasBlocks Members
public AstBlock[] Blocks
{
get { return new AstBlock[] {_block}; }
}
#region IAstHasExpressions Members
public override AstExpr[] Expressions
{
get
{
var b = base.Expressions;
var r = new AstExpr[b.Length + 1];
b.CopyTo(r,0);
r[b.Length] = _resourceExpression;
return r;
}
}
[PublicAPI]
public AstScopedBlock Block
{
get { return _block; }
}
[PublicAPI]
public AstExpr ResourceExpression
{
get { return _resourceExpression; }
set { _resourceExpression = value; }
}
#endregion
#endregion
protected override void DoEmitCode(CompilerTarget target, StackSemantics stackSemantics)
{
if(stackSemantics == StackSemantics.Value)
throw new NotSupportedException("Using blocks do not produce values and can thus not be used as expressions.");
if (_resourceExpression == null)
throw new PrexoniteException("AstUsing requires Expression to be initialized.");
var tryNode = new AstTryCatchFinally(Position, this);
var vContainer = _block.CreateLabel("container");
target.Function.Variables.Add(vContainer);
//Try block => Container = {Expression}; {Block};
var setCont = target.Factory.Call(Position, EntityRef.Variable.Local.Create(vContainer),PCall.Set);
setCont.Arguments.Add(_resourceExpression);
var getCont = target.Factory.Call(Position, EntityRef.Variable.Local.Create(vContainer));
var tryBlock = tryNode.TryBlock;
tryBlock.Add(setCont);
tryBlock.AddRange(_block);
//Finally block => dispose( Container );
var dispose = target.Factory.Call(Position, EntityRef.Command.Create(Engine.DisposeAlias));
dispose.Arguments.Add(getCont);
tryNode.FinallyBlock.Add(dispose);
//Emit code!
tryNode.EmitEffectCode(target);
}
}
} | SealedSun/prx | Prexonite/Compiler/AST/AstUsing.cs | C# | bsd-3-clause | 4,537 |
#ifndef SM_KINEMATICS_PROPERTY_TREE_HPP
#define SM_KINEMATICS_PROPERTY_TREE_HPP
#include <sm/eigen/property_tree.hpp>
#include <sm/kinematics/Transformation.hpp>
namespace sm {
namespace kinematics {
inline sm::kinematics::Transformation transformationFromPropertyTree(const sm::ConstPropertyTree & config)
{
Eigen::Vector4d q = sm::eigen::quaternionFromPropertyTree( sm::ConstPropertyTree(config, "q_a_b") );
q = q / q.norm();
Eigen::Vector3d t = sm::eigen::vector3FromPropertyTree( sm::ConstPropertyTree(config, "t_a_b_a" ) );
return sm::kinematics::Transformation(q,t);
}
} // namespace kinematics
} // namespace sm
#endif /* SM_KINEMATICS_PROPERTY_TREE_HPP */
| ethz-asl/Schweizer-Messer | sm_kinematics/include/sm/kinematics/property_tree.hpp | C++ | bsd-3-clause | 767 |
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <sys/types.h>
#ifdef DG_DIAGNOSE
#include "diagnose/dg.h"
#endif
#include "n_srm.h"
#include "srm2api.h"
#include "srm_soap27.h"
#include "constants.h"
#include "i18.h"
#include "sysdep.h" /* BOOL, STD_BUF, ... */
#include "free.h" /* FREE(), DELETE() */
#include "str.h" /* dq_param() */
#include <iostream> /* std::string, cout, endl, ... */
#include <sstream> /* ostringstream */
using namespace std;
/*
* srmMkdir request constuctor
*/
srmMkdir::srmMkdir()
{
init();
}
/*
* Initialise srmMkdir request
*/
void
srmMkdir::init()
{
/* request (parser/API) */
SURL = NULL;
/* response (parser) */
}
/*
* srmMkdir request copy constuctor
*/
srmMkdir::srmMkdir(Node &node)
{
init();
Node::init(node);
}
/*
* srmMkdir request destructor
*/
srmMkdir::~srmMkdir()
{
DM_DBG_I;
/* request (parser/API) */
DELETE(SURL);
DELETE_VEC(storageSystemInfo.key);
DELETE_VEC(storageSystemInfo.value);
/* response (parser) */
DM_DBG_O;
}
/*
* Free process-related structures.
*/
void
srmMkdir::finish(Process *proc)
{
DM_DBG_I;
FREE_SRM_RET(Mkdir);
}
int
srmMkdir::exec(Process *proc)
{
#define EVAL_VEC_STR_MK(vec) vec = proc->eval_vec_str(srmMkdir::vec)
DM_DBG_I;
tStorageSystemInfo storageSystemInfo;
EVAL_VEC_STR_MK(storageSystemInfo.key);
EVAL_VEC_STR_MK(storageSystemInfo.value);
#ifdef SRM2_CALL
NEW_SRM_RET(Mkdir);
Mkdir(
soap,
EVAL2CSTR(srm_endpoint),
EVAL2CSTR(authorizationID),
EVAL2CSTR(SURL),
storageSystemInfo,
resp
);
#endif
DELETE_VEC(storageSystemInfo.key);
DELETE_VEC(storageSystemInfo.value);
/* matching */
if(!resp || !resp->srmMkdirResponse) {
DM_LOG(DM_N(1), "no SRM response\n");
RETURN(ERR_ERR);
}
RETURN(matchReturnStatus(resp->srmMkdirResponse->returnStatus, proc));
#undef EVAL_VEC_STR_MK
}
std::string
srmMkdir::toString(Process *proc)
{
#define EVAL_VEC_STR_MK(vec) EVAL_VEC_STR(srmMkdir,vec)
DM_DBG_I;
GET_SRM_RESP(Mkdir);
BOOL quote = TRUE;
std::stringstream ss;
tStorageSystemInfo_ storageSystemInfo;
EVAL_VEC_STR_MK(storageSystemInfo.key);
EVAL_VEC_STR_MK(storageSystemInfo.value);
/* request */
SS_SRM("srmMkdir");
SS_P_DQ(authorizationID);
SS_P_DQ(SURL);
SS_VEC_DEL(storageSystemInfo.key);
SS_VEC_DEL(storageSystemInfo.value);
/* response (parser) */
SS_P_DQ(returnStatus.explanation);
SS_P_DQ(returnStatus.statusCode);
/* response (API) */
if(!resp || !resp->srmMkdirResponse) RETURN(ss.str());
SS_P_SRM_RETSTAT(resp->srmMkdirResponse);
RETURN(ss.str());
#undef EVAL_VEC_STR_MK
}
| dCache/s2 | protos/srm/2.2/n/n_srmMkdir.cpp | C++ | bsd-3-clause | 2,686 |