code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import time
import numpy
from nupic.bindings.math import GetNTAReal
from nupic.research.monitor_mixin.monitor_mixin_base import MonitorMixinBase
from nupic.research.monitor_mixin.temporal_memory_monitor_mixin import (
TemporalMemoryMonitorMixin)
from sensorimotor.fast_general_temporal_memory import (
FastGeneralTemporalMemory as GeneralTemporalMemory)
# Uncomment the line below to use GeneralTemporalMemory
# from sensorimotor.general_temporal_memory import GeneralTemporalMemory
from sensorimotor.temporal_pooler import TemporalPooler
# Uncomment the line below to use SpatialTemporalPooler
# from sensorimotor.spatial_temporal_pooler import SpatialTemporalPooler as TemporalPooler
from sensorimotor.temporal_pooler_monitor_mixin import (
TemporalPoolerMonitorMixin)
class MonitoredGeneralTemporalMemory(TemporalMemoryMonitorMixin,
GeneralTemporalMemory): pass
class MonitoredTemporalPooler(TemporalPoolerMonitorMixin, TemporalPooler): pass
"""
Experiment runner class for running networks with layer 4 and layer 3. The
client is responsible for setting up universes, agents, and worlds. This
class just sets up and runs the HTM learning algorithms.
"""
realDType = GetNTAReal()
class SensorimotorExperimentRunner(object):
DEFAULT_TM_PARAMS = {
# These should be decent for most experiments, shouldn't need to override
# these too often. Might want to increase cellsPerColumn for capacity
# experiments.
"cellsPerColumn": 8,
"initialPermanence": 0.5,
"connectedPermanence": 0.6,
"permanenceIncrement": 0.1,
"permanenceDecrement": 0.02,
# We will force client to override these
"columnDimensions": "Sorry",
"minThreshold": "Sorry",
"maxNewSynapseCount": "Sorry",
"activationThreshold": "Sorry",
}
DEFAULT_TP_PARAMS = {
# Need to check these parameters and find stable values that will be
# consistent across most experiments.
"synPermInactiveDec": 0, # TODO: Check we can use class default here.
"synPermActiveInc": 0.001, # TODO: Check we can use class default here.
"synPredictedInc": 0.5, # TODO: Why so high??
"potentialPct": 0.9, # TODO: need to check impact of this for pooling
"initConnectedPct": 0.5, # TODO: need to check impact of this for pooling
"poolingThreshUnpredicted": 0.0,
# We will force client to override these
"numActiveColumnsPerInhArea": "Sorry",
}
def __init__(self, tmOverrides=None, tpOverrides=None, seed=42):
# Initialize Layer 4 temporal memory
params = dict(self.DEFAULT_TM_PARAMS)
params.update(tmOverrides or {})
params["seed"] = seed
self._checkParams(params)
self.tm = MonitoredGeneralTemporalMemory(mmName="TM", **params)
# Initialize Layer 3 temporal pooler
params = dict(self.DEFAULT_TP_PARAMS)
params["inputDimensions"] = [self.tm.numberOfCells()]
params["potentialRadius"] = self.tm.numberOfCells()
params["seed"] = seed
params.update(tpOverrides or {})
self._checkParams(params)
self.tp = MonitoredTemporalPooler(mmName="TP", **params)
def _checkParams(self, params):
for k,v in params.iteritems():
if v == "Sorry":
raise RuntimeError("Param "+k+" must be specified")
def feedTransition(self, sensorPattern, motorPattern, sensorimotorPattern,
tmLearn=True, tpLearn=None, sequenceLabel=None):
if sensorPattern is None:
self.tm.reset()
self.tp.reset()
else:
# Feed the TM
self.tm.compute(sensorPattern,
activeExternalCells=motorPattern,
formInternalConnections=True,
learn=tmLearn,
sequenceLabel=sequenceLabel)
# If requested, feed the TP
if tpLearn is not None:
tpInputVector, burstingColumns, correctlyPredictedCells = (
self.formatInputForTP())
activeArray = numpy.zeros(self.tp.getNumColumns())
self.tp.compute(tpInputVector,
tpLearn,
activeArray,
burstingColumns,
correctlyPredictedCells,
sequenceLabel=sequenceLabel)
def feedLayers(self, sequences, tmLearn=True, tpLearn=None, verbosity=0,
showProgressInterval=None):
"""
Feed the given sequences to the HTM algorithms.
@param tmLearn: (bool) Either False, or True
@param tpLearn: (None,bool) Either None, False, or True. If None,
temporal pooler will be skipped.
@param showProgressInterval: (int) Prints progress every N iterations,
where N is the value of this param
"""
(sensorSequence,
motorSequence,
sensorimotorSequence,
sequenceLabels) = sequences
currentTime = time.time()
for i in xrange(len(sensorSequence)):
sensorPattern = sensorSequence[i]
motorPattern = motorSequence[i]
sensorimotorPattern = sensorimotorSequence[i]
sequenceLabel = sequenceLabels[i]
self.feedTransition(sensorPattern, motorPattern, sensorimotorPattern,
tmLearn=tmLearn, tpLearn=tpLearn,
sequenceLabel=sequenceLabel)
if (showProgressInterval is not None and
i > 0 and
i % showProgressInterval == 0):
print ("Fed {0} / {1} elements of the sequence "
"in {2:0.2f} seconds.".format(
i, len(sensorSequence), time.time() - currentTime))
currentTime = time.time()
if verbosity >= 2:
# Print default TM traces
traces = self.tm.mmGetDefaultTraces(verbosity=verbosity)
print MonitorMixinBase.mmPrettyPrintTraces(traces,
breakOnResets=
self.tm.mmGetTraceResets())
if tpLearn is not None:
# Print default TP traces
traces = self.tp.mmGetDefaultTraces(verbosity=verbosity)
print MonitorMixinBase.mmPrettyPrintTraces(traces,
breakOnResets=
self.tp.mmGetTraceResets())
print
@staticmethod
def generateSequences(length, agents, numSequences=1, verbosity=0):
"""
@param length (int) Length of each sequence to generate, one for
each agent
@param agents (AbstractAgent) Agents acting in their worlds
@return (tuple) (sensor sequence, motor sequence, sensorimotor sequence,
sequence labels)
"""
sensorSequence = []
motorSequence = []
sensorimotorSequence = []
sequenceLabels = []
for _ in xrange(numSequences):
for agent in agents:
s,m,sm = agent.generateSensorimotorSequence(length, verbosity=verbosity)
sensorSequence += s
motorSequence += m
sensorimotorSequence += sm
sequenceLabels += [agent.world.toString()] * length
sensorSequence.append(None)
motorSequence.append(None)
sensorimotorSequence.append(None)
sequenceLabels.append(None)
return sensorSequence, motorSequence, sensorimotorSequence, sequenceLabels
def formatInputForTP(self):
"""
Given an instance of the TM, format the information we need to send to the
TP.
"""
# all currently active cells in layer 4
tpInputVector = numpy.zeros(
self.tm.numberOfCells()).astype(realDType)
tpInputVector[list(self.tm.activeCellsIndices())] = 1
# bursting columns in layer 4
burstingColumns = numpy.zeros(
self.tm.numberOfColumns()).astype(realDType)
burstingColumns[list(self.tm.unpredictedActiveColumns)] = 1
# correctly predicted cells in layer 4
correctlyPredictedCells = numpy.zeros(
self.tm.numberOfCells()).astype(realDType)
correctlyPredictedCells[list(self.tm.predictedActiveCellsIndices())] = 1
return tpInputVector, burstingColumns, correctlyPredictedCells
def formatRow(self, x, formatString = "%d", rowSize = 700):
"""
Utility routine for pretty printing large vectors
"""
s = ''
for c,v in enumerate(x):
if c > 0 and c % 7 == 0:
s += ' '
if c > 0 and c % rowSize == 0:
s += '\n'
s += formatString % v
s += ' '
return s
| pford68/nupic.research | sensorimotor/sensorimotor/sensorimotor_experiment_runner.py | Python | gpl-3.0 | 9,403 |
/****************************************************************************
**
** Copyright (C) 2016 The Qt Company Ltd.
** Contact: https://www.qt.io/licensing/
**
** This file is part of Qt Creator.
**
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see https://www.qt.io/terms-conditions. For further
** information use the contact form at https://www.qt.io/contact-us.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3 as published by the Free Software
** Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
** included in the packaging of this file. Please review the following
** information to ensure the GNU General Public License requirements will
** be met: https://www.gnu.org/licenses/gpl-3.0.html.
**
****************************************************************************/
#include "resizecontroller.h"
#include "formeditoritem.h"
#include "layeritem.h"
#include <resizehandleitem.h>
#include <QCursor>
#include <QGraphicsScene>
namespace QmlDesigner {
class ResizeControllerData
{
public:
ResizeControllerData(LayerItem *layerItem,
FormEditorItem *formEditorItem);
ResizeControllerData(const ResizeControllerData &other);
~ResizeControllerData();
QPointer<LayerItem> layerItem;
FormEditorItem *formEditorItem = nullptr;
QSharedPointer<ResizeHandleItem> topLeftItem;
QSharedPointer<ResizeHandleItem> topRightItem;
QSharedPointer<ResizeHandleItem> bottomLeftItem;
QSharedPointer<ResizeHandleItem> bottomRightItem;
QSharedPointer<ResizeHandleItem> topItem;
QSharedPointer<ResizeHandleItem> leftItem;
QSharedPointer<ResizeHandleItem> rightItem;
QSharedPointer<ResizeHandleItem> bottomItem;
};
ResizeControllerData::ResizeControllerData(LayerItem *layerItem, FormEditorItem *formEditorItem)
: layerItem(layerItem),
formEditorItem(formEditorItem),
topLeftItem(nullptr),
topRightItem(nullptr),
bottomLeftItem(nullptr),
bottomRightItem(nullptr),
topItem(nullptr),
leftItem(nullptr),
rightItem(nullptr),
bottomItem(nullptr)
{
}
ResizeControllerData::ResizeControllerData(const ResizeControllerData &other) = default;
ResizeControllerData::~ResizeControllerData()
{
if (layerItem) {
QGraphicsScene *scene = layerItem->scene();
scene->removeItem(topLeftItem.data());
scene->removeItem(topRightItem.data());
scene->removeItem(bottomLeftItem.data());
scene->removeItem(bottomRightItem.data());
scene->removeItem(topItem.data());
scene->removeItem(leftItem.data());
scene->removeItem(rightItem.data());
scene->removeItem(bottomItem.data());
}
}
ResizeController::ResizeController()
: m_data(new ResizeControllerData(nullptr, nullptr))
{
}
ResizeController::ResizeController(const QSharedPointer<ResizeControllerData> &data)
: m_data(data)
{
}
ResizeController::ResizeController(LayerItem *layerItem, FormEditorItem *formEditorItem)
: m_data(new ResizeControllerData(layerItem, formEditorItem))
{
m_data->topLeftItem = QSharedPointer<ResizeHandleItem>(new ResizeHandleItem(layerItem, *this));
m_data->topLeftItem->setZValue(302);
m_data->topLeftItem->setCursor(Qt::SizeFDiagCursor);
m_data->topRightItem = QSharedPointer<ResizeHandleItem>(new ResizeHandleItem(layerItem, *this));
m_data->topRightItem->setZValue(301);
m_data->topRightItem->setCursor(Qt::SizeBDiagCursor);
m_data->bottomLeftItem = QSharedPointer<ResizeHandleItem>(new ResizeHandleItem(layerItem, *this));
m_data->bottomLeftItem->setZValue(301);
m_data->bottomLeftItem->setCursor(Qt::SizeBDiagCursor);
m_data->bottomRightItem = QSharedPointer<ResizeHandleItem>(new ResizeHandleItem(layerItem, *this));
m_data->bottomRightItem->setZValue(305);
m_data->bottomRightItem->setCursor(Qt::SizeFDiagCursor);
m_data->topItem = QSharedPointer<ResizeHandleItem>(new ResizeHandleItem(layerItem, *this));
m_data->topItem->setZValue(300);
m_data->topItem->setCursor(Qt::SizeVerCursor);
m_data->leftItem = QSharedPointer<ResizeHandleItem>(new ResizeHandleItem(layerItem, *this));
m_data->leftItem->setZValue(300);
m_data->leftItem->setCursor(Qt::SizeHorCursor);
m_data->rightItem = QSharedPointer<ResizeHandleItem>(new ResizeHandleItem(layerItem, *this));
m_data->rightItem->setZValue(300);
m_data->rightItem->setCursor(Qt::SizeHorCursor);
m_data->bottomItem = QSharedPointer<ResizeHandleItem>(new ResizeHandleItem(layerItem, *this));
m_data->bottomItem->setZValue(300);
m_data->bottomItem->setCursor(Qt::SizeVerCursor);
updatePosition();
}
ResizeController::ResizeController(const ResizeController &other) = default;
ResizeController::ResizeController(const WeakResizeController &resizeController)
: m_data(resizeController.m_data.toStrongRef())
{
}
ResizeController::~ResizeController() = default;
ResizeController &ResizeController::operator =(const ResizeController &other)
{
if (this != &other)
m_data = other.m_data;
return *this;
}
bool ResizeController::isValid() const
{
return m_data->formEditorItem && m_data->formEditorItem->qmlItemNode().isValid();
}
void ResizeController::show()
{
m_data->topLeftItem->show();
m_data->topRightItem->show();
m_data->bottomLeftItem->show();
m_data->bottomRightItem->show();
m_data->topItem->show();
m_data->leftItem->show();
m_data->rightItem->show();
m_data->bottomItem->show();
}
void ResizeController::hide()
{
m_data->topLeftItem->hide();
m_data->topRightItem->hide();
m_data->bottomLeftItem->hide();
m_data->bottomRightItem->hide();
m_data->topItem->hide();
m_data->leftItem->hide();
m_data->rightItem->hide();
m_data->bottomItem->hide();
}
static QPointF topCenter(const QRectF &rect)
{
return {rect.center().x(), rect.top()};
}
static QPointF leftCenter(const QRectF &rect)
{
return {rect.left(), rect.center().y()};
}
static QPointF rightCenter(const QRectF &rect)
{
return {rect.right(), rect.center().y()};
}
static QPointF bottomCenter(const QRectF &rect)
{
return {rect.center().x(), rect.bottom()};
}
void ResizeController::updatePosition()
{
if (isValid()) {
QRectF boundingRect = m_data->formEditorItem->qmlItemNode().instanceBoundingRect();
QPointF topLeftPointInLayerSpace(m_data->formEditorItem->mapToItem(m_data->layerItem.data(),
boundingRect.topLeft()));
QPointF topRightPointInLayerSpace(m_data->formEditorItem->mapToItem(m_data->layerItem.data(),
boundingRect.topRight()));
QPointF bottomLeftPointInLayerSpace(m_data->formEditorItem->mapToItem(m_data->layerItem.data(),
boundingRect.bottomLeft()));
QPointF bottomRightPointInLayerSpace(m_data->formEditorItem->mapToItem(m_data->layerItem.data(),
boundingRect.bottomRight()));
QPointF topPointInLayerSpace(m_data->formEditorItem->mapToItem(m_data->layerItem.data(),
topCenter(boundingRect)));
QPointF leftPointInLayerSpace(m_data->formEditorItem->mapToItem(m_data->layerItem.data(),
leftCenter(boundingRect)));
QPointF rightPointInLayerSpace(m_data->formEditorItem->mapToItem(m_data->layerItem.data(),
rightCenter(boundingRect)));
QPointF bottomPointInLayerSpace(m_data->formEditorItem->mapToItem(m_data->layerItem.data(),
bottomCenter(boundingRect)));
m_data->topRightItem->setHandlePosition(topRightPointInLayerSpace, boundingRect.topRight());
m_data->topLeftItem->setHandlePosition(topLeftPointInLayerSpace, boundingRect.topLeft());
m_data->bottomLeftItem->setHandlePosition(bottomLeftPointInLayerSpace, boundingRect.bottomLeft());
m_data->bottomRightItem->setHandlePosition(bottomRightPointInLayerSpace, boundingRect.bottomRight());
m_data->topItem->setHandlePosition(topPointInLayerSpace, topCenter(boundingRect));
m_data->leftItem->setHandlePosition(leftPointInLayerSpace, leftCenter(boundingRect));
m_data->rightItem->setHandlePosition(rightPointInLayerSpace, rightCenter(boundingRect));
m_data->bottomItem->setHandlePosition(bottomPointInLayerSpace, bottomCenter(boundingRect));
}
}
FormEditorItem* ResizeController::formEditorItem() const
{
return m_data->formEditorItem;
}
bool ResizeController::isTopLeftHandle(const ResizeHandleItem *handle) const
{
return handle == m_data->topLeftItem;
}
bool ResizeController::isTopRightHandle(const ResizeHandleItem *handle) const
{
return handle == m_data->topRightItem;
}
bool ResizeController::isBottomLeftHandle(const ResizeHandleItem *handle) const
{
return handle == m_data->bottomLeftItem;
}
bool ResizeController::isBottomRightHandle(const ResizeHandleItem *handle) const
{
return handle == m_data->bottomRightItem;
}
bool ResizeController::isTopHandle(const ResizeHandleItem *handle) const
{
return handle == m_data->topItem;
}
bool ResizeController::isLeftHandle(const ResizeHandleItem *handle) const
{
return handle == m_data->leftItem;
}
bool ResizeController::isRightHandle(const ResizeHandleItem *handle) const
{
return handle == m_data->rightItem;
}
bool ResizeController::isBottomHandle(const ResizeHandleItem *handle) const
{
return handle == m_data->bottomItem;
}
WeakResizeController ResizeController::toWeakResizeController() const
{
return WeakResizeController(*this);
}
WeakResizeController::WeakResizeController() = default;
WeakResizeController::WeakResizeController(const WeakResizeController &resizeController) = default;
WeakResizeController::WeakResizeController(const ResizeController &resizeController)
: m_data(resizeController.m_data.toWeakRef())
{
}
WeakResizeController::~WeakResizeController() = default;
WeakResizeController &WeakResizeController::operator =(const WeakResizeController &other)
{
if (m_data != other.m_data)
m_data = other.m_data;
return *this;
}
ResizeController WeakResizeController::toResizeController() const
{
return ResizeController(*this);
}
}
| qtproject/qt-creator | src/plugins/qmldesigner/components/formeditor/resizecontroller.cpp | C++ | gpl-3.0 | 11,008 |
/*
* Copyright 2016 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#include "gm.h"
#include "SkPathEffect.h"
#include "SkPictureRecorder.h"
#include "SkShadowPaintFilterCanvas.h"
#include "SkShadowShader.h"
#include "SkSurface.h"
#ifdef SK_EXPERIMENTAL_SHADOWING
static sk_sp<SkPicture> make_test_picture(int width, int height) {
SkPictureRecorder recorder;
// LONG RANGE TODO: eventually add SkBBHFactory (bounding box factory)
SkCanvas* canvas = recorder.beginRecording(SkRect::MakeIWH(width, height));
SkASSERT(canvas->getTotalMatrix().isIdentity());
SkPaint paint;
paint.setColor(SK_ColorGRAY);
// LONG RANGE TODO: tag occluders
// LONG RANGE TODO: track number of IDs we need (hopefully less than 256)
// and determinate the mapping from z to id
// universal receiver, "ground"
canvas->drawRect(SkRect::MakeIWH(width, height), paint);
// TODO: Maybe add the ID here along with the depth
paint.setColor(0xFFEE8888);
canvas->translateZ(80);
canvas->drawRect(SkRect::MakeLTRB(200,150,350,300), paint);
paint.setColor(0xFF88EE88);
canvas->translateZ(80);
canvas->drawRect(SkRect::MakeLTRB(150,200,300,350), paint);
paint.setColor(0xFF8888EE);
canvas->translateZ(80);
canvas->drawRect(SkRect::MakeLTRB(100,100,250,250), paint);
// TODO: Add an assert that Z order matches painter's order
// TODO: think about if the Z-order always matching painting order is too strict
return recorder.finishRecordingAsPicture();
}
namespace skiagm {
class ShadowMapsGM : public GM {
public:
ShadowMapsGM() {
this->setBGColor(sk_tool_utils::color_to_565(0xFFCCCCCC));
}
void onOnceBeforeDraw() override {
// Create a light set consisting of
// - bluish directional light pointing more right than down
// - reddish directional light pointing more down than right
// - soft white ambient light
SkLights::Builder builder;
builder.add(SkLights::Light::MakeDirectional(SkColor3f::Make(0.2f, 0.3f, 0.4f),
SkVector3::Make(0.2f, 0.1f, 1.0f)));
builder.add(SkLights::Light::MakeDirectional(SkColor3f::Make(0.4f, 0.3f, 0.2f),
SkVector3::Make(0.1f, 0.2f, 1.0f)));
builder.setAmbientLightColor(SkColor3f::Make(0.4f, 0.4f, 0.4f));
fLights = builder.finish();
fShadowParams.fShadowRadius = 4.0f;
fShadowParams.fBiasingConstant = 0.3f;
fShadowParams.fMinVariance = 1024;
fShadowParams.fType = SkShadowParams::kVariance_ShadowType;
}
protected:
static constexpr int kWidth = 400;
static constexpr int kHeight = 400;
SkString onShortName() override {
return SkString("shadowmaps");
}
SkISize onISize() override {
return SkISize::Make(kWidth, kHeight);
}
void onDraw(SkCanvas* canvas) override {
// This picture stores the picture of the scene.
// It's used to generate the depth maps.
sk_sp<SkPicture> pic(make_test_picture(kWidth, kHeight));
canvas->setLights(fLights);
canvas->drawShadowedPicture(pic, nullptr, nullptr, fShadowParams);
}
private:
sk_sp<SkLights> fLights;
SkShadowParams fShadowParams;
typedef GM INHERITED;
};
//////////////////////////////////////////////////////////////////////////////
DEF_GM(return new ShadowMapsGM;)
}
#endif
| geminy/aidear | oss/qt/qt-everywhere-opensource-src-5.9.0/qtwebengine/src/3rdparty/chromium/third_party/skia/gm/shadowmaps.cpp | C++ | gpl-3.0 | 3,576 |
// -*- Mode: Go; indent-tabs-mode: t -*-
/*
* Copyright (C) 2019 Canonical Ltd
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 3 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package main
import (
"errors"
"fmt"
"strings"
"time"
"github.com/jessevdk/go-flags"
"github.com/snapcore/snapd/asserts"
"github.com/snapcore/snapd/client"
"github.com/snapcore/snapd/i18n"
)
var (
shortModelHelp = i18n.G("Get the active model for this device")
longModelHelp = i18n.G(`
The model command returns the active model assertion information for this
device.
By default, only the essential model identification information is
included in the output, but this can be expanded to include all of an
assertion's non-meta headers.
The verbose output is presented in a structured, yaml-like format.
Similarly, the active serial assertion can be used for the output instead of the
model assertion.
`)
invalidTypeMessage = i18n.G("invalid type for %q header")
errNoMainAssertion = errors.New(i18n.G("device not ready yet (no assertions found)"))
errNoSerial = errors.New(i18n.G("device not registered yet (no serial assertion found)"))
errNoVerboseAssertion = errors.New(i18n.G("cannot use --verbose with --assertion"))
// this list is a "nice" "human" "readable" "ordering" of headers to print
// off, sorted in lexical order with meta headers and primary key headers
// removed, and big nasty keys such as device-key-sha3-384 and
// device-key at the bottom
// it also contains both serial and model assertion headers, but we
// follow the same code path for both assertion types and some of the
// headers are shared between the two, so it still works out correctly
niceOrdering = [...]string{
"architecture",
"base",
"classic",
"display-name",
"gadget",
"kernel",
"revision",
"store",
"system-user-authority",
"timestamp",
"required-snaps", // for uc16 and uc18 models
"snaps", // for uc20 models
"device-key-sha3-384",
"device-key",
}
)
type cmdModel struct {
clientMixin
timeMixin
colorMixin
Serial bool `long:"serial"`
Verbose bool `long:"verbose"`
Assertion bool `long:"assertion"`
}
func init() {
addCommand("model",
shortModelHelp,
longModelHelp,
func() flags.Commander {
return &cmdModel{}
}, colorDescs.also(timeDescs).also(map[string]string{
"assertion": i18n.G("Print the raw assertion."),
"verbose": i18n.G("Print all specific assertion fields."),
"serial": i18n.G(
"Print the serial assertion instead of the model assertion."),
}),
[]argDesc{},
)
}
func (x *cmdModel) Execute(args []string) error {
if x.Verbose && x.Assertion {
// can't do a verbose mode for the assertion
return errNoVerboseAssertion
}
var mainAssertion asserts.Assertion
serialAssertion, serialErr := x.client.CurrentSerialAssertion()
modelAssertion, modelErr := x.client.CurrentModelAssertion()
// if we didn't get a model assertion bail early
if modelErr != nil {
if client.IsAssertionNotFoundError(modelErr) {
// device is not registered yet - use specific error message
return errNoMainAssertion
}
return modelErr
}
// if the serial assertion error is anything other than not found, also
// bail early
// the serial assertion not being found may not be fatal
if serialErr != nil && !client.IsAssertionNotFoundError(serialErr) {
return serialErr
}
if x.Serial {
mainAssertion = serialAssertion
} else {
mainAssertion = modelAssertion
}
if x.Assertion {
// if we are using the serial assertion and we specifically didn't find the
// serial assertion, bail with specific error
if x.Serial && client.IsAssertionNotFoundError(serialErr) {
return errNoMainAssertion
}
_, err := Stdout.Write(asserts.Encode(mainAssertion))
return err
}
termWidth, _ := termSize()
termWidth -= 3
if termWidth > 100 {
// any wider than this and it gets hard to read
termWidth = 100
}
esc := x.getEscapes()
w := tabWriter()
if x.Serial && client.IsAssertionNotFoundError(serialErr) {
// for serial assertion, the primary keys are output (model and
// brand-id), but if we didn't find the serial assertion then we still
// output the brand-id and model from the model assertion, but also
// return a devNotReady error
fmt.Fprintf(w, "brand-id:\t%s\n", modelAssertion.HeaderString("brand-id"))
fmt.Fprintf(w, "model:\t%s\n", modelAssertion.HeaderString("model"))
w.Flush()
return errNoSerial
}
// the rest of this function is the main flow for outputting either the
// model or serial assertion in normal or verbose mode
// for the `snap model` case with no options, we don't want colons, we want
// to be like `snap version`
separator := ":"
if !x.Verbose && !x.Serial {
separator = ""
}
// ordering of the primary keys for model: brand, model, serial
// ordering of primary keys for serial is brand-id, model, serial
// output brand/brand-id
brandIDHeader := mainAssertion.HeaderString("brand-id")
modelHeader := mainAssertion.HeaderString("model")
// for the serial header, if there's no serial yet, it's not an error for
// model (and we already handled the serial error above) but need to add a
// parenthetical about the device not being registered yet
var serial string
if client.IsAssertionNotFoundError(serialErr) {
if x.Verbose || x.Serial {
// verbose and serial are yamlish, so we need to escape the dash
serial = esc.dash
} else {
serial = "-"
}
serial += " (device not registered yet)"
} else {
serial = serialAssertion.HeaderString("serial")
}
// handle brand/brand-id and model/model + display-name differently on just
// `snap model` w/o opts
if x.Serial || x.Verbose {
fmt.Fprintf(w, "brand-id:\t%s\n", brandIDHeader)
fmt.Fprintf(w, "model:\t%s\n", modelHeader)
} else {
// for the model command (not --serial) we want to show a publisher
// style display of "brand" instead of just "brand-id"
storeAccount, err := x.client.StoreAccount(brandIDHeader)
if err != nil {
return err
}
// use the longPublisher helper to format the brand store account
// like we do in `snap info`
fmt.Fprintf(w, "brand%s\t%s\n", separator, longPublisher(x.getEscapes(), storeAccount))
// for model, if there's a display-name, we show that first with the
// real model in parenthesis
if displayName := modelAssertion.HeaderString("display-name"); displayName != "" {
modelHeader = fmt.Sprintf("%s (%s)", displayName, modelHeader)
}
fmt.Fprintf(w, "model%s\t%s\n", separator, modelHeader)
}
// only output the grade if it is non-empty, either it is not in the model
// assertion for all non-uc20 model assertions, or it is non-empty and
// required for uc20 model assertions
grade := modelAssertion.HeaderString("grade")
if grade != "" {
fmt.Fprintf(w, "grade%s\t%s\n", separator, grade)
}
storageSafety := modelAssertion.HeaderString("storage-safety")
if storageSafety != "" {
fmt.Fprintf(w, "storage-safety%s\t%s\n", separator, storageSafety)
}
// serial is same for all variants
fmt.Fprintf(w, "serial%s\t%s\n", separator, serial)
// --verbose means output more information
if x.Verbose {
allHeadersMap := mainAssertion.Headers()
for _, headerName := range niceOrdering {
invalidTypeErr := fmt.Errorf(invalidTypeMessage, headerName)
headerValue, ok := allHeadersMap[headerName]
// make sure the header is in the map
if !ok {
continue
}
// switch on which header it is to handle some special cases
switch headerName {
// list of scalars
case "required-snaps", "system-user-authority":
headerIfaceList, ok := headerValue.([]interface{})
if !ok {
return invalidTypeErr
}
if len(headerIfaceList) == 0 {
continue
}
fmt.Fprintf(w, "%s:\t\n", headerName)
for _, elem := range headerIfaceList {
headerStringElem, ok := elem.(string)
if !ok {
return invalidTypeErr
}
// note we don't wrap these, since for now this is
// specifically just required-snaps and so all of these
// will be snap names which are required to be short
fmt.Fprintf(w, " - %s\n", headerStringElem)
}
//timestamp needs to be formatted with fmtTime from the timeMixin
case "timestamp":
timestamp, ok := headerValue.(string)
if !ok {
return invalidTypeErr
}
// parse the time string as RFC3339, which is what the format is
// always in for assertions
t, err := time.Parse(time.RFC3339, timestamp)
if err != nil {
return err
}
fmt.Fprintf(w, "timestamp:\t%s\n", x.fmtTime(t))
// long string key we don't want to rewrap but can safely handle
// on "reasonable" width terminals
case "device-key-sha3-384":
// also flush the writer before continuing so the previous keys
// don't try to align with this key
w.Flush()
headerString, ok := headerValue.(string)
if !ok {
return invalidTypeErr
}
switch {
case termWidth > 86:
fmt.Fprintf(w, "device-key-sha3-384: %s\n", headerString)
case termWidth <= 86 && termWidth > 66:
fmt.Fprintln(w, "device-key-sha3-384: |")
wrapLine(w, []rune(headerString), " ", termWidth)
}
case "snaps":
// also flush the writer before continuing so the previous keys
// don't try to align with this key
w.Flush()
snapsHeader, ok := headerValue.([]interface{})
if !ok {
return invalidTypeErr
}
if len(snapsHeader) == 0 {
// unexpected why this is an empty list, but just ignore for
// now
continue
}
fmt.Fprintf(w, "snaps:\n")
for _, sn := range snapsHeader {
snMap, ok := sn.(map[string]interface{})
if !ok {
return invalidTypeErr
}
// iterate over all keys in the map in a stable, visually
// appealing ordering
// first do snap name, which will always be present since we
// parsed a valid assertion
name := snMap["name"].(string)
fmt.Fprintf(w, " - name:\t%s\n", name)
// the rest of these may be absent, but they are all still
// simple strings
for _, snKey := range []string{"id", "type", "default-channel", "presence"} {
snValue, ok := snMap[snKey]
if !ok {
continue
}
snStrValue, ok := snValue.(string)
if !ok {
return invalidTypeErr
}
if snStrValue != "" {
fmt.Fprintf(w, " %s:\t%s\n", snKey, snStrValue)
}
}
// finally handle "modes" which is a list
modes, ok := snMap["modes"]
if !ok {
continue
}
modesSlice, ok := modes.([]interface{})
if !ok {
return invalidTypeErr
}
if len(modesSlice) == 0 {
continue
}
modeStrSlice := make([]string, 0, len(modesSlice))
for _, mode := range modesSlice {
modeStr, ok := mode.(string)
if !ok {
return invalidTypeErr
}
modeStrSlice = append(modeStrSlice, modeStr)
}
modesSliceYamlStr := "[" + strings.Join(modeStrSlice, ", ") + "]"
fmt.Fprintf(w, " modes:\t%s\n", modesSliceYamlStr)
}
// long base64 key we can rewrap safely
case "device-key":
headerString, ok := headerValue.(string)
if !ok {
return invalidTypeErr
}
// the string value here has newlines inserted as part of the
// raw assertion, but base64 doesn't care about whitespace, so
// it's safe to split by newlines and re-wrap to make it
// prettier
headerString = strings.Join(
strings.Split(headerString, "\n"),
"")
fmt.Fprintln(w, "device-key: |")
wrapLine(w, []rune(headerString), " ", termWidth)
// the default is all the rest of short scalar values, which all
// should be strings
default:
headerString, ok := headerValue.(string)
if !ok {
return invalidTypeErr
}
fmt.Fprintf(w, "%s:\t%s\n", headerName, headerString)
}
}
}
return w.Flush()
}
| mvo5/snappy | cmd/snap/cmd_model.go | GO | gpl-3.0 | 12,390 |
using System.Collections.Generic;
using System.Linq;
namespace PKHeX.Core
{
/// <summary>
/// Editor object that unpacks <see cref="EventWork{T}"/> into flags & work groups, and handles value get/set operations.
/// </summary>
/// <typeparam name="T"></typeparam>
public sealed class SplitEventEditor<T> where T : struct
{
public readonly IList<EventVarGroup> Work;
public readonly IList<EventVarGroup> Flag;
public readonly IEventVar<T> Block;
public SplitEventEditor(IEventVar<T> block, IEnumerable<string> work, IEnumerable<string> flag)
{
Block = block;
// load lines
var workLines = work.Where(z => !string.IsNullOrWhiteSpace(z) && z.Length > 5);
Work = EventWorkUtil.GetVars(workLines, (index, t, data) => new EventWork<T>(index, t, data));
var flagLines = flag.Where(z => !string.IsNullOrWhiteSpace(z) && z.Length > 5);
Flag = EventWorkUtil.GetVars(flagLines, (index, t, data) => new EventFlag(index, t, data));
// initialize lines
foreach (var group in Work)
{
foreach (var item in group.Vars)
{
item.RawIndex = block.GetWorkRawIndex(item.Type, item.RelativeIndex);
((EventWork<T>)item).Value = block.GetWork(item.RawIndex);
}
}
foreach (var group in Flag)
{
foreach (var item in group.Vars)
{
item.RawIndex = block.GetFlagRawIndex(item.Type, item.RelativeIndex);
((EventFlag)item).Flag = block.GetFlag(item.RawIndex);
}
}
}
/// <summary>
/// Writes all of the updated event values back to the block.
/// </summary>
public void Save()
{
foreach (var g in Work)
{
foreach (var item in g.Vars)
{
var value = ((EventWork<T>)item).Value;
Block.SetWork(item.RawIndex, value);
}
}
foreach (var g in Flag)
{
foreach (var item in g.Vars)
{
var value = ((EventFlag)item).Flag;
Block.SetFlag(item.RawIndex, value);
}
}
}
}
}
| ReignOfComputer/PKHeX | PKHeX.Core/Editing/Saves/Editors/EventWork/SplitEventEditor.cs | C# | gpl-3.0 | 2,434 |
package net.minecraft.entity.monster;
import net.minecraft.enchantment.EnchantmentHelper;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityCreature;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.SharedMonsterAttributes;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Items;
import net.minecraft.init.SoundEvents;
import net.minecraft.item.ItemAxe;
import net.minecraft.item.ItemStack;
import net.minecraft.util.DamageSource;
import net.minecraft.util.SoundCategory;
import net.minecraft.util.SoundEvent;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.MathHelper;
import net.minecraft.world.EnumDifficulty;
import net.minecraft.world.EnumSkyBlock;
import net.minecraft.world.World;
public abstract class EntityMob extends EntityCreature implements IMob
{
public EntityMob(World worldIn)
{
super(worldIn);
this.experienceValue = 5;
}
public SoundCategory getSoundCategory()
{
return SoundCategory.HOSTILE;
}
/**
* Called frequently so the entity can update its state every tick as required. For example, zombies and skeletons
* use this to react to sunlight and start to burn.
*/
public void onLivingUpdate()
{
this.updateArmSwingProgress();
float f = this.getBrightness(1.0F);
if (f > 0.5F)
{
this.entityAge += 2;
}
super.onLivingUpdate();
}
/**
* Called to update the entity's position/logic.
*/
public void onUpdate()
{
super.onUpdate();
if (!this.worldObj.isRemote && this.worldObj.getDifficulty() == EnumDifficulty.PEACEFUL)
{
this.setDead();
}
}
protected SoundEvent getSwimSound()
{
return SoundEvents.entity_hostile_swim;
}
protected SoundEvent getSplashSound()
{
return SoundEvents.entity_hostile_splash;
}
/**
* Called when the entity is attacked.
*/
public boolean attackEntityFrom(DamageSource source, float amount)
{
return this.isEntityInvulnerable(source) ? false : super.attackEntityFrom(source, amount);
}
protected SoundEvent getHurtSound()
{
return SoundEvents.entity_hostile_hurt;
}
protected SoundEvent getDeathSound()
{
return SoundEvents.entity_hostile_death;
}
protected SoundEvent getFallSound(int heightIn)
{
return heightIn > 4 ? SoundEvents.entity_hostile_big_fall : SoundEvents.entity_hostile_small_fall;
}
public boolean attackEntityAsMob(Entity entityIn)
{
float f = (float)this.getEntityAttribute(SharedMonsterAttributes.ATTACK_DAMAGE).getAttributeValue();
int i = 0;
if (entityIn instanceof EntityLivingBase)
{
f += EnchantmentHelper.getModifierForCreature(this.getHeldItemMainhand(), ((EntityLivingBase)entityIn).getCreatureAttribute());
i += EnchantmentHelper.getKnockbackModifier(this);
}
boolean flag = entityIn.attackEntityFrom(DamageSource.causeMobDamage(this), f);
if (flag)
{
if (i > 0 && entityIn instanceof EntityLivingBase)
{
((EntityLivingBase)entityIn).knockBack(this, (float)i * 0.5F, (double)MathHelper.sin(this.rotationYaw * 0.017453292F), (double)(-MathHelper.cos(this.rotationYaw * 0.017453292F)));
this.motionX *= 0.6D;
this.motionZ *= 0.6D;
}
int j = EnchantmentHelper.getFireAspectModifier(this);
if (j > 0)
{
entityIn.setFire(j * 4);
}
if (entityIn instanceof EntityPlayer)
{
EntityPlayer entityplayer = (EntityPlayer)entityIn;
ItemStack itemstack = this.getHeldItemMainhand();
ItemStack itemstack1 = entityplayer.isHandActive() ? entityplayer.getActiveItemStack() : null;
if (itemstack != null && itemstack1 != null && itemstack.getItem() instanceof ItemAxe && itemstack1.getItem() == Items.shield)
{
float f1 = 0.25F + (float)EnchantmentHelper.getEfficiencyModifier(this) * 0.05F;
if (this.rand.nextFloat() < f1)
{
entityplayer.getCooldownTracker().setCooldown(Items.shield, 100);
this.worldObj.setEntityState(entityplayer, (byte)30);
}
}
}
this.applyEnchantments(this, entityIn);
}
return flag;
}
public float getBlockPathWeight(BlockPos pos)
{
return 0.5F - this.worldObj.getLightBrightness(pos);
}
/**
* Checks to make sure the light is not too bright where the mob is spawning
*/
protected boolean isValidLightLevel()
{
BlockPos blockpos = new BlockPos(this.posX, this.getEntityBoundingBox().minY, this.posZ);
if (this.worldObj.getLightFor(EnumSkyBlock.SKY, blockpos) > this.rand.nextInt(32))
{
return false;
}
else
{
int i = this.worldObj.getLightFromNeighbors(blockpos);
if (this.worldObj.isThundering())
{
int j = this.worldObj.getSkylightSubtracted();
this.worldObj.setSkylightSubtracted(10);
i = this.worldObj.getLightFromNeighbors(blockpos);
this.worldObj.setSkylightSubtracted(j);
}
return i <= this.rand.nextInt(8);
}
}
/**
* Checks if the entity's current position is a valid location to spawn this entity.
*/
public boolean getCanSpawnHere()
{
return this.worldObj.getDifficulty() != EnumDifficulty.PEACEFUL && this.isValidLightLevel() && super.getCanSpawnHere();
}
protected void applyEntityAttributes()
{
super.applyEntityAttributes();
this.getAttributeMap().registerAttribute(SharedMonsterAttributes.ATTACK_DAMAGE);
}
/**
* Entity won't drop items or experience points if this returns false
*/
protected boolean canDropLoot()
{
return true;
}
} | aebert1/BigTransport | build/tmp/recompileMc/sources/net/minecraft/entity/monster/EntityMob.java | Java | gpl-3.0 | 6,264 |
package system // import "github.com/docker/docker/pkg/system"
import (
"os"
"path/filepath"
"regexp"
"strconv"
"strings"
"sync"
"syscall"
"time"
"unsafe"
winio "github.com/Microsoft/go-winio"
"golang.org/x/sys/windows"
)
const (
// SddlAdministratorsLocalSystem is local administrators plus NT AUTHORITY\System
SddlAdministratorsLocalSystem = "D:P(A;OICI;GA;;;BA)(A;OICI;GA;;;SY)"
)
// MkdirAllWithACL is a wrapper for MkdirAll that creates a directory
// with an appropriate SDDL defined ACL.
func MkdirAllWithACL(path string, perm os.FileMode, sddl string) error {
return mkdirall(path, true, sddl)
}
// MkdirAll implementation that is volume path aware for Windows. It can be used
// as a drop-in replacement for os.MkdirAll()
func MkdirAll(path string, _ os.FileMode) error {
return mkdirall(path, false, "")
}
// mkdirall is a custom version of os.MkdirAll modified for use on Windows
// so that it is both volume path aware, and can create a directory with
// a DACL.
func mkdirall(path string, applyACL bool, sddl string) error {
if re := regexp.MustCompile(`^\\\\\?\\Volume{[a-z0-9-]+}$`); re.MatchString(path) {
return nil
}
// The rest of this method is largely copied from os.MkdirAll and should be kept
// as-is to ensure compatibility.
// Fast path: if we can tell whether path is a directory or file, stop with success or error.
dir, err := os.Stat(path)
if err == nil {
if dir.IsDir() {
return nil
}
return &os.PathError{
Op: "mkdir",
Path: path,
Err: syscall.ENOTDIR,
}
}
// Slow path: make sure parent exists and then call Mkdir for path.
i := len(path)
for i > 0 && os.IsPathSeparator(path[i-1]) { // Skip trailing path separator.
i--
}
j := i
for j > 0 && !os.IsPathSeparator(path[j-1]) { // Scan backward over element.
j--
}
if j > 1 {
// Create parent
err = mkdirall(path[0:j-1], false, sddl)
if err != nil {
return err
}
}
// Parent now exists; invoke os.Mkdir or mkdirWithACL and use its result.
if applyACL {
err = mkdirWithACL(path, sddl)
} else {
err = os.Mkdir(path, 0)
}
if err != nil {
// Handle arguments like "foo/." by
// double-checking that directory doesn't exist.
dir, err1 := os.Lstat(path)
if err1 == nil && dir.IsDir() {
return nil
}
return err
}
return nil
}
// mkdirWithACL creates a new directory. If there is an error, it will be of
// type *PathError. .
//
// This is a modified and combined version of os.Mkdir and windows.Mkdir
// in golang to cater for creating a directory am ACL permitting full
// access, with inheritance, to any subfolder/file for Built-in Administrators
// and Local System.
func mkdirWithACL(name string, sddl string) error {
sa := windows.SecurityAttributes{Length: 0}
sd, err := winio.SddlToSecurityDescriptor(sddl)
if err != nil {
return &os.PathError{Op: "mkdir", Path: name, Err: err}
}
sa.Length = uint32(unsafe.Sizeof(sa))
sa.InheritHandle = 1
sa.SecurityDescriptor = uintptr(unsafe.Pointer(&sd[0]))
namep, err := windows.UTF16PtrFromString(name)
if err != nil {
return &os.PathError{Op: "mkdir", Path: name, Err: err}
}
e := windows.CreateDirectory(namep, &sa)
if e != nil {
return &os.PathError{Op: "mkdir", Path: name, Err: e}
}
return nil
}
// IsAbs is a platform-specific wrapper for filepath.IsAbs. On Windows,
// golang filepath.IsAbs does not consider a path \windows\system32 as absolute
// as it doesn't start with a drive-letter/colon combination. However, in
// docker we need to verify things such as WORKDIR /windows/system32 in
// a Dockerfile (which gets translated to \windows\system32 when being processed
// by the daemon. This SHOULD be treated as absolute from a docker processing
// perspective.
func IsAbs(path string) bool {
if !filepath.IsAbs(path) {
if !strings.HasPrefix(path, string(os.PathSeparator)) {
return false
}
}
return true
}
// The origin of the functions below here are the golang OS and windows packages,
// slightly modified to only cope with files, not directories due to the
// specific use case.
//
// The alteration is to allow a file on Windows to be opened with
// FILE_FLAG_SEQUENTIAL_SCAN (particular for docker load), to avoid eating
// the standby list, particularly when accessing large files such as layer.tar.
// CreateSequential creates the named file with mode 0666 (before umask), truncating
// it if it already exists. If successful, methods on the returned
// File can be used for I/O; the associated file descriptor has mode
// O_RDWR.
// If there is an error, it will be of type *PathError.
func CreateSequential(name string) (*os.File, error) {
return OpenFileSequential(name, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0)
}
// OpenSequential opens the named file for reading. If successful, methods on
// the returned file can be used for reading; the associated file
// descriptor has mode O_RDONLY.
// If there is an error, it will be of type *PathError.
func OpenSequential(name string) (*os.File, error) {
return OpenFileSequential(name, os.O_RDONLY, 0)
}
// OpenFileSequential is the generalized open call; most users will use Open
// or Create instead.
// If there is an error, it will be of type *PathError.
func OpenFileSequential(name string, flag int, _ os.FileMode) (*os.File, error) {
if name == "" {
return nil, &os.PathError{Op: "open", Path: name, Err: syscall.ENOENT}
}
r, errf := windowsOpenFileSequential(name, flag, 0)
if errf == nil {
return r, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: errf}
}
func windowsOpenFileSequential(name string, flag int, _ os.FileMode) (file *os.File, err error) {
r, e := windowsOpenSequential(name, flag|windows.O_CLOEXEC, 0)
if e != nil {
return nil, e
}
return os.NewFile(uintptr(r), name), nil
}
func makeInheritSa() *windows.SecurityAttributes {
var sa windows.SecurityAttributes
sa.Length = uint32(unsafe.Sizeof(sa))
sa.InheritHandle = 1
return &sa
}
func windowsOpenSequential(path string, mode int, _ uint32) (fd windows.Handle, err error) {
if len(path) == 0 {
return windows.InvalidHandle, windows.ERROR_FILE_NOT_FOUND
}
pathp, err := windows.UTF16PtrFromString(path)
if err != nil {
return windows.InvalidHandle, err
}
var access uint32
switch mode & (windows.O_RDONLY | windows.O_WRONLY | windows.O_RDWR) {
case windows.O_RDONLY:
access = windows.GENERIC_READ
case windows.O_WRONLY:
access = windows.GENERIC_WRITE
case windows.O_RDWR:
access = windows.GENERIC_READ | windows.GENERIC_WRITE
}
if mode&windows.O_CREAT != 0 {
access |= windows.GENERIC_WRITE
}
if mode&windows.O_APPEND != 0 {
access &^= windows.GENERIC_WRITE
access |= windows.FILE_APPEND_DATA
}
sharemode := uint32(windows.FILE_SHARE_READ | windows.FILE_SHARE_WRITE)
var sa *windows.SecurityAttributes
if mode&windows.O_CLOEXEC == 0 {
sa = makeInheritSa()
}
var createmode uint32
switch {
case mode&(windows.O_CREAT|windows.O_EXCL) == (windows.O_CREAT | windows.O_EXCL):
createmode = windows.CREATE_NEW
case mode&(windows.O_CREAT|windows.O_TRUNC) == (windows.O_CREAT | windows.O_TRUNC):
createmode = windows.CREATE_ALWAYS
case mode&windows.O_CREAT == windows.O_CREAT:
createmode = windows.OPEN_ALWAYS
case mode&windows.O_TRUNC == windows.O_TRUNC:
createmode = windows.TRUNCATE_EXISTING
default:
createmode = windows.OPEN_EXISTING
}
// Use FILE_FLAG_SEQUENTIAL_SCAN rather than FILE_ATTRIBUTE_NORMAL as implemented in golang.
//https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858(v=vs.85).aspx
const fileFlagSequentialScan = 0x08000000 // FILE_FLAG_SEQUENTIAL_SCAN
h, e := windows.CreateFile(pathp, access, sharemode, sa, createmode, fileFlagSequentialScan, 0)
return h, e
}
// Helpers for TempFileSequential
var rand uint32
var randmu sync.Mutex
func reseed() uint32 {
return uint32(time.Now().UnixNano() + int64(os.Getpid()))
}
func nextSuffix() string {
randmu.Lock()
r := rand
if r == 0 {
r = reseed()
}
r = r*1664525 + 1013904223 // constants from Numerical Recipes
rand = r
randmu.Unlock()
return strconv.Itoa(int(1e9 + r%1e9))[1:]
}
// TempFileSequential is a copy of ioutil.TempFile, modified to use sequential
// file access. Below is the original comment from golang:
// TempFile creates a new temporary file in the directory dir
// with a name beginning with prefix, opens the file for reading
// and writing, and returns the resulting *os.File.
// If dir is the empty string, TempFile uses the default directory
// for temporary files (see os.TempDir).
// Multiple programs calling TempFile simultaneously
// will not choose the same file. The caller can use f.Name()
// to find the pathname of the file. It is the caller's responsibility
// to remove the file when no longer needed.
func TempFileSequential(dir, prefix string) (f *os.File, err error) {
if dir == "" {
dir = os.TempDir()
}
nconflict := 0
for i := 0; i < 10000; i++ {
name := filepath.Join(dir, prefix+nextSuffix())
f, err = OpenFileSequential(name, os.O_RDWR|os.O_CREATE|os.O_EXCL, 0600)
if os.IsExist(err) {
if nconflict++; nconflict > 10 {
randmu.Lock()
rand = reseed()
randmu.Unlock()
}
continue
}
break
}
return
}
| mudler/docker-companion | vendor/github.com/docker/docker/pkg/system/filesys_windows.go | GO | gpl-3.0 | 9,173 |
modules.define(
'spec',
['button', 'i-bem__dom', 'chai', 'jquery', 'BEMHTML'],
function(provide, Button, BEMDOM, chai, $, BEMHTML) {
var expect = chai.expect;
describe('button_type_link', function() {
var button;
beforeEach(function() {
button = buildButton({
block : 'button',
mods : { type : 'link' },
url : '/'
});
});
afterEach(function() {
BEMDOM.destruct(button.domElem);
});
describe('url', function() {
it('should properly gets url', function() {
button.domElem.attr('href').should.be.equal('/');
button.getUrl().should.be.equal('/');
});
it('should properly sets url', function() {
button.setUrl('/bla');
button.domElem.attr('href').should.be.equal('/bla');
button.getUrl().should.be.equal('/bla');
});
});
describe('disabled', function() {
it('should remove "href" attribute if disabled before init', function() {
BEMDOM.destruct(button.domElem); // we need to destruct default button from beforeEach
button = buildButton({
block : 'button',
mods : { type : 'link', disabled : true },
url : '/'
});
button.getUrl().should.be.equal('/');
expect(button.domElem.attr('href')).to.be.undefined;
});
it('should update attributes properly', function() {
button.setMod('disabled');
button.domElem.attr('aria-disabled').should.be.equal('true');
expect(button.domElem.attr('href')).to.be.undefined;
button.delMod('disabled');
button.domElem.attr('href').should.be.equal('/');
expect(button.domElem.attr('aria-disabled')).to.be.undefined;
});
});
function buildButton(bemjson) {
return BEMDOM.init($(BEMHTML.apply(bemjson))
.appendTo('body'))
.bem('button');
}
});
provide();
});
| dojdev/bem-components | common.blocks/button/_type/button_type_link.spec.js | JavaScript | mpl-2.0 | 2,043 |
/**
* The contents of this file are subject to the Mozilla Public License
* Version 1.1 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations under
* the License.
*
* The Original Code is OpenELIS code.
*
* Copyright (C) The Minnesota Department of Health. All Rights Reserved.
*/
package us.mn.state.health.lims.codeelementtype.action;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import us.mn.state.health.lims.codeelementtype.dao.CodeElementTypeDAO;
import us.mn.state.health.lims.codeelementtype.daoimpl.CodeElementTypeDAOImpl;
import us.mn.state.health.lims.codeelementtype.valueholder.CodeElementType;
import us.mn.state.health.lims.common.action.BaseAction;
import us.mn.state.health.lims.common.action.BaseActionForm;
import us.mn.state.health.lims.common.exception.LIMSRuntimeException;
import us.mn.state.health.lims.common.log.LogEvent;
import us.mn.state.health.lims.common.util.StringUtil;
/**
* @author diane benz
*
* To change this generated comment edit the template variable "typecomment":
* Window>Preferences>Java>Templates. To enable and disable the creation of type
* comments go to Window>Preferences>Java>Code Generation.
*/
public class CodeElementTypeNextPreviousAction extends BaseAction {
private boolean isNew = false;
protected ActionForward performAction(ActionMapping mapping,
ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
// The first job is to determine if we are coming to this action with an
// ID parameter in the request. If there is no parameter, we are
// creating a new Analyte.
// If there is a parameter present, we should bring up an existing
// Analyte to edit.
String forward = FWD_SUCCESS;
request.setAttribute(ALLOW_EDITS_KEY, "true");
request.setAttribute(PREVIOUS_DISABLED, "false");
request.setAttribute(NEXT_DISABLED, "false");
String id = request.getParameter(ID);
if (StringUtil.isNullorNill(id) || "0".equals(id)) {
isNew = true;
} else {
isNew = false;
}
BaseActionForm dynaForm = (BaseActionForm) form;
String start = (String) request.getParameter("startingRecNo");
String direction = (String) request.getParameter("direction");
// System.out.println("This is ID from request " + id);
CodeElementType codeElementType = new CodeElementType();
codeElementType.setId(id);
try {
CodeElementTypeDAO codeElementTypeDAO = new CodeElementTypeDAOImpl();
//retrieve analyte by id since the name may have changed
codeElementTypeDAO.getData(codeElementType);
if (FWD_NEXT.equals(direction)) {
//bugzilla 1427 pass in name not id
List codeElementTypes = codeElementTypeDAO.getNextCodeElementTypeRecord(codeElementType.getText());
if (codeElementTypes != null && codeElementTypes.size() > 0) {
codeElementType = (CodeElementType) codeElementTypes.get(0);
codeElementTypeDAO.getData(codeElementType);
if (codeElementTypes.size() < 2) {
// disable next button
request.setAttribute(NEXT_DISABLED, "true");
}
id = codeElementType.getId();
} else {
// just disable next button
request.setAttribute(NEXT_DISABLED, "true");
}
}
if (FWD_PREVIOUS.equals(direction)) {
//bugzilla 1427 pass in name not id
List codeElementTypes = codeElementTypeDAO.getPreviousCodeElementTypeRecord(codeElementType.getText());
if (codeElementTypes != null && codeElementTypes.size() > 0) {
codeElementType = (CodeElementType) codeElementTypes.get(0);
codeElementTypeDAO.getData(codeElementType);
if (codeElementTypes.size() < 2) {
// disable previous button
request.setAttribute(PREVIOUS_DISABLED, "true");
}
id = codeElementType.getId();
} else {
// just disable next button
request.setAttribute(PREVIOUS_DISABLED, "true");
}
}
} catch (LIMSRuntimeException lre) {
//bugzilla 2154
LogEvent.logError("CodeElementTypeNextPreviousAction","performAction()",lre.toString());
request.setAttribute(ALLOW_EDITS_KEY, "false");
// disable previous and next
request.setAttribute(PREVIOUS_DISABLED, "true");
request.setAttribute(NEXT_DISABLED, "true");
forward = FWD_FAIL;
}
if (forward.equals(FWD_FAIL))
return mapping.findForward(forward);
if (codeElementType.getId() != null && !codeElementType.getId().equals("0")) {
request.setAttribute(ID, codeElementType.getId());
}
return getForward(mapping.findForward(forward), id, start);
}
protected String getPageTitleKey() {
return null;
}
protected String getPageSubtitleKey() {
return null;
}
} | mark47/OESandbox | app/src/us/mn/state/health/lims/codeelementtype/action/CodeElementTypeNextPreviousAction.java | Java | mpl-2.0 | 5,173 |
import java.util.Scanner;
public class SegmentTree {
private static class Node {
public int left, right;
public long add, sum;
public Node(int left, int right, long sum) {
this.left = left;
this.right = right;
this.sum = sum;
}
}
private Node[] tree;
private int size;
public SegmentTree(int n,int[] arr) {
size = (n<<2);
tree = new Node[size];
build(0, 0, n-1, arr);
}
private void build(int pos, int p, int r, int[] arr) {
if (p == r) {
tree[pos] = new Node(p, r, arr[p]);
} else {
build(2*pos+1, p, (p+r)/2, arr);
build(2*pos+2, (p+r)/2+1, r, arr);
tree[pos] = new Node(p, r, tree[2*pos+1].sum + tree[2*pos+2].sum);
}
}
public void update(int p, int r, long delt) {
p = (tree[0].left < p)?
p : tree[0].left;
r = (tree[0].right > r)?
r : tree[0].right;
if (p <= r) {
updateHelp(0, p, r, delt);
}
}
private void updateHelp(int pos, int p, int r, long delt) {
if (tree[pos].left>=p && tree[pos].right<=r) {
tree[pos].add += delt;
tree[pos].sum +=
(tree[pos].right-tree[pos].left+1)*delt;
} else {
if (tree[pos].add!=0) {
pushDown(pos);
}
int mid = (tree[pos].left+tree[pos].right)/2;
if (p <= mid) {
updateHelp(2*pos+1, p, r, delt);
}
if (mid+1 <= r) {
updateHelp(2*pos+2, p, r, delt);
}
tree[pos].sum = tree[2*pos+1].sum + tree[2*pos+2].sum;
}
}
private void pushDown(int pos) {
int left = 2*pos+1, right = 2*pos+2;
tree[left].add += tree[pos].add;
tree[right].add += tree[pos].add;
tree[left].sum +=
(tree[left].right-tree[left].left+1)*tree[pos].add;
tree[right].sum +=
(tree[right].right-tree[right].left+1)*tree[pos].add;
tree[pos].add = 0;
}
public long query(int p,int r) {
if (tree[0].left<=p && tree[0].right>=r) {
return queryHelp(0,p,r);
} else {
return 0;
}
}
private long queryHelp(int pos,int p,int r) {
if (tree[pos].left>=p && tree[pos].right<=r) {
return tree[pos].sum;
} else {
if (tree[pos].add!=0) {
pushDown(pos);
}
long val = 0;
int mid = (tree[pos].left+tree[pos].right)/2;
if (p <= mid) {
val += queryHelp(2*pos+1, p, r);
}
if (mid+1 <= r) {
val += queryHelp(2*pos+2, p, r);
}
return val;
}
}
public static void main(String[] args) {
Main.main(args);
}
}
class Main {
/** POJ 3468: http://poj.org/problem?id=3468 */
public static void main(String[] args) {
Scanner in = new Scanner(System.in);
int n = in.nextInt();
int[] arr = new int[n];
int q = in.nextInt();
for (int i=0;i<n;i++) {
arr[i] = in.nextInt();
}
SegmentTree tr = new SegmentTree(n,arr);
for (int i=0;i<q;i++) {
String op = in.next();
if (op.equals("C")) {
int p = in.nextInt()-1;
int r = in.nextInt()-1;
tr.update(p,r,in.nextInt());
} else if (op.equals("Q")) {
int p = in.nextInt()-1;
int r = in.nextInt()-1;
System.out.println(tr.query(p,r));
}
}
in.close();
}
}
| DevinZ1993/Pieces-of-Code | java/Sets/src/SegmentTree.java | Java | mpl-2.0 | 3,871 |
"""
Tests for Blocks api.py
"""
from django.test.client import RequestFactory
from course_blocks.tests.helpers import EnableTransformerRegistryMixin
from student.tests.factories import UserFactory
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import SampleCourseFactory
from ..api import get_blocks
class TestGetBlocks(EnableTransformerRegistryMixin, SharedModuleStoreTestCase):
"""
Tests for the get_blocks function
"""
@classmethod
def setUpClass(cls):
super(TestGetBlocks, cls).setUpClass()
cls.course = SampleCourseFactory.create()
# hide the html block
cls.html_block = cls.store.get_item(cls.course.id.make_usage_key('html', 'html_x1a_1'))
cls.html_block.visible_to_staff_only = True
cls.store.update_item(cls.html_block, ModuleStoreEnum.UserID.test)
def setUp(self):
super(TestGetBlocks, self).setUp()
self.user = UserFactory.create()
self.request = RequestFactory().get("/dummy")
self.request.user = self.user
def test_basic(self):
blocks = get_blocks(self.request, self.course.location, self.user)
self.assertEquals(blocks['root'], unicode(self.course.location))
# subtract for (1) the orphaned course About block and (2) the hidden Html block
self.assertEquals(len(blocks['blocks']), len(self.store.get_items(self.course.id)) - 2)
self.assertNotIn(unicode(self.html_block.location), blocks['blocks'])
def test_no_user(self):
blocks = get_blocks(self.request, self.course.location)
self.assertIn(unicode(self.html_block.location), blocks['blocks'])
def test_access_before_api_transformer_order(self):
"""
Tests the order of transformers: access checks are made before the api
transformer is applied.
"""
blocks = get_blocks(self.request, self.course.location, self.user, nav_depth=5, requested_fields=['nav_depth'])
vertical_block = self.store.get_item(self.course.id.make_usage_key('vertical', 'vertical_x1a'))
problem_block = self.store.get_item(self.course.id.make_usage_key('problem', 'problem_x1a_1'))
vertical_descendants = blocks['blocks'][unicode(vertical_block.location)]['descendants']
self.assertIn(unicode(problem_block.location), vertical_descendants)
self.assertNotIn(unicode(self.html_block.location), vertical_descendants)
| antoviaque/edx-platform | lms/djangoapps/course_api/blocks/tests/test_api.py | Python | agpl-3.0 | 2,533 |
"""This *was* the parser for the current HTML format on parl.gc.ca.
But now we have XML. See parl_document.py.
This module is organized like so:
__init__.py - utility functions, simple parse interface
common.py - infrastructure used in the parsers, i.e. regexes
current.py - parser for the Hansard format used from 2006 to the present
old.py - (fairly crufty) parser for the format used from 1994 to 2006
"""
from parliament.imports.hans_old.common import *
import logging
logger = logging.getLogger(__name__)
class HansardParser2009(HansardParser):
def __init__(self, hansard, html):
for regex in STARTUP_RE_2009:
html = re.sub(regex[0], regex[1], html)
super(HansardParser2009, self).__init__(hansard, html)
for x in self.soup.findAll('a', 'deleteMe'):
x.findParent('div').extract()
def process_related_link(self, tag, string, current_politician=None):
#print "PROCESSING RELATED for %s" % string
resid = re.search(r'ResourceID=(\d+)', tag['href'])
restype = re.search(r'ResourceType=(Document|Affiliation)', tag['href'])
if not resid and restype:
return string
resid, restype = int(resid.group(1)), restype.group(1)
if restype == 'Document':
try:
bill = Bill.objects.get_by_legisinfo_id(resid)
except Bill.DoesNotExist:
match = re.search(r'\b[CS]\-\d+[A-E]?\b', string)
if not match:
logger.error("Invalid bill link %s" % string)
return string
bill = Bill.objects.create_temporary_bill(legisinfo_id=resid,
number=match.group(0), session=self.hansard.session)
except Exception, e:
print "Related bill search failed for callback %s" % resid
print repr(e)
return string
return u'<bill id="%d" name="%s">%s</bill>' % (bill.id, escape(bill.name), string)
elif restype == 'Affiliation':
try:
pol = Politician.objects.getByParlID(resid)
except Politician.DoesNotExist:
print "Related politician search failed for callback %s" % resid
if getattr(settings, 'PARLIAMENT_LABEL_FAILED_CALLBACK', False):
# FIXME migrate away from internalxref?
InternalXref.objects.get_or_create(schema='pol_parlid', int_value=resid, target_id=-1)
return string
if pol == current_politician:
return string # When someone mentions her riding, don't link back to her
return u'<pol id="%d" name="%s">%s</pol>' % (pol.id, escape(pol.name), string)
def get_text(self, cursor):
text = u''
for string in cursor.findAll(text=parsetools.r_hasText):
if string.parent.name == 'a' and string.parent['class'] == 'WebOption':
text += self.process_related_link(string.parent, string, self.t['politician'])
else:
text += unicode(string)
return text
def parse(self):
super(HansardParser2009, self).parse()
# Initialize variables
t = ParseTracker()
self.t = t
member_refs = {}
# Get the date
c = self.soup.find(text='OFFICIAL REPORT (HANSARD)').findNext('h2')
self.date = datetime.datetime.strptime(c.string.strip(), "%A, %B %d, %Y").date()
self.hansard.date = self.date
self.hansard.save()
c = c.findNext(text=r_housemet)
match = re.search(r_housemet, c.string)
t['timestamp'] = self.houseTime(match.group(1), match.group(2))
t.setNext('timestamp', t['timestamp'])
# Move the pointer to the start
c = c.next
# And start the big loop
while c is not None:
# It's a string
if not hasattr(c, 'name'):
pass
# Heading
elif c.name == 'h2':
c = c.next
if not parsetools.isString(c): raise ParseException("Expecting string right after h2")
t.setNext('heading', parsetools.titleIfNecessary(parsetools.tameWhitespace(c.string.strip())))
# Topic
elif c.name == 'h3':
top = c.find(text=r_letter)
#if not parsetools.isString(c):
# check if it's an empty header
# if c.parent.find(text=r_letter):
# raise ParseException("Expecting string right after h3")
if top is not None:
c = top
t['topic_set'] = True
t.setNext('topic', parsetools.titleIfNecessary(parsetools.tameWhitespace(c.string.strip())))
elif c.name == 'h4':
if c.string == 'APPENDIX':
self.saveStatement(t)
print "Appendix reached -- we're done!"
break
# Timestamp
elif c.name == 'a' and c.has_key('name') and c['name'].startswith('T'):
match = re.search(r'^T(\d\d)(\d\d)$', c['name'])
if match:
t.setNext('timestamp', parsetools.time_to_datetime(
hour=int(match.group(1)),
minute=int(match.group(2)),
date=self.date))
else:
raise ParseException("Couldn't match time %s" % c.attrs['name'])
elif c.name == 'b' and c.string:
# Something to do with written answers
match = r_honorific.search(c.string)
if match:
# It's a politician asking or answering a question
# We don't get a proper link here, so this has to be a name match
polname = re.sub(r'\(.+\)', '', match.group(2)).strip()
self.saveStatement(t)
t['member_title'] = c.string.strip()
t['written_question'] = True
try:
pol = Politician.objects.get_by_name(polname, session=self.hansard.session)
t['politician'] = pol
t['member'] = ElectedMember.objects.get_by_pol(politician=pol, date=self.date)
except Politician.DoesNotExist:
print "WARNING: No name match for %s" % polname
except Politician.MultipleObjectsReturned:
print "WARNING: Multiple pols for %s" % polname
else:
if not c.string.startswith('Question'):
print "WARNING: Unexplained boldness: %s" % c.string
# div -- the biggie
elif c.name == 'div':
origdiv = c
if c.find('b'):
# We think it's a new speaker
# Save the current buffer
self.saveStatement(t)
c = c.find('b')
if c.find('a'):
# There's a link...
c = c.find('a')
match = re.search(r'ResourceType=Affiliation&ResourceID=(\d+)', c['href'])
if match and c.find(text=r_letter):
parlwebid = int(match.group(1))
# We have the parl ID. First, see if we already know this ID.
pol = Politician.objects.getByParlID(parlwebid, lookOnline=False)
if pol is None:
# We don't. Try to do a quick name match first (if flags say so)
if not GET_PARLID_ONLINE:
who = c.next.string
match = re.search(r_honorific, who)
if match:
polname = re.sub(r'\(.+\)', '', match.group(2)).strip()
try:
#print "Looking for %s..." % polname,
pol = Politician.objects.get_by_name(polname, session=self.hansard.session)
#print "found."
except Politician.DoesNotExist:
pass
except Politician.MultipleObjectsReturned:
pass
if pol is None:
# Still no match. Go online...
try:
pol = Politician.objects.getByParlID(parlwebid, session=self.hansard.session)
except Politician.DoesNotExist:
print "WARNING: Couldn't find politician for ID %d" % parlwebid
if pol is not None:
t['member'] = ElectedMember.objects.get_by_pol(politician=pol, date=self.date)
t['politician'] = pol
c = c.next
if not parsetools.isString(c): raise Exception("Expecting string in b for member name")
t['member_title'] = c.strip()
#print c
if t['member_title'].endswith(':'): # Remove colon in e.g. Some hon. members:
t['member_title'] = t['member_title'][:-1]
# Sometimes we don't get a link for short statements -- see if we can identify by backreference
if t['member']:
member_refs[t['member_title']] = t['member']
# Also save a backref w/o position/riding
member_refs[re.sub(r'\s*\(.+\)\s*', '', t['member_title'])] = t['member']
elif t['member_title'] in member_refs:
t['member'] = member_refs[t['member_title']]
t['politician'] = t['member'].politician
c.findParent('b').extract() # We've got the title, now get the rest of the paragraph
c = origdiv
t.addText(self.get_text(c))
else:
# There should be text in here
if c.find('div'):
if c.find('div', 'Footer'):
# We're done!
self.saveStatement(t)
print "Footer div reached -- done!"
break
raise Exception("I wasn't expecting another div in here")
txt = self.get_text(c).strip()
if r_proceedings.search(txt):
self.saveStatement(t)
self.saveProceedingsStatement(txt, t)
else:
t.addText(txt, blockquote=bool(c.find('small')))
else:
#print c.name
if c.name == 'b':
print "B: ",
print c
#if c.name == 'p':
# print "P: ",
# print c
c = c.next
return self.statements
| twhyte/openparliament | parliament/imports/hans_old/current.py | Python | agpl-3.0 | 11,722 |
require_relative 'test_helper'
class BoxesTest < ActiveSupport::TestCase
def setup
create_and_activate_user
login_api
end
kinds= %w[Profile Community Person Enterprise Environment]
kinds.each do |kind|
should "get_boxes_from_#{kind.downcase.pluralize}" do
context_obj = fast_create(kind.constantize)
box = fast_create(Box, :owner_id => context_obj.id, :owner_type => (kind == 'Environment') ? 'Environment' : 'Profile')
get "/api/v1/#{kind.downcase.pluralize}/#{context_obj.id}/boxes?#{params.to_query}"
json = JSON.parse(last_response.body)
assert_equal box.id, json.first["id"]
end
end
should 'get boxes from default environment' do
Environment.delete_all
environment = fast_create(Environment, :is_default => true)
box = fast_create(Box, :owner_id => environment.id, :owner_type => 'Environment')
get "/api/v1/environments/default/boxes?#{params.to_query}"
json = JSON.parse(last_response.body)
assert_equal box.id, json.first["id"]
end
should 'get boxes from context environment' do
env = fast_create(Environment, :is_default => true)
env2 = fast_create(Environment).domains << Domain.new(:name => 'test.host')
box = fast_create(Box, :owner_id => environment.id, :owner_type => 'Environment')
get "/api/v1/environments/context/boxes?#{params.to_query}"
json = JSON.parse(last_response.body)
assert_equal box.id, json.first["id"]
end
should 'not display block api_content by default' do
Environment.delete_all
environment = fast_create(Environment, :is_default => true)
box = fast_create(Box, :owner_id => environment.id, :owner_type => 'Environment')
block = fast_create(Block, box_id: box.id)
get "/api/v1/environments/default/boxes?#{params.to_query}"
json = JSON.parse(last_response.body)
assert !json.first["blocks"].first.key?('api_content')
end
should 'get blocks from boxes' do
Environment.delete_all
environment = fast_create(Environment, :is_default => true)
box = fast_create(Box, :owner_id => environment.id, :owner_type => 'Environment')
block = fast_create(Block, box_id: box.id)
get "/api/v1/environments/default/boxes?#{params.to_query}"
json = JSON.parse(last_response.body)
assert_equal [block.id], json.first["blocks"].map {|b| b['id']}
end
should 'not list a block for not logged users' do
logout_api
profile = fast_create(Profile)
box = fast_create(Box, :owner_id => profile.id, :owner_type => Profile.name)
block = fast_create(Block, box_id: box.id)
block.display = 'never'
block.save!
get "/api/v1/profiles/#{profile.id}/boxes?#{params.to_query}"
json = JSON.parse(last_response.body)
assert_equal [], json.first["blocks"].map {|b| b['id']}
end
should 'list a block with logged in display_user for a logged user' do
profile = fast_create(Profile)
box = fast_create(Box, :owner_id => profile.id, :owner_type => Profile.name)
block = fast_create(Block, box_id: box.id)
block.display_user = 'logged'
block.save!
get "/api/v1/profiles/#{profile.id}/boxes?#{params.to_query}"
json = JSON.parse(last_response.body)
assert_equal [block.id], json.first["blocks"].map {|b| b['id']}
end
should 'list a block with not logged in display_user for an admin user' do
profile = fast_create(Profile)
profile.add_admin(person)
box = fast_create(Box, :owner_id => profile.id, :owner_type => Profile.name)
block = fast_create(Block, box_id: box.id)
block.display_user = 'not_logged'
block.save!
get "/api/v1/profiles/#{profile.id}/boxes?#{params.to_query}"
json = JSON.parse(last_response.body)
assert_equal [block.id], json.first["blocks"].map {|b| b['id']}
end
should 'not list boxes for user without permission' do
profile = fast_create(Profile, public_profile: false)
box = fast_create(Box, :owner_id => profile.id, :owner_type => Profile.name)
block = fast_create(Block, box_id: box.id)
get "/api/v1/profiles/#{profile.id}/boxes?#{params.to_query}"
json = JSON.parse(last_response.body)
assert_equal 403, last_response.status
end
end
| coletivoEITA/noosfero-ecosol | test/api/boxes_test.rb | Ruby | agpl-3.0 | 4,177 |
/*
* This file is part of ToroDB.
*
* ToroDB is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* ToroDB is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with ToroDB. If not, see <http://www.gnu.org/licenses/>.
*
* Copyright (c) 2014, 8Kdata Technology
*
*/
package com.torodb.torod.core.language.querycriteria;
import com.torodb.torod.core.language.AttributeReference;
import com.torodb.torod.core.language.querycriteria.utils.QueryCriteriaVisitor;
import com.torodb.torod.core.subdocument.values.Value;
/**
*
*/
public class IsGreaterOrEqualQueryCriteria extends AttributeAndValueQueryCriteria {
private static final long serialVersionUID = 1L;
public IsGreaterOrEqualQueryCriteria(AttributeReference attributeReference, Value<?> val) {
super(attributeReference, val);
}
@Override
protected int getBaseHash() {
return 5;
}
@Override
public String toString() {
return getAttributeReference() + " >= " + getValue();
}
@Override
public <Result, Arg> Result accept(QueryCriteriaVisitor<Result, Arg> visitor, Arg arg) {
return visitor.visit(this, arg);
}
}
| ahachete/torodb | torod/torod-core/src/main/java/com/torodb/torod/core/language/querycriteria/IsGreaterOrEqualQueryCriteria.java | Java | agpl-3.0 | 1,681 |
import Analyzer from 'parser/core/Analyzer';
import RESOURCE_TYPES from 'game/RESOURCE_TYPES';
class Insanity extends Analyzer {
_insanityEvents = [];
on_toPlayer_energize(event) {
if (event.resourceChangeType === RESOURCE_TYPES.INSANITY.id) {
this._insanityEvents = [
...this._insanityEvents,
event,
];
}
}
get events() {
return this._insanityEvents;
}
}
export default Insanity;
| ronaldpereira/WoWAnalyzer | src/parser/priest/shadow/modules/core/Insanity.js | JavaScript | agpl-3.0 | 435 |
/*
* This file is part of CoAnSys project.
* Copyright (c) 2012-2015 ICM-UW
*
* CoAnSys is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* CoAnSys is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with CoAnSys. If not, see <http://www.gnu.org/licenses/>.
*/
package pl.edu.icm.coansys.disambiguation.author.pig.merger;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.pig.EvalFunc;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.apache.pig.tools.pigstats.PigStatusReporter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import pl.edu.icm.coansys.commons.java.DiacriticsRemover;
import pl.edu.icm.coansys.commons.java.StackTraceExtractor;
import pl.edu.icm.coansys.models.DocumentProtos.Author;
import pl.edu.icm.coansys.models.DocumentProtos.BasicMetadata;
import pl.edu.icm.coansys.models.DocumentProtos.DocumentMetadata;
import pl.edu.icm.coansys.models.DocumentProtos.DocumentWrapper;
import pl.edu.icm.coansys.models.DocumentProtos.KeyValue;
/**
*
* @author pdendek
*/
public class MergeDocumentWithOrcid extends EvalFunc<Tuple> {
PigStatusReporter myPigStatusReporter;
private static final Logger logger = LoggerFactory
.getLogger(MergeDocumentWithOrcid.class);
@Override
public Schema outputSchema(Schema p_input) {
try {
return Schema.generateNestedSchema(DataType.TUPLE,
DataType.CHARARRAY, DataType.BYTEARRAY);
} catch (FrontendException e) {
logger.error("Error in creating output schema:", e);
throw new IllegalStateException(e);
}
}
@Override
public Tuple exec(Tuple input) throws IOException {
if (input == null || input.size() != 3) {
return null;
}
try {
myPigStatusReporter = PigStatusReporter.getInstance();
//load input tuple
////doi
String docId = (String) input.get(0);
////normal document
DataByteArray dbaD = (DataByteArray) input.get(1);
////orcid document
DataByteArray dbaO = (DataByteArray) input.get(2);
//load input documents
DocumentWrapper dwD = DocumentWrapper.parseFrom(dbaD.get());
List<Author> aDL = dwD.getDocumentMetadata().getBasicMetadata().getAuthorList();
DocumentWrapper dwO = DocumentWrapper.parseFrom(dbaO.get());
List<Author> aOL = dwO.getDocumentMetadata().getBasicMetadata().getAuthorList();
//calculate merged author list
List<Author> aRL = matchAuthors(docId,aDL,aOL);
//construct resulting document
BasicMetadata.Builder bmR = BasicMetadata.newBuilder(DocumentWrapper.newBuilder(dwD).getDocumentMetadata().getBasicMetadata());
bmR.clearAuthor();
bmR.addAllAuthor(aRL);
DocumentMetadata.Builder dmR = DocumentMetadata.newBuilder(DocumentWrapper.newBuilder(dwD).getDocumentMetadata());
dmR.setBasicMetadata(bmR);
DocumentWrapper.Builder dwR = DocumentWrapper.newBuilder(dwD);
dwR.setDocumentMetadata(dmR);
//construct resulting tuple
Tuple result = TupleFactory.getInstance().newTuple();
result.append(docId);
result.append(new DataByteArray(dwR.build().toByteArray()));
return result;
} catch (Exception e) {
logger.error("Error in processing input row:", e);
throw new IOException("Caught exception processing input row:\n"
+ StackTraceExtractor.getStackTrace(e));
}
}
protected List<Author> matchAuthors(String docId, List<Author> base,
List<Author> second) {
List<Author> result = new ArrayList<Author>(base.size());
List<Author> secondCopy = new ArrayList<Author>(second);
boolean changedBln = false;
int changedInt = 0;
logger.error("-------------------------------------------");
logger.error("number of base authors: "+base.size()+"\tnumber of orcid authors");
for (Author author : base) {
Author foundAuthor = null;
for (Author secondAuthor : secondCopy) {
if (
equalsIgnoreCaseIgnoreDiacritics(author.getName(), secondAuthor.getName())
||
//equalsIgnoreCaseIgnoreDiacritics(author.getForenames(), secondAuthor.getForenames()) &&
equalsIgnoreCaseIgnoreDiacritics(author.getSurname(), secondAuthor.getSurname())
){
foundAuthor = secondAuthor;
break;
}
}
if (foundAuthor != null) {
result.add(merge(author,foundAuthor));
changedBln = true;
changedInt++;
if(myPigStatusReporter != null){
Counter c = myPigStatusReporter.getCounter("ORCID Enhancement", "Author Enhanced");
if(c!=null){
c.increment(1);
}
}
} else {
result.add(Author.newBuilder(author).build());
}
}
if(changedBln){
logger.info("------------------------------------------");
logger.info("Changed docId:"+docId);
if(myPigStatusReporter != null){
Counter c = myPigStatusReporter.getCounter("ORCID Enhancement", "Document Enhanced");
if(c!=null){
c.increment(1);
}
}
}
logger.error("number of intersections: "+changedInt);
return result;
}
private Author merge(Author author, Author foundAuthor) {
Author.Builder builder = Author.newBuilder(author);
for(KeyValue kv : foundAuthor.getExtIdList()){
if("orcid-author-id".equals(kv.getKey())){
KeyValue.Builder kvb = KeyValue.newBuilder();
kvb.setKey(kv.getKey());
kvb.setValue(kv.getValue());
builder.addExtId(kvb.build());
logger.info("<k:"+kv.getKey()+"; v:"+kv.getValue()+">");
logger.info("<kc:"+kvb.getKey()+"; vc:"+kvb.getValue()+">");
}
}
Author ret = builder.build();
logger.info("<auth:"+ret.toString()+">");
return ret;
}
private boolean equalsIgnoreCaseIgnoreDiacritics(String firstName,
String secondName) {
if (firstName.isEmpty() || secondName.isEmpty()) {
return false;
}
return DiacriticsRemover.removeDiacritics(firstName).equalsIgnoreCase(
DiacriticsRemover.removeDiacritics(secondName));
}
}
| acz-icm/coansys | disambiguation-author/disambiguation-author-logic/src/main/java/pl/edu/icm/coansys/disambiguation/author/pig/merger/MergeDocumentWithOrcid.java | Java | agpl-3.0 | 6,921 |
/*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2007-2014 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.web.command;
/**
* Command object for listing a specific statistics report. This object deserializes query params
* for a specific report, identified by integer ID.
*
* @author <a href="mailto:dj@opennms.org">DJ Gregor</a>
* @version $Id: $
* @since 1.8.1
*/
public class StatisticsReportCommand {
private Integer m_id;
/**
* <p>getId</p>
*
* @return a {@link java.lang.Integer} object.
*/
public Integer getId() {
return m_id;
}
/**
* <p>setId</p>
*
* @param id a {@link java.lang.Integer} object.
*/
public void setId(Integer id) {
m_id = id;
}
}
| roskens/opennms-pre-github | opennms-webapp/src/main/java/org/opennms/web/command/StatisticsReportCommand.java | Java | agpl-3.0 | 1,870 |
// Copyright 2020 Canonical Ltd.
// Licensed under the AGPLv3, see LICENCE file for details.
package azuretesting
import (
"encoding/json"
"reflect"
)
// JsonMarshalRaw does the same a json.Marshal, except that it does not
// use the MarshalJSON methods of the value being marshaled. If any types
// are specified in the allow set then those will use their MarshalJSON
// method.
//
// Many of the types in the Azure SDK have MarshalJSON which skip over
// fields that are marked as READ-ONLY, this is useful for the client,
// but a problem when pretending to be a server.
func JsonMarshalRaw(v interface{}, allow ...reflect.Type) ([]byte, error) {
allowed := make(map[reflect.Type]bool, len(allow))
for _, a := range allow {
allowed[a] = true
}
if v != nil {
v = rawValueMaker{allowed}.rawValue(reflect.ValueOf(v)).Interface()
}
return json.Marshal(v)
}
type rawValueMaker struct {
allowed map[reflect.Type]bool
}
func (m rawValueMaker) rawValue(v reflect.Value) reflect.Value {
t := v.Type()
if m.allowed[t] {
return v
}
switch t.Kind() {
case reflect.Ptr:
return m.rawPointerValue(v)
case reflect.Struct:
return m.rawStructValue(v)
case reflect.Map:
return m.rawMapValue(v)
case reflect.Slice:
return m.rawSliceValue(v)
default:
return v
}
}
func (m rawValueMaker) rawPointerValue(v reflect.Value) reflect.Value {
if v.IsNil() {
return v
}
rv := m.rawValue(v.Elem())
if rv.CanAddr() {
return rv.Addr()
}
pv := reflect.New(rv.Type())
pv.Elem().Set(rv)
return pv
}
func (m rawValueMaker) rawStructValue(v reflect.Value) reflect.Value {
t := v.Type()
fields := make([]reflect.StructField, 0, t.NumField())
values := make([]reflect.Value, 0, t.NumField())
for i := 0; i < t.NumField(); i++ {
sf := t.Field(i)
if sf.PkgPath != "" || sf.Tag.Get("json") == "-" {
// Skip fields that won't ever be marshaled.
continue
}
if tag, ok := sf.Tag.Lookup("json"); ok && tag == "" {
// Also skip fields with a present, but empty, json tag.
continue
}
rv := m.rawValue(v.Field(i))
sf.Type = rv.Type()
sf.Anonymous = false
fields = append(fields, sf)
values = append(values, rv)
}
newT := reflect.StructOf(fields)
newV := reflect.New(newT).Elem()
for i, v := range values {
newV.Field(i).Set(v)
}
return newV
}
var interfaceType = reflect.TypeOf((*interface{})(nil)).Elem()
func (m rawValueMaker) rawMapValue(v reflect.Value) reflect.Value {
newV := reflect.MakeMap(reflect.MapOf(v.Type().Key(), interfaceType))
for _, key := range v.MapKeys() {
value := v.MapIndex(key)
newV.SetMapIndex(key, m.rawValue(value))
}
return newV
}
func (m rawValueMaker) rawSliceValue(v reflect.Value) reflect.Value {
newV := reflect.MakeSlice(reflect.SliceOf(interfaceType), v.Len(), v.Len())
for i := 0; i < v.Len(); i++ {
newV.Index(i).Set(m.rawValue(v.Index(i)))
}
return newV
}
| freyes/juju | provider/azure/internal/azuretesting/json.go | GO | agpl-3.0 | 2,870 |
<?php
/*********************************************************************************
* TimeTrex is a Payroll and Time Management program developed by
* TimeTrex Software Inc. Copyright (C) 2003 - 2014 TimeTrex Software Inc.
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License version 3 as published by
* the Free Software Foundation with the addition of the following permission
* added to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED
* WORK IN WHICH THE COPYRIGHT IS OWNED BY TIMETREX, TIMETREX DISCLAIMS THE
* WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along
* with this program; if not, see http://www.gnu.org/licenses or write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301 USA.
*
* You can contact TimeTrex headquarters at Unit 22 - 2475 Dobbin Rd. Suite
* #292 Westbank, BC V4T 2E9, Canada or at email address info@timetrex.com.
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Public License version 3.
*
* In accordance with Section 7(b) of the GNU Affero General Public License
* version 3, these Appropriate Legal Notices must retain the display of the
* "Powered by TimeTrex" logo. If the display of the logo is not reasonably
* feasible for technical reasons, the Appropriate Legal Notices must display
* the words "Powered by TimeTrex".
********************************************************************************/
/**
* @package API\Accrual
*/
class APIAccrualBalance extends APIFactory {
protected $main_class = 'AccrualBalanceFactory';
public function __construct() {
parent::__construct(); //Make sure parent constructor is always called.
return TRUE;
}
/**
* Get options for dropdown boxes.
* @param string $name Name of options to return, ie: 'columns', 'type', 'status'
* @param mixed $parent Parent name/ID of options to return if data is in hierarchical format. (ie: Province)
* @return array
*/
function getOptions( $name, $parent = NULL ) {
if ( $name == 'columns'
AND ( !$this->getPermissionObject()->Check('accrual', 'enabled')
OR !( $this->getPermissionObject()->Check('accrual', 'view') OR $this->getPermissionObject()->Check('accrual', 'view_child') ) ) ) {
$name = 'list_columns';
}
return parent::getOptions( $name, $parent );
}
/**
* Get accrual balance data for one or more accrual balancees.
* @param array $data filter data
* @return array
*/
function getAccrualBalance( $data = NULL, $disable_paging = FALSE ) {
if ( !$this->getPermissionObject()->Check('accrual', 'enabled')
OR !( $this->getPermissionObject()->Check('accrual', 'view') OR $this->getPermissionObject()->Check('accrual', 'view_own') OR $this->getPermissionObject()->Check('accrual', 'view_child') ) ) {
return $this->getPermissionObject()->PermissionDenied();
}
$data = $this->initializeFilterAndPager( $data, $disable_paging );
$data['filter_data']['permission_children_ids'] = $this->getPermissionObject()->getPermissionChildren( 'accrual', 'view' );
$blf = TTnew( 'AccrualBalanceListFactory' );
$blf->getAPISearchByCompanyIdAndArrayCriteria( $this->getCurrentCompanyObject()->getId(), $data['filter_data'], $data['filter_items_per_page'], $data['filter_page'], NULL, $data['filter_sort'] );
Debug::Text('Record Count: '. $blf->getRecordCount(), __FILE__, __LINE__, __METHOD__, 10);
if ( $blf->getRecordCount() > 0 ) {
$this->getProgressBarObject()->start( $this->getAMFMessageID(), $blf->getRecordCount() );
$this->setPagerObject( $blf );
foreach( $blf as $b_obj ) {
$retarr[] = $b_obj->getObjectAsArray( $data['filter_columns'], $data['filter_data']['permission_children_ids'] );
$this->getProgressBarObject()->set( $this->getAMFMessageID(), $blf->getCurrentRow() );
}
$this->getProgressBarObject()->stop( $this->getAMFMessageID() );
return $this->returnHandler( $retarr );
}
return $this->returnHandler( TRUE ); //No records returned.
}
}
?>
| BrunoChauvet/timetrex | classes/modules/api/accrual/APIAccrualBalance.class.php | PHP | agpl-3.0 | 4,549 |
Clazz.declarePackage ("org.jmol.shapespecial");
Clazz.load (["org.jmol.shape.AtomShape", "org.jmol.atomdata.RadiusData", "org.jmol.util.BitSet"], "org.jmol.shapespecial.Dots", ["java.util.Hashtable", "org.jmol.constant.EnumVdw", "org.jmol.geodesic.EnvelopeCalculation", "org.jmol.util.BitSetUtil", "$.Colix", "$.Escape", "$.Logger", "$.Matrix3f", "$.StringXBuilder"], function () {
c$ = Clazz.decorateAsClass (function () {
this.ec = null;
this.isSurface = false;
this.bsOn = null;
this.bsSelected = null;
this.bsIgnore = null;
this.thisAtom = 0;
this.thisRadius = 0;
this.thisArgb = 0;
this.rdLast = null;
Clazz.instantialize (this, arguments);
}, org.jmol.shapespecial, "Dots", org.jmol.shape.AtomShape);
Clazz.prepareFields (c$, function () {
this.bsOn = new org.jmol.util.BitSet ();
this.rdLast = new org.jmol.atomdata.RadiusData (null, 0, null, null);
});
Clazz.defineMethod (c$, "initShape",
function () {
Clazz.superCall (this, org.jmol.shapespecial.Dots, "initShape", []);
this.translucentAllowed = false;
this.ec = new org.jmol.geodesic.EnvelopeCalculation (this.viewer, this.atomCount, this.mads);
});
Clazz.defineMethod (c$, "getSize",
function (atomIndex) {
return (this.mads == null ? Clazz.doubleToInt (Math.floor (this.ec.getRadius (atomIndex) * 2000)) : this.mads[atomIndex] * 2);
}, "~N");
Clazz.defineMethod (c$, "setProperty",
function (propertyName, value, bs) {
if ("init" === propertyName) {
this.initialize ();
return;
}if ("translucency" === propertyName) {
if (!this.translucentAllowed) return;
}if ("ignore" === propertyName) {
this.bsIgnore = value;
return;
}if ("select" === propertyName) {
this.bsSelected = value;
return;
}if ("radius" === propertyName) {
this.thisRadius = (value).floatValue ();
if (this.thisRadius > 16) this.thisRadius = 16;
return;
}if ("colorRGB" === propertyName) {
this.thisArgb = (value).intValue ();
return;
}if ("atom" === propertyName) {
this.thisAtom = (value).intValue ();
if (this.thisAtom >= this.atoms.length) return;
this.atoms[this.thisAtom].setShapeVisibility (this.myVisibilityFlag, true);
this.ec.allocDotsConvexMaps (this.atomCount);
return;
}if ("dots" === propertyName) {
if (this.thisAtom >= this.atoms.length) return;
this.isActive = true;
this.ec.setFromBits (this.thisAtom, value);
this.atoms[this.thisAtom].setShapeVisibility (this.myVisibilityFlag, true);
if (this.mads == null) {
this.ec.setMads (null);
this.mads = Clazz.newShortArray (this.atomCount, 0);
for (var i = 0; i < this.atomCount; i++) if (this.atoms[i].isInFrame () && this.atoms[i].isShapeVisible (this.myVisibilityFlag)) try {
this.mads[i] = Clazz.floatToShort (this.ec.getAppropriateRadius (i) * 1000);
} catch (e) {
if (Clazz.exceptionOf (e, Exception)) {
} else {
throw e;
}
}
this.ec.setMads (this.mads);
}this.mads[this.thisAtom] = Clazz.floatToShort (this.thisRadius * 1000);
if (this.colixes == null) {
this.colixes = Clazz.newShortArray (this.atomCount, 0);
this.paletteIDs = Clazz.newByteArray (this.atomCount, 0);
}this.colixes[this.thisAtom] = org.jmol.util.Colix.getColix (this.thisArgb);
this.bsOn.set (this.thisAtom);
return;
}if ("refreshTrajectories" === propertyName) {
bs = (value)[1];
var m4 = (value)[2];
if (m4 == null) return;
var m = new org.jmol.util.Matrix3f ();
m4.getRotationScale (m);
this.ec.reCalculate (bs, m);
return;
}if (propertyName === "deleteModelAtoms") {
var firstAtomDeleted = ((value)[2])[1];
var nAtomsDeleted = ((value)[2])[2];
org.jmol.util.BitSetUtil.deleteBits (this.bsOn, bs);
this.ec.deleteAtoms (firstAtomDeleted, nAtomsDeleted);
}Clazz.superCall (this, org.jmol.shapespecial.Dots, "setProperty", [propertyName, value, bs]);
}, "~S,~O,org.jmol.util.BitSet");
Clazz.defineMethod (c$, "initialize",
function () {
this.bsSelected = null;
this.bsIgnore = null;
this.isActive = false;
if (this.ec == null) this.ec = new org.jmol.geodesic.EnvelopeCalculation (this.viewer, this.atomCount, this.mads);
});
Clazz.overrideMethod (c$, "setSizeRD",
function (rd, bsSelected) {
if (rd == null) rd = new org.jmol.atomdata.RadiusData (null, 0, org.jmol.atomdata.RadiusData.EnumType.ABSOLUTE, null);
if (this.bsSelected != null) bsSelected = this.bsSelected;
if (org.jmol.util.Logger.debugging) {
org.jmol.util.Logger.debug ("Dots.setSize " + rd.value);
}var isVisible = true;
var setRadius = 3.4028235E38;
this.isActive = true;
switch (rd.factorType) {
case org.jmol.atomdata.RadiusData.EnumType.OFFSET:
break;
case org.jmol.atomdata.RadiusData.EnumType.ABSOLUTE:
if (rd.value == 0) isVisible = false;
setRadius = rd.value;
default:
rd.valueExtended = this.viewer.getCurrentSolventProbeRadius ();
}
var maxRadius;
switch (rd.vdwType) {
case org.jmol.constant.EnumVdw.ADPMIN:
case org.jmol.constant.EnumVdw.ADPMAX:
case org.jmol.constant.EnumVdw.HYDRO:
case org.jmol.constant.EnumVdw.TEMP:
maxRadius = setRadius;
break;
case org.jmol.constant.EnumVdw.IONIC:
maxRadius = this.modelSet.getMaxVanderwaalsRadius () * 2;
break;
default:
maxRadius = this.modelSet.getMaxVanderwaalsRadius ();
}
var newSet = (this.rdLast.value != rd.value || this.rdLast.valueExtended != rd.valueExtended || this.rdLast.factorType !== rd.factorType || this.rdLast.vdwType !== rd.vdwType || this.ec.getDotsConvexMax () == 0);
if (isVisible) {
for (var i = bsSelected.nextSetBit (0); i >= 0; i = bsSelected.nextSetBit (i + 1)) if (!this.bsOn.get (i)) {
this.bsOn.set (i);
newSet = true;
}
} else {
var isAll = (bsSelected == null);
var i0 = (isAll ? this.atomCount - 1 : bsSelected.nextSetBit (0));
for (var i = i0; i >= 0; i = (isAll ? i - 1 : bsSelected.nextSetBit (i + 1))) this.bsOn.setBitTo (i, false);
}for (var i = this.atomCount; --i >= 0; ) {
this.atoms[i].setShapeVisibility (this.myVisibilityFlag, this.bsOn.get (i));
}
if (!isVisible) return;
if (newSet) {
this.mads = null;
this.ec.newSet ();
}var dotsConvexMaps = this.ec.getDotsConvexMaps ();
if (dotsConvexMaps != null) {
for (var i = this.atomCount; --i >= 0; ) if (this.bsOn.get (i)) {
dotsConvexMaps[i] = null;
}
}if (dotsConvexMaps == null) {
this.colixes = Clazz.newShortArray (this.atomCount, 0);
this.paletteIDs = Clazz.newByteArray (this.atomCount, 0);
}this.ec.calculate (rd, maxRadius, this.bsOn, this.bsIgnore, !this.viewer.getDotSurfaceFlag (), this.viewer.getDotsSelectedOnlyFlag (), this.isSurface, true);
this.rdLast = rd;
}, "org.jmol.atomdata.RadiusData,org.jmol.util.BitSet");
Clazz.overrideMethod (c$, "setModelClickability",
function () {
for (var i = this.atomCount; --i >= 0; ) {
var atom = this.atoms[i];
if ((atom.getShapeVisibilityFlags () & this.myVisibilityFlag) == 0 || this.modelSet.isAtomHidden (i)) continue;
atom.setClickable (this.myVisibilityFlag);
}
});
Clazz.overrideMethod (c$, "getShapeState",
function () {
var dotsConvexMaps = this.ec.getDotsConvexMaps ();
if (dotsConvexMaps == null || this.ec.getDotsConvexMax () == 0) return "";
var s = new org.jmol.util.StringXBuilder ();
var temp = new java.util.Hashtable ();
var atomCount = this.viewer.getAtomCount ();
var type = (this.isSurface ? "geoSurface " : "dots ");
for (var i = 0; i < atomCount; i++) {
if (!this.bsOn.get (i) || dotsConvexMaps[i] == null) continue;
if (this.bsColixSet != null && this.bsColixSet.get (i)) org.jmol.shape.Shape.setStateInfo (temp, i, this.getColorCommand (type, this.paletteIDs[i], this.colixes[i]));
var bs = dotsConvexMaps[i];
if (!bs.isEmpty ()) {
var r = this.ec.getAppropriateRadius (i);
org.jmol.shape.Shape.appendCmd (s, type + i + " radius " + r + " " + org.jmol.util.Escape.escape (bs));
}}
s.append (org.jmol.shape.Shape.getShapeCommands (temp, null));
return s.toString ();
});
Clazz.defineStatics (c$,
"SURFACE_DISTANCE_FOR_CALCULATION", 10,
"MAX_LEVEL", 3);
});
| ksripathi/edx-demo-course | static/jsmol/j2s/org/jmol/shapespecial/Dots.js | JavaScript | agpl-3.0 | 7,637 |
import re
import uuid
from xmodule.assetstore.assetmgr import AssetManager
XASSET_LOCATION_TAG = 'c4x'
XASSET_SRCREF_PREFIX = 'xasset:'
XASSET_THUMBNAIL_TAIL_NAME = '.jpg'
STREAM_DATA_CHUNK_SIZE = 1024
import os
import logging
import StringIO
from urlparse import urlparse, urlunparse, parse_qsl
from urllib import urlencode
from opaque_keys.edx.locator import AssetLocator
from opaque_keys.edx.keys import CourseKey, AssetKey
from opaque_keys import InvalidKeyError
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.exceptions import NotFoundError
from PIL import Image
class StaticContent(object):
def __init__(self, loc, name, content_type, data, last_modified_at=None, thumbnail_location=None, import_path=None,
length=None, locked=False):
self.location = loc
self.name = name # a display string which can be edited, and thus not part of the location which needs to be fixed
self.content_type = content_type
self._data = data
self.length = length
self.last_modified_at = last_modified_at
self.thumbnail_location = thumbnail_location
# optional information about where this file was imported from. This is needed to support import/export
# cycles
self.import_path = import_path
self.locked = locked
@property
def is_thumbnail(self):
return self.location.category == 'thumbnail'
@staticmethod
def generate_thumbnail_name(original_name, dimensions=None):
"""
- original_name: Name of the asset (typically its location.name)
- dimensions: `None` or a tuple of (width, height) in pixels
"""
name_root, ext = os.path.splitext(original_name)
if not ext == XASSET_THUMBNAIL_TAIL_NAME:
name_root = name_root + ext.replace(u'.', u'-')
if dimensions:
width, height = dimensions # pylint: disable=unpacking-non-sequence
name_root += "-{}x{}".format(width, height)
return u"{name_root}{extension}".format(
name_root=name_root,
extension=XASSET_THUMBNAIL_TAIL_NAME,
)
@staticmethod
def compute_location(course_key, path, revision=None, is_thumbnail=False):
"""
Constructs a location object for static content.
- course_key: the course that this asset belongs to
- path: is the name of the static asset
- revision: is the object's revision information
- is_thumbnail: is whether or not we want the thumbnail version of this
asset
"""
path = path.replace('/', '_')
return course_key.make_asset_key(
'asset' if not is_thumbnail else 'thumbnail',
AssetLocator.clean_keeping_underscores(path)
).for_branch(None)
def get_id(self):
return self.location
@property
def data(self):
return self._data
ASSET_URL_RE = re.compile(r"""
/?c4x/
(?P<org>[^/]+)/
(?P<course>[^/]+)/
(?P<category>[^/]+)/
(?P<name>[^/]+)
""", re.VERBOSE | re.IGNORECASE)
@staticmethod
def is_c4x_path(path_string):
"""
Returns a boolean if a path is believed to be a c4x link based on the leading element
"""
return StaticContent.ASSET_URL_RE.match(path_string) is not None
@staticmethod
def get_static_path_from_location(location):
"""
This utility static method will take a location identifier and create a 'durable' /static/.. URL representation of it.
This link is 'durable' as it can maintain integrity across cloning of courseware across course-ids, e.g. reruns of
courses.
In the LMS/CMS, we have runtime link-rewriting, so at render time, this /static/... format will get translated into
the actual /c4x/... path which the client needs to reference static content
"""
if location is not None:
return u"/static/{name}".format(name=location.name)
else:
return None
@staticmethod
def get_base_url_path_for_course_assets(course_key):
if course_key is None:
return None
assert isinstance(course_key, CourseKey)
placeholder_id = uuid.uuid4().hex
# create a dummy asset location with a fake but unique name. strip off the name, and return it
url_path = StaticContent.serialize_asset_key_with_slash(
course_key.make_asset_key('asset', placeholder_id).for_branch(None)
)
return url_path.replace(placeholder_id, '')
@staticmethod
def get_location_from_path(path):
"""
Generate an AssetKey for the given path (old c4x/org/course/asset/name syntax)
"""
try:
return AssetKey.from_string(path)
except InvalidKeyError:
# TODO - re-address this once LMS-11198 is tackled.
if path.startswith('/'):
# try stripping off the leading slash and try again
return AssetKey.from_string(path[1:])
@staticmethod
def get_asset_key_from_path(course_key, path):
"""
Parses a path, extracting an asset key or creating one.
Args:
course_key: key to the course which owns this asset
path: the path to said content
Returns:
AssetKey: the asset key that represents the path
"""
# Clean up the path, removing any static prefix and any leading slash.
if path.startswith('/static/'):
path = path[len('/static/'):]
path = path.lstrip('/')
try:
return AssetKey.from_string(path)
except InvalidKeyError:
# If we couldn't parse the path, just let compute_location figure it out.
# It's most likely a path like /image.png or something.
return StaticContent.compute_location(course_key, path)
@staticmethod
def get_canonicalized_asset_path(course_key, path, base_url):
"""
Returns a fully-qualified path to a piece of static content.
If a static asset CDN is configured, this path will include it.
Otherwise, the path will simply be relative.
Args:
course_key: key to the course which owns this asset
path: the path to said content
Returns:
string: fully-qualified path to asset
"""
# Break down the input path.
_, _, relative_path, params, query_string, fragment = urlparse(path)
# Convert our path to an asset key if it isn't one already.
asset_key = StaticContent.get_asset_key_from_path(course_key, relative_path)
# Check the status of the asset to see if this can be served via CDN aka publicly.
serve_from_cdn = False
try:
content = AssetManager.find(asset_key, as_stream=True)
is_locked = getattr(content, "locked", True)
serve_from_cdn = not is_locked
except (ItemNotFoundError, NotFoundError):
# If we can't find the item, just treat it as if it's locked.
serve_from_cdn = False
# Update any query parameter values that have asset paths in them. This is for assets that
# require their own after-the-fact values, like a Flash file that needs the path of a config
# file passed to it e.g. /static/visualization.swf?configFile=/static/visualization.xml
query_params = parse_qsl(query_string)
updated_query_params = []
for query_name, query_value in query_params:
if query_value.startswith("/static/"):
new_query_value = StaticContent.get_canonicalized_asset_path(course_key, query_value, base_url)
updated_query_params.append((query_name, new_query_value))
else:
updated_query_params.append((query_name, query_value))
serialized_asset_key = StaticContent.serialize_asset_key_with_slash(asset_key)
base_url = base_url if serve_from_cdn else ''
return urlunparse((None, base_url, serialized_asset_key, params, urlencode(updated_query_params), fragment))
def stream_data(self):
yield self._data
@staticmethod
def serialize_asset_key_with_slash(asset_key):
"""
Legacy code expects the serialized asset key to start w/ a slash; so, do that in one place
:param asset_key:
"""
url = unicode(asset_key)
if not url.startswith('/'):
url = '/' + url # TODO - re-address this once LMS-11198 is tackled.
return url
class StaticContentStream(StaticContent):
def __init__(self, loc, name, content_type, stream, last_modified_at=None, thumbnail_location=None, import_path=None,
length=None, locked=False):
super(StaticContentStream, self).__init__(loc, name, content_type, None, last_modified_at=last_modified_at,
thumbnail_location=thumbnail_location, import_path=import_path,
length=length, locked=locked)
self._stream = stream
def stream_data(self):
while True:
chunk = self._stream.read(STREAM_DATA_CHUNK_SIZE)
if len(chunk) == 0:
break
yield chunk
def stream_data_in_range(self, first_byte, last_byte):
"""
Stream the data between first_byte and last_byte (included)
"""
self._stream.seek(first_byte)
position = first_byte
while True:
if last_byte < position + STREAM_DATA_CHUNK_SIZE - 1:
chunk = self._stream.read(last_byte - position + 1)
yield chunk
break
chunk = self._stream.read(STREAM_DATA_CHUNK_SIZE)
position += STREAM_DATA_CHUNK_SIZE
yield chunk
def close(self):
self._stream.close()
def copy_to_in_mem(self):
self._stream.seek(0)
content = StaticContent(self.location, self.name, self.content_type, self._stream.read(),
last_modified_at=self.last_modified_at, thumbnail_location=self.thumbnail_location,
import_path=self.import_path, length=self.length, locked=self.locked)
return content
class ContentStore(object):
'''
Abstraction for all ContentStore providers (e.g. MongoDB)
'''
def save(self, content):
raise NotImplementedError
def find(self, filename):
raise NotImplementedError
def get_all_content_for_course(self, course_key, start=0, maxresults=-1, sort=None, filter_params=None):
'''
Returns a list of static assets for a course, followed by the total number of assets.
By default all assets are returned, but start and maxresults can be provided to limit the query.
The return format is a list of asset data dictionaries.
The asset data dictionaries have the following keys:
asset_key (:class:`opaque_keys.edx.AssetKey`): The key of the asset
displayname: The human-readable name of the asset
uploadDate (datetime.datetime): The date and time that the file was uploadDate
contentType: The mimetype string of the asset
md5: An md5 hash of the asset content
'''
raise NotImplementedError
def delete_all_course_assets(self, course_key):
"""
Delete all of the assets which use this course_key as an identifier
:param course_key:
"""
raise NotImplementedError
def copy_all_course_assets(self, source_course_key, dest_course_key):
"""
Copy all the course assets from source_course_key to dest_course_key
"""
raise NotImplementedError
def generate_thumbnail(self, content, tempfile_path=None, dimensions=None):
"""Create a thumbnail for a given image.
Returns a tuple of (StaticContent, AssetKey)
`content` is the StaticContent representing the image you want to make a
thumbnail out of.
`tempfile_path` is a string path to the location of a file to read from
in order to grab the image data, instead of relying on `content.data`
`dimensions` is an optional param that represents (width, height) in
pixels. It defaults to None.
"""
thumbnail_content = None
# use a naming convention to associate originals with the thumbnail
thumbnail_name = StaticContent.generate_thumbnail_name(
content.location.name, dimensions=dimensions
)
thumbnail_file_location = StaticContent.compute_location(
content.location.course_key, thumbnail_name, is_thumbnail=True
)
# if we're uploading an image, then let's generate a thumbnail so that we can
# serve it up when needed without having to rescale on the fly
if content.content_type is not None and content.content_type.split('/')[0] == 'image':
try:
# use PIL to do the thumbnail generation (http://www.pythonware.com/products/pil/)
# My understanding is that PIL will maintain aspect ratios while restricting
# the max-height/width to be whatever you pass in as 'size'
# @todo: move the thumbnail size to a configuration setting?!?
if tempfile_path is None:
im = Image.open(StringIO.StringIO(content.data))
else:
im = Image.open(tempfile_path)
# I've seen some exceptions from the PIL library when trying to save palletted
# PNG files to JPEG. Per the google-universe, they suggest converting to RGB first.
im = im.convert('RGB')
if not dimensions:
dimensions = (128, 128)
im.thumbnail(dimensions, Image.ANTIALIAS)
thumbnail_file = StringIO.StringIO()
im.save(thumbnail_file, 'JPEG')
thumbnail_file.seek(0)
# store this thumbnail as any other piece of content
thumbnail_content = StaticContent(thumbnail_file_location, thumbnail_name,
'image/jpeg', thumbnail_file)
self.save(thumbnail_content)
except Exception, e:
# log and continue as thumbnails are generally considered as optional
logging.exception(u"Failed to generate thumbnail for {0}. Exception: {1}".format(content.location, str(e)))
return thumbnail_content, thumbnail_file_location
def ensure_indexes(self):
"""
Ensure that all appropriate indexes are created that are needed by this modulestore, or raise
an exception if unable to.
"""
pass
| MakeHer/edx-platform | common/lib/xmodule/xmodule/contentstore/content.py | Python | agpl-3.0 | 14,964 |
<?php
/*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* This software consists of voluntary contributions made by many individuals
* and is licensed under the MIT license. For more information, see
* <http://www.doctrine-project.org>.
*/
namespace Doctrine\ORM\Query\AST;
/**
* JoinVariableDeclaration ::= Join [IndexBy]
*
* @license http://www.opensource.org/licenses/mit-license.php MIT
* @link www.doctrine-project.org
* @since 2.5
* @author Guilherme Blanco <guilhermeblanco@hotmail.com>
*/
class JoinVariableDeclaration extends Node
{
/**
* @var Join
*/
public $join;
/**
* @var IndexBy|null
*/
public $indexBy;
/**
* Constructor.
*
* @param Join $join
* @param IndexBy|null $indexBy
*/
public function __construct($join, $indexBy)
{
$this->join = $join;
$this->indexBy = $indexBy;
}
/**
* {@inheritdoc}
*/
public function dispatch($walker)
{
return $walker->walkJoinVariableDeclaration($this);
}
}
| hannesk001/SPHERE-Framework | Library/MOC-V/Component/Database/Vendor/Doctrine2ORM/2.5.0/lib/Doctrine/ORM/Query/AST/JoinVariableDeclaration.php | PHP | agpl-3.0 | 1,810 |
/* *
*
* (c) 2010-2020 Torstein Honsi
*
* License: www.highcharts.com/license
*
* !!!!!!! SOURCE GETS TRANSPILED BY TYPESCRIPT. EDIT TS FILE ONLY. !!!!!!!
*
* */
'use strict';
import H from './Globals.js';
import Point from './Point.js';
import U from './Utilities.js';
var seriesType = U.seriesType;
var seriesTypes = H.seriesTypes;
/**
* The ohlc series type.
*
* @private
* @class
* @name Highcharts.seriesTypes.ohlc
*
* @augments Highcharts.Series
*/
seriesType('ohlc', 'column'
/**
* An OHLC chart is a style of financial chart used to describe price
* movements over time. It displays open, high, low and close values per
* data point.
*
* @sample stock/demo/ohlc/
* OHLC chart
*
* @extends plotOptions.column
* @excluding borderColor, borderRadius, borderWidth, crisp, stacking,
* stack
* @product highstock
* @optionparent plotOptions.ohlc
*/
, {
/**
* The approximate pixel width of each group. If for example a series
* with 30 points is displayed over a 600 pixel wide plot area, no
* grouping is performed. If however the series contains so many points
* that the spacing is less than the groupPixelWidth, Highcharts will
* try to group it into appropriate groups so that each is more or less
* two pixels wide. Defaults to `5`.
*
* @type {number}
* @default 5
* @product highstock
* @apioption plotOptions.ohlc.dataGrouping.groupPixelWidth
*/
/**
* The pixel width of the line/border. Defaults to `1`.
*
* @sample {highstock} stock/plotoptions/ohlc-linewidth/
* A greater line width
*
* @type {number}
* @default 1
* @product highstock
*
* @private
*/
lineWidth: 1,
tooltip: {
pointFormat: '<span style="color:{point.color}">\u25CF</span> ' +
'<b> {series.name}</b><br/>' +
'Open: {point.open}<br/>' +
'High: {point.high}<br/>' +
'Low: {point.low}<br/>' +
'Close: {point.close}<br/>'
},
threshold: null,
states: {
/**
* @extends plotOptions.column.states.hover
* @product highstock
*/
hover: {
/**
* The pixel width of the line representing the OHLC point.
*
* @type {number}
* @default 3
* @product highstock
*/
lineWidth: 3
}
},
/**
* Determines which one of `open`, `high`, `low`, `close` values should
* be represented as `point.y`, which is later used to set dataLabel
* position and [compare](#plotOptions.series.compare).
*
* @sample {highstock} stock/plotoptions/ohlc-pointvalkey/
* Possible values
*
* @type {string}
* @default close
* @validvalue ["open", "high", "low", "close"]
* @product highstock
* @apioption plotOptions.ohlc.pointValKey
*/
/**
* @default close
* @apioption plotOptions.ohlc.colorKey
*/
/**
* Line color for up points.
*
* @type {Highcharts.ColorString|Highcharts.GradientColorObject|Highcharts.PatternObject}
* @product highstock
* @apioption plotOptions.ohlc.upColor
*/
stickyTracking: true
},
/**
* @lends Highcharts.seriesTypes.ohlc
*/
{
/* eslint-disable valid-jsdoc */
directTouch: false,
pointArrayMap: ['open', 'high', 'low', 'close'],
toYData: function (point) {
// return a plain array for speedy calculation
return [point.open, point.high, point.low, point.close];
},
pointValKey: 'close',
pointAttrToOptions: {
stroke: 'color',
'stroke-width': 'lineWidth'
},
/**
* @private
* @function Highcarts.seriesTypes.ohlc#init
* @return {void}
*/
init: function () {
seriesTypes.column.prototype.init.apply(this, arguments);
this.options.stacking = void 0; // #8817
},
/**
* Postprocess mapping between options and SVG attributes
*
* @private
* @function Highcharts.seriesTypes.ohlc#pointAttribs
* @param {Highcharts.OHLCPoint} point
* @param {string} state
* @return {Highcharts.SVGAttributes}
*/
pointAttribs: function (point, state) {
var attribs = seriesTypes.column.prototype.pointAttribs.call(this, point, state), options = this.options;
delete attribs.fill;
if (!point.options.color &&
options.upColor &&
point.open < point.close) {
attribs.stroke = options.upColor;
}
return attribs;
},
/**
* Translate data points from raw values x and y to plotX and plotY
*
* @private
* @function Highcharts.seriesTypes.ohlc#translate
* @return {void}
*/
translate: function () {
var series = this, yAxis = series.yAxis, hasModifyValue = !!series.modifyValue, translated = [
'plotOpen',
'plotHigh',
'plotLow',
'plotClose',
'yBottom'
]; // translate OHLC for
seriesTypes.column.prototype.translate.apply(series);
// Do the translation
series.points.forEach(function (point) {
[point.open, point.high, point.low, point.close, point.low]
.forEach(function (value, i) {
if (value !== null) {
if (hasModifyValue) {
value = series.modifyValue(value);
}
point[translated[i]] =
yAxis.toPixels(value, true);
}
});
// Align the tooltip to the high value to avoid covering the
// point
point.tooltipPos[1] =
point.plotHigh + yAxis.pos - series.chart.plotTop;
});
},
/**
* Draw the data points
*
* @private
* @function Highcharts.seriesTypes.ohlc#drawPoints
* @return {void}
*/
drawPoints: function () {
var series = this, points = series.points, chart = series.chart,
/**
* Extend vertical stem to open and close values.
*/
extendStem = function (path, halfStrokeWidth, openOrClose) {
var start = path[0];
var end = path[1];
// We don't need to worry about crisp - openOrClose value
// is already crisped and halfStrokeWidth should remove it.
if (typeof start[2] === 'number') {
start[2] = Math.max(openOrClose + halfStrokeWidth, start[2]);
}
if (typeof end[2] === 'number') {
end[2] = Math.min(openOrClose - halfStrokeWidth, end[2]);
}
};
points.forEach(function (point) {
var plotOpen, plotClose, crispCorr, halfWidth, path, graphic = point.graphic, crispX, isNew = !graphic, strokeWidth;
if (typeof point.plotY !== 'undefined') {
// Create and/or update the graphic
if (!graphic) {
point.graphic = graphic = chart.renderer.path()
.add(series.group);
}
if (!chart.styledMode) {
graphic.attr(series.pointAttribs(point, (point.selected && 'select'))); // #3897
}
// crisp vector coordinates
strokeWidth = graphic.strokeWidth();
crispCorr = (strokeWidth % 2) / 2;
// #2596:
crispX = Math.round(point.plotX) - crispCorr;
halfWidth = Math.round(point.shapeArgs.width / 2);
// the vertical stem
path = [
['M', crispX, Math.round(point.yBottom)],
['L', crispX, Math.round(point.plotHigh)]
];
// open
if (point.open !== null) {
plotOpen = Math.round(point.plotOpen) + crispCorr;
path.push(['M', crispX, plotOpen], ['L', crispX - halfWidth, plotOpen]);
extendStem(path, strokeWidth / 2, plotOpen);
}
// close
if (point.close !== null) {
plotClose = Math.round(point.plotClose) + crispCorr;
path.push(['M', crispX, plotClose], ['L', crispX + halfWidth, plotClose]);
extendStem(path, strokeWidth / 2, plotClose);
}
graphic[isNew ? 'attr' : 'animate']({ d: path })
.addClass(point.getClassName(), true);
}
});
},
animate: null // Disable animation
/* eslint-enable valid-jsdoc */
},
/**
* @lends Highcharts.seriesTypes.ohlc.prototype.pointClass.prototype
*/
{
/* eslint-disable valid-jsdoc */
/**
* Extend the parent method by adding up or down to the class name.
* @private
* @function Highcharts.seriesTypes.ohlc#getClassName
* @return {string}
*/
getClassName: function () {
return Point.prototype.getClassName.call(this) +
(this.open < this.close ?
' highcharts-point-up' :
' highcharts-point-down');
}
/* eslint-enable valid-jsdoc */
});
/**
* A `ohlc` series. If the [type](#series.ohlc.type) option is not
* specified, it is inherited from [chart.type](#chart.type).
*
* @extends series,plotOptions.ohlc
* @excluding dataParser, dataURL
* @product highstock
* @apioption series.ohlc
*/
/**
* An array of data points for the series. For the `ohlc` series type,
* points can be given in the following ways:
*
* 1. An array of arrays with 5 or 4 values. In this case, the values correspond
* to `x,open,high,low,close`. If the first value is a string, it is applied
* as the name of the point, and the `x` value is inferred. The `x` value can
* also be omitted, in which case the inner arrays should be of length 4\.
* Then the `x` value is automatically calculated, either starting at 0 and
* incremented by 1, or from `pointStart` and `pointInterval` given in the
* series options.
* ```js
* data: [
* [0, 6, 5, 6, 7],
* [1, 9, 4, 8, 2],
* [2, 6, 3, 4, 10]
* ]
* ```
*
* 2. An array of objects with named values. The following snippet shows only a
* few settings, see the complete options set below. If the total number of
* data points exceeds the series'
* [turboThreshold](#series.ohlc.turboThreshold), this option is not
* available.
* ```js
* data: [{
* x: 1,
* open: 3,
* high: 4,
* low: 5,
* close: 2,
* name: "Point2",
* color: "#00FF00"
* }, {
* x: 1,
* open: 4,
* high: 3,
* low: 6,
* close: 7,
* name: "Point1",
* color: "#FF00FF"
* }]
* ```
*
* @type {Array<Array<(number|string),number,number,number>|Array<(number|string),number,number,number,number>|*>}
* @extends series.arearange.data
* @excluding y, marker
* @product highstock
* @apioption series.ohlc.data
*/
/**
* The closing value of each data point.
*
* @type {number}
* @product highstock
* @apioption series.ohlc.data.close
*/
/**
* The opening value of each data point.
*
* @type {number}
* @product highstock
* @apioption series.ohlc.data.open
*/
''; // adds doclets above to transpilat
| burki/jewish-history-online | web/vendor/highcharts/es-modules/parts/OHLCSeries.js | JavaScript | agpl-3.0 | 11,563 |
# -*- coding: utf-8 -*-
# Copyright(C) 2010-2015 Julien Veyssier, Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import random
import urllib
from urlparse import urlsplit
from weboob.deprecated.browser import Browser, BrowserHTTPNotFound
from .pages.index import IndexPage
from .pages.torrents import TorrentPage, TorrentsPage
__all__ = ['PiratebayBrowser']
class PiratebayBrowser(Browser):
ENCODING = 'utf-8'
DOMAINS = ['thepiratebay.vg',
'thepiratebay.la',
'thepiratebay.mn',
'thepiratebay.gd']
def __init__(self, url, *args, **kwargs):
url = url or 'https://%s/' % random.choice(self.DOMAINS)
url_parsed = urlsplit(url)
self.PROTOCOL = url_parsed.scheme
self.DOMAIN = url_parsed.netloc
self.PAGES = {
'%s://%s/' % (self.PROTOCOL, self.DOMAIN): IndexPage,
'%s://%s/search/.*/0/7/0' % (self.PROTOCOL, self.DOMAIN): TorrentsPage,
'%s://%s/torrent/.*' % (self.PROTOCOL, self.DOMAIN): TorrentPage
}
Browser.__init__(self, *args, **kwargs)
def iter_torrents(self, pattern):
self.location('%s://%s/search/%s/0/7/0' % (self.PROTOCOL,
self.DOMAIN,
urllib.quote_plus(pattern.encode('utf-8'))))
assert self.is_on_page(TorrentsPage)
return self.page.iter_torrents()
def get_torrent(self, _id):
try:
self.location('%s://%s/torrent/%s/' % (self.PROTOCOL,
self.DOMAIN,
_id))
except BrowserHTTPNotFound:
return
if self.is_on_page(TorrentPage):
return self.page.get_torrent(_id)
| sputnick-dev/weboob | modules/piratebay/browser.py | Python | agpl-3.0 | 2,463 |
/*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2007-2014 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.core.tasks;
import org.springframework.util.Assert;
/**
* <p>AsyncTask class.</p>
*
* @author ranger
* @version $Id: $
*/
public class AsyncTask<T> extends AbstractTask {
private final Async<T> m_async;
private final Callback<T> m_callback;
/**
* <p>Constructor for AsyncTask.</p>
*
* @param coordinator a {@link org.opennms.core.tasks.TaskCoordinator} object.
* @param parent a {@link org.opennms.core.tasks.ContainerTask} object.
* @param async a {@link org.opennms.core.tasks.Async} object.
* @param <T> a T object.
*/
public AsyncTask(TaskCoordinator coordinator, ContainerTask<?> parent, Async<T> async) {
this(coordinator, parent, async, null);
}
/**
* <p>Constructor for AsyncTask.</p>
*
* @param coordinator a {@link org.opennms.core.tasks.TaskCoordinator} object.
* @param parent a {@link org.opennms.core.tasks.ContainerTask} object.
* @param async a {@link org.opennms.core.tasks.Async} object.
* @param callback a {@link org.opennms.core.tasks.Callback} object.
*/
public AsyncTask(TaskCoordinator coordinator, ContainerTask<?> parent, Async<T> async, Callback<T> callback) {
super(coordinator, parent);
Assert.notNull(async, "async parameter must not be null");
m_async = async;
m_callback = callback;
}
/** {@inheritDoc} */
@Override
public String toString() {
return String.valueOf(m_async);
}
/** {@inheritDoc} */
@Override
protected void doSubmit() {
Callback<T> callback = callback();
try {
m_async.supplyAsyncThenAccept(callback);
} catch (Throwable t) {
callback.handleException(t);
}
}
/**
* <p>markTaskAsCompleted</p>
*/
private final void markTaskAsCompleted() {
getCoordinator().markTaskAsCompleted(this);
}
private Callback<T> callback() {
return new Callback<T>() {
@Override
public void accept(T t) {
try {
if (m_callback != null) {
m_callback.accept(t);
}
} finally {
markTaskAsCompleted();
}
}
@Override
public T apply(Throwable t) {
try {
if (m_callback != null) {
m_callback.handleException(t);
}
} finally {
markTaskAsCompleted();
}
return null;
}
};
}
}
| aihua/opennms | core/tasks/src/main/java/org/opennms/core/tasks/AsyncTask.java | Java | agpl-3.0 | 3,889 |
# frozen_string_literal: true
class Settings::PreferencesController < ApplicationController
layout 'admin'
before_action :authenticate_user!
def show; end
def update
user_settings.update(user_settings_params.to_h)
if current_user.update(user_params)
I18n.locale = current_user.locale
redirect_to settings_preferences_path, notice: I18n.t('generic.changes_saved_msg')
else
render :show
end
end
private
def user_settings
UserSettingsDecorator.new(current_user)
end
def user_params
params.require(:user).permit(
:locale,
filtered_languages: []
)
end
def user_settings_params
params.require(:user).permit(
:setting_default_privacy,
:setting_boost_modal,
:setting_auto_play_gif,
notification_emails: %i(follow follow_request reblog favourite mention digest),
interactions: %i(must_be_follower must_be_following)
)
end
end
| TootCat/mastodon | app/controllers/settings/preferences_controller.rb | Ruby | agpl-3.0 | 946 |
<?php
// created: 2015-12-11 14:01:20
$dictionary['Contact']['fields']['salutation']['len']=100;
$dictionary['Contact']['fields']['salutation']['inline_edit']=true;
$dictionary['Contact']['fields']['salutation']['comments']='Contact salutation (e.g., Mr, Ms)';
$dictionary['Contact']['fields']['salutation']['merge_filter']='disabled';
?> | auf/crm_auf_org | metadata/custom/modulebuilder/builds/all_auf/Extension/modules/Contacts/Ext/Vardefs/sugarfield_salutation.php | PHP | agpl-3.0 | 341 |
"""This module implement decorators for wrapping data sources so as to
simplify their construction and attribution of properties.
"""
import functools
def data_source_generator(name=None, **properties):
"""Decorator for applying to a simple data source which directly
returns an iterable/generator with the metrics for each sample. The
function the decorator is applied to must take no arguments.
"""
def _decorator(func):
@functools.wraps(func)
def _properties(settings):
def _factory(environ):
return func
d = dict(properties)
d['name'] = name
d['factory'] = _factory
return d
return _properties
return _decorator
def data_source_factory(name=None, **properties):
"""Decorator for applying to a data source defined as a factory. The
decorator can be applied to a class or a function. The class
constructor or function must accept arguments of 'settings', being
configuration settings for the data source, and 'environ' being
information about the context in which the data source is being
used. The resulting object must be a callable which directly returns
an iterable/generator with the metrics for each sample.
"""
def _decorator(func):
@functools.wraps(func)
def _properties(settings):
def _factory(environ):
return func(settings, environ)
d = dict(properties)
d['name'] = name
d['factory'] = _factory
return d
return _properties
return _decorator
| GbalsaC/bitnamiP | venv/lib/python2.7/site-packages/newrelic-2.46.0.37/newrelic/samplers/decorators.py | Python | agpl-3.0 | 1,626 |
<?php
if (!defined('sugarEntry') || !sugarEntry) {
die('Not A Valid Entry Point');
}
/**
*
* SugarCRM Community Edition is a customer relationship management program developed by
* SugarCRM, Inc. Copyright (C) 2004-2013 SugarCRM Inc.
*
* SuiteCRM is an extension to SugarCRM Community Edition developed by SalesAgility Ltd.
* Copyright (C) 2011 - 2018 SalesAgility Ltd.
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License version 3 as published by the
* Free Software Foundation with the addition of the following permission added
* to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK
* IN WHICH THE COPYRIGHT IS OWNED BY SUGARCRM, SUGARCRM DISCLAIMS THE WARRANTY
* OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along with
* this program; if not, see http://www.gnu.org/licenses or write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301 USA.
*
* You can contact SugarCRM, Inc. headquarters at 10050 North Wolfe Road,
* SW2-130, Cupertino, CA 95014, USA. or at email address contact@sugarcrm.com.
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Public License version 3.
*
* In accordance with Section 7(b) of the GNU Affero General Public License version 3,
* these Appropriate Legal Notices must retain the display of the "Powered by
* SugarCRM" logo and "Supercharged by SuiteCRM" logo. If the display of the logos is not
* reasonably feasible for technical reasons, the Appropriate Legal Notices must
* display the words "Powered by SugarCRM" and "Supercharged by SuiteCRM".
*/
/**
* Predefined logic hooks
* after_ui_frame
* after_ui_footer
* after_save
* before_save
* before_retrieve
* after_retrieve
* process_record
* before_delete
* after_delete
* before_restore
* after_restore
* server_roundtrip
* before_logout
* after_logout
* before_login
* after_login
* login_failed
* after_session_start
* after_entry_point
*
* @api
*/
class LogicHook
{
public $bean = null;
public function __construct()
{
}
/**
* Static Function which returns and instance of LogicHook
*
* @return unknown
*/
public static function initialize()
{
if (empty($GLOBALS['logic_hook'])) {
$GLOBALS['logic_hook'] = new LogicHook();
}
return $GLOBALS['logic_hook'];
}
public function setBean($bean)
{
$this->bean = $bean;
return $this;
}
protected $hook_map = array();
protected $hookscan = array();
public function getHooksMap()
{
return $this->hook_map;
}
public function getHooksList()
{
return $this->hookscan;
}
public function scanHooksDir($extpath)
{
if (is_dir($extpath)) {
$dir = dir($extpath);
while ($entry = $dir->read()) {
if ($entry != '.' && $entry != '..' && strtolower(substr($entry, -4)) == ".php" && is_file($extpath.'/'.$entry)) {
unset($hook_array);
include($extpath.'/'.$entry);
if (!empty($hook_array)) {
foreach ($hook_array as $type => $hookg) {
foreach ($hookg as $index => $hook) {
$this->hookscan[$type][] = $hook;
$idx = count($this->hookscan[$type])-1;
$this->hook_map[$type][$idx] = array("file" => $extpath.'/'.$entry, "index" => $index);
}
}
}
}
}
}
}
protected static $hooks = array();
public static function refreshHooks()
{
self::$hooks = array();
}
public function loadHooks($module_dir)
{
$hook_array = array();
if (!empty($module_dir)) {
$custom = "custom/modules/$module_dir";
} else {
$custom = "custom/modules";
}
if (file_exists("$custom/logic_hooks.php")) {
if (isset($GLOBALS['log'])) {
$GLOBALS['log']->debug('Including module specific hook file for '.$custom);
}
include("$custom/logic_hooks.php");
}
if (empty($module_dir)) {
$custom = "custom/application";
}
if (file_exists("$custom/Ext/LogicHooks/logichooks.ext.php")) {
if (isset($GLOBALS['log'])) {
$GLOBALS['log']->debug('Including Ext hook file for '.$custom);
}
include("$custom/Ext/LogicHooks/logichooks.ext.php");
}
return $hook_array;
}
public function getHooks($module_dir, $refresh = false)
{
if ($refresh || !isset(self::$hooks[$module_dir])) {
self::$hooks[$module_dir] = $this->loadHooks($module_dir);
}
return self::$hooks[$module_dir];
}
/**
* Provide a means for developers to create upgrade safe business logic hooks.
* If the bean is null, then we assume this call was not made from a SugarBean Object and
* therefore we do not pass it to the method call.
*
* @param string $module_dir
* @param string $event
* @param array $arguments
* @param SugarBean $bean
*/
public function call_custom_logic($module_dir, $event, $arguments = null)
{
// declare the hook array variable, it will be defined in the included file.
$hook_array = null;
if (isset($GLOBALS['log'])) {
$GLOBALS['log']->debug("Hook called: $module_dir::$event");
}
if (!empty($module_dir)) {
// This will load an array of the hooks to process
$hooks = $this->getHooks($module_dir);
if (!empty($hooks)) {
$this->process_hooks($hooks, $event, $arguments);
}
}
$hooks = $this->getHooks('');
if (!empty($hooks)) {
$this->process_hooks($hooks, $event, $arguments);
}
}
/**
* This is called from call_custom_logic and actually performs the action as defined in the
* logic hook. If the bean is null, then we assume this call was not made from a SugarBean Object and
* therefore we do not pass it to the method call.
*
* @param array $hook_array
* @param string $event
* @param array $arguments
* @param SugarBean $bean
*/
public function process_hooks($hook_array, $event, $arguments)
{
// Now iterate through the array for the appropriate hook
if (!empty($hook_array[$event])) {
// Apply sorting to the hooks using the sort index.
// Hooks with matching sort indexes will be processed in no particular order.
$sorted_indexes = array();
foreach ($hook_array[$event] as $idx => $hook_details) {
$order_idx = $hook_details[0];
$sorted_indexes[$idx] = $order_idx;
}
asort($sorted_indexes);
$process_order = array_keys($sorted_indexes);
foreach ($process_order as $hook_index) {
$hook_details = $hook_array[$event][$hook_index];
if (!file_exists($hook_details[2])) {
if (isset($GLOBALS['log'])) {
$GLOBALS['log']->error('Unable to load custom logic file: '.$hook_details[2]);
}
continue;
}
include_once($hook_details[2]);
$hook_class = $hook_details[3];
$hook_function = $hook_details[4];
// Make a static call to the function of the specified class
//TODO Make a factory for these classes. Cache instances accross uses
if ($hook_class == $hook_function) {
if (isset($GLOBALS['log'])) {
$GLOBALS['log']->debug('Creating new instance of hook class '.$hook_class.' with parameters');
}
if (!is_null($this->bean)) {
$class = new $hook_class($this->bean, $event, $arguments);
} else {
$class = new $hook_class($event, $arguments);
}
} else {
if (isset($GLOBALS['log'])) {
$GLOBALS['log']->debug('Creating new instance of hook class '.$hook_class.' without parameters');
}
$class = new $hook_class();
if (!is_null($this->bean)) {
$class->$hook_function($this->bean, $event, $arguments);
} else {
$class->$hook_function($event, $arguments);
}
}
}
}
}
}
| JimMackin/SuiteCRM | include/utils/LogicHook.php | PHP | agpl-3.0 | 9,433 |
package tc.oc.api.ocn;
import java.util.Collection;
import javax.inject.Singleton;
import com.google.common.util.concurrent.ListenableFuture;
import tc.oc.api.docs.MapRating;
import tc.oc.api.docs.virtual.MapDoc;
import tc.oc.api.docs.virtual.UserDoc;
import tc.oc.api.exceptions.NotFound;
import tc.oc.api.http.HttpOption;
import tc.oc.api.maps.MapRatingsRequest;
import tc.oc.api.maps.MapRatingsResponse;
import tc.oc.api.maps.MapService;
import tc.oc.api.maps.UpdateMapsResponse;
import tc.oc.api.model.HttpModelService;
import tc.oc.commons.core.concurrent.FutureUtils;
import tc.oc.commons.core.stream.Collectors;
@Singleton
class OCNMapService extends HttpModelService<MapDoc, MapDoc> implements MapService {
public ListenableFuture<Object> rate(MapRating rating) {
return this.client().post(memberUri(rating.map_id, "rate"), rating, Object.class, HttpOption.INFINITE_RETRY);
}
public ListenableFuture<MapRatingsResponse> getRatings(MapRatingsRequest request) {
return this.client().post(memberUri(request.map_id, "get_ratings"), request, MapRatingsResponse.class, HttpOption.INFINITE_RETRY);
}
public UpdateMapsResponse updateMaps(Collection<? extends MapDoc> maps) {
final ListenableFuture<MapUpdateMultiResponse> future = updateMulti(maps, MapUpdateMultiResponse.class);
return new UpdateMapsResponse(
(ListenableFuture) future,
maps.stream()
.flatMap(MapDoc::authorAndContributorUuids)
.distinct()
.collect(Collectors.mappingTo(uuid -> FutureUtils.mapSync(
future,
response -> {
final UserDoc.Identity user = response.users_by_uuid.get(uuid);
if(user != null) return user;
throw new NotFound();
}
)))
);
}
}
| OvercastNetwork/ProjectAres | API/ocn/src/main/java/tc/oc/api/ocn/OCNMapService.java | Java | agpl-3.0 | 1,908 |
/*--------------------------------------------------------------------
Copyright (c) 2011 Local Projects. All rights reserved.
Licensed under the Affero GNU GPL v3, see LICENSE for more details.
--------------------------------------------------------------------*/
tc.gam.widgetVisibilityHandler = function(options) {
var self = {
currentHash: window.location.hash,
previousHash: null
};
self._setHash = function(hash) {
if (hash === self.currentHash) {
tc.jQ(window).trigger('hashchange');
} else {
//This will trigger the 'hashchange' event because the hash is different
window.location.hash = hash;
}
};
self._getHash = function() {
return window.location.hash.substring(1, window.location.hash.length);
};
self._goHome = function() {
self._setHash('show,home');
};
self._triggerWidgetVisibilityEvent = function(action, widget, id) {
tc.jQ(tc).trigger(action + '-project-widget', [widget, id]);
};
self._onHashChange = function(event) {
var action, widget;
self.previousHash = self.currentHash;
self.currentHash = self._getHash();
// For project-home hash, fire goHome.
if (!self.currentHash || self.currentHash === 'project-home') {
self._goHome();
} else {
action = self.currentHash.split(',')[0];
widget = self.currentHash.split(',')[1];
id = self.currentHash.split(',')[2];
}
tc.util.log('&&& hashchange: ' + action + ', ' + widget);
self._triggerWidgetVisibilityEvent(action, widget, id);
};
var bindEvents = function() {
tc.jQ(window).bind('hashchange', self._onHashChange);
};
var init = function() {
bindEvents();
if (self.currentHash) {
self._setHash(self.currentHash);
} else {
self._goHome();
}
};
init();
return self;
}; | localprojects/Change-By-Us | static/js/tc.gam.widget-visibility-handler.js | JavaScript | agpl-3.0 | 2,065 |
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# LICENSE
#
# Copyright (c) 2010-2017, GEM Foundation, G. Weatherill, M. Pagani,
# D. Monelli.
#
# The Hazard Modeller's Toolkit is free software: you can redistribute
# it and/or modify it under the terms of the GNU Affero General Public
#License as published by the Free Software Foundation, either version
#3 of the License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>
#
#DISCLAIMER
#
# The software Hazard Modeller's Toolkit (openquake.hmtk) provided herein
#is released as a prototype implementation on behalf of
# scientists and engineers working within the GEM Foundation (Global
#Earthquake Model).
#
# It is distributed for the purpose of open collaboration and in the
# hope that it will be useful to the scientific, engineering, disaster
# risk and software design communities.
#
# The software is NOT distributed as part of GEM's OpenQuake suite
# (http://www.globalquakemodel.org/openquake) and must be considered as a
# separate entity. The software provided herein is designed and implemented
# by scientific staff. It is not developed to the design standards, nor
# subject to same level of critical review by professional software
# developers, as GEM's OpenQuake software suite.
#
# Feedback and contribution to the software is welcome, and can be
# directed to the hazard scientific staff of the GEM Model Facility
# (hazard@globalquakemodel.org).
#
# The Hazard Modeller's Toolkit (openquake.hmtk) is therefore distributed WITHOUT
#ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
#FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
#for more details.
#
# The GEM Foundation, and the authors of the software, assume no
# liability for use of the software.
| gem/oq-hazardlib | openquake/hmtk/strain/regionalisation/__init__.py | Python | agpl-3.0 | 1,925 |
/*****************************************************************************
*
* This file is part of Mapnik (c++ mapping toolkit)
*
* Copyright (C) 2006 Artem Pavlenko
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
*****************************************************************************/
//$Id: style.hpp 39 2005-04-10 20:39:53Z pavlenko $
#ifndef STYLE_HPP
#define STYLE_HPP
// mapnik
#include <mapnik/color.hpp>
#include <mapnik/symbolizer.hpp>
// boost
#include <boost/shared_ptr.hpp>
// stl
#include <vector>
#include <algorithm>
#include <functional>
namespace mapnik { }
#endif //STYLE_HPP
| carlos-lopez-garces/mapnik-trunk | include/mapnik/style.hpp | C++ | lgpl-2.1 | 1,310 |
/*
* This library is part of OpenCms -
* the Open Source Content Management System
*
* Copyright (c) Alkacon Software GmbH (http://www.alkacon.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* For further information about Alkacon Software GmbH, please see the
* company website: http://www.alkacon.com
*
* For further information about OpenCms, please see the
* project website: http://www.opencms.org
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.opencms.workplace.tools.content;
import org.opencms.file.CmsProperty;
import org.opencms.file.CmsPropertyDefinition;
import org.opencms.file.CmsResource;
import org.opencms.file.CmsVfsException;
import org.opencms.i18n.CmsMessages;
import org.opencms.jsp.CmsJspActionElement;
import org.opencms.lock.CmsLock;
import org.opencms.main.CmsException;
import org.opencms.main.OpenCms;
import org.opencms.workplace.CmsDialog;
import org.opencms.workplace.CmsWorkplace;
import org.opencms.workplace.CmsWorkplaceSettings;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.jsp.JspException;
import javax.servlet.jsp.PageContext;
/**
* Provides methods for the delete property definition dialog.<p>
*
* @since 6.0.0
*/
public class CmsPropertyDelete extends CmsDialog {
/** Value for the action: delete cascade. */
public static final int ACTION_DELETE_CASCADE = 100;
/** Request parameter value for the action: delete cascade. */
public static final String DIALOG_DELETE_CASCADE = "deletecascade";
/** The dialog type. */
public static final String DIALOG_TYPE = "propertydelete";
/** Request parameter name for the property name. */
public static final String PARAM_PROPERTYNAME = "propertyname";
private String m_paramPropertyName;
/**
* Public constructor with JSP action element.<p>
*
* @param jsp an initialized JSP action element
*/
public CmsPropertyDelete(CmsJspActionElement jsp) {
super(jsp);
}
/**
* Public constructor with JSP variables.<p>
*
* @param context the JSP page context
* @param req the JSP request
* @param res the JSP response
*/
public CmsPropertyDelete(PageContext context, HttpServletRequest req, HttpServletResponse res) {
this(new CmsJspActionElement(context, req, res));
}
/**
* Deletes the property definition.<p>
*
* @throws JspException if problems including sub-elements occur
*/
public void actionDelete() throws JspException {
// save initialized instance of this class in request attribute for included sub-elements
getJsp().getRequest().setAttribute(SESSION_WORKPLACE_CLASS, this);
try {
getCms().deletePropertyDefinition(getParamPropertyName());
// close the dialog
actionCloseDialog();
} catch (Throwable e) {
// error while deleting property definition, show error dialog
includeErrorpage(this, e);
}
}
/**
* Deletes the property definition by cascading the properties on resources.<p>
*
* @throws JspException if problems including sub-elements occur
*/
public void actionDeleteCascade() throws JspException {
// save initialized instance of this class in request attribute for included sub-elements
getJsp().getRequest().setAttribute(SESSION_WORKPLACE_CLASS, this);
try {
// list of all resources containing this propertydefinition
List resourcesWithProperty = getCms().readResourcesWithProperty(getParamPropertyName());
// list of all resources locked by another user, containing this propertydefinition
List resourcesLockedByOtherUser = getResourcesLockedByOtherUser(resourcesWithProperty);
// do the following operations only if all of the resources are not locked by another user
if (resourcesLockedByOtherUser.isEmpty()) {
// save the site root
String storedSiteRoot = getCms().getRequestContext().getSiteRoot();
try {
// change to the root site
getCms().getRequestContext().setSiteRoot("/");
Iterator i = resourcesWithProperty.iterator();
while (i.hasNext()) {
CmsResource resource = (CmsResource)i.next();
// read the property object
CmsProperty property = getCms().readPropertyObject(
resource.getRootPath(),
getParamPropertyName(),
false);
// try to delete the property if it is not the NULL PROPERTY
// if the property is the NULL PROPERTY, it only had a shared
// value which was deleted at a sibling which was already processed
if (!property.isNullProperty()) {
CmsLock lock = getCms().getLock(resource);
if (lock.isUnlocked()) {
// lock the resource for the current (Admin) user
getCms().lockResource(resource.getRootPath());
}
property.setStructureValue(CmsProperty.DELETE_VALUE);
property.setResourceValue(CmsProperty.DELETE_VALUE);
// write the property with the null value to the resource and cascade it from the definition
getCms().writePropertyObject(resource.getRootPath(), property);
// unlock the resource
getCms().unlockResource(resource.getRootPath());
}
}
// delete the property definition at last
getCms().deletePropertyDefinition(getParamPropertyName());
} finally {
// restore the siteroot
getCms().getRequestContext().setSiteRoot(storedSiteRoot);
// close the dialog
actionCloseDialog();
}
} else {
StringBuffer reason = new StringBuffer();
reason.append(dialogWhiteBoxStart());
reason.append(buildResourceList(resourcesLockedByOtherUser, true));
reason.append(dialogWhiteBoxEnd());
throw new CmsVfsException(Messages.get().container(
Messages.ERR_DEL_PROP_RESOURCES_LOCKED_1,
reason.toString()));
}
} catch (Throwable e) {
// error while deleting property definition, show error dialog
includeErrorpage(this, e);
}
}
/**
* Builds a HTML list of Resources that use the specified property.<p>
*
* @throws CmsException if operation was not successful
*
* @return the HTML String for the Resource list
*/
public String buildResourceList() throws CmsException {
List resourcesWithProperty = getCms().readResourcesWithProperty(getParamPropertyName());
return buildResourceList(resourcesWithProperty, false);
}
/**
* Builds a HTML list of Resources.<p>
*
* Columns: Type, Name, Uri, Value of the property, locked by(optional).<p>
*
* @param resourceList a list of resources
* @param lockInfo a boolean to decide if the locked info should be shown or not
* @throws CmsException if operation was not successful
*
* @return the HTML String for the Resource list
*/
public String buildResourceList(List resourceList, boolean lockInfo) throws CmsException {
// reverse the resource list
Collections.reverse(resourceList);
CmsMessages messages = Messages.get().getBundle(getLocale());
StringBuffer result = new StringBuffer();
result.append("<table border=\"0\" width=\"100%\" cellpadding=\"1\" cellspacing=\"1\">\n");
result.append("<tr>\n");
// Type
result.append("\t<td style=\"width:5%;\" class=\"textbold\">");
result.append(messages.key(Messages.GUI_INPUT_TYPE_0));
result.append("</td>\n");
// Uri
result.append("\t<td style=\"width:40%;\" class=\"textbold\">");
result.append(messages.key(Messages.GUI_INPUT_ADRESS_0));
result.append("</td>\n");
// Name
result.append("\t<td style=\"width:25%;\" class=\"textbold\">");
result.append(messages.key(Messages.GUI_INPUT_TITLE_0));
result.append("</td>\n");
if (!lockInfo) {
// Property value
result.append("\t<td style=\"width:30%;\" class=\"textbold\">");
result.append(messages.key(Messages.GUI_INPUT_PROPERTYVALUE_0));
result.append("</td>\n");
}
if (lockInfo) {
// Property value
result.append("\t<td style=\"width:30%;\" class=\"textbold\">");
result.append(messages.key(Messages.GUI_EXPLORER_LOCKEDBY_0));
result.append("</td>\n");
result.append("</tr>\n");
}
result.append("</tr>\n");
result.append("<tr><td colspan=\"4\"><span style=\"height: 6px;\"> </span></td></tr>\n");
String storedSiteRoot = getCms().getRequestContext().getSiteRoot();
try {
getCms().getRequestContext().setSiteRoot("/");
Iterator i = resourceList.iterator();
while (i.hasNext()) {
CmsResource resource = (CmsResource)i.next();
String filetype = OpenCms.getResourceManager().getResourceType(resource.getTypeId()).getTypeName();
result.append("<tr>\n");
// file type
result.append("\t<td>");
result.append("<img src=\"");
result.append(getSkinUri());
result.append(CmsWorkplace.RES_PATH_FILETYPES);
result.append(filetype);
result.append(".gif\">");
result.append("</td>\n");
// file address
result.append("\t<td>");
result.append(resource.getRootPath());
result.append("</td>\n");
// title
result.append("\t<td>");
result.append(getJsp().property(CmsPropertyDefinition.PROPERTY_TITLE, resource.getRootPath(), ""));
result.append("</td>\n");
// current value of the property
if (!lockInfo) {
result.append("\t<td>");
result.append(getJsp().property(getParamPropertyName(), resource.getRootPath()));
result.append("</td>\n");
}
// locked by user
if (lockInfo) {
CmsLock lock = getCms().getLock(resource);
result.append("\t<td>");
result.append(getCms().readUser(lock.getUserId()).getName());
result.append("</td>\n");
}
result.append("</tr>\n");
}
result.append("</table>\n");
} finally {
getCms().getRequestContext().setSiteRoot(storedSiteRoot);
}
return result.toString();
}
/**
* Builds the html for the property definition select box.<p>
*
* @param attributes optional attributes for the <select> tag
* @return the html for the property definition select box
*/
public String buildSelectProperty(String attributes) {
return CmsPropertyChange.buildSelectProperty(getCms(), Messages.get().getBundle(getLocale()).key(
Messages.GUI_PLEASE_SELECT_0), attributes, "");
}
/**
* Returns the value of the propertyname parameter.<p>
*
* @return the value of the propertyname parameter
*/
public String getParamPropertyName() {
return m_paramPropertyName;
}
/**
* Sets the value of the propertyname parameter.<p>
*
* @param paramPropertyName the value of the propertyname parameter
*/
public void setParamPropertyName(String paramPropertyName) {
m_paramPropertyName = paramPropertyName;
}
/**
* @see org.opencms.workplace.CmsWorkplace#initWorkplaceRequestValues(org.opencms.workplace.CmsWorkplaceSettings, javax.servlet.http.HttpServletRequest)
*/
protected void initWorkplaceRequestValues(CmsWorkplaceSettings settings, HttpServletRequest request) {
// fill the parameter values in the get/set methods
fillParamValues(request);
// set the dialog type
setParamDialogtype(DIALOG_TYPE);
// set the action for the JSP switch
if (DIALOG_OK.equals(getParamAction())) {
setAction(ACTION_OK);
setParamTitle(Messages.get().getBundle(getLocale()).key(Messages.GUI_TITLE_PROPERTYDELETE_0)
+ ": "
+ getParamPropertyName());
} else if (DIALOG_CANCEL.equals(getParamAction())) {
setAction(ACTION_CANCEL);
} else if (DIALOG_DELETE_CASCADE.equals(getParamAction())) {
setAction(ACTION_DELETE_CASCADE);
} else {
setAction(ACTION_DEFAULT);
// build title for change property value dialog
setParamTitle(Messages.get().getBundle(getLocale()).key(Messages.GUI_TITLE_PROPERTYDELETE_0));
}
}
/**
* Returns a list of resources that are locked by another user as the current user.<p>
*
* @param resourceList the list of all (mixed) resources
*
* @return a list of resources that are locked by another user as the current user
* @throws CmsException if the getLock operation fails
*/
private List getResourcesLockedByOtherUser(List resourceList) throws CmsException {
List lockedResourcesByOtherUser = new ArrayList();
Iterator i = resourceList.iterator();
while (i.hasNext()) {
CmsResource resource = (CmsResource)i.next();
// get the lock state for the resource
CmsLock lock = getCms().getLock(resource);
// add this resource to the list if this is locked by another user
if (!lock.isUnlocked() && !lock.isOwnedBy(getCms().getRequestContext().getCurrentUser())) {
lockedResourcesByOtherUser.add(resource);
}
}
return lockedResourcesByOtherUser;
}
}
| serrapos/opencms-core | src-modules/org/opencms/workplace/tools/content/CmsPropertyDelete.java | Java | lgpl-2.1 | 15,467 |
#!/usr/bin/python
import math
import Sofa
def tostr(L):
return str(L).replace('[', '').replace("]", '').replace(",", ' ')
def transform(T,p):
return [T[0][0]*p[0]+T[0][1]*p[1]+T[0][2]*p[2]+T[1][0],T[0][3]*p[0]+T[0][4]*p[1]+T[0][5]*p[2]+T[1][1],T[0][6]*p[0]+T[0][7]*p[1]+T[0][8]*p[2]+T[1][2]]
def transformF(T,F):
return [T[0][0]*F[0]+T[0][1]*F[3]+T[0][2]*F[6],T[0][0]*F[1]+T[0][1]*F[4]+T[0][2]*F[7],T[0][0]*F[2]+T[0][1]*F[5]+T[0][2]*F[8],T[0][3]*F[0]+T[0][4]*F[3]+T[0][5]*F[6],T[0][3]*F[1]+T[0][4]*F[4]+T[0][5]*F[7],T[0][3]*F[2]+T[0][4]*F[5]+T[0][5]*F[8],T[0][6]*F[0]+T[0][7]*F[3]+T[0][8]*F[6],T[0][6]*F[1]+T[0][7]*F[4]+T[0][8]*F[7],T[0][6]*F[2]+T[0][7]*F[5]+T[0][8]*F[8]]
def compare(p1,p2):
res = 0
for i,P1 in enumerate(p1):
for j,item in enumerate(P1):
res = res+ (item-p2[i][j])*(item-p2[i][j])
return res
ERRORTOL = 1e-5
T = [[2,0,0,0,2,0,0,0,2],[0,0,0]]
#T = [[0.8,1.2,0.3,0,1.9,0.45,0.5,2.8,0.2],[5,2,8]]
samples= [[0.5,0.5,0.5], [0.23,0.5,0.8], [0,0.12,0], [0.8,0,0.58]]
# scene creation method
def createScene(rootNode):
rootNode.createObject('RequiredPlugin', pluginName="Flexible")
rootNode.createObject('VisualStyle', displayFlags="showBehaviorModels")
restpos = [[0, 0, 0], [1, 0, 0], [0, 1, 0], [1, 1, 0], [0, 0, 1], [1, 0, 1], [0, 1, 1], [1, 1, 1]]
pos = [transform(T,item) for item in restpos]
###########################################################
simNode = rootNode.createChild('Hexa_barycentric')
simNode.createObject('MeshTopology', name="mesh", position=tostr(restpos), hexahedra="0 1 3 2 4 5 7 6")
simNode.createObject('MechanicalObject', template="Vec3d", name="parent", rest_position="@mesh.position",position=tostr(pos) )
simNode.createObject('BarycentricShapeFunction', position="@parent.rest_position", nbRef="8")
childNode = simNode.createChild('childP')
childNode.createObject('MechanicalObject', template="Vec3d", name="child", position=tostr(samples) , showObject="1")
childNode.createObject('LinearMapping', template="Vec3d,Vec3d")
childNode = simNode.createChild('childF')
childNode.createObject('GaussPointContainer', position=tostr(samples))
childNode.createObject('MechanicalObject', template="F331", name="child")
childNode.createObject('LinearMapping', template="Vec3d,F331", showDeformationGradientScale="1")
childNode = simNode.createChild('Visu')
childNode.createObject('VisualModel', color="8e-1 8e-1 1 1e-1")
childNode.createObject('IdentityMapping')
childNode = simNode.createChild('Visu2')
childNode.createObject('VisualStyle', displayFlags="showWireframe")
childNode.createObject('VisualModel', color="8e-1 8e-1 1 1")
childNode.createObject('IdentityMapping')
simNode.createObject('PythonScriptController',filename="FEM.py", classname="Controller")
###########################################################
simNode = rootNode.createChild('Tetra_barycentric')
simNode.createObject('MeshTopology', name="mesh", position=tostr(restpos), tetrahedra="0 5 1 7 0 1 2 7 1 2 7 3 7 2 0 6 7 6 0 5 6 5 4 0")
simNode.createObject('MechanicalObject', template="Vec3d", name="parent", rest_position="@mesh.position",position=tostr(pos) )
simNode.createObject('BarycentricShapeFunction', position="@parent.rest_position", nbRef="4")
childNode = simNode.createChild('childP')
childNode.createObject('MechanicalObject', template="Vec3d", name="child", position=tostr(samples) , showObject="1")
childNode.createObject('LinearMapping', template="Vec3d,Vec3d")
childNode = simNode.createChild('childF')
childNode.createObject('GaussPointContainer', position=tostr(samples))
childNode.createObject('MechanicalObject', template="F331", name="child")
childNode.createObject('LinearMapping', template="Vec3d,F331")
simNode.createObject('PythonScriptController',filename="FEM.py", classname="Controller")
###########################################################
simNode = rootNode.createChild('Hexa_shepard')
simNode.createObject('MeshTopology', name="mesh", position=tostr(restpos), hexahedra="0 1 3 2 4 5 7 6")
simNode.createObject('MechanicalObject', template="Vec3d", name="parent", rest_position="@mesh.position",position=tostr(pos) )
simNode.createObject('ShepardShapeFunction', position="@parent.rest_position", power="2")
childNode = simNode.createChild('childP')
childNode.createObject('MechanicalObject', template="Vec3d", name="child", position=tostr(samples) , showObject="1")
childNode.createObject('LinearMapping', template="Vec3d,Vec3d")
childNode = simNode.createChild('childF')
childNode.createObject('GaussPointContainer', position=tostr(samples))
childNode.createObject('MechanicalObject', template="F331", name="child")
childNode.createObject('LinearMapping', template="Vec3d,F331")
simNode.createObject('PythonScriptController',filename="FEM.py", classname="Controller")
###########################################################
rootNode.animate=1
return rootNode
class Controller(Sofa.PythonScriptController):
def createGraph(self,node):
self.node=node
self.done=0
return 0
def onEndAnimationStep(self,dt):
if self.done==0:
print "TEST "+self.node.name+":"
# test points
restpos = self.node.getObject('childP/child').findData('rest_position').value
refpos = [transform(T,item) for item in restpos]
pos = self.node.getObject('childP/child').findData('position').value
error = compare(refpos,pos)
if error>ERRORTOL :
print "\t"+"\033[91m"+"[FAILED]"+"\033[0m"+" error on P= "+str(error)
else :
print "\t"+"\033[92m"+"[OK]"+"\033[0m"+" error on P= "+str(error)
# test defo gradients
restpos = [1,0,0,0,1,0,0,0,1]
pos = self.node.getObject('childF/child').findData('position').value
refpos = [transformF(T,restpos) for item in pos]
error = compare(refpos,pos)
if error>ERRORTOL :
print "\t"+"\033[91m"+"[FAILED]"+"\033[0m"+" error on F= "+str(error)
else :
print "\t"+"\033[92m"+"[OK]"+"\033[0m"+" error on F= "+str(error)
self.done=1
return 0
| FabienPean/sofa | applications/plugins/Flexible/examples/patch_test/FEM.py | Python | lgpl-2.1 | 6,005 |
/****************************************************************************
**
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of Qt Creator.
**
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
****************************************************************************/
/*
* Expected: 'i_first' 'c_first'
* Not expected: 'i_second' 'c_second' 'f_second'
*/
typedef struct {
int i_first;
char c_first;
} S1;
typedef struct {
int i_second;
char c_second;
float f_second;
} S2;
void foo()
{
S1 s;
s.<<<<;
}
| maui-packages/qt-creator | src/plugins/clangcodemodel/test/cxx_regression_2.cpp | C++ | lgpl-2.1 | 1,722 |
/****************************************************************************
**
** Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of Qt Creator.
**
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://www.qt.io/licensing. For further information
** use the contact form at http://www.qt.io/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 or version 3 as published by the Free
** Software Foundation and appearing in the file LICENSE.LGPLv21 and
** LICENSE.LGPLv3 included in the packaging of this file. Please review the
** following information to ensure the GNU Lesser General Public License
** requirements will be met: https://www.gnu.org/licenses/lgpl.html and
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
****************************************************************************/
#include "qmlconsoleview.h"
#include "qmlconsoleitemdelegate.h"
#include "qmlconsoleitemmodel.h"
#include <coreplugin/editormanager/editormanager.h>
#include <coreplugin/manhattanstyle.h>
#include <utils/hostosinfo.h>
#include <QMouseEvent>
#include <QPainter>
#include <QApplication>
#include <QClipboard>
#include <QAbstractProxyModel>
#include <QFileInfo>
#include <QScrollBar>
#include <QStyleFactory>
#include <QString>
#include <QUrl>
using namespace QmlJS;
namespace QmlJSTools {
namespace Internal {
class QmlConsoleViewStyle : public ManhattanStyle
{
public:
QmlConsoleViewStyle(const QString &baseStyleName) : ManhattanStyle(baseStyleName) {}
void drawPrimitive(PrimitiveElement element, const QStyleOption *option, QPainter *painter,
const QWidget *widget = 0) const
{
if (element != QStyle::PE_PanelItemViewRow)
ManhattanStyle::drawPrimitive(element, option, painter, widget);
}
int styleHint(StyleHint hint, const QStyleOption *option = 0, const QWidget *widget = 0,
QStyleHintReturn *returnData = 0) const {
if (hint == SH_ItemView_ShowDecorationSelected)
return 0;
else
return ManhattanStyle::styleHint(hint, option, widget, returnData);
}
};
///////////////////////////////////////////////////////////////////////
//
// QmlConsoleView
//
///////////////////////////////////////////////////////////////////////
QmlConsoleView::QmlConsoleView(QWidget *parent) :
Utils::TreeView(parent)
{
setFrameStyle(QFrame::NoFrame);
setHeaderHidden(true);
setRootIsDecorated(false);
setUniformRowHeights(true);
setEditTriggers(QAbstractItemView::AllEditTriggers);
setStyleSheet(QLatin1String("QTreeView::branch:has-siblings:!adjoins-item {"
"border-image: none;"
"image: none; }"
"QTreeView::branch:has-siblings:adjoins-item {"
"border-image: none;"
"image: none; }"
"QTreeView::branch:!has-children:!has-siblings:adjoins-item {"
"border-image: none;"
"image: none; }"
"QTreeView::branch:has-children:!has-siblings:closed,"
"QTreeView::branch:closed:has-children:has-siblings {"
"border-image: none;"
"image: none; }"
"QTreeView::branch:open:has-children:!has-siblings,"
"QTreeView::branch:open:has-children:has-siblings {"
"border-image: none;"
"image: none; }"));
QString baseName = QApplication::style()->objectName();
if (Utils::HostOsInfo::isAnyUnixHost() && !Utils::HostOsInfo::isMacHost()
&& baseName == QLatin1String("windows")) {
// Sometimes we get the standard windows 95 style as a fallback
if (QStyleFactory::keys().contains(QLatin1String("Fusion"))) {
baseName = QLatin1String("fusion"); // Qt5
} else { // Qt4
// e.g. if we are running on a KDE4 desktop
QByteArray desktopEnvironment = qgetenv("DESKTOP_SESSION");
if (desktopEnvironment == "kde")
baseName = QLatin1String("plastique");
else
baseName = QLatin1String("cleanlooks");
}
}
QmlConsoleViewStyle *style = new QmlConsoleViewStyle(baseName);
setStyle(style);
style->setParent(this);
setHorizontalScrollBarPolicy(Qt::ScrollBarAlwaysOff);
setVerticalScrollBarPolicy(Qt::ScrollBarAlwaysOn);
setVerticalScrollMode(QAbstractItemView::ScrollPerPixel);
horizontalScrollBar()->setSingleStep(20);
verticalScrollBar()->setSingleStep(20);
connect(this, SIGNAL(activated(QModelIndex)), SLOT(onRowActivated(QModelIndex)));
}
void QmlConsoleView::onScrollToBottom()
{
// Keep scrolling to bottom if scroll bar is not at maximum()
if (verticalScrollBar()->value() != verticalScrollBar()->maximum())
scrollToBottom();
}
void QmlConsoleView::mousePressEvent(QMouseEvent *event)
{
QPoint pos = event->pos();
QModelIndex index = indexAt(pos);
if (index.isValid()) {
ConsoleItem::ItemType type = (ConsoleItem::ItemType)index.data(
QmlConsoleItemModel::TypeRole).toInt();
bool handled = false;
if (type == ConsoleItem::UndefinedType) {
bool showTypeIcon = index.parent() == QModelIndex();
ConsoleItemPositions positions(visualRect(index), viewOptions().font, showTypeIcon,
true);
if (positions.expandCollapseIcon().contains(pos)) {
if (isExpanded(index))
setExpanded(index, false);
else
setExpanded(index, true);
handled = true;
}
}
if (!handled)
Utils::TreeView::mousePressEvent(event);
} else {
selectionModel()->setCurrentIndex(model()->index(model()->rowCount() - 1, 0),
QItemSelectionModel::ClearAndSelect);
}
}
void QmlConsoleView::resizeEvent(QResizeEvent *e)
{
static_cast<QmlConsoleItemDelegate *>(itemDelegate())->emitSizeHintChanged(
selectionModel()->currentIndex());
Utils::TreeView::resizeEvent(e);
}
void QmlConsoleView::drawBranches(QPainter *painter, const QRect &rect,
const QModelIndex &index) const
{
static_cast<QmlConsoleItemDelegate *>(itemDelegate())->drawBackground(painter, rect, index,
false);
Utils::TreeView::drawBranches(painter, rect, index);
}
void QmlConsoleView::contextMenuEvent(QContextMenuEvent *event)
{
QModelIndex itemIndex = indexAt(event->pos());
QMenu menu;
QAction *copy = new QAction(tr("&Copy"), this);
copy->setEnabled(itemIndex.isValid());
menu.addAction(copy);
QAction *show = new QAction(tr("&Show in Editor"), this);
show->setEnabled(canShowItemInTextEditor(itemIndex));
menu.addAction(show);
menu.addSeparator();
QAction *clear = new QAction(tr("C&lear"), this);
menu.addAction(clear);
QAction *a = menu.exec(event->globalPos());
if (a == 0)
return;
if (a == copy) {
copyToClipboard(itemIndex);
} else if (a == show) {
onRowActivated(itemIndex);
} else if (a == clear) {
QAbstractProxyModel *proxyModel = qobject_cast<QAbstractProxyModel *>(model());
QmlConsoleItemModel *handler = qobject_cast<QmlConsoleItemModel *>(
proxyModel->sourceModel());
handler->clear();
}
}
void QmlConsoleView::onRowActivated(const QModelIndex &index)
{
if (!index.isValid())
return;
// See if we have file and line Info
QString filePath = model()->data(index, QmlConsoleItemModel::FileRole).toString();
const QUrl fileUrl = QUrl(filePath);
if (fileUrl.isLocalFile())
filePath = fileUrl.toLocalFile();
if (!filePath.isEmpty()) {
QFileInfo fi(filePath);
if (fi.exists() && fi.isFile() && fi.isReadable()) {
int line = model()->data(index, QmlConsoleItemModel::LineRole).toInt();
Core::EditorManager::openEditorAt(fi.canonicalFilePath(), line);
}
}
}
void QmlConsoleView::copyToClipboard(const QModelIndex &index)
{
if (!index.isValid())
return;
QString contents = model()->data(index, QmlConsoleItemModel::ExpressionRole).toString();
// See if we have file and line Info
QString filePath = model()->data(index, QmlConsoleItemModel::FileRole).toString();
const QUrl fileUrl = QUrl(filePath);
if (fileUrl.isLocalFile())
filePath = fileUrl.toLocalFile();
if (!filePath.isEmpty()) {
contents = QString::fromLatin1("%1 %2: %3").arg(contents).arg(filePath).arg(
model()->data(index, QmlConsoleItemModel::LineRole).toString());
}
QClipboard *cb = QApplication::clipboard();
cb->setText(contents);
}
bool QmlConsoleView::canShowItemInTextEditor(const QModelIndex &index)
{
if (!index.isValid())
return false;
// See if we have file and line Info
QString filePath = model()->data(index, QmlConsoleItemModel::FileRole).toString();
const QUrl fileUrl = QUrl(filePath);
if (fileUrl.isLocalFile())
filePath = fileUrl.toLocalFile();
if (!filePath.isEmpty()) {
QFileInfo fi(filePath);
if (fi.exists() && fi.isFile() && fi.isReadable())
return true;
}
return false;
}
} // Internal
} // QmlJSTools
| AltarBeastiful/qt-creator | src/plugins/qmljstools/qmlconsoleview.cpp | C++ | lgpl-2.1 | 10,425 |
/********************************************************************************/
/* Projeto: Biblioteca ZeusDFe */
/* Biblioteca C# para auxiliar no desenvolvimento das demais bibliotecas DFe */
/* */
/* */
/* Direitos Autorais Reservados (c) 2014 Adenilton Batista da Silva */
/* Zeusdev Tecnologia LTDA ME */
/* */
/* Você pode obter a última versão desse arquivo no GitHub */
/* localizado em https://github.com/adeniltonbs/Zeus.Net.NFe.NFCe */
/* */
/* */
/* Esta biblioteca é software livre; você pode redistribuí-la e/ou modificá-la */
/* sob os termos da Licença Pública Geral Menor do GNU conforme publicada pela */
/* Free Software Foundation; tanto a versão 2.1 da Licença, ou (a seu critério) */
/* qualquer versão posterior. */
/* */
/* Esta biblioteca é distribuída na expectativa de que seja útil, porém, SEM */
/* NENHUMA GARANTIA; nem mesmo a garantia implícita de COMERCIABILIDADE OU */
/* ADEQUAÇÃO A UMA FINALIDADE ESPECÍFICA. Consulte a Licença Pública Geral Menor*/
/* do GNU para mais detalhes. (Arquivo LICENÇA.TXT ou LICENSE.TXT) */
/* */
/* Você deve ter recebido uma cópia da Licença Pública Geral Menor do GNU junto*/
/* com esta biblioteca; se não, escreva para a Free Software Foundation, Inc., */
/* no endereço 59 Temple Street, Suite 330, Boston, MA 02111-1307 USA. */
/* Você também pode obter uma copia da licença em: */
/* http://www.opensource.org/licenses/lgpl-license.php */
/* */
/* Zeusdev Tecnologia LTDA ME - adenilton@zeusautomacao.com.br */
/* http://www.zeusautomacao.com.br/ */
/* Rua Comendador Francisco josé da Cunha, 111 - Itabaiana - SE - 49500-000 */
/********************************************************************************/
using System.Xml.Serialization;
namespace DFe.Classes.Assinatura
{
public class Transform
{
/// <summary>
/// XS13 - Atributos válidos Algorithm do Transform:
/// <para>http://www.w3.org/TR/2001/REC-xml-c14n-20010315</para>
/// <para>http://www.w3.org/2000/09/xmldsig#enveloped-signature</para>
/// </summary>
[XmlAttribute]
public string Algorithm { get; set; }
}
} | adrbarros/Zeus.Net.NFe.NFCe | Shared.DFe.Classes/Assinatura/Transform.cs | C# | lgpl-2.1 | 3,142 |
/****************************************************************************
**
** Copyright (C) 2012 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the QtXmlPatterns module of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include "qnamepool_p.h"
#include "qdelegatingnamespaceresolver_p.h"
QT_BEGIN_NAMESPACE
using namespace QPatternist;
DelegatingNamespaceResolver::DelegatingNamespaceResolver(const NamespaceResolver::Ptr &resolver) : m_nsResolver(resolver)
{
Q_ASSERT(m_nsResolver);
}
DelegatingNamespaceResolver::DelegatingNamespaceResolver(const NamespaceResolver::Ptr &ns,
const Bindings &overrides) : m_nsResolver(ns)
, m_bindings(overrides)
{
Q_ASSERT(m_nsResolver);
}
QXmlName::NamespaceCode DelegatingNamespaceResolver::lookupNamespaceURI(const QXmlName::PrefixCode prefix) const
{
const QXmlName::NamespaceCode val(m_bindings.value(prefix, NoBinding));
if(val == NoBinding)
return m_nsResolver->lookupNamespaceURI(prefix);
else
return val;
}
NamespaceResolver::Bindings DelegatingNamespaceResolver::bindings() const
{
Bindings bs(m_nsResolver->bindings());
const Bindings::const_iterator end(m_bindings.constEnd());
Bindings::const_iterator it(m_bindings.constBegin());
for(; it != end; ++it)
bs.insert(it.key(), it.value());
return bs;
}
void DelegatingNamespaceResolver::addBinding(const QXmlName nb)
{
if(nb.namespaceURI() == StandardNamespaces::UndeclarePrefix)
m_bindings.remove(nb.prefix());
else
m_bindings.insert(nb.prefix(), nb.namespaceURI());
}
QT_END_NAMESPACE
| RLovelett/qt | src/xmlpatterns/utils/qdelegatingnamespaceresolver.cpp | C++ | lgpl-2.1 | 3,480 |
#ifndef BOOST_CORE_LIGHTWEIGHT_TEST_TRAIT_HPP
#define BOOST_CORE_LIGHTWEIGHT_TEST_TRAIT_HPP
// MS compatible compilers support #pragma once
#if defined(_MSC_VER)
# pragma once
#endif
// boost/core/lightweight_test_trait.hpp
//
// BOOST_TEST_TRAIT_TRUE, BOOST_TEST_TRAIT_FALSE, BOOST_TEST_TRAIT_SAME
//
// Copyright 2014, 2021 Peter Dimov
//
// Copyright 2019 Glen Joseph Fernandes
// (glenjofe@gmail.com)
//
// Distributed under the Boost Software License, Version 1.0.
// See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt
#include <boost/core/lightweight_test.hpp>
#include <boost/core/type_name.hpp>
#include <boost/core/is_same.hpp>
#include <boost/config.hpp>
namespace boost
{
namespace detail
{
template< class T > inline void test_trait_impl( char const * trait, void (*)( T ),
bool expected, char const * file, int line, char const * function )
{
if( T::value == expected )
{
test_results();
}
else
{
BOOST_LIGHTWEIGHT_TEST_OSTREAM
<< file << "(" << line << "): predicate '" << trait << "' ["
<< boost::core::type_name<T>() << "]"
<< " test failed in function '" << function
<< "' (should have been " << ( expected? "true": "false" ) << ")"
<< std::endl;
++test_results().errors();
}
}
template<class T> inline bool test_trait_same_impl_( T )
{
return T::value;
}
template<class T1, class T2> inline void test_trait_same_impl( char const * types,
boost::core::is_same<T1, T2> same, char const * file, int line, char const * function )
{
if( test_trait_same_impl_( same ) )
{
test_results();
}
else
{
BOOST_LIGHTWEIGHT_TEST_OSTREAM
<< file << "(" << line << "): test 'is_same<" << types << ">'"
<< " failed in function '" << function
<< "' ('" << boost::core::type_name<T1>()
<< "' != '" << boost::core::type_name<T2>() << "')"
<< std::endl;
++test_results().errors();
}
}
} // namespace detail
} // namespace boost
#define BOOST_TEST_TRAIT_TRUE(type) ( ::boost::detail::test_trait_impl(#type, (void(*)type)0, true, __FILE__, __LINE__, BOOST_CURRENT_FUNCTION) )
#define BOOST_TEST_TRAIT_FALSE(type) ( ::boost::detail::test_trait_impl(#type, (void(*)type)0, false, __FILE__, __LINE__, BOOST_CURRENT_FUNCTION) )
#if defined(__GNUC__)
// ignoring -Wvariadic-macros with #pragma doesn't work under GCC
# pragma GCC system_header
#endif
#define BOOST_TEST_TRAIT_SAME(...) ( ::boost::detail::test_trait_same_impl(#__VA_ARGS__, ::boost::core::is_same<__VA_ARGS__>(), __FILE__, __LINE__, BOOST_CURRENT_FUNCTION) )
#endif // #ifndef BOOST_CORE_LIGHTWEIGHT_TEST_TRAIT_HPP
| qianqians/abelkhan | cpp_component/3rdparty/boost/boost/core/lightweight_test_trait.hpp | C++ | lgpl-2.1 | 2,747 |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Newtonsoft.Json.Linq;
using Newtonsoft.Json.Utilities;
namespace Newtonsoft.Json.Schema
{
internal class JsonSchemaWriter
{
private readonly JsonWriter _writer;
private readonly JsonSchemaResolver _resolver;
public JsonSchemaWriter(JsonWriter writer, JsonSchemaResolver resolver)
{
ValidationUtils.ArgumentNotNull(writer, "writer");
_writer = writer;
_resolver = resolver;
}
private void ReferenceOrWriteSchema(JsonSchema schema)
{
if (schema.Id != null && _resolver.GetSchema(schema.Id) != null)
{
_writer.WriteStartObject();
_writer.WritePropertyName(JsonSchemaConstants.ReferencePropertyName);
_writer.WriteValue(schema.Id);
_writer.WriteEndObject();
}
else
{
WriteSchema(schema);
}
}
public void WriteSchema(JsonSchema schema)
{
ValidationUtils.ArgumentNotNull(schema, "schema");
if (!_resolver.LoadedSchemas.Contains(schema))
_resolver.LoadedSchemas.Add(schema);
_writer.WriteStartObject();
WritePropertyIfNotNull(_writer, JsonSchemaConstants.IdPropertyName, schema.Id);
WritePropertyIfNotNull(_writer, JsonSchemaConstants.TitlePropertyName, schema.Title);
WritePropertyIfNotNull(_writer, JsonSchemaConstants.DescriptionPropertyName, schema.Description);
WritePropertyIfNotNull(_writer, JsonSchemaConstants.OptionalPropertyName, schema.Optional);
WritePropertyIfNotNull(_writer, JsonSchemaConstants.ReadOnlyPropertyName, schema.ReadOnly);
WritePropertyIfNotNull(_writer, JsonSchemaConstants.HiddenPropertyName, schema.Hidden);
WritePropertyIfNotNull(_writer, JsonSchemaConstants.TransientPropertyName, schema.Transient);
if (schema.Type != null)
WriteType(JsonSchemaConstants.TypePropertyName, _writer, schema.Type.Value);
if (!schema.AllowAdditionalProperties)
{
_writer.WritePropertyName(JsonSchemaConstants.AdditionalPropertiesPropertyName);
_writer.WriteValue(schema.AllowAdditionalProperties);
}
else
{
if (schema.AdditionalProperties != null)
{
_writer.WritePropertyName(JsonSchemaConstants.AdditionalPropertiesPropertyName);
ReferenceOrWriteSchema(schema.AdditionalProperties);
}
}
if (schema.Properties != null)
{
_writer.WritePropertyName(JsonSchemaConstants.PropertiesPropertyName);
_writer.WriteStartObject();
foreach (KeyValuePair<string, JsonSchema> property in schema.Properties)
{
_writer.WritePropertyName(property.Key);
ReferenceOrWriteSchema(property.Value);
}
_writer.WriteEndObject();
}
WriteItems(schema);
WritePropertyIfNotNull(_writer, JsonSchemaConstants.MinimumPropertyName, schema.Minimum);
WritePropertyIfNotNull(_writer, JsonSchemaConstants.MaximumPropertyName, schema.Maximum);
WritePropertyIfNotNull(_writer, JsonSchemaConstants.MinimumLengthPropertyName, schema.MinimumLength);
WritePropertyIfNotNull(_writer, JsonSchemaConstants.MaximumLengthPropertyName, schema.MaximumLength);
WritePropertyIfNotNull(_writer, JsonSchemaConstants.MinimumItemsPropertyName, schema.MinimumItems);
WritePropertyIfNotNull(_writer, JsonSchemaConstants.MaximumItemsPropertyName, schema.MaximumItems);
WritePropertyIfNotNull(_writer, JsonSchemaConstants.MaximumDecimalsPropertyName, schema.MaximumDecimals);
WritePropertyIfNotNull(_writer, JsonSchemaConstants.FormatPropertyName, schema.Format);
WritePropertyIfNotNull(_writer, JsonSchemaConstants.PatternPropertyName, schema.Pattern);
if (schema.Enum != null)
{
_writer.WritePropertyName(JsonSchemaConstants.EnumPropertyName);
_writer.WriteStartArray();
foreach (JToken token in schema.Enum)
{
token.WriteTo(_writer);
}
_writer.WriteEndArray();
}
if (schema.Default != null)
{
_writer.WritePropertyName(JsonSchemaConstants.DefaultPropertyName);
schema.Default.WriteTo(_writer);
}
if (schema.Options != null)
{
_writer.WritePropertyName(JsonSchemaConstants.OptionsPropertyName);
_writer.WriteStartArray();
foreach (KeyValuePair<JToken, string> option in schema.Options)
{
_writer.WriteStartObject();
_writer.WritePropertyName(JsonSchemaConstants.OptionValuePropertyName);
option.Key.WriteTo(_writer);
if (option.Value != null)
{
_writer.WritePropertyName(JsonSchemaConstants.OptionValuePropertyName);
_writer.WriteValue(option.Value);
}
_writer.WriteEndObject();
}
_writer.WriteEndArray();
}
if (schema.Disallow != null)
WriteType(JsonSchemaConstants.DisallowPropertyName, _writer, schema.Disallow.Value);
if (schema.Extends != null)
{
_writer.WritePropertyName(JsonSchemaConstants.ExtendsPropertyName);
ReferenceOrWriteSchema(schema.Extends);
}
_writer.WriteEndObject();
}
private void WriteItems(JsonSchema schema)
{
if (CollectionUtils.IsNullOrEmpty(schema.Items))
return;
_writer.WritePropertyName(JsonSchemaConstants.ItemsPropertyName);
if (schema.Items.Count == 1)
{
ReferenceOrWriteSchema(schema.Items[0]);
return;
}
_writer.WriteStartArray();
foreach (JsonSchema itemSchema in schema.Items)
{
ReferenceOrWriteSchema(itemSchema);
}
_writer.WriteEndArray();
}
private void WriteType(string propertyName, JsonWriter writer, JsonSchemaType type)
{
IList<JsonSchemaType> types;
if (System.Enum.IsDefined(typeof(JsonSchemaType), type))
types = new List<JsonSchemaType> { type };
else
types = EnumUtils.GetFlagsValues(type).Where(v => v != JsonSchemaType.None).ToList();
if (types.Count == 0)
return;
writer.WritePropertyName(propertyName);
if (types.Count == 1)
{
writer.WriteValue(JsonSchemaBuilder.MapType(types[0]));
return;
}
writer.WriteStartArray();
foreach (JsonSchemaType jsonSchemaType in types)
{
writer.WriteValue(JsonSchemaBuilder.MapType(jsonSchemaType));
}
writer.WriteEndArray();
}
private void WritePropertyIfNotNull(JsonWriter writer, string propertyName, object value)
{
if (value != null)
{
writer.WritePropertyName(propertyName);
writer.WriteValue(value);
}
}
}
}
| consumentor/Server | trunk/tools/Json.Net/Source/Src/Newtonsoft.Json/Schema/JsonSchemaWriter.cs | C# | lgpl-3.0 | 8,106 |
/**
* This file is part of FNLP (formerly FudanNLP).
*
* FNLP is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* FNLP is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with FudanNLP. If not, see <http://www.gnu.org/licenses/>.
*
* Copyright 2009-2014 www.fnlp.org. All rights reserved.
*/
package org.fnlp.util.exception;
import java.io.FileNotFoundException;
import java.io.IOException;
public class LoadModelException extends Exception {
private static final long serialVersionUID = -3933859344026018386L;
public LoadModelException(Exception e, String file) {
super(e);
if( e instanceof FileNotFoundException) {
System.out.println("模型文件不存在: "+ file);
} else if (e instanceof ClassNotFoundException) {
System.out.println("模型文件版本错误。");
} else if (e instanceof IOException) {
System.out.println("模型文件读入错误: "+file);
}
e.printStackTrace();
}
public LoadModelException(String msg) {
super(msg);
printStackTrace();
}
} | xpqiu/fnlp | fnlp-core/src/main/java/org/fnlp/util/exception/LoadModelException.java | Java | lgpl-3.0 | 1,512 |
//
// Copyright (C) 2004 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
// System.Runtime.InteropServices/DESCKIND.cs
//
// Paolo Molaro (lupus@ximian.com)
//
// (C) 2002 Ximian, Inc.
using System;
namespace System.Runtime.InteropServices
{
[Obsolete]
[Serializable]
public enum DESCKIND {
DESCKIND_NONE = 0,
DESCKIND_FUNCDESC = 1,
DESCKIND_VARDESC = 2,
DESCKIND_TYPECOMP = 3,
DESCKIND_IMPLICITAPPOBJ = 4,
DESCKIND_MAX = 5
}
}
| edwinspire/VSharp | v#/corlib/System.Runtime.InteropServices/DESCKIND.cs | C# | lgpl-3.0 | 1,520 |
namespace StockSharp.Algo.Export
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Xml;
using Ecng.Common;
using StockSharp.BusinessEntities;
using StockSharp.Messages;
/// <summary>
/// Ýêñïîðò â xml.
/// </summary>
public class XmlExporter : BaseExporter
{
private const string _timeFormat = "yyyy-MM-dd HH:mm:ss.fff zzz";
/// <summary>
/// Ñîçäàòü <see cref="XmlExporter"/>.
/// </summary>
/// <param name="security">Èíñòðóìåíò.</param>
/// <param name="arg">Ïàðàìåòð äàííûõ.</param>
/// <param name="isCancelled">Îáðàáîò÷èê, âîçâðàùàþùèé ïðèçíàê ïðåðûâàíèÿ ýêñïîðòà.</param>
/// <param name="fileName">Ïóòü ê ôàéëó.</param>
public XmlExporter(Security security, object arg, Func<int, bool> isCancelled, string fileName)
: base(security, arg, isCancelled, fileName)
{
}
/// <summary>
/// Ýêñïîðòèðîâàòü <see cref="ExecutionMessage"/>.
/// </summary>
/// <param name="messages">Ñîîáùåíèÿ.</param>
protected override void Export(IEnumerable<ExecutionMessage> messages)
{
switch ((ExecutionTypes)Arg)
{
case ExecutionTypes.Tick:
{
Do(messages, "trades", (writer, trade) =>
{
writer.WriteStartElement("trade");
writer.WriteAttribute("id", trade.TradeId == null ? trade.TradeStringId : trade.TradeId.To<string>());
writer.WriteAttribute("serverTime", trade.ServerTime.ToString(_timeFormat));
writer.WriteAttribute("localTime", trade.LocalTime.ToString(_timeFormat));
writer.WriteAttribute("price", trade.TradePrice);
writer.WriteAttribute("volume", trade.Volume);
if (trade.OriginSide != null)
writer.WriteAttribute("originSide", trade.OriginSide.Value);
if (trade.OpenInterest != null)
writer.WriteAttribute("openInterest", trade.OpenInterest.Value);
if (trade.IsUpTick != null)
writer.WriteAttribute("isUpTick", trade.IsUpTick.Value);
writer.WriteEndElement();
});
break;
}
case ExecutionTypes.OrderLog:
{
Do(messages, "orderLog", (writer, item) =>
{
writer.WriteStartElement("item");
writer.WriteAttribute("id", item.OrderId == null ? item.OrderStringId : item.OrderId.To<string>());
writer.WriteAttribute("serverTime", item.ServerTime.ToString(_timeFormat));
writer.WriteAttribute("localTime", item.LocalTime.ToString(_timeFormat));
writer.WriteAttribute("price", item.Price);
writer.WriteAttribute("volume", item.Volume);
writer.WriteAttribute("side", item.Side);
writer.WriteAttribute("state", item.OrderState);
writer.WriteAttribute("timeInForce", item.TimeInForce);
writer.WriteAttribute("isSystem", item.IsSystem);
if (item.TradePrice != null)
{
writer.WriteAttribute("tradeId", item.TradeId == null ? item.TradeStringId : item.TradeId.To<string>());
writer.WriteAttribute("tradePrice", item.TradePrice);
if (item.OpenInterest != null)
writer.WriteAttribute("openInterest", item.OpenInterest.Value);
}
writer.WriteEndElement();
});
break;
}
case ExecutionTypes.Order:
case ExecutionTypes.Trade:
{
Do(messages, "executions", (writer, item) =>
{
writer.WriteStartElement("item");
writer.WriteAttribute("serverTime", item.ServerTime.ToString(_timeFormat));
writer.WriteAttribute("localTime", item.LocalTime.ToString(_timeFormat));
writer.WriteAttribute("portfolio", item.PortfolioName);
writer.WriteAttribute("transactionId", item.TransactionId);
writer.WriteAttribute("id", item.OrderId == null ? item.OrderStringId : item.OrderId.To<string>());
writer.WriteAttribute("price", item.Price);
writer.WriteAttribute("volume", item.Volume);
writer.WriteAttribute("balance", item.Balance);
writer.WriteAttribute("side", item.Side);
writer.WriteAttribute("type", item.OrderType);
writer.WriteAttribute("state", item.OrderState);
writer.WriteAttribute("tradeId", item.TradeId == null ? item.TradeStringId : item.TradeId.To<string>());
writer.WriteAttribute("tradePrice", item.TradePrice);
writer.WriteEndElement();
});
break;
}
default:
throw new ArgumentOutOfRangeException();
}
}
/// <summary>
/// Ýêñïîðòèðîâàòü <see cref="QuoteChangeMessage"/>.
/// </summary>
/// <param name="messages">Ñîîáùåíèÿ.</param>
protected override void Export(IEnumerable<QuoteChangeMessage> messages)
{
Do(messages, "depths", (writer, depth) =>
{
writer.WriteStartElement("depth");
writer.WriteAttribute("serverTime", depth.ServerTime.ToString(_timeFormat));
writer.WriteAttribute("localTime", depth.LocalTime.ToString(_timeFormat));
foreach (var quote in depth.Bids.Concat(depth.Asks).OrderByDescending(q => q.Price))
{
writer.WriteStartElement("quote");
writer.WriteAttribute("price", quote.Price);
writer.WriteAttribute("volume", quote.Volume);
writer.WriteAttribute("side", quote.Side);
writer.WriteEndElement();
}
writer.WriteEndElement();
});
}
/// <summary>
/// Ýêñïîðòèðîâàòü <see cref="Level1ChangeMessage"/>.
/// </summary>
/// <param name="messages">Ñîîáùåíèÿ.</param>
protected override void Export(IEnumerable<Level1ChangeMessage> messages)
{
Do(messages, "messages", (writer, message) =>
{
writer.WriteStartElement("message");
writer.WriteAttribute("serverTime", message.ServerTime.ToString(_timeFormat));
writer.WriteAttribute("localTime", message.LocalTime.ToString(_timeFormat));
foreach (var pair in message.Changes)
writer.WriteAttribute(pair.Key.ToString(), pair.Value is DateTime ? ((DateTime)pair.Value).ToString(_timeFormat) : pair.Value);
writer.WriteEndElement();
});
}
/// <summary>
/// Ýêñïîðòèðîâàòü <see cref="CandleMessage"/>.
/// </summary>
/// <param name="messages">Ñîîáùåíèÿ.</param>
protected override void Export(IEnumerable<CandleMessage> messages)
{
Do(messages, "candles", (writer, candle) =>
{
writer.WriteStartElement("candle");
writer.WriteAttribute("openTime", candle.OpenTime.ToString(_timeFormat));
writer.WriteAttribute("closeTime", candle.CloseTime.ToString(_timeFormat));
writer.WriteAttribute("O", candle.OpenPrice);
writer.WriteAttribute("H", candle.HighPrice);
writer.WriteAttribute("L", candle.LowPrice);
writer.WriteAttribute("C", candle.ClosePrice);
writer.WriteAttribute("V", candle.TotalVolume);
if (candle.OpenInterest != null)
writer.WriteAttribute("openInterest", candle.OpenInterest.Value);
writer.WriteEndElement();
});
}
/// <summary>
/// Ýêñïîðòèðîâàòü <see cref="NewsMessage"/>.
/// </summary>
/// <param name="messages">Ñîîáùåíèÿ.</param>
protected override void Export(IEnumerable<NewsMessage> messages)
{
Do(messages, "news", (writer, n) =>
{
writer.WriteStartElement("item");
if (!n.Id.IsEmpty())
writer.WriteAttribute("id", n.Id);
writer.WriteAttribute("serverTime", n.ServerTime.ToString(_timeFormat));
writer.WriteAttribute("localTime", n.LocalTime.ToString(_timeFormat));
if (n.SecurityId != null)
writer.WriteAttribute("securityCode", n.SecurityId.Value.SecurityCode);
if (!n.BoardCode.IsEmpty())
writer.WriteAttribute("boardCode", n.BoardCode);
writer.WriteAttribute("headline", n.Headline);
if (!n.Source.IsEmpty())
writer.WriteAttribute("source", n.Source);
if (n.Url != null)
writer.WriteAttribute("board", n.Url);
if (!n.Story.IsEmpty())
writer.WriteCData(n.Story);
writer.WriteEndElement();
});
}
/// <summary>
/// Ýêñïîðòèðîâàòü <see cref="SecurityMessage"/>.
/// </summary>
/// <param name="messages">Ñîîáùåíèÿ.</param>
protected override void Export(IEnumerable<SecurityMessage> messages)
{
Do(messages, "securities", (writer, security) =>
{
writer.WriteStartElement("security");
writer.WriteAttribute("code", security.SecurityId.SecurityCode);
writer.WriteAttribute("board", security.SecurityId.BoardCode);
if (!security.Name.IsEmpty())
writer.WriteAttribute("name", security.Name);
if (!security.ShortName.IsEmpty())
writer.WriteAttribute("shortName", security.ShortName);
if (security.PriceStep != null)
writer.WriteAttribute("priceStep", security.PriceStep.Value);
if (security.VolumeStep != null)
writer.WriteAttribute("volumeStep", security.VolumeStep.Value);
if (security.Multiplier != null)
writer.WriteAttribute("multiplier", security.Multiplier.Value);
if (security.Decimals != null)
writer.WriteAttribute("decimals", security.Decimals.Value);
if (security.Currency != null)
writer.WriteAttribute("currency", security.Currency.Value);
if (security.SecurityType != null)
writer.WriteAttribute("type", security.SecurityType.Value);
if (security.OptionType != null)
writer.WriteAttribute("optionType", security.OptionType.Value);
if (security.Strike != null)
writer.WriteAttribute("strike", security.Strike.Value);
if (!security.BinaryOptionType.IsEmpty())
writer.WriteAttribute("binaryOptionType", security.BinaryOptionType);
if (!security.UnderlyingSecurityCode.IsEmpty())
writer.WriteAttribute("underlyingSecurityCode", security.UnderlyingSecurityCode);
if (security.ExpiryDate != null)
writer.WriteAttribute("expiryDate", security.ExpiryDate.Value.ToString("yyyy-MM-dd"));
if (security.SettlementDate != null)
writer.WriteAttribute("settlementDate", security.SettlementDate.Value.ToString("yyyy-MM-dd"));
if (!security.SecurityId.Bloomberg.IsEmpty())
writer.WriteAttribute("bloomberg", security.SecurityId.Bloomberg);
if (!security.SecurityId.Cusip.IsEmpty())
writer.WriteAttribute("cusip", security.SecurityId.Cusip);
if (!security.SecurityId.IQFeed.IsEmpty())
writer.WriteAttribute("iqfeed", security.SecurityId.IQFeed);
if (security.SecurityId.InteractiveBrokers != null)
writer.WriteAttribute("ib", security.SecurityId.InteractiveBrokers);
if (!security.SecurityId.Isin.IsEmpty())
writer.WriteAttribute("isin", security.SecurityId.Isin);
if (!security.SecurityId.Plaza.IsEmpty())
writer.WriteAttribute("plaza", security.SecurityId.Plaza);
if (!security.SecurityId.Ric.IsEmpty())
writer.WriteAttribute("ric", security.SecurityId.Ric);
if (!security.SecurityId.Sedol.IsEmpty())
writer.WriteAttribute("sedol", security.SecurityId.Sedol);
writer.WriteEndElement();
});
}
private void Do<TValue>(IEnumerable<TValue> values, string rootElem, Action<XmlWriter, TValue> action)
{
using (var writer = XmlWriter.Create(Path, new XmlWriterSettings { Indent = true }))
{
writer.WriteStartElement(rootElem);
foreach (var value in values)
{
if (!CanProcess())
break;
action(writer, value);
}
writer.WriteEndElement();
}
}
}
} | donaldlee2008/StockSharp | Algo/Export/XmlExporter.cs | C# | lgpl-3.0 | 11,023 |
{
"translatorID": "cd587058-6125-4b33-a876-8c6aae48b5e8",
"label": "WHO",
"creator": "Mario Trojan, Philipp Zumstein",
"target": "^http://apps\\.who\\.int/iris/",
"minVersion": "3.0",
"maxVersion": "",
"priority": 100,
"inRepository": true,
"translatorType": 4,
"browserSupport": "gcsibv",
"lastUpdated": "2018-09-02 14:34:27"
}
/*
***** BEGIN LICENSE BLOCK *****
Copyright © 2018 Mario Trojan
This file is part of Zotero.
Zotero is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Zotero is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with Zotero. If not, see <http://www.gnu.org/licenses/>.
***** END LICENSE BLOCK *****
*/
// attr()/text() v2
function attr(docOrElem,selector,attr,index){var elem=index?docOrElem.querySelectorAll(selector).item(index):docOrElem.querySelector(selector);return elem?elem.getAttribute(attr):null;}
function text(docOrElem,selector,index){var elem=index?docOrElem.querySelectorAll(selector).item(index):docOrElem.querySelector(selector);return elem?elem.textContent:null;}
function detectWeb(doc, url) {
if (url.includes("/handle/") && text(doc, 'div.item-summary-view-metadata')) {
var type = attr(doc, 'meta[name="DC.type"]', 'content');
//Z.debug(type);
if (type && type.includes("articles")) {
return "journalArticle";
}
if (type && (type.includes("Book") || type.includes("Publications"))) {
return "book";
}
return "report";
} else if (getSearchResults(doc, true)) {
return "multiple";
}
}
function getSearchResults(doc, checkOnly) {
var items = {};
var found = false;
var rows = doc.querySelectorAll('h4.artifact-title>a');
for (let i=0; i<rows.length; i++) {
let href = rows[i].href;
var title = rows[i].textContent;
if (!href || !title) continue;
if (checkOnly) return true;
found = true;
items[href] = title;
}
return found ? items : false;
}
function doWeb(doc, url) {
if (detectWeb(doc, url) == "multiple") {
Zotero.selectItems(getSearchResults(doc, false), function (items) {
if (!items) {
return true;
}
var articles = [];
for (var i in items) {
articles.push(i);
}
ZU.processDocuments(articles, scrape);
});
} else {
scrape(doc, url);
}
}
function scrape(doc, url) {
// copy meta tags in body to head
var head = doc.getElementsByTagName('head');
var metasInBody = ZU.xpath(doc, '//body/meta');
for (let meta of metasInBody) {
head[0].append(meta);
}
var type = detectWeb(doc, url);
var translator = Zotero.loadTranslator('web');
// Embedded Metadata
translator.setTranslator('951c027d-74ac-47d4-a107-9c3069ab7b48');
translator.setHandler('itemDone', function (obj, item) {
if (item.publisher && !item.place && item.publisher.includes(' : ')) {
let placePublisher = item.publisher.split(' : ');
item.place = placePublisher[0];
item.publisher = placePublisher[1];
}
var firstAuthor = attr(doc, 'meta[name="DC.creator"]', 'content');
if (firstAuthor && !firstAuthor.includes(',')) {
item.creators[0] = {
"lastName": firstAuthor,
"creatorType": "author",
"fieldMode": true
};
}
var descriptions = doc.querySelectorAll('meta[name="DC.description"]');
// DC.description doesn't actually contain other useful content,
// except possibly the number of pages
for (let description of descriptions) {
var numPages = description.content.match(/(([lxiv]+,\s*)?\d+)\s*p/);
if (numPages) {
if (ZU.fieldIsValidForType("numPages", item.itemType)) {
item.numPages = numPages[1];
}
else if (!item.extra) {
item.extra = "number-of-pages: " + numPages[1];
}
else {
item.extra += "\nnumber-of-pages: " + numPages[1];
}
delete item.abstractNote;
}
}
item.complete();
});
translator.getTranslatorObject(function(trans) {
trans.itemType = type;
trans.doWeb(doc, url);
});
}
/** BEGIN TEST CASES **/
var testCases = [
{
"type": "web",
"url": "http://apps.who.int/iris/handle/10665/70863?locale=ar",
"items": [
{
"itemType": "report",
"title": "Consensus document on the epidemiology of severe acute respiratory syndrome (SARS)",
"creators": [
{
"lastName": "World Health Organization",
"creatorType": "author",
"fieldMode": true
}
],
"date": "2003",
"extra": "number-of-pages: 46",
"institution": "World Health Organization",
"language": "en",
"libraryCatalog": "apps.who.int",
"place": "Geneva",
"reportNumber": "WHO/CDS/CSR/GAR/2003.11",
"url": "http://apps.who.int/iris/handle/10665/70863",
"attachments": [
{
"title": "Full Text PDF",
"mimeType": "application/pdf"
},
{
"title": "Snapshot"
}
],
"tags": [
{
"tag": "Communicable Diseases and their Control"
},
{
"tag": "Disease outbreaks"
},
{
"tag": "Epidemiologic surveillance"
},
{
"tag": "Severe acute respiratory syndrome"
}
],
"notes": [],
"seeAlso": []
}
]
},
{
"type": "web",
"url": "http://apps.who.int/iris/handle/10665/272081",
"items": [
{
"itemType": "journalArticle",
"title": "Providing oxygen to children in hospitals: a realist review",
"creators": [
{
"firstName": "Hamish",
"lastName": "Graham",
"creatorType": "author"
},
{
"firstName": "Shidan",
"lastName": "Tosif",
"creatorType": "author"
},
{
"firstName": "Amy",
"lastName": "Gray",
"creatorType": "author"
},
{
"firstName": "Shamim",
"lastName": "Qazi",
"creatorType": "author"
},
{
"firstName": "Harry",
"lastName": "Campbell",
"creatorType": "author"
},
{
"firstName": "David",
"lastName": "Peel",
"creatorType": "author"
},
{
"firstName": "Barbara",
"lastName": "McPake",
"creatorType": "author"
},
{
"firstName": "Trevor",
"lastName": "Duke",
"creatorType": "author"
}
],
"date": "2017-4-01",
"DOI": "10.2471/BLT.16.186676",
"ISSN": "0042-9686",
"abstractNote": "288",
"extra": "PMID: 28479624",
"issue": "4",
"language": "en",
"libraryCatalog": "apps.who.int",
"pages": "288-302",
"publicationTitle": "Bulletin of the World Health Organization",
"rights": "http://creativecommons.org/licenses/by/3.0/igo/legalcode",
"shortTitle": "Providing oxygen to children in hospitals",
"url": "http://apps.who.int/iris/handle/10665/272081",
"volume": "95",
"attachments": [
{
"title": "Full Text PDF",
"mimeType": "application/pdf"
},
{
"title": "Snapshot"
},
{
"title": "PubMed entry",
"mimeType": "text/html",
"snapshot": false
}
],
"tags": [
{
"tag": "Systematic Reviews"
}
],
"notes": [],
"seeAlso": []
}
]
},
{
"type": "web",
"url": "http://apps.who.int/iris/handle/10665/273678",
"items": [
{
"itemType": "book",
"title": "Сборник руководящих принципов и стандартов ВОЗ: обеспечение оптимального оказания медицинских услуг пациентам с туберкулезом",
"creators": [
{
"lastName": "Всемирная организация здравоохранения",
"creatorType": "author",
"fieldMode": true
}
],
"date": "2018",
"ISBN": "9789244514108",
"language": "ru",
"libraryCatalog": "apps.who.int",
"numPages": "47",
"publisher": "Всемирная организация здравоохранения",
"rights": "CC BY-NC-SA 3.0 IGO",
"shortTitle": "Сборник руководящих принципов и стандартов ВОЗ",
"url": "http://apps.who.int/iris/handle/10665/273678",
"attachments": [
{
"title": "Full Text PDF",
"mimeType": "application/pdf"
},
{
"title": "Snapshot"
}
],
"tags": [
{
"tag": "Delivery of Health Care"
},
{
"tag": "Disease Management"
},
{
"tag": "Guideline"
},
{
"tag": "Infection Control"
},
{
"tag": "Multidrug-Resistant"
},
{
"tag": "Patient Care"
},
{
"tag": "Reference Standards"
},
{
"tag": "Tuberculosis"
}
],
"notes": [],
"seeAlso": []
}
]
},
{
"type": "web",
"url": "http://apps.who.int/iris/handle/10665/165097",
"items": "multiple"
},
{
"type": "web",
"url": "http://apps.who.int/iris/discover?query=acupuncture",
"items": "multiple"
}
];
/** END TEST CASES **/
| ZotPlus/zotero-better-bibtex | test/fixtures/profile/zotero/zotero/translators/WHO.js | JavaScript | unlicense | 9,219 |
'use strict';
angular.module('mgcrea.ngStrap.typeahead', ['mgcrea.ngStrap.tooltip', 'mgcrea.ngStrap.helpers.parseOptions'])
.provider('$typeahead', function() {
var defaults = this.defaults = {
animation: 'am-fade',
prefixClass: 'typeahead',
prefixEvent: '$typeahead',
placement: 'bottom-left',
template: 'typeahead/typeahead.tpl.html',
trigger: 'focus',
container: false,
keyboard: true,
html: false,
delay: 0,
minLength: 1,
filter: 'filter',
limit: 6
};
this.$get = function($window, $rootScope, $tooltip) {
var bodyEl = angular.element($window.document.body);
function TypeaheadFactory(element, controller, config) {
var $typeahead = {};
// Common vars
var options = angular.extend({}, defaults, config);
$typeahead = $tooltip(element, options);
var parentScope = config.scope;
var scope = $typeahead.$scope;
scope.$resetMatches = function(){
scope.$matches = [];
scope.$activeIndex = 0;
};
scope.$resetMatches();
scope.$activate = function(index) {
scope.$$postDigest(function() {
$typeahead.activate(index);
});
};
scope.$select = function(index, evt) {
scope.$$postDigest(function() {
$typeahead.select(index);
});
};
scope.$isVisible = function() {
return $typeahead.$isVisible();
};
// Public methods
$typeahead.update = function(matches) {
scope.$matches = matches;
if(scope.$activeIndex >= matches.length) {
scope.$activeIndex = 0;
}
};
$typeahead.activate = function(index) {
scope.$activeIndex = index;
};
$typeahead.select = function(index) {
var value = scope.$matches[index].value;
controller.$setViewValue(value);
controller.$render();
scope.$resetMatches();
if(parentScope) parentScope.$digest();
// Emit event
scope.$emit(options.prefixEvent + '.select', value, index);
};
// Protected methods
$typeahead.$isVisible = function() {
if(!options.minLength || !controller) {
return !!scope.$matches.length;
}
// minLength support
return scope.$matches.length && angular.isString(controller.$viewValue) && controller.$viewValue.length >= options.minLength;
};
$typeahead.$getIndex = function(value) {
var l = scope.$matches.length, i = l;
if(!l) return;
for(i = l; i--;) {
if(scope.$matches[i].value === value) break;
}
if(i < 0) return;
return i;
};
$typeahead.$onMouseDown = function(evt) {
// Prevent blur on mousedown
evt.preventDefault();
evt.stopPropagation();
};
$typeahead.$onKeyDown = function(evt) {
if(!/(38|40|13)/.test(evt.keyCode)) return;
// Let ngSubmit pass if the typeahead tip is hidden
if($typeahead.$isVisible()) {
evt.preventDefault();
evt.stopPropagation();
}
// Select with enter
if(evt.keyCode === 13 && scope.$matches.length) {
$typeahead.select(scope.$activeIndex);
}
// Navigate with keyboard
else if(evt.keyCode === 38 && scope.$activeIndex > 0) scope.$activeIndex--;
else if(evt.keyCode === 40 && scope.$activeIndex < scope.$matches.length - 1) scope.$activeIndex++;
else if(angular.isUndefined(scope.$activeIndex)) scope.$activeIndex = 0;
scope.$digest();
};
// Overrides
var show = $typeahead.show;
$typeahead.show = function() {
show();
setTimeout(function() {
$typeahead.$element.on('mousedown', $typeahead.$onMouseDown);
if(options.keyboard) {
element.on('keydown', $typeahead.$onKeyDown);
}
});
};
var hide = $typeahead.hide;
$typeahead.hide = function() {
$typeahead.$element.off('mousedown', $typeahead.$onMouseDown);
if(options.keyboard) {
element.off('keydown', $typeahead.$onKeyDown);
}
hide();
};
return $typeahead;
}
TypeaheadFactory.defaults = defaults;
return TypeaheadFactory;
};
})
.directive('bsTypeahead', function($window, $parse, $q, $typeahead, $parseOptions) {
var defaults = $typeahead.defaults;
return {
restrict: 'EAC',
require: 'ngModel',
link: function postLink(scope, element, attr, controller) {
// Directive options
var options = {scope: scope};
angular.forEach(['placement', 'container', 'delay', 'trigger', 'keyboard', 'html', 'animation', 'template', 'filter', 'limit', 'minLength', 'watchOptions', 'selectMode'], function(key) {
if(angular.isDefined(attr[key])) options[key] = attr[key];
});
// Build proper ngOptions
var filter = options.filter || defaults.filter;
var limit = options.limit || defaults.limit;
var ngOptions = attr.ngOptions;
if(filter) ngOptions += ' | ' + filter + ':$viewValue';
if(limit) ngOptions += ' | limitTo:' + limit;
var parsedOptions = $parseOptions(ngOptions);
// Initialize typeahead
var typeahead = $typeahead(element, controller, options);
// Watch options on demand
if(options.watchOptions) {
// Watch ngOptions values before filtering for changes, drop function calls
var watchedOptions = parsedOptions.$match[7].replace(/\|.+/, '').replace(/\(.*\)/g, '').trim();
scope.$watch(watchedOptions, function (newValue, oldValue) {
// console.warn('scope.$watch(%s)', watchedOptions, newValue, oldValue);
parsedOptions.valuesFn(scope, controller).then(function (values) {
typeahead.update(values);
controller.$render();
});
}, true);
}
// Watch model for changes
scope.$watch(attr.ngModel, function(newValue, oldValue) {
// console.warn('$watch', element.attr('ng-model'), newValue);
scope.$modelValue = newValue; // Publish modelValue on scope for custom templates
parsedOptions.valuesFn(scope, controller)
.then(function(values) {
// Prevent input with no future prospect if selectMode is truthy
// @TODO test selectMode
if(options.selectMode && !values.length && newValue.length > 0) {
controller.$setViewValue(controller.$viewValue.substring(0, controller.$viewValue.length - 1));
return;
}
if(values.length > limit) values = values.slice(0, limit);
var isVisible = typeahead.$isVisible();
isVisible && typeahead.update(values);
// Do not re-queue an update if a correct value has been selected
if(values.length === 1 && values[0].value === newValue) return;
!isVisible && typeahead.update(values);
// Queue a new rendering that will leverage collection loading
controller.$render();
});
});
// Model rendering in view
controller.$render = function () {
// console.warn('$render', element.attr('ng-model'), 'controller.$modelValue', typeof controller.$modelValue, controller.$modelValue, 'controller.$viewValue', typeof controller.$viewValue, controller.$viewValue);
if(controller.$isEmpty(controller.$viewValue)) return element.val('');
var index = typeahead.$getIndex(controller.$modelValue);
var selected = angular.isDefined(index) ? typeahead.$scope.$matches[index].label : controller.$viewValue;
selected = angular.isObject(selected) ? selected.label : selected;
element.val(selected.replace(/<(?:.|\n)*?>/gm, '').trim());
};
// Garbage collection
scope.$on('$destroy', function() {
if (typeahead) typeahead.destroy();
options = null;
typeahead = null;
});
}
};
});
| anirvann/testApp | vendor/angular-strap/src/typeahead/typeahead.js | JavaScript | unlicense | 8,390 |
namespace TinderApp.Library.Controls
{
public interface IApp
{
CustomPhoneApplicationFrame RootFrameInstance { get; }
void Logout();
}
} | brianhama/tinder | TinderApp.Library/Controls/IApp.cs | C# | unlicense | 168 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.common;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import org.apache.activemq.artemis.api.config.ActiveMQDefaultConfiguration;
import org.apache.activemq.artemis.api.core.TransportConfiguration;
import org.apache.activemq.artemis.api.core.client.ActiveMQClient;
import org.apache.activemq.artemis.api.core.client.ClientMessage;
import org.apache.activemq.artemis.api.core.client.ClientRequestor;
import org.apache.activemq.artemis.api.core.client.ClientSession;
import org.apache.activemq.artemis.api.core.client.ClientSessionFactory;
import org.apache.activemq.artemis.api.core.client.ServerLocator;
import org.apache.activemq.artemis.api.core.management.ManagementHelper;
import org.apache.activemq.artemis.api.core.management.ResourceNames;
import org.apache.activemq.artemis.core.remoting.impl.netty.NettyConnectorFactory;
import org.apache.activemq.artemis.tests.util.SpawnedVMSupport;
import org.junit.Assert;
import org.objectweb.jtests.jms.admin.Admin;
/**
* AbstractAdmin.
*/
public class AbstractAdmin implements Admin {
protected ClientSession clientSession;
protected ClientRequestor requestor;
protected boolean serverLifeCycleActive;
protected Process serverProcess;
protected ServerLocator serverLocator;
protected ClientSessionFactory sf;
// this is a constant to control if we should use a separate VM for the server.
public static final boolean spawnServer = false;
/**
* Determines whether to act or 'no-op' on serverStart() and
* serverStop(). This is used when testing combinations of client and
* servers with different versions.
*/
private static final String SERVER_LIVE_CYCLE_PROPERTY = "org.apache.activemq.artemis.jms.ActiveMQAMQPAdmin.serverLifeCycle";
public AbstractAdmin() {
serverLifeCycleActive = Boolean.valueOf(System.getProperty(SERVER_LIVE_CYCLE_PROPERTY, "true"));
}
@Override
public String getName() {
return getClass().getName();
}
@Override
public void start() throws Exception {
serverLocator = ActiveMQClient.createServerLocatorWithoutHA(new TransportConfiguration(NettyConnectorFactory.class.getName()));
sf = serverLocator.createSessionFactory();
clientSession = sf.createSession(ActiveMQDefaultConfiguration.getDefaultClusterUser(), ActiveMQDefaultConfiguration.getDefaultClusterPassword(), false, true, true, false, 1);
requestor = new ClientRequestor(clientSession, ActiveMQDefaultConfiguration.getDefaultManagementAddress());
clientSession.start();
}
@Override
public void stop() throws Exception {
requestor.close();
if (sf != null) {
sf.close();
}
if (serverLocator != null) {
serverLocator.close();
}
sf = null;
serverLocator = null;
}
@Override
public Context createContext() throws NamingException {
return new InitialContext();
}
@Override
public void createConnectionFactory(final String name) {
throw new RuntimeException("FIXME NYI createConnectionFactory");
}
@Override
public void deleteConnectionFactory(final String name) {
throw new RuntimeException("FIXME NYI deleteConnectionFactory");
}
@Override
public void createQueue(final String name) {
Boolean result;
try {
result = (Boolean) invokeSyncOperation(ResourceNames.JMS_SERVER, "createQueue", name, name);
Assert.assertEquals(true, result.booleanValue());
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
@Override
public void deleteQueue(final String name) {
Boolean result;
try {
result = (Boolean) invokeSyncOperation(ResourceNames.JMS_SERVER, "destroyQueue", name);
Assert.assertEquals(true, result.booleanValue());
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
@Override
public void createQueueConnectionFactory(final String name) {
createConnectionFactory(name);
}
@Override
public void deleteQueueConnectionFactory(final String name) {
deleteConnectionFactory(name);
}
@Override
public void createTopic(final String name) {
Boolean result;
try {
result = (Boolean) invokeSyncOperation(ResourceNames.JMS_SERVER, "createTopic", name, name);
Assert.assertEquals(true, result.booleanValue());
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
@Override
public void deleteTopic(final String name) {
Boolean result;
try {
result = (Boolean) invokeSyncOperation(ResourceNames.JMS_SERVER, "destroyTopic", name);
Assert.assertEquals(true, result.booleanValue());
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
@Override
public void createTopicConnectionFactory(final String name) {
createConnectionFactory(name);
}
@Override
public void deleteTopicConnectionFactory(final String name) {
deleteConnectionFactory(name);
}
@Override
public void startServer() throws Exception {
if (!serverLifeCycleActive) {
return;
}
if (spawnServer) {
String[] vmArgs = new String[]{};
serverProcess = SpawnedVMSupport.spawnVM(SpawnedJMSServer.class.getName(), vmArgs, false);
InputStreamReader isr = new InputStreamReader(serverProcess.getInputStream());
final BufferedReader br = new BufferedReader(isr);
String line = null;
while ((line = br.readLine()) != null) {
System.out.println("SERVER: " + line);
if ("OK".equals(line.trim())) {
new Thread() {
@Override
public void run() {
try {
String line1 = null;
while ((line1 = br.readLine()) != null) {
System.out.println("SERVER: " + line1);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}.start();
return;
} else if ("KO".equals(line.trim())) {
// something went wrong with the server, destroy it:
serverProcess.destroy();
throw new IllegalStateException("Unable to start the spawned server :" + line);
}
}
} else {
SpawnedJMSServer.startServer();
}
}
@Override
public void stopServer() throws Exception {
if (!serverLifeCycleActive) {
return;
}
if (spawnServer) {
OutputStreamWriter osw = new OutputStreamWriter(serverProcess.getOutputStream());
osw.write("STOP\n");
osw.flush();
int exitValue = serverProcess.waitFor();
if (exitValue != 0) {
serverProcess.destroy();
}
} else {
SpawnedJMSServer.stopServer();
}
}
protected Object invokeSyncOperation(final String resourceName,
final String operationName,
final Object... parameters) throws Exception {
ClientMessage message = clientSession.createMessage(false);
ManagementHelper.putOperationInvocation(message, resourceName, operationName, parameters);
ClientMessage reply;
try {
reply = requestor.request(message, 3000);
} catch (Exception e) {
throw new IllegalStateException("Exception while invoking " + operationName + " on " + resourceName, e);
}
if (reply == null) {
throw new IllegalStateException("no reply received when invoking " + operationName + " on " + resourceName);
}
if (!ManagementHelper.hasOperationSucceeded(reply)) {
throw new IllegalStateException("operation failed when invoking " + operationName +
" on " +
resourceName +
": " +
ManagementHelper.getResult(reply));
}
return ManagementHelper.getResult(reply);
}
}
| okalmanRH/jboss-activemq-artemis | tests/joram-tests/src/test/java/org/apache/activemq/artemis/common/AbstractAdmin.java | Java | apache-2.0 | 9,253 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.qpid.jms.integration;
import static org.apache.qpid.jms.provider.amqp.AmqpSupport.ANONYMOUS_RELAY;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.util.UUID;
import javax.jms.JMSContext;
import javax.jms.JMSProducer;
import org.apache.qpid.jms.test.QpidJmsTestCase;
import org.apache.qpid.jms.test.testpeer.TestAmqpPeer;
import org.apache.qpid.proton.amqp.Binary;
import org.apache.qpid.proton.amqp.Symbol;
import org.junit.Test;
public class JMSContextIntegrationTest extends QpidJmsTestCase {
private final IntegrationTestFixture testFixture = new IntegrationTestFixture();
private Symbol[] SERVER_ANONYMOUS_RELAY = new Symbol[]{ANONYMOUS_RELAY};
@Test(timeout = 20000)
public void testCreateAndCloseContext() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
@Test(timeout = 20000)
public void testCreateContextWithClientId() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer, false, null, null, null, true);
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
@Test(timeout = 20000)
public void testCreateContextAndSetClientID() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer, false, null, null, null, false);
context.setClientID(UUID.randomUUID().toString());
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
@Test(timeout = 20000)
public void testCreateAutoAckSessionByDefault() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
assertEquals(JMSContext.AUTO_ACKNOWLEDGE, context.getSessionMode());
testPeer.expectBegin();
context.createTopic("TopicName");
testPeer.expectEnd();
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
@Test(timeout = 20000)
public void testCreateContextWithTransactedSessionMode() throws Exception {
Binary txnId = new Binary(new byte[]{ (byte) 5, (byte) 6, (byte) 7, (byte) 8});
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer, JMSContext.SESSION_TRANSACTED);
assertEquals(JMSContext.SESSION_TRANSACTED, context.getSessionMode());
// Session should be created and a coordinator should be attached since this
// should be a TX session, then a new TX is declared, once closed the TX should
// be discharged as a roll back.
testPeer.expectBegin();
testPeer.expectCoordinatorAttach();
testPeer.expectDeclare(txnId);
testPeer.expectDischarge(txnId, true);
testPeer.expectEnd();
testPeer.expectClose();
context.createTopic("TopicName");
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
@Test(timeout = 20000)
public void testCreateContextFromContextWithSessionsActive() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer);
assertEquals(JMSContext.AUTO_ACKNOWLEDGE, context.getSessionMode());
testPeer.expectBegin();
context.createTopic("TopicName");
// Create a second should not create a new session yet, once a new connection is
// create on demand then close of the second context should only close the session
JMSContext other = context.createContext(JMSContext.CLIENT_ACKNOWLEDGE);
assertEquals(JMSContext.CLIENT_ACKNOWLEDGE, other.getSessionMode());
testPeer.expectBegin();
testPeer.expectEnd();
other.createTopic("TopicName");
other.close();
testPeer.waitForAllHandlersToComplete(1000);
// Now the connection should close down.
testPeer.expectEnd();
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
@Test(timeout = 20000)
public void testOnlyOneProducerCreatedInSingleContext() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer, SERVER_ANONYMOUS_RELAY);
assertEquals(JMSContext.AUTO_ACKNOWLEDGE, context.getSessionMode());
testPeer.expectBegin();
testPeer.expectSenderAttach();
// One producer created should send an attach.
JMSProducer producer1 = context.createProducer();
assertNotNull(producer1);
// An additional one should not result in an attach
JMSProducer producer2 = context.createProducer();
assertNotNull(producer2);
testPeer.expectEnd();
testPeer.expectClose();
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
@Test(timeout = 20000)
public void testEachContextGetsItsOwnProducer() throws Exception {
try (TestAmqpPeer testPeer = new TestAmqpPeer();) {
JMSContext context = testFixture.createJMSContext(testPeer, SERVER_ANONYMOUS_RELAY);
assertEquals(JMSContext.AUTO_ACKNOWLEDGE, context.getSessionMode());
testPeer.expectBegin();
testPeer.expectSenderAttach();
testPeer.expectBegin();
testPeer.expectSenderAttach();
// One producer created should send an attach.
JMSProducer producer1 = context.createProducer();
assertNotNull(producer1);
// An additional one should not result in an attach
JMSContext other = context.createContext(JMSContext.AUTO_ACKNOWLEDGE);
JMSProducer producer2 = other.createProducer();
assertNotNull(producer2);
testPeer.expectEnd();
testPeer.expectEnd();
testPeer.expectClose();
other.close();
context.close();
testPeer.waitForAllHandlersToComplete(1000);
}
}
}
| apache/qpid-jms | qpid-jms-client/src/test/java/org/apache/qpid/jms/integration/JMSContextIntegrationTest.java | Java | apache-2.0 | 7,660 |
package org.apereo.cas.authentication;
import com.google.common.base.Splitter;
import org.apereo.cas.authentication.principal.Principal;
import org.apereo.cas.services.MultifactorAuthenticationProvider;
import org.apereo.cas.services.RegisteredService;
import org.apereo.cas.services.RegisteredServiceMultifactorPolicy;
import org.apereo.cas.util.CollectionUtils;
import org.springframework.util.StringUtils;
import javax.servlet.http.HttpServletRequest;
import java.util.Collection;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
/**
* Default MFA Trigger selection strategy. This strategy looks for valid triggers in the following order: request
* parameter, RegisteredService policy, principal attribute.
*
* @author Daniel Frett
* @since 5.0.0
*/
public class DefaultMultifactorTriggerSelectionStrategy implements MultifactorTriggerSelectionStrategy {
private static final Splitter ATTR_NAMES = Splitter.on(',').trimResults().omitEmptyStrings();
private final String requestParameter;
private final String globalPrincipalAttributeNameTriggers;
public DefaultMultifactorTriggerSelectionStrategy(final String attributeNameTriggers, final String requestParameter) {
this.globalPrincipalAttributeNameTriggers = attributeNameTriggers;
this.requestParameter = requestParameter;
}
@Override
public Optional<String> resolve(final Collection<MultifactorAuthenticationProvider> providers,
final HttpServletRequest request, final RegisteredService service, final Principal principal) {
Optional<String> provider = Optional.empty();
// short-circuit if we don't have any available MFA providers
if (providers == null || providers.isEmpty()) {
return provider;
}
final Set<String> validProviderIds = providers.stream()
.map(MultifactorAuthenticationProvider::getId)
.collect(Collectors.toSet());
// check for an opt-in provider id parameter trigger, we only care about the first value
if (!provider.isPresent() && request != null) {
provider = Optional.ofNullable(request.getParameter(requestParameter))
.filter(validProviderIds::contains);
}
// check for a RegisteredService configured trigger
if (!provider.isPresent() && service != null) {
final RegisteredServiceMultifactorPolicy policy = service.getMultifactorPolicy();
if (shouldApplyRegisteredServiceMultifactorPolicy(policy, principal)) {
provider = policy.getMultifactorAuthenticationProviders().stream()
.filter(validProviderIds::contains)
.findFirst();
}
}
// check for principal attribute trigger
if (!provider.isPresent() && principal != null
&& StringUtils.hasText(globalPrincipalAttributeNameTriggers)) {
provider = StreamSupport.stream(ATTR_NAMES.split(globalPrincipalAttributeNameTriggers).spliterator(), false)
// principal.getAttribute(name).values
.map(principal.getAttributes()::get).filter(Objects::nonNull)
.map(CollectionUtils::toCollection).flatMap(Set::stream)
// validProviderIds.contains((String) value)
.filter(String.class::isInstance).map(String.class::cast).filter(validProviderIds::contains)
.findFirst();
}
// return the resolved trigger
return provider;
}
private static boolean shouldApplyRegisteredServiceMultifactorPolicy(final RegisteredServiceMultifactorPolicy policy, final Principal principal) {
final String attrName = policy.getPrincipalAttributeNameTrigger();
final String attrValue = policy.getPrincipalAttributeValueToMatch();
// Principal attribute name and/or value is not defined
if (!StringUtils.hasText(attrName) || !StringUtils.hasText(attrValue)) {
return true;
}
// no Principal, we should enforce policy
if (principal == null) {
return true;
}
// check to see if any of the specified attributes match the attrValue pattern
final Predicate<String> attrValuePredicate = Pattern.compile(attrValue).asPredicate();
return StreamSupport.stream(ATTR_NAMES.split(attrName).spliterator(), false)
.map(principal.getAttributes()::get)
.filter(Objects::nonNull)
.map(CollectionUtils::toCollection)
.flatMap(Set::stream)
.filter(String.class::isInstance)
.map(String.class::cast)
.anyMatch(attrValuePredicate);
}
}
| creamer/cas | core/cas-server-core-services/src/main/java/org/apereo/cas/authentication/DefaultMultifactorTriggerSelectionStrategy.java | Java | apache-2.0 | 4,960 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2017 F5 Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {
'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'
}
DOCUMENTATION = '''
module: iworkflow_license_pool
short_description: Manage license pools in iWorkflow.
description:
- Manage license pools in iWorkflow.
version_added: 2.4
options:
name:
description:
- Name of the license pool to create.
required: True
state:
description:
- Whether the license pool should exist, or not. A state of C(present)
will attempt to activate the license pool if C(accept_eula) is set
to C(yes).
required: False
default: present
choices:
- present
- absent
base_key:
description:
- Key that the license server uses to verify the functionality that
you are entitled to license. This option is required if you are
creating a new license.
required: False
default: None
accept_eula:
description:
- Specifies that you accept the EULA that is part of iWorkflow. Note
that this is required to activate the license pool. If this is not
specified, or it is set to C(no), then the pool will remain in a state
of limbo until you choose to accept the EULA. This option is required
when updating a license. It is also suggested that you provide it when
creating a license, but if you do not, the license will remain
inactive and you will have to run this module again with this option
set to C(yes) to activate it.
required: False
default: 'no'
choices:
- yes
- no
notes:
- Requires the f5-sdk Python package on the host. This is as easy as pip
install f5-sdk.
extends_documentation_fragment: f5
requirements:
- f5-sdk >= 2.3.0
- iWorkflow >= 2.1.0
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = '''
- name: Create license pool
iworkflow_license_pool:
accept_eula: "yes"
name: "my-lic-pool"
base_key: "XXXXX-XXXXX-XXXXX-XXXXX-XXXXXXX"
state: "present"
server: "iwf.mydomain.com"
password: "secret"
user: "admin"
validate_certs: "no"
delegate_to: localhost
'''
RETURN = '''
'''
import time
from ansible.module_utils.basic import BOOLEANS
from ansible.module_utils.f5_utils import (
AnsibleF5Client,
AnsibleF5Parameters,
F5ModuleError,
HAS_F5SDK,
iControlUnexpectedHTTPError
)
class Parameters(AnsibleF5Parameters):
api_map = {
'baseRegKey': 'base_key'
}
returnables = []
api_attributes = [
'baseRegKey', 'state'
]
updatables = []
def to_return(self):
result = {}
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
def api_params(self):
result = {}
for api_attribute in self.api_attributes:
if self.api_map is not None and api_attribute in self.api_map:
result[api_attribute] = getattr(self, self.api_map[api_attribute])
else:
result[api_attribute] = getattr(self, api_attribute)
result = self._filter_params(result)
return result
@property
def name(self):
if self._values['name'] is None:
return None
name = str(self._values['name']).strip()
if name == '':
raise F5ModuleError(
"You must specify a name for this module"
)
return name
class ModuleManager(object):
def __init__(self, client):
self.client = client
self.have = None
self.want = Parameters(self.client.module.params)
self.changes = Parameters()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = Parameters(changed)
def _update_changed_options(self):
changed = {}
for key in Parameters.updatables:
if getattr(self.want, key) is not None:
attr1 = getattr(self.want, key)
attr2 = getattr(self.have, key)
if attr1 != attr2:
changed[key] = attr1
if changed:
self.changes = Parameters(changed)
return True
return False
def _pool_is_licensed(self):
if self.have.state == 'LICENSED':
return True
return False
def _pool_is_unlicensed_eula_unaccepted(self, current):
if current.state != 'LICENSED' and not self.want.accept_eula:
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
result.update(**self.changes.to_return())
result.update(dict(changed=changed))
return result
def exists(self):
collection = self.client.api.cm.shared.licensing.pools_s.get_collection(
requests_params=dict(
params="$filter=name+eq+'{0}'".format(self.want.name)
)
)
if len(collection) == 1:
return True
elif len(collection) == 0:
return False
else:
raise F5ModuleError(
"Multiple license pools with the provided name were found!"
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def should_update(self):
if self._pool_is_licensed():
return False
if self._pool_is_unlicensed_eula_unaccepted():
return False
return True
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def update_on_device(self):
collection = self.client.api.cm.shared.licensing.pools_s.get_collection(
requests_params=dict(
params="$filter=name+eq+'{0}'".format(self.want.name)
)
)
resource = collection.pop()
resource.modify(
state='RELICENSE',
method='AUTOMATIC'
)
return self._wait_for_license_pool_state_to_activate(resource)
def create(self):
self._set_changed_options()
if self.client.check_mode:
return True
if self.want.base_key is None:
raise F5ModuleError(
"You must specify a 'base_key' when creating a license pool"
)
self.create_on_device()
return True
def read_current_from_device(self):
collection = self.client.api.cm.shared.licensing.pools_s.get_collection(
requests_params=dict(
params="$filter=name+eq+'{0}'".format(self.want.name)
)
)
resource = collection.pop()
result = resource.attrs
return Parameters(result)
def create_on_device(self):
resource = self.client.api.cm.shared.licensing.pools_s.pool.create(
name=self.want.name,
baseRegKey=self.want.base_key,
method="AUTOMATIC"
)
return self._wait_for_license_pool_state_to_activate(resource)
def _wait_for_license_pool_state_to_activate(self, pool):
error_values = ['EXPIRED', 'FAILED']
# Wait no more than 5 minutes
for x in range(1, 30):
pool.refresh()
if pool.state == 'LICENSED':
return True
elif pool.state == 'WAITING_FOR_EULA_ACCEPTANCE':
pool.modify(
eulaText=pool.eulaText,
state='ACCEPTED_EULA'
)
elif pool.state in error_values:
raise F5ModuleError(pool.errorText)
time.sleep(10)
def absent(self):
if self.exists():
return self.remove()
return False
def remove(self):
if self.client.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the license pool")
return True
def remove_from_device(self):
collection = self.client.api.cm.shared.licensing.pools_s.get_collection(
requests_params=dict(
params="$filter=name+eq+'{0}'".format(self.want.name)
)
)
resource = collection.pop()
if resource:
resource.delete()
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
self.argument_spec = dict(
accept_eula=dict(
type='bool',
default='no',
choices=BOOLEANS
),
base_key=dict(
required=False,
no_log=True
),
name=dict(
required=True
),
state=dict(
required=False,
default='present',
choices=['absent', 'present']
)
)
self.f5_product_name = 'iworkflow'
def main():
if not HAS_F5SDK:
raise F5ModuleError("The python f5-sdk module is required")
spec = ArgumentSpec()
client = AnsibleF5Client(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
f5_product_name=spec.f5_product_name
)
try:
mm = ModuleManager(client)
results = mm.exec_module()
client.module.exit_json(**results)
except F5ModuleError as e:
client.module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
| mcgonagle/ansible_f5 | library_old/iworkflow_license_pool.py | Python | apache-2.0 | 10,879 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jmeter.gui;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.util.HashSet;
import java.util.Set;
import javax.swing.JMenu;
import javax.swing.JMenuItem;
import javax.swing.MenuElement;
import org.apache.jmeter.exceptions.IllegalUserActionException;
import org.apache.jmeter.gui.action.AbstractAction;
import org.apache.jmeter.gui.action.ActionNames;
import org.apache.jmeter.gui.action.ActionRouter;
import org.apache.jmeter.gui.plugin.MenuCreator;
import org.apache.jmeter.util.JMeterUtils;
public class HtmlReportAction extends AbstractAction implements MenuCreator {
private static Set<String> commands = new HashSet<>();
private HtmlReportUI htmlReportPanel;
static {
commands.add(ActionNames.HTML_REPORT);
}
public HtmlReportAction() {
super();
}
@Override
public void doAction(ActionEvent e) throws IllegalUserActionException {
htmlReportPanel = new HtmlReportUI();
htmlReportPanel.showInputDialog(getParentFrame(e));
}
@Override
public Set<String> getActionNames() {
return commands;
}
@Override
public JMenuItem[] getMenuItemsAtLocation(MENU_LOCATION location) {
if (location != MENU_LOCATION.TOOLS) {
return new JMenuItem[0];
}
// Use the action name as resource key because the action name is used by JMeterMenuBar too when changing languages.
JMenuItem menuItem = new JMenuItem(JMeterUtils.getResString(ActionNames.HTML_REPORT), KeyEvent.VK_UNDEFINED);
menuItem.setName(ActionNames.HTML_REPORT);
menuItem.setActionCommand(ActionNames.HTML_REPORT);
menuItem.setAccelerator(null);
menuItem.addActionListener(ActionRouter.getInstance());
return new JMenuItem[] { menuItem };
}
@Override
public JMenu[] getTopLevelMenus() {
return new JMenu[0];
}
@Override
public boolean localeChanged(MenuElement menu) {
return false;
}
@Override
public void localeChanged() {
// NOOP
}
public HtmlReportUI getHtmlReportPanel() {
return htmlReportPanel;
}
}
| apache/jmeter | src/core/src/main/java/org/apache/jmeter/gui/HtmlReportAction.java | Java | apache-2.0 | 2,972 |
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.model.bpmn.impl.instance.camunda;
import static org.camunda.bpm.model.bpmn.impl.BpmnModelConstants.CAMUNDA_ELEMENT_CONNECTOR;
import static org.camunda.bpm.model.bpmn.impl.BpmnModelConstants.CAMUNDA_NS;
import org.camunda.bpm.model.bpmn.impl.instance.BpmnModelElementInstanceImpl;
import org.camunda.bpm.model.bpmn.instance.camunda.CamundaConnector;
import org.camunda.bpm.model.bpmn.instance.camunda.CamundaConnectorId;
import org.camunda.bpm.model.bpmn.instance.camunda.CamundaInputOutput;
import org.camunda.bpm.model.xml.ModelBuilder;
import org.camunda.bpm.model.xml.impl.instance.ModelTypeInstanceContext;
import org.camunda.bpm.model.xml.type.ModelElementTypeBuilder;
import org.camunda.bpm.model.xml.type.ModelElementTypeBuilder.ModelTypeInstanceProvider;
import org.camunda.bpm.model.xml.type.child.ChildElement;
import org.camunda.bpm.model.xml.type.child.SequenceBuilder;
/**
* The BPMN connector camunda extension element
*
* @author Sebastian Menski
*/
public class CamundaConnectorImpl extends BpmnModelElementInstanceImpl implements CamundaConnector {
protected static ChildElement<CamundaConnectorId> camundaConnectorIdChild;
protected static ChildElement<CamundaInputOutput> camundaInputOutputChild;
public static void registerType(ModelBuilder modelBuilder) {
ModelElementTypeBuilder typeBuilder = modelBuilder.defineType(CamundaConnector.class, CAMUNDA_ELEMENT_CONNECTOR)
.namespaceUri(CAMUNDA_NS)
.instanceProvider(new ModelTypeInstanceProvider<CamundaConnector>() {
public CamundaConnector newInstance(ModelTypeInstanceContext instanceContext) {
return new CamundaConnectorImpl(instanceContext);
}
});
SequenceBuilder sequenceBuilder = typeBuilder.sequence();
camundaConnectorIdChild = sequenceBuilder.element(CamundaConnectorId.class)
.required()
.build();
camundaInputOutputChild = sequenceBuilder.element(CamundaInputOutput.class)
.build();
typeBuilder.build();
}
public CamundaConnectorImpl(ModelTypeInstanceContext instanceContext) {
super(instanceContext);
}
public CamundaConnectorId getCamundaConnectorId() {
return camundaConnectorIdChild.getChild(this);
}
public void setCamundaConnectorId(CamundaConnectorId camundaConnectorId) {
camundaConnectorIdChild.setChild(this, camundaConnectorId);
}
public CamundaInputOutput getCamundaInputOutput() {
return camundaInputOutputChild.getChild(this);
}
public void setCamundaInputOutput(CamundaInputOutput camundaInputOutput) {
camundaInputOutputChild.setChild(this, camundaInputOutput);
}
}
| camunda/camunda-bpmn-model | src/main/java/org/camunda/bpm/model/bpmn/impl/instance/camunda/CamundaConnectorImpl.java | Java | apache-2.0 | 3,445 |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.edu.learning.stepic;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.startup.StartupActivity;
import com.jetbrains.edu.learning.courseGeneration.StudyProjectGenerator;
import org.jetbrains.annotations.NotNull;
import java.util.List;
public class StudyCoursesUpdater implements StartupActivity {
public static StudyCoursesUpdater getInstance() {
final StartupActivity[] extensions = Extensions.getExtensions(StartupActivity.POST_STARTUP_ACTIVITY);
for (StartupActivity extension : extensions) {
if (extension instanceof StudyCoursesUpdater) {
return (StudyCoursesUpdater) extension;
}
}
throw new UnsupportedOperationException("could not find self");
}
@Override
public void runActivity(@NotNull final Project project) {
final Application application = ApplicationManager.getApplication();
if (application.isUnitTestMode()) {
return;
}
if (checkNeeded()) {
application.executeOnPooledThread(new Runnable() {
@Override
public void run() {
final List<CourseInfo> courses = EduStepicConnector.getCourses();
StudyProjectGenerator.flushCache(courses);
}
});
}
}
public static boolean checkNeeded() {
final List<CourseInfo> courses = StudyProjectGenerator.getCoursesFromCache();
return courses.isEmpty();
}
}
| Soya93/Extract-Refactoring | python/educational-core/student/src/com/jetbrains/edu/learning/stepic/StudyCoursesUpdater.java | Java | apache-2.0 | 2,161 |
///<reference path="app.js" />
define(['Dexie', 'Dexie.Observable', './console'], function (Dexie, DexieObservable, console) {
// Declare Dexie instance and explicitely apply the addon:
var db = new Dexie("appdb2", { addons: [DexieObservable] });
// Define database schema
db.version(1).stores({
contacts: '++id,first,last'
});
// Populate ground data
db.on('populate', function () {
console.log("Populating data first time");
// Populate a contact
db.contacts.add({ first: 'Arnold', last: 'Fitzgerald' });
});
// Open database
db.open();
return db;
});
| YuriSolovyov/Dexie.js | samples/requirejs-with-addons/scripts/db.js | JavaScript | apache-2.0 | 643 |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.api.runtime.util;
import org.camunda.bpm.engine.delegate.DelegateExecution;
import org.camunda.bpm.engine.delegate.ExecutionListener;
/**
* @author: Johannes Heinemann
*/
public class IncrementCounterListener implements ExecutionListener {
public static int counter = 0;
@Override
public void notify(DelegateExecution execution) throws Exception {
counter++;
}
}
| AlexMinsk/camunda-bpm-platform | engine/src/test/java/org/camunda/bpm/engine/test/api/runtime/util/IncrementCounterListener.java | Java | apache-2.0 | 977 |
package foo;
public class ControllerB2 extends grails.TopB {
public void foo() {
super.foo();
System.out.println("ControllerB.foo() running again!");
}
}
| spring-projects/spring-loaded | testdata/src/main/java/foo/ControllerB2.java | Java | apache-2.0 | 162 |
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/rtp_rtcp/source/rtp_format_h264.h"
namespace webrtc {
void FuzzOneInput(const uint8_t* data, size_t size) {
RtpDepacketizerH264 depacketizer;
RtpDepacketizer::ParsedPayload parsed_payload;
depacketizer.Parse(&parsed_payload, data, size);
}
} // namespace webrtc
| wangcy6/storm_app | frame/c++/webrtc-master/test/fuzzers/h264_depacketizer_fuzzer.cc | C++ | apache-2.0 | 701 |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.conduits;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.concurrent.TimeUnit;
import io.undertow.UndertowLogger;
import io.undertow.UndertowMessages;
import io.undertow.UndertowOptions;
import io.undertow.server.OpenListener;
import io.undertow.util.WorkerUtils;
import org.xnio.ChannelListener;
import org.xnio.ChannelListeners;
import org.xnio.IoUtils;
import org.xnio.Options;
import org.xnio.StreamConnection;
import org.xnio.XnioExecutor;
import org.xnio.channels.ReadTimeoutException;
import org.xnio.channels.StreamSinkChannel;
import org.xnio.conduits.AbstractStreamSourceConduit;
import org.xnio.conduits.ConduitStreamSourceChannel;
import org.xnio.conduits.ReadReadyHandler;
import org.xnio.conduits.StreamSourceConduit;
/**
* Wrapper for read timeout. This should always be the first wrapper applied to the underlying channel.
*
* @author Stuart Douglas
* @see org.xnio.Options#READ_TIMEOUT
*/
public final class ReadTimeoutStreamSourceConduit extends AbstractStreamSourceConduit<StreamSourceConduit> {
private XnioExecutor.Key handle;
private final StreamConnection connection;
private volatile long expireTime = -1;
private final OpenListener openListener;
private static final int FUZZ_FACTOR = 50; //we add 50ms to the timeout to make sure the underlying channel has actually timed out
private volatile boolean expired;
private final Runnable timeoutCommand = new Runnable() {
@Override
public void run() {
handle = null;
if (expireTime == -1) {
return;
}
long current = System.currentTimeMillis();
if (current < expireTime) {
//timeout has been bumped, re-schedule
handle = WorkerUtils.executeAfter(connection.getIoThread(),timeoutCommand, (expireTime - current) + FUZZ_FACTOR, TimeUnit.MILLISECONDS);
return;
}
UndertowLogger.REQUEST_LOGGER.tracef("Timing out channel %s due to inactivity", connection.getSourceChannel());
synchronized (ReadTimeoutStreamSourceConduit.this) {
expired = true;
}
boolean readResumed = connection.getSourceChannel().isReadResumed();
ChannelListener<? super ConduitStreamSourceChannel> readListener = connection.getSourceChannel().getReadListener();
if (readResumed) {
ChannelListeners.invokeChannelListener(connection.getSourceChannel(), readListener);
}
if (connection.getSinkChannel().isWriteResumed()) {
ChannelListeners.invokeChannelListener(connection.getSinkChannel(), connection.getSinkChannel().getWriteListener());
}
// close only after invoking listeners, to allow space for listener getting ReadTimeoutException
IoUtils.safeClose(connection);
}
};
public ReadTimeoutStreamSourceConduit(final StreamSourceConduit delegate, StreamConnection connection, OpenListener openListener) {
super(delegate);
this.connection = connection;
this.openListener = openListener;
final ReadReadyHandler handler = new ReadReadyHandler.ChannelListenerHandler<>(connection.getSourceChannel());
delegate.setReadReadyHandler(new ReadReadyHandler() {
@Override
public void readReady() {
handler.readReady();
}
@Override
public void forceTermination() {
cleanup();
handler.forceTermination();
}
@Override
public void terminated() {
cleanup();
handler.terminated();
}
});
}
private void handleReadTimeout(final long ret) throws IOException {
if (!connection.isOpen()) {
cleanup();
return;
}
if (ret == -1) {
cleanup();
return;
}
Integer timeout = getTimeout();
if (timeout == null || timeout <= 0) {
return;
}
final long currentTime = System.currentTimeMillis();
if (ret == 0) {
final long expireTimeVar = expireTime;
if (expireTimeVar != -1 && currentTime > expireTimeVar) {
IoUtils.safeClose(connection);
throw UndertowMessages.MESSAGES.readTimedOut(this.getTimeout());
}
}
expireTime = currentTime + timeout;
if (handle == null) {
handle = connection.getIoThread().executeAfter(timeoutCommand, timeout, TimeUnit.MILLISECONDS);
}
}
@Override
public long transferTo(final long position, final long count, final FileChannel target) throws IOException {
checkExpired();
long ret = super.transferTo(position, count, target);
handleReadTimeout(ret);
return ret;
}
@Override
public long transferTo(final long count, final ByteBuffer throughBuffer, final StreamSinkChannel target) throws IOException {
checkExpired();
long ret = super.transferTo(count, throughBuffer, target);
handleReadTimeout(ret);
return ret;
}
@Override
public long read(final ByteBuffer[] dsts, final int offset, final int length) throws IOException {
checkExpired();
long ret = super.read(dsts, offset, length);
handleReadTimeout(ret);
return ret;
}
@Override
public int read(final ByteBuffer dst) throws IOException {
checkExpired();
int ret = super.read(dst);
handleReadTimeout(ret);
return ret;
}
@Override
public void awaitReadable() throws IOException {
checkExpired();
Integer timeout = getTimeout();
if (timeout != null && timeout > 0) {
super.awaitReadable(timeout + FUZZ_FACTOR, TimeUnit.MILLISECONDS);
} else {
super.awaitReadable();
}
}
@Override
public void awaitReadable(long time, TimeUnit timeUnit) throws IOException {
checkExpired();
Integer timeout = getTimeout();
if (timeout != null && timeout > 0) {
long millis = timeUnit.toMillis(time);
super.awaitReadable(Math.min(millis, timeout + FUZZ_FACTOR), TimeUnit.MILLISECONDS);
} else {
super.awaitReadable(time, timeUnit);
}
}
private Integer getTimeout() {
Integer timeout = 0;
try {
timeout = connection.getSourceChannel().getOption(Options.READ_TIMEOUT);
} catch (IOException ignore) {
// should never happen
}
Integer idleTimeout = openListener.getUndertowOptions().get(UndertowOptions.IDLE_TIMEOUT);
if ((timeout == null || timeout <= 0) && idleTimeout != null) {
timeout = idleTimeout;
} else if (timeout != null && idleTimeout != null && idleTimeout > 0) {
timeout = Math.min(timeout, idleTimeout);
}
return timeout;
}
@Override
public void terminateReads() throws IOException {
checkExpired();
super.terminateReads();
cleanup();
}
private void cleanup() {
if (handle != null) {
handle.remove();
handle = null;
expireTime = -1;
}
}
@Override
public void suspendReads() {
super.suspendReads();
cleanup();
}
private void checkExpired() throws ReadTimeoutException {
synchronized (this) {
if (expired) {
throw UndertowMessages.MESSAGES.readTimedOut(System.currentTimeMillis());
}
}
}
public String toString() {
return super.toString() + " (next: " + next + ")";
}
}
| rhusar/undertow | core/src/main/java/io/undertow/conduits/ReadTimeoutStreamSourceConduit.java | Java | apache-2.0 | 8,603 |
// Copyright (c) 2019 The Jaeger Authors.
// Copyright (c) 2017 Uber Technologies, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cache
import (
"container/list"
"sync"
"time"
)
// LRU is a concurrent fixed size cache that evicts elements in LRU order as well as by TTL.
type LRU struct {
mux sync.Mutex
byAccess *list.List
byKey map[string]*list.Element
maxSize int
ttl time.Duration
TimeNow func() time.Time
onEvict EvictCallback
}
// NewLRU creates a new LRU cache with default options.
func NewLRU(maxSize int) *LRU {
return NewLRUWithOptions(maxSize, nil)
}
// NewLRUWithOptions creates a new LRU cache with the given options.
func NewLRUWithOptions(maxSize int, opts *Options) *LRU {
if opts == nil {
opts = &Options{}
}
if opts.TimeNow == nil {
opts.TimeNow = time.Now
}
return &LRU{
byAccess: list.New(),
byKey: make(map[string]*list.Element, opts.InitialCapacity),
ttl: opts.TTL,
maxSize: maxSize,
TimeNow: opts.TimeNow,
onEvict: opts.OnEvict,
}
}
// Get retrieves the value stored under the given key
func (c *LRU) Get(key string) interface{} {
c.mux.Lock()
defer c.mux.Unlock()
elt := c.byKey[key]
if elt == nil {
return nil
}
cacheEntry := elt.Value.(*cacheEntry)
if !cacheEntry.expiration.IsZero() && c.TimeNow().After(cacheEntry.expiration) {
// Entry has expired
if c.onEvict != nil {
c.onEvict(cacheEntry.key, cacheEntry.value)
}
c.byAccess.Remove(elt)
delete(c.byKey, cacheEntry.key)
return nil
}
c.byAccess.MoveToFront(elt)
return cacheEntry.value
}
// Put puts a new value associated with a given key, returning the existing value (if present)
func (c *LRU) Put(key string, value interface{}) interface{} {
c.mux.Lock()
defer c.mux.Unlock()
elt := c.byKey[key]
return c.putWithMutexHold(key, value, elt)
}
// CompareAndSwap puts a new value associated with a given key if existing value matches oldValue.
// It returns itemInCache as the element in cache after the function is executed and replaced as true if value is replaced, false otherwise.
func (c *LRU) CompareAndSwap(key string, oldValue, newValue interface{}) (itemInCache interface{}, replaced bool) {
c.mux.Lock()
defer c.mux.Unlock()
elt := c.byKey[key]
// If entry not found, old value should be nil
if elt == nil && oldValue != nil {
return nil, false
}
if elt != nil {
// Entry found, compare it with that you expect.
entry := elt.Value.(*cacheEntry)
if entry.value != oldValue {
return entry.value, false
}
}
c.putWithMutexHold(key, newValue, elt)
return newValue, true
}
// putWithMutexHold populates the cache and returns the inserted value.
// Caller is expected to hold the c.mut mutex before calling.
func (c *LRU) putWithMutexHold(key string, value interface{}, elt *list.Element) interface{} {
if elt != nil {
entry := elt.Value.(*cacheEntry)
existing := entry.value
entry.value = value
if c.ttl != 0 {
entry.expiration = c.TimeNow().Add(c.ttl)
}
c.byAccess.MoveToFront(elt)
return existing
}
entry := &cacheEntry{
key: key,
value: value,
}
if c.ttl != 0 {
entry.expiration = c.TimeNow().Add(c.ttl)
}
c.byKey[key] = c.byAccess.PushFront(entry)
for len(c.byKey) > c.maxSize {
oldest := c.byAccess.Remove(c.byAccess.Back()).(*cacheEntry)
if c.onEvict != nil {
c.onEvict(oldest.key, oldest.value)
}
delete(c.byKey, oldest.key)
}
return nil
}
// Delete deletes a key, value pair associated with a key
func (c *LRU) Delete(key string) {
c.mux.Lock()
defer c.mux.Unlock()
elt := c.byKey[key]
if elt != nil {
entry := c.byAccess.Remove(elt).(*cacheEntry)
if c.onEvict != nil {
c.onEvict(entry.key, entry.value)
}
delete(c.byKey, key)
}
}
// Size returns the number of entries currently in the lru, useful if cache is not full
func (c *LRU) Size() int {
c.mux.Lock()
defer c.mux.Unlock()
return len(c.byKey)
}
type cacheEntry struct {
key string
expiration time.Time
value interface{}
}
| uber/jaeger | pkg/cache/lru.go | GO | apache-2.0 | 4,507 |
require_relative '../../puppet_x/puppetlabs/property/tag.rb'
require_relative '../../puppet_x/puppetlabs/property/region.rb'
require_relative '../../puppet_x/puppetlabs/aws_ingress_rules_parser'
Puppet::Type.newtype(:ec2_securitygroup) do
@doc = 'type representing an EC2 security group'
ensurable
newparam(:name, namevar: true) do
desc 'the name of the security group'
validate do |value|
fail 'security groups must have a name' if value == ''
fail 'name should be a String' unless value.is_a?(String)
end
end
newproperty(:region, :parent => PuppetX::Property::AwsRegion) do
desc 'the region in which to launch the security group'
end
newproperty(:ingress, :array_matching => :all) do
desc 'rules for ingress traffic'
def insync?(is)
for_comparison = Marshal.load(Marshal.dump(should))
parser = PuppetX::Puppetlabs::AwsIngressRulesParser.new(for_comparison)
to_create = parser.rules_to_create(is)
to_delete = parser.rules_to_delete(is)
to_create.empty? && to_delete.empty?
end
validate do |value|
fail 'ingress should be a Hash' unless value.is_a?(Hash)
end
end
newproperty(:tags, :parent => PuppetX::Property::AwsTag) do
desc 'the tags for the security group'
end
newproperty(:description) do
desc 'a short description of the group'
validate do |value|
fail 'description cannot be blank' if value == ''
fail 'description should be a String' unless value.is_a?(String)
end
end
newproperty(:vpc) do
desc 'A VPC to which the group should be associated'
validate do |value|
fail 'vpc should be a String' unless value.is_a?(String)
end
end
newproperty(:id) do
desc 'The unique identifier for the security group'
end
autorequire(:ec2_vpc) do
self[:vpc]
end
end
| gregohardy/puppetlabs-aws | lib/puppet/type/ec2_securitygroup.rb | Ruby | apache-2.0 | 1,842 |
/*!
* Copyright 2014 Apereo Foundation (AF) Licensed under the
* Educational Community License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
define(['jquery', 'oae.core'], function ($, oae) {
return function (uid) {
// The widget container
var $rootel = $('#' + uid);
// Holds the current state of the user profile as it is updated
var profile = _.extend({}, oae.data.me);
// Holds an email address that is pending verification from the user, if
// applicable
var unverifiedEmail = null;
/**
* Determine if the current profile has a valid display name
*
* @return {Boolean} Whether or not the profile has a valid display name
*/
var isValidDisplayName = function() {
return oae.api.util.validation().isValidDisplayName(profile.displayName);
};
/**
* Determine if the current profile has a valid email
*
* @return {Boolean} Whether or not the profile has a valid email
*/
var isValidEmail = function() {
return oae.api.util.validation().isValidEmail(profile.email);
};
/**
* Determine if the current profile is valid, such that it would allow a user to dismiss the
* user profile modal
*
* @return {Boolean} Whether or not the profile is valid in its current state
*/
var isValidProfile = function() {
return (isValidEmail() && isValidDisplayName());
};
/**
* Show the main panel
*/
var showMainPanel = function() {
$('#editprofile-panel-email-container').hide();
$('#editprofile-panel-main-container').show();
};
/**
* Show the email panel
*/
var showEmailPanel = function() {
$('#editprofile-panel-main-container').hide();
$('#editprofile-panel-email-container').show();
};
/**
* Show the appropriate panel based on the user's profile state
*/
var showDefaultPanel = function() {
// Initialize the email verification status
oae.api.user.getEmailVerificationStatus(oae.data.me.id, function(err, email) {
// Ignore issues checking for a pending email verification, as there being no
// pending verification is the 99.999% use-case and we wouldn't want to annoy
// uninterested users with an error notification or anything
unverifiedEmail = email;
if (isValidDisplayName() && !isValidEmail() && unverifiedEmail) {
// If the user profile is awaiting a verified email, but all
// the other information is accurate, we take them directly
// to the panel that indicates they need to verify their
// email
renderEditProfileEmailPanel();
} else {
renderEditProfileMainPanel();
}
});
};
/**
* Render the edit profile "main" panel with validation and switch the current modal view to
* the "main" panel
*/
var renderEditProfileMainPanel = function() {
// If the display name is not valid, clear it to inform the template that the user
// has no real display name
if (!isValidDisplayName()) {
profile.displayName = null;
// Profiles with invalid display names will have had visibility set to private, so we
// reset it to the tenant's default visibility
// @see https://github.com/oaeproject/3akai-ux/pull/4100
var tenantVisibility = oae.api.config.getValue('oae-principals', 'user', 'visibility');
profile.visibility = tenantVisibility;
}
// Render the form elements
oae.api.util.template().render($('#editprofile-panel-main-template', $rootel), {
'isValidProfile': isValidProfile(),
'profile': profile,
'unverifiedEmail': unverifiedEmail
}, $('#editprofile-panel-main-container', $rootel));
// Detect changes in the form and enable the submit button
$('#editprofile-form', $rootel).on(oae.api.util.getFormChangeEventNames(), function() {
$('#editprofile-panel-main-container button[type="submit"]', $rootel).prop('disabled', false);
});
// Initialize jQuery validate on the form
var validateOpts = {
'submitHandler': editProfile,
'methods': {
'displayname': {
'method': oae.api.util.validation().isValidDisplayName,
'text': oae.api.i18n.translate('__MSG__PLEASE_ENTER_A_VALID_NAME__')
}
}
};
oae.api.util.validation().validate($('#editprofile-form', $rootel), validateOpts);
// Switch the view to the main panel
showMainPanel();
};
/**
* Render the edit profile "email" panel that instructs the user how to proceed with
* verifying their email. It will also switch the view to the "email" panel.
*/
var renderEditProfileEmailPanel = function() {
// Render the email verification instruction template
oae.api.util.template().render($('#editprofile-panel-email-template', $rootel), {
'isValidProfile': isValidProfile(),
'profile': profile,
'unverifiedEmail': unverifiedEmail
}, $('#editprofile-panel-email-container', $rootel));
// Switch the view to the email panel
showEmailPanel();
};
/**
* Perform the edit profile action
*/
var editProfile = function() {
// Disable the form
$('#editprofile-form *', $rootel).prop('disabled', true);
var newDisplayName = $.trim($('#editprofile-name', $rootel).val());
var newEmail = $.trim($('#editprofile-email', $rootel).val()).toLowerCase();
var newVisibility = $('.oae-large-options-container input[type="radio"]:checked', $rootel).val();
var params = {
'displayName': newDisplayName,
'email': newEmail,
'visibility': newVisibility
};
// Determine if this update constitutes a change in email. If so we will need to notify
// the user that the new email is pending verification
var isEmailChange = (newEmail !== oae.data.me.email);
oae.api.user.updateUser(params, function (err, data) {
if (!err) {
// Update the user profile in state
profile = data;
// Notify the rest of the UI widgets that the profile has been updated
$(document).trigger('oae.editprofile.done', data);
if (!isEmailChange) {
// If the update succeeded and didn't have an email change, close the modal
// while showing a notification
closeModal();
oae.api.util.notification(
oae.api.i18n.translate('__MSG__PROFILE_EDITED__'),
oae.api.i18n.translate('__MSG__PROFILE_DETAILS_EDIT_SUCCESS__', 'editprofile'));
} else {
// Since the email is updated, a verification email will be sent. We should
// tell the user that they must validate their email address from their
// email inbox
unverifiedEmail = newEmail;
renderEditProfileEmailPanel();
}
} else {
// If the update failed, enable the form and show an error notification
oae.api.util.notification(
oae.api.i18n.translate('__MSG__PROFILE_NOT_EDITED__'),
oae.api.i18n.translate('__MSG__PROFILE_DETAILS_EDIT_FAIL__', 'editprofile'),
'error');
// Enable the form
$('#editprofile-form *', $rootel).prop('disabled', false);
}
});
// Avoid default form submit behavior
return false;
};
/**
* Reset the widget to its original state when the modal dialog is opened and closed.
* Ideally this would only be necessary when the modal is hidden, but IE10+ fires `input`
* events while Bootstrap is rendering the modal, and those events can "undo" parts of the
* reset. Hooking into the `shown` event provides the chance to compensate.
*/
var setUpReset = function() {
$('#editprofile-modal', $rootel).on('shown.bs.modal', showDefaultPanel);
$('#editprofile-modal', $rootel).on('hidden.bs.modal', function (evt) {
// Reset the form
var $form = $('#editprofile-form', $rootel);
$form[0].reset();
oae.api.util.validation().clear($form);
// Enable the form
$('#editprofile-form *', $rootel).prop('disabled', false);
$('#editprofile-form button[type="submit"]', $rootel).prop('disabled', true);
showMainPanel();
});
};
/**
* Apply the listeners to the document that will launch the editprofile modal
*/
var setUpModalListeners = function() {
$(document).on('click', '.oae-trigger-editprofile', showModal);
$(document).on('oae.trigger.editprofile', showModal);
};
/**
* Show the edit profile modal and render the appropriate panel
*/
var showModal = function() {
$('#editprofile-modal', $rootel).modal({
'backdrop': 'static'
});
showDefaultPanel();
};
/**
* Close the edit profile modal
*/
var closeModal = function() {
$('#editprofile-modal', $rootel).modal('hide');
if (oae.data.me.needsToAcceptTC) {
// It is possible that we entered the edit profile modal to
// clean up our user profile before accepting the terms and
// conditions (see `oae.api.js` function `setupPreUseActions`).
// Therefore we need to ensure we segue to the terms and
// conditions widget after we close the editprofile modal
oae.api.widget.insertWidget('termsandconditions', null, null, true);
}
};
/**
* Bind all the action listeners needed for the user to interact with the "main" panel in
* the edit profile modal
*/
var bindEditProfileMainPanelListeners = function() {
$('#editprofile-modal', $rootel).on('shown.bs.modal', function() {
// Set focus to the display name field
$('#editprofile-name', $rootel).focus();
});
// Catch changes in the visibility radio group
$rootel.on('change', '#editprofile-panel-main-container .oae-large-options-container input[type="radio"]', function() {
$('.oae-large-options-container label', $rootel).removeClass('checked');
$(this).parents('label').addClass('checked');
});
// When the "Resend Verification" button is clicked, resend the email verification
$rootel.on('click', '#editprofile-email-verification .editprofile-email-verification-action button', function() {
// Disable all actions in the modal
$('#editprofile-form *', $rootel).prop('disabled', true);
oae.api.user.resendEmailToken(oae.data.me.id, function(err) {
if (!err) {
// If the token resent successfully show a notification
oae.api.util.notification(
oae.api.i18n.translate('__MSG__VERIFICATION_EMAIL_SENT__', 'editprofile'),
oae.api.i18n.translate('__MSG__A_VERIFICATION_EMAIL_HAS_BEEN_SENT_TO_UNVERIFIED_EMAIL__', 'editprofile', {
'unverifiedEmail': unverifiedEmail
}));
} else {
// If the token failed to resend, show a notification
oae.api.util.notification(
oae.api.i18n.translate('__MSG__VERIFICATION_EMAIL_FAILED__', 'editprofile'),
oae.api.i18n.translate('__MSG__A_VERIFICATION_EMAIL_FAILED_TO_BE_SENT_TO_UNVERIFIED_EMAIL__', 'editprofile', {
'unverifiedEmail': unverifiedEmail
}),
'error');
}
// Re-enable the form
$('#editprofile-form *', $rootel).prop('disabled', false);
});
});
// When the "Cancel Verification" button is clicked, delete the pending email verification
// and close the container that indicates there is a pending verification
$rootel.on('click', '#editprofile-email-verification .editprofile-email-verification-cancel button', function(evt) {
// Allow the modal to be saved now
$('#editprofile-panel-main-container button[type="submit"]', $rootel).prop('disabled', false);
oae.api.user.deletePendingEmailVerification(function(err) {
if (!err) {
unverifiedEmail = null;
// If cancelling succeeded, simply remove the email verification panel
$('#editprofile-email-verification', $rootel).slideUp();
} else {
// If the token failed to resend, show a notification
oae.api.util.notification(
oae.api.i18n.translate('__MSG__CANCEL_EMAIL_VERIFICATION_FAILED__', 'editprofile'),
oae.api.i18n.translate('__MSG__AN_ERROR_OCCURRED_WHILE_CANCELLING_THE_EMAIL_VERIFICATION__', 'editprofile'),
'error');
}
});
evt.preventDefault();
});
};
/**
* Bind all the action listeners needed for the user to interact with the "email" panel in
* the edit profile modal
*/
var bindEditProfileEmailPanelListeners = function() {
// When "Done" is clicked, close the modal
$rootel.on('click', '#editprofile-panel-email-container .modal-footer button.btn-primary', function() {
closeModal();
});
// When the user chooses to go back, re-render and enable the main panel
$rootel.on('click', '#editprofile-panel-email-container .modal-footer button.btn-link', function() {
renderEditProfileMainPanel();
});
};
setUpReset();
setUpModalListeners();
bindEditProfileMainPanelListeners();
bindEditProfileEmailPanelListeners();
};
});
| nicolaasmatthijs/3akai-ux | node_modules/oae-core/editprofile/js/editprofile.js | JavaScript | apache-2.0 | 16,154 |
# ./darwinpush/xb/raw/sm.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:8eb48f8f0e727f488907a816c69d6ed98ba221c7
# Generated 2015-04-23 16:42:14.513978 by PyXB version 1.2.4 using Python 3.4.1.final.0
# Namespace http://www.thalesgroup.com/rtti/PushPort/StationMessages/v1 [xmlns:sm]
from __future__ import unicode_literals
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
import pyxb.utils.six as _six
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:5049f1de-e9cf-11e4-bb50-a0481ca50ab0')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.4'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# Import bindings for namespaces imported into schema
import pyxb.binding.datatypes
import darwinpush.xb.ct as _ImportedBinding_darwinpush_xb_ct
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI('http://www.thalesgroup.com/rtti/PushPort/StationMessages/v1', create_if_missing=True)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument (xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement, default_namespace=default_namespace)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, _six.text_type):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM (node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# Atomic simple type: {http://www.thalesgroup.com/rtti/PushPort/StationMessages/v1}MsgCategoryType
class MsgCategoryType (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""The category of operator message"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'MsgCategoryType')
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 15, 1)
_Documentation = 'The category of operator message'
MsgCategoryType._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=MsgCategoryType, enum_prefix=None)
MsgCategoryType.Train = MsgCategoryType._CF_enumeration.addEnumeration(unicode_value='Train', tag='Train')
MsgCategoryType.Station = MsgCategoryType._CF_enumeration.addEnumeration(unicode_value='Station', tag='Station')
MsgCategoryType.Connections = MsgCategoryType._CF_enumeration.addEnumeration(unicode_value='Connections', tag='Connections')
MsgCategoryType.System = MsgCategoryType._CF_enumeration.addEnumeration(unicode_value='System', tag='System')
MsgCategoryType.Misc = MsgCategoryType._CF_enumeration.addEnumeration(unicode_value='Misc', tag='Misc')
MsgCategoryType.PriorTrains = MsgCategoryType._CF_enumeration.addEnumeration(unicode_value='PriorTrains', tag='PriorTrains')
MsgCategoryType.PriorOther = MsgCategoryType._CF_enumeration.addEnumeration(unicode_value='PriorOther', tag='PriorOther')
MsgCategoryType._InitializeFacetMap(MsgCategoryType._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'MsgCategoryType', MsgCategoryType)
# Atomic simple type: {http://www.thalesgroup.com/rtti/PushPort/StationMessages/v1}MsgSeverityType
class MsgSeverityType (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""The severity of operator message"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'MsgSeverityType')
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 29, 1)
_Documentation = 'The severity of operator message'
MsgSeverityType._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=MsgSeverityType, enum_prefix=None)
MsgSeverityType.n0 = MsgSeverityType._CF_enumeration.addEnumeration(unicode_value='0', tag='n0')
MsgSeverityType.n1 = MsgSeverityType._CF_enumeration.addEnumeration(unicode_value='1', tag='n1')
MsgSeverityType.n2 = MsgSeverityType._CF_enumeration.addEnumeration(unicode_value='2', tag='n2')
MsgSeverityType.n3 = MsgSeverityType._CF_enumeration.addEnumeration(unicode_value='3', tag='n3')
MsgSeverityType._InitializeFacetMap(MsgSeverityType._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'MsgSeverityType', MsgSeverityType)
# Complex type [anonymous] with content type MIXED
class CTD_ANON (pyxb.binding.basis.complexTypeDefinition):
"""The content of the message"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_MIXED
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 58, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.thalesgroup.com/rtti/PushPort/StationMessages/v1}p uses Python identifier p
__p = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'p'), 'p', '__httpwww_thalesgroup_comrttiPushPortStationMessagesv1_CTD_ANON_httpwww_thalesgroup_comrttiPushPortStationMessagesv1p', True, pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 84, 1), )
p = property(__p.value, __p.set, None, 'Defines an HTML paragraph')
# Element {http://www.thalesgroup.com/rtti/PushPort/StationMessages/v1}a uses Python identifier a
__a = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'a'), 'a', '__httpwww_thalesgroup_comrttiPushPortStationMessagesv1_CTD_ANON_httpwww_thalesgroup_comrttiPushPortStationMessagesv1a', True, pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 94, 1), )
a = property(__a.value, __a.set, None, 'Defines an HTML anchor')
_ElementMap.update({
__p.name() : __p,
__a.name() : __a
})
_AttributeMap.update({
})
# Complex type [anonymous] with content type MIXED
class CTD_ANON_ (pyxb.binding.basis.complexTypeDefinition):
"""Defines an HTML paragraph"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_MIXED
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 88, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.thalesgroup.com/rtti/PushPort/StationMessages/v1}a uses Python identifier a
__a = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'a'), 'a', '__httpwww_thalesgroup_comrttiPushPortStationMessagesv1_CTD_ANON__httpwww_thalesgroup_comrttiPushPortStationMessagesv1a', True, pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 94, 1), )
a = property(__a.value, __a.set, None, 'Defines an HTML anchor')
_ElementMap.update({
__a.name() : __a
})
_AttributeMap.update({
})
# Complex type [anonymous] with content type SIMPLE
class CTD_ANON_2 (pyxb.binding.basis.complexTypeDefinition):
"""Defines an HTML anchor"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 98, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.string
# Attribute href uses Python identifier href
__href = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'href'), 'href', '__httpwww_thalesgroup_comrttiPushPortStationMessagesv1_CTD_ANON_2_href', pyxb.binding.datatypes.string, required=True)
__href._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 101, 5)
__href._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 101, 5)
href = property(__href.value, __href.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__href.name() : __href
})
# Complex type {http://www.thalesgroup.com/rtti/PushPort/StationMessages/v1}StationMessage with content type ELEMENT_ONLY
class StationMessage (pyxb.binding.basis.complexTypeDefinition):
"""Darwin Workstation Station Message"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'StationMessage')
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 41, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {http://www.thalesgroup.com/rtti/PushPort/StationMessages/v1}Station uses Python identifier Station
__Station = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Station'), 'Station', '__httpwww_thalesgroup_comrttiPushPortStationMessagesv1_StationMessage_httpwww_thalesgroup_comrttiPushPortStationMessagesv1Station', True, pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 46, 3), )
Station = property(__Station.value, __Station.set, None, 'The Stations the message is being applied to')
# Element {http://www.thalesgroup.com/rtti/PushPort/StationMessages/v1}Msg uses Python identifier Msg
__Msg = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'Msg'), 'Msg', '__httpwww_thalesgroup_comrttiPushPortStationMessagesv1_StationMessage_httpwww_thalesgroup_comrttiPushPortStationMessagesv1Msg', False, pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 54, 3), )
Msg = property(__Msg.value, __Msg.set, None, 'The content of the message')
# Attribute id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'id'), 'id', '__httpwww_thalesgroup_comrttiPushPortStationMessagesv1_StationMessage_id', pyxb.binding.datatypes.int, required=True)
__id._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 66, 2)
__id._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 66, 2)
id = property(__id.value, __id.set, None, None)
# Attribute cat uses Python identifier cat
__cat = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'cat'), 'cat', '__httpwww_thalesgroup_comrttiPushPortStationMessagesv1_StationMessage_cat', MsgCategoryType, required=True)
__cat._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 67, 2)
__cat._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 67, 2)
cat = property(__cat.value, __cat.set, None, 'The category of message')
# Attribute sev uses Python identifier sev
__sev = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'sev'), 'sev', '__httpwww_thalesgroup_comrttiPushPortStationMessagesv1_StationMessage_sev', MsgSeverityType, required=True)
__sev._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 72, 2)
__sev._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 72, 2)
sev = property(__sev.value, __sev.set, None, 'The severity of the message')
# Attribute suppress uses Python identifier suppress
__suppress = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'suppress'), 'suppress', '__httpwww_thalesgroup_comrttiPushPortStationMessagesv1_StationMessage_suppress', pyxb.binding.datatypes.boolean, unicode_default='false')
__suppress._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 77, 2)
__suppress._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 77, 2)
suppress = property(__suppress.value, __suppress.set, None, 'Whether the train running information is suppressed to the public')
_ElementMap.update({
__Station.name() : __Station,
__Msg.name() : __Msg
})
_AttributeMap.update({
__id.name() : __id,
__cat.name() : __cat,
__sev.name() : __sev,
__suppress.name() : __suppress
})
Namespace.addCategoryObject('typeBinding', 'StationMessage', StationMessage)
# Complex type [anonymous] with content type EMPTY
class CTD_ANON_3 (pyxb.binding.basis.complexTypeDefinition):
"""The Stations the message is being applied to"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 50, 4)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute crs uses Python identifier crs
__crs = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'crs'), 'crs', '__httpwww_thalesgroup_comrttiPushPortStationMessagesv1_CTD_ANON_3_crs', _ImportedBinding_darwinpush_xb_ct.CrsType, required=True)
__crs._DeclarationLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 51, 5)
__crs._UseLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 51, 5)
crs = property(__crs.value, __crs.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__crs.name() : __crs
})
p = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'p'), CTD_ANON_, documentation='Defines an HTML paragraph', location=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 84, 1))
Namespace.addCategoryObject('elementBinding', p.name().localName(), p)
a = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'a'), CTD_ANON_2, documentation='Defines an HTML anchor', location=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 94, 1))
Namespace.addCategoryObject('elementBinding', a.name().localName(), a)
CTD_ANON._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'p'), CTD_ANON_, scope=CTD_ANON, documentation='Defines an HTML paragraph', location=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 84, 1)))
CTD_ANON._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'a'), CTD_ANON_2, scope=CTD_ANON, documentation='Defines an HTML anchor', location=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 94, 1)))
def _BuildAutomaton ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton
del _BuildAutomaton
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 60, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 61, 6))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'p')), pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 60, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'a')), pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 61, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON._Automaton = _BuildAutomaton()
CTD_ANON_._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'a'), CTD_ANON_2, scope=CTD_ANON_, documentation='Defines an HTML anchor', location=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 94, 1)))
def _BuildAutomaton_ ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_
del _BuildAutomaton_
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 90, 4))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(CTD_ANON_._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'a')), pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 90, 4))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
CTD_ANON_._Automaton = _BuildAutomaton_()
StationMessage._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Station'), CTD_ANON_3, scope=StationMessage, documentation='The Stations the message is being applied to', location=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 46, 3)))
StationMessage._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'Msg'), CTD_ANON, scope=StationMessage, documentation='The content of the message', location=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 54, 3)))
def _BuildAutomaton_2 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_2
del _BuildAutomaton_2
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 46, 3))
counters.add(cc_0)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(StationMessage._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Station')), pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 46, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(StationMessage._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'Msg')), pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTStationMessages_v1.xsd', 54, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
StationMessage._Automaton = _BuildAutomaton_2()
| HackTrain/darwinpush | darwinpush/xb/raw/sm.py | Python | apache-2.0 | 22,743 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.waveprotocol.wave.client.editor.content.paragraph;
import static org.waveprotocol.wave.client.editor.Editor.ROOT_HANDLER_REGISTRY;
import com.google.gwt.dom.client.Element;
import junit.framework.TestCase;
import org.waveprotocol.wave.client.editor.Editor;
import org.waveprotocol.wave.client.editor.EditorTestingUtil;
import org.waveprotocol.wave.client.editor.content.CMutableDocument;
import org.waveprotocol.wave.client.editor.content.ContentDocElement;
import org.waveprotocol.wave.client.editor.content.ContentDocument;
import org.waveprotocol.wave.client.editor.content.ContentDocument.PermanentMutationHandler;
import org.waveprotocol.wave.client.editor.content.ContentElement;
import org.waveprotocol.wave.client.editor.content.ContentNode;
import org.waveprotocol.wave.client.editor.content.HasImplNodelets;
import org.waveprotocol.wave.client.editor.content.paragraph.OrderedListRenumberer.LevelNumbers;
import org.waveprotocol.wave.client.editor.content.paragraph.Paragraph.Alignment;
import org.waveprotocol.wave.client.editor.content.paragraph.Paragraph.Direction;
import org.waveprotocol.wave.client.scheduler.FinalTaskRunner;
import org.waveprotocol.wave.client.scheduler.Scheduler.Task;
import org.waveprotocol.wave.model.document.indexed.IndexedDocumentImpl;
import org.waveprotocol.wave.model.document.operation.Attributes;
import org.waveprotocol.wave.model.document.operation.impl.DocInitializationBuilder;
import org.waveprotocol.wave.model.document.util.Point;
import org.waveprotocol.wave.model.schema.conversation.ConversationSchemas;
import org.waveprotocol.wave.model.util.CollectionUtils;
import java.io.PrintStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
/**
* Utilities for testing ordered list numbering.
*
* A bunch of methods refer to lines by "index". This is index into the
* conceptual list of lines, so, 0 for the first line, 1 for the second line,
* and so forth.
*
* @author danilatos@google.com (Daniel Danilatos)
*/
public abstract class RenumbererTestBase extends TestCase {
/**
* Simple enum for representing a style of line, that maps to the type and
* li-style type attributes. Contains a representative sample of the types of
* lines that could possibly have different effects on renumbering.
*/
enum Type {
/** No attributes */
NONE,
/** t=h1 */
HEADING,
/** t=li without listyle */
LIST,
/** t=li with listyle = decimal */
DECIMAL // DECIMAL must come last
}
/**
* Fake renderer that doesn't depend on any DOM stuff.
*/
ParagraphHtmlRenderer renderer = new ParagraphHtmlRenderer() {
@Override
public Element createDomImpl(Renderable element) {
return null;
}
@Override
public void updateRendering(HasImplNodelets element, String type, String listStyle, int indent,
Alignment alignment, Direction direction) {
}
@Override
public void updateListValue(HasImplNodelets element, int value) {
assertEquals(Line.fromParagraph(((ContentElement) element)).getCachedNumberValue(), value);
}
};
/**
* Renumberer being tested.
*/
final OrderedListRenumberer renumberer = new OrderedListRenumberer(renderer);
/**
* Batch render task that will get scheduled.
*/
Task scheduledTask;
/**
* Simple fake take runner that just sets {@link #scheduledTask}
*/
final FinalTaskRunner runner = new FinalTaskRunner() {
@Override public void scheduleFinally(Task task) {
assertTrue(scheduledTask == null || scheduledTask == task);
scheduledTask = task;
}
};
/**
* Same as a regular ParagraphRenderer but tagged with
* {@link PermanentMutationHandler} so that it gets used even in POJO document mode.
*/
static class Renderer extends ParagraphRenderer implements PermanentMutationHandler {
Renderer(ParagraphHtmlRenderer htmlRenderer, OrderedListRenumberer renumberer,
FinalTaskRunner finalRaskRunner) {
super(htmlRenderer, renumberer, finalRaskRunner);
// TODO Auto-generated constructor stub
}
}
ContentDocument content1;
ContentDocument content2;
CMutableDocument doc1;
CMutableDocument doc2;
/**
* Current doc being used. For some tests we render more than one doc to test
* the sharing of a single renumberer between multiple documents.
*/
CMutableDocument doc;
/** Number of lines in test documents */
final int SIZE = 10;
@Override
protected void setUp() {
EditorTestingUtil.setupTestEnvironment();
ContentDocElement.register(ROOT_HANDLER_REGISTRY, ContentDocElement.DEFAULT_TAGNAME);
Paragraph.register(ROOT_HANDLER_REGISTRY);
LineRendering.registerLines(ROOT_HANDLER_REGISTRY);
LineRendering.registerParagraphRenderer(Editor.ROOT_HANDLER_REGISTRY,
new Renderer(renderer, renumberer, runner));
renumberer.updateHtmlEvenWhenNullImplNodelet = true;
DocInitializationBuilder builder = new DocInitializationBuilder();
builder.elementStart("body", Attributes.EMPTY_MAP);
for (int i = 0; i < SIZE; i++) {
builder.elementStart("line", Attributes.EMPTY_MAP).elementEnd();
}
builder.elementEnd();
content1 = new ContentDocument(ConversationSchemas.BLIP_SCHEMA_CONSTRAINTS);
content1.setRegistries(Editor.ROOT_REGISTRIES);
content1.consume(builder.build());
doc1 = content1.getMutableDoc();
content2 = new ContentDocument(ConversationSchemas.BLIP_SCHEMA_CONSTRAINTS);
content2.setRegistries(Editor.ROOT_REGISTRIES);
content2.consume(builder.build());
doc2 = content2.getMutableDoc();
doc = doc1;
runTask();
}
/**
* Performs a randomized test of renumbering logic.
*
* @param testIterations number of test iterations on the same document. Each
* iteration does a substantial amount of work (depending on document
* size).
* @param seed initial random seed.
*/
void doRandomTest(int testIterations, int seed) {
ContentDocument.performExpensiveChecks = false;
ContentDocument.validateLocalOps = false;
IndexedDocumentImpl.performValidation = false;
final int LEVELS = 4;
final int MAX_RUN = 3;
final int ITERS_PER_BATCH_RENDER = 6;
final int DECIMALS_TO_OTHERS = 4; // ratio of decimal bullets to other stuff
final int UPDATE_TO_ADD_REMOVE = 4; // ratio of updates to node adds/removals
assertNull(scheduledTask);
int maxRand = 5;
Random r = new Random(seed);
// For each iteration
for (int iter = 0; iter < testIterations; iter++) {
info("Iter: " + iter);
// Repeat several times for a single batch render, to make sure we are
// able to handle multiple overlapping, redundant updates.
// Times two because we are alternating between two documents to test
// the ability of the renumberer to handle more than one document
// correctly.
int innerIters = (r.nextInt(ITERS_PER_BATCH_RENDER) + 1) * 2;
for (int inner = 0; inner < innerIters; inner++) {
doc = doc1; // (inner % 2 == 0) ? doc1 : doc2;
int totalLines = (doc.size() - 2) / 2;
Line line = getFirstLine();
// Pick a random section of the document to perform a bunch of random
// changes to
int i = 0;
int a = r.nextInt(totalLines);
int b = r.nextInt(totalLines);
int startSection = Math.min(a, b);
int endSection = Math.max(a, b);
while (i < startSection) {
i++;
line = line.next();
}
while (i < endSection && line != null) {
// Pick a random indentation to set
int level = r.nextInt(LEVELS);
// Length of run of elements to update
int length;
// Whether we are making them numbered items or doing something else
boolean decimal;
if (r.nextInt(DECIMALS_TO_OTHERS) == 0) {
// No need making it a long run for non-numbered items.
length = r.nextInt(2);
decimal = false;
} else {
decimal = true;
length = r.nextInt(MAX_RUN - 1) + 1;
}
while (length > 0 && i < endSection && line != null) {
boolean fiftyFifty = i % 2 == 0;
// If we're numbering these lines, then DECIMAL, otherwise choose a
// random other type.
Type type = decimal ? Type.DECIMAL : Type.values()[r.nextInt(Type.values().length - 1)];
// Randomly decide to add/remove, or to update
if (r.nextInt(UPDATE_TO_ADD_REMOVE) == 0) {
int index = index(line);
// Randomly decide to add or remove.
// Include some constraints to ensure the document doesn't get too small or too large.
boolean add = index == 0 ||
totalLines < SIZE / 2 ? true : (totalLines > SIZE * 2 ? false : r.nextBoolean());
if (add) {
line = create(index, type, level, r.nextBoolean());
} else {
line = delete(index);
if (line == null) {
// We just deleted the last line.
continue;
}
}
assert line != null;
} else {
update(index(line), type, level, fiftyFifty);
}
length--;
i++;
line = line.next();
}
}
}
check(iter);
}
}
/**
* @return index for the given line object (0 for the first line, etc).
*/
int index(Line line) {
return (doc.getLocation(line.getLineElement()) - 1) / 2;
}
/**
* @return the line element for the given index.
*/
ContentElement getLineElement(int index) {
return doc.locate(index * 2 + 1).getNodeAfter().asElement();
}
/**
* @return the first line object
*/
Line getFirstLine() {
return Line.getFirstLineOfContainer(doc.getDocumentElement().getFirstChild().asElement());
}
/**
* Creates and returns a new line.
*
* @param createAndUpdateSeparately if true, creates a line, then sets the
* attributes as a separate operation. Otherwise, sets them all at
* once. We want to test both scenarios.
*/
Line create(int index, Type type, int indent, boolean createAndUpdateSeparately) {
// info("Creating @" + index + " " +
// type + " " + indent + " " + createAndUpdateSeparately);
Point<ContentNode> loc = doc.locate(index * 2 + 1);
Line l;
if (createAndUpdateSeparately) {
l = Line.fromLineElement(
doc.createElement(loc, "line", Attributes.EMPTY_MAP));
update(index, type, indent);
} else {
l = Line.fromLineElement(
doc.createElement(loc, "line", attributes(type, indent, false, true)));
}
assertNotNull(l);
return l;
}
/**
* Deletes the line at the specified index.
*/
Line delete(int index) {
// info("Deleting @" + index);
assert index != 0 : "Code doesn't (yet) support killing the initial line";
ContentElement e = getLineElement(index);
Line line = Line.fromLineElement(e).next();
doc.deleteNode(e);
return line;
}
/**
* Updates the attributes of the line at the specified index.
*/
void update(int index, Type type, int indent) {
update(index, type, indent, true);
}
/**
* Updates the attributes of the line at the specified index.
*
* @param alwaysSetRedundant if true, always set the listyle attribute even if it
* is not necessary. For example, if the listyle attribute was
* "decimal", but the type is "HEADING", the listyle attribute should
* normally be ignored and has no meaning. It won't make a difference
* if it is set or not. We want to test both scenarios.
*/
void update(int index, Type type, int indent, boolean alwaysSetRedundant) {
ContentElement e = getLineElement(index);
// info("Making @" + ((doc.getLocation(e) - 1)/2) + " " +
// type + " " + indent + " " + alwaysSetStyle);
Map<String, String> updates = attributes(type, indent, alwaysSetRedundant, false);
for (Map.Entry<String, String> pair : updates.entrySet()) {
doc.setElementAttribute(e, pair.getKey(), pair.getValue());
}
}
/**
* Creates the map of element attributes for the given parameters.
*
* @param alwaysSetStyle see {@link #update(int, Type, int, boolean)}
* @param noNulls eliminate keys that would have null values. We want nulls
* for updates, but no nulls for creates.
*/
Map<String, String> attributes(Type type, int indent, boolean alwaysSetStyle, boolean noNulls) {
Map<String, String> updates = new HashMap<String, String>();
String levelStr = (indent > 0 ? "" + indent : null);
maybePut(updates, Paragraph.INDENT_ATTR, levelStr, noNulls);
String t = null;
String lt = null;
switch (type) {
case HEADING: t = "h1"; break;
case LIST: t = Paragraph.LIST_TYPE; break;
case DECIMAL: t = Paragraph.LIST_TYPE; lt = Paragraph.LIST_STYLE_DECIMAL; break;
}
maybePut(updates, Paragraph.SUBTYPE_ATTR, t, noNulls);
if (alwaysSetStyle || type == Type.LIST || type == Type.DECIMAL) {
maybePut(updates, Paragraph.LIST_STYLE_ATTR, lt, noNulls);
}
return updates;
}
void maybePut(Map<String, String> map, String key, String val, boolean noNull) {
if (val != null || !noNull) {
map.put(key, val);
}
}
/**
* Check the current line numbering is consistent with the document state.
*/
void check() {
check(-1);
}
/**
* Check the current line numbering is consistent with the document state.
*
* @param iter current test iteration, for debugging/logging purposes.
*/
void check(int iter) {
runTask();
// if (iter >= 1740) {
// info("\n\nCHECKING\n");
// printInfo(null, "XX");
// info("---");
// }
LevelNumbers numbers = new LevelNumbers(0, 1);
Line line = getFirstLine();
while (line != null) {
int indent = line.getIndent();
numbers.setLevel(indent);
if (line.isDecimalListItem()) {
int num = numbers.getNumberAndIncrement();
assertFalse(line.getCachedNumberValue() == Line.DIRTY);
if (num != line.getCachedNumberValue()) {
String msg = "Expected: " + num + ", got: " + line.getCachedNumberValue();
printInfo(line, msg);
fail("Wrong number on iteration " + iter + ". " + msg +
". See stdout & stderr for debug details");
}
} else {
numbers.setNumber(1);
}
line = line.next();
}
// info("^^^");
}
void runTask() {
if (scheduledTask != null) {
scheduledTask.execute();
}
scheduledTask = null;
}
void printInfo(Line badLine, String msg) {
Line line = getFirstLine();
PrintStream stream = System.out;
int i = 0;
while (line != null) {
int indent = line.getIndent();
stream.println(
CollectionUtils.repeat('.', line.getIndent()) +
line.toString() +
" indent:" + indent +
CollectionUtils.repeat(' ', 20) + line.getCachedNumberValue() + " (" + i + ")");
if (line == badLine) {
stream.println("\n\n\n");
stream = System.err;
stream.println(msg);
stream.println(">>>>>>>>>>>>>>>>>>>>>>>>> DIED ON LINE ABOVE <<<<<<<<<<<<<<<<<<\n\n");
}
line = line.next();
i++;
}
}
void info(Object msg) {
// Uncomment for debugging
// System.out.println(msg == null ? "null" : msg.toString());
}
}
| vega113/incubator-wave | wave/src/test/java/org/waveprotocol/wave/client/editor/content/paragraph/RenumbererTestBase.java | Java | apache-2.0 | 16,472 |
from urlparse import urlparse
from api_tests.nodes.views.test_node_contributors_list import NodeCRUDTestCase
from nose.tools import * # flake8: noqa
from api.base.settings.defaults import API_BASE
from framework.auth.core import Auth
from tests.base import fake
from osf_tests.factories import (
ProjectFactory,
CommentFactory,
RegistrationFactory,
WithdrawnRegistrationFactory,
)
class TestWithdrawnRegistrations(NodeCRUDTestCase):
def setUp(self):
super(TestWithdrawnRegistrations, self).setUp()
self.registration = RegistrationFactory(creator=self.user, project=self.public_project)
self.withdrawn_registration = WithdrawnRegistrationFactory(registration=self.registration, user=self.registration.creator)
self.public_pointer_project = ProjectFactory(is_public=True)
self.public_pointer = self.public_project.add_pointer(self.public_pointer_project,
auth=Auth(self.user),
save=True)
self.withdrawn_url = '/{}registrations/{}/?version=2.2'.format(API_BASE, self.registration._id)
self.withdrawn_registration.justification = 'We made a major error.'
self.withdrawn_registration.save()
def test_can_access_withdrawn_contributors(self):
url = '/{}registrations/{}/contributors/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 200)
def test_cannot_access_withdrawn_children(self):
url = '/{}registrations/{}/children/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_comments(self):
self.public_project = ProjectFactory(is_public=True, creator=self.user)
self.public_comment = CommentFactory(node=self.public_project, user=self.user)
url = '/{}registrations/{}/comments/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_can_access_withdrawn_contributor_detail(self):
url = '/{}registrations/{}/contributors/{}/'.format(API_BASE, self.registration._id, self.user._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 200)
def test_cannot_return_a_withdrawn_registration_at_node_detail_endpoint(self):
url = '/{}nodes/{}/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_cannot_delete_a_withdrawn_registration(self):
url = '/{}registrations/{}/'.format(API_BASE, self.registration._id)
res = self.app.delete_json_api(url, auth=self.user.auth, expect_errors=True)
self.registration.reload()
assert_equal(res.status_code, 405)
def test_cannot_access_withdrawn_files_list(self):
url = '/{}registrations/{}/files/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_node_links_detail(self):
url = '/{}registrations/{}/node_links/{}/'.format(API_BASE, self.registration._id, self.public_pointer._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_node_links_list(self):
url = '/{}registrations/{}/node_links/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_node_logs(self):
self.public_project = ProjectFactory(is_public=True, creator=self.user)
url = '/{}registrations/{}/logs/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_registrations_list(self):
self.registration.save()
url = '/{}registrations/{}/registrations/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_withdrawn_registrations_display_limited_fields(self):
registration = self.registration
res = self.app.get(self.withdrawn_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
attributes = res.json['data']['attributes']
registration.reload()
expected_attributes = {
'title': registration.title,
'description': registration.description,
'date_created': registration.date_created.isoformat().replace('+00:00', 'Z'),
'date_registered': registration.registered_date.isoformat().replace('+00:00', 'Z'),
'date_modified': registration.date_modified.isoformat().replace('+00:00', 'Z'),
'withdrawal_justification': registration.retraction.justification,
'public': None,
'category': None,
'registration': True,
'fork': None,
'collection': None,
'tags': None,
'withdrawn': True,
'pending_withdrawal': None,
'pending_registration_approval': None,
'pending_embargo_approval': None,
'embargo_end_date': None,
'registered_meta': None,
'current_user_permissions': None,
'registration_supplement': registration.registered_schema.first().name
}
for attribute in expected_attributes:
assert_equal(expected_attributes[attribute], attributes[attribute])
contributors = urlparse(res.json['data']['relationships']['contributors']['links']['related']['href']).path
assert_equal(contributors, '/{}registrations/{}/contributors/'.format(API_BASE, registration._id))
assert_not_in('children', res.json['data']['relationships'])
assert_not_in('comments', res.json['data']['relationships'])
assert_not_in('node_links', res.json['data']['relationships'])
assert_not_in('registrations', res.json['data']['relationships'])
assert_not_in('parent', res.json['data']['relationships'])
assert_not_in('forked_from', res.json['data']['relationships'])
assert_not_in('files', res.json['data']['relationships'])
assert_not_in('logs', res.json['data']['relationships'])
assert_not_in('registered_by', res.json['data']['relationships'])
assert_not_in('registered_from', res.json['data']['relationships'])
assert_not_in('root', res.json['data']['relationships'])
def test_field_specific_related_counts_ignored_if_hidden_field_on_withdrawn_registration(self):
url = '/{}registrations/{}/?related_counts=children'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_not_in('children', res.json['data']['relationships'])
assert_in('contributors', res.json['data']['relationships'])
def test_field_specific_related_counts_retrieved_if_visible_field_on_withdrawn_registration(self):
url = '/{}registrations/{}/?related_counts=contributors'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['relationships']['contributors']['links']['related']['meta']['count'], 1)
| monikagrabowska/osf.io | api_tests/registrations/views/test_withdrawn_registrations.py | Python | apache-2.0 | 7,865 |
//where
/** Generate code to create an array with given element type and number
* of dimensions.
*/
Item makeNewArray(DiagnosticPosition pos, Type type, int ndims) {
try {//我加上的
DEBUG.P(this,"makeNewArray(3)");
DEBUG.P("type="+type);
DEBUG.P("ndims="+ndims);
Type elemtype = types.elemtype(type);
if (types.dimensions(elemtype) + ndims > ClassFile.MAX_DIMENSIONS) {
log.error(pos, "limit.dimensions");
nerrs++;
}
int elemcode = Code.arraycode(elemtype);
DEBUG.P("elemcode="+elemcode);
if (elemcode == 0 || (elemcode == 1 && ndims == 1)) {
code.emitAnewarray(makeRef(pos, elemtype), type);
} else if (elemcode == 1) {
code.emitMultianewarray(ndims, makeRef(pos, type), type);
} else {
code.emitNewarray(elemcode, type);
}
return items.makeStackItem(type);
}finally{//我加上的
DEBUG.P(0,this,"makeNewArray(3)");
}
} | mashuai/Open-Source-Research | Javac2007/流程/jvm/16Gen/makeNewArray.java | Java | apache-2.0 | 914 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.util;
import org.apache.calcite.avatica.util.DateTimeUtils;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import java.util.Calendar;
import java.util.regex.Pattern;
import javax.annotation.Nonnull;
/**
* Time literal.
*
* <p>Immutable, internally represented as a string (in ISO format),
* and can support unlimited precision (milliseconds, nanoseconds).
*/
public class TimeString implements Comparable<TimeString> {
private static final Pattern PATTERN =
Pattern.compile("[0-9][0-9]:[0-9][0-9]:[0-9][0-9](\\.[0-9]*[1-9])?");
final String v;
/** Internal constructor, no validation. */
private TimeString(String v, @SuppressWarnings("unused") boolean ignore) {
this.v = v;
}
/** Creates a TimeString. */
public TimeString(String v) {
this(v, false);
Preconditions.checkArgument(PATTERN.matcher(v).matches(),
"Invalid time format:", v);
Preconditions.checkArgument(getHour() >= 0 && getHour() < 24,
"Hour out of range:", getHour());
Preconditions.checkArgument(getMinute() >= 0 && getMinute() < 60,
"Minute out of range:", getMinute());
Preconditions.checkArgument(getSecond() >= 0 && getSecond() < 60,
"Second out of range:", getSecond());
}
/** Creates a TimeString for hour, minute, second and millisecond values. */
public TimeString(int h, int m, int s) {
this(hms(h, m, s), false);
}
/** Validates an hour-minute-second value and converts to a string. */
private static String hms(int h, int m, int s) {
Preconditions.checkArgument(h >= 0 && h < 24, "Hour out of range:", h);
Preconditions.checkArgument(m >= 0 && m < 60, "Minute out of range:", m);
Preconditions.checkArgument(s >= 0 && s < 60, "Second out of range:", s);
final StringBuilder b = new StringBuilder();
DateTimeStringUtils.hms(b, h, m, s);
return b.toString();
}
/** Sets the fraction field of a {@code TimeString} to a given number
* of milliseconds. Nukes the value set via {@link #withNanos}.
*
* <p>For example,
* {@code new TimeString(1970, 1, 1, 2, 3, 4).withMillis(56)}
* yields {@code TIME '1970-01-01 02:03:04.056'}. */
public TimeString withMillis(int millis) {
Preconditions.checkArgument(millis >= 0 && millis < 1000);
return withFraction(DateTimeStringUtils.pad(3, millis));
}
/** Sets the fraction field of a {@code TimeString} to a given number
* of nanoseconds. Nukes the value set via {@link #withMillis(int)}.
*
* <p>For example,
* {@code new TimeString(1970, 1, 1, 2, 3, 4).withNanos(56789)}
* yields {@code TIME '1970-01-01 02:03:04.000056789'}. */
public TimeString withNanos(int nanos) {
Preconditions.checkArgument(nanos >= 0 && nanos < 1000000000);
return withFraction(DateTimeStringUtils.pad(9, nanos));
}
/** Sets the fraction field of a {@code TimeString}.
* The precision is determined by the number of leading zeros.
* Trailing zeros are stripped.
*
* <p>For example,
* {@code new TimeString(1970, 1, 1, 2, 3, 4).withFraction("00506000")}
* yields {@code TIME '1970-01-01 02:03:04.00506'}. */
public TimeString withFraction(String fraction) {
String v = this.v;
int i = v.indexOf('.');
if (i >= 0) {
v = v.substring(0, i);
}
while (fraction.endsWith("0")) {
fraction = fraction.substring(0, fraction.length() - 1);
}
if (fraction.length() > 0) {
v = v + "." + fraction;
}
return new TimeString(v);
}
@Override public String toString() {
return v;
}
@Override public boolean equals(Object o) {
// The value is in canonical form (no trailing zeros).
return o == this
|| o instanceof TimeString
&& ((TimeString) o).v.equals(v);
}
@Override public int hashCode() {
return v.hashCode();
}
@Override public int compareTo(@Nonnull TimeString o) {
return v.compareTo(o.v);
}
/** Creates a TimeString from a Calendar. */
public static TimeString fromCalendarFields(Calendar calendar) {
return new TimeString(
calendar.get(Calendar.HOUR_OF_DAY),
calendar.get(Calendar.MINUTE),
calendar.get(Calendar.SECOND))
.withMillis(calendar.get(Calendar.MILLISECOND));
}
public static TimeString fromMillisOfDay(int i) {
return new TimeString(DateTimeUtils.unixTimeToString(i))
.withMillis((int) DateTimeUtils.floorMod(i, 1000));
}
public TimeString round(int precision) {
Preconditions.checkArgument(precision >= 0);
int targetLength = 9 + precision;
if (v.length() <= targetLength) {
return this;
}
String v = this.v.substring(0, targetLength);
while (v.length() >= 9 && (v.endsWith("0") || v.endsWith("."))) {
v = v.substring(0, v.length() - 1);
}
return new TimeString(v);
}
public int getMillisOfDay() {
int h = Integer.valueOf(v.substring(0, 2));
int m = Integer.valueOf(v.substring(3, 5));
int s = Integer.valueOf(v.substring(6, 8));
int ms = getMillisInSecond();
return (int) (h * DateTimeUtils.MILLIS_PER_HOUR
+ m * DateTimeUtils.MILLIS_PER_MINUTE
+ s * DateTimeUtils.MILLIS_PER_SECOND
+ ms);
}
private int getMillisInSecond() {
switch (v.length()) {
case 8: // "12:34:56"
return 0;
case 10: // "12:34:56.7"
return Integer.valueOf(v.substring(9)) * 100;
case 11: // "12:34:56.78"
return Integer.valueOf(v.substring(9)) * 10;
case 12: // "12:34:56.789"
default: // "12:34:56.7890000012345"
return Integer.valueOf(v.substring(9, 12));
}
}
private int getHour() {
return Integer.parseInt(v.substring(0, 2));
}
private int getMinute() {
return Integer.parseInt(this.v.substring(3, 5));
}
private int getSecond() {
return Integer.parseInt(this.v.substring(6, 8));
}
public Calendar toCalendar() {
return Util.calendar(getMillisOfDay());
}
/** Converts this TimestampString to a string, truncated or padded with
* zeroes to a given precision. */
public String toString(int precision) {
Preconditions.checkArgument(precision >= 0);
final int p = precision();
if (precision < p) {
return round(precision).toString(precision);
}
if (precision > p) {
String s = v;
if (p == 0) {
s += ".";
}
return s + Strings.repeat("0", precision - p);
}
return v;
}
private int precision() {
return v.length() < 9 ? 0 : (v.length() - 9);
}
}
// End TimeString.java
| xhoong/incubator-calcite | core/src/main/java/org/apache/calcite/util/TimeString.java | Java | apache-2.0 | 7,356 |
#
# Cookbook Name:: graphite
# Library:: ChefGraphite
#
# Copyright 2014, Heavy Water Ops, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module ChefGraphite
class << self
def ini_file(hash)
data = generate_conf_data(hash)
lines = Array.new
data.each do |section, config|
lines << "[#{section}]"
config.each { |key, value| lines << "#{key} = #{value}" }
lines << ""
end
lines.join("\n").concat("\n")
end
def generate_conf_data(data)
tuples = sort_tuples(section_tuples(data))
result = Hash.new
tuples.each { |tuple| result[tuple.first] = tuple.last }
result
end
def sort_tuples(tuples)
tuples.sort { |a, b| a.first <=> b.first }
end
def section_tuples(section_hashes)
section_hashes.map do |hash|
[
section_name(hash[:type], hash[:name]),
normalize(hash[:config])
]
end
end
def section_name(type, name)
if type.nil?
name
elsif name == "default"
type
else
"#{type}:#{name}"
end
end
def normalize(hash)
result = Hash.new
hash.each do |key, value|
result[key.to_s.upcase] = normalize_value(value)
end
result
end
def normalize_value(obj)
if obj.is_a? Array
obj.map { |o| normalize_value(o) }.join(", ")
else
value = obj.to_s
value.capitalize! if %w{true false}.include?(value)
value
end
end
end
end
| mbabic/graphite | libraries/chef_graphite.rb | Ruby | apache-2.0 | 2,028 |
/**
* <copyright>
* Copyright (c) 2008 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
* </copyright>
*/
package org.eclipse.bpel.ui.adapters;
import java.util.List;
import org.eclipse.bpel.model.BPELPackage;
import org.eclipse.bpel.model.MessageExchanges;
import org.eclipse.bpel.ui.BPELUIPlugin;
import org.eclipse.bpel.ui.IBPELUIConstants;
import org.eclipse.bpel.ui.adapters.delegates.ReferenceContainer;
import org.eclipse.bpel.ui.editparts.MessageExchangesEditPart;
import org.eclipse.bpel.ui.editparts.OutlineTreeEditPart;
import org.eclipse.bpel.ui.properties.PropertiesLabelProvider;
import org.eclipse.gef.EditPart;
import org.eclipse.gef.EditPartFactory;
import org.eclipse.swt.graphics.Image;
import org.eclipse.bpel.ui.Messages;
/**
*
* @author Miriam Grundig (MGrundig@de.ibm.com)
*/
public class MessageExchangesAdapter extends ContainerAdapter implements EditPartFactory,
ILabeledElement, IOutlineEditPartFactory, ITrayEditPartFactory
{
public MessageExchangesAdapter() {
super();
}
/* IContainer delegate */
public IContainer createContainerDelegate() {
return new ReferenceContainer(BPELPackage.eINSTANCE.getMessageExchanges_Children());
}
/* EditPartFactory */
public EditPart createEditPart(EditPart context, Object model) {
MessageExchangesEditPart result = new MessageExchangesEditPart();
result.setLabelProvider(PropertiesLabelProvider.getInstance());
result.setModel(model);
return result;
}
/* ITrayEditPartFactory */
public EditPart createTrayEditPart(EditPart context, Object model) {
return createEditPart(context, model);
}
/* ILabeledElement */
public Image getSmallImage(Object object) {
return BPELUIPlugin.INSTANCE.getImage(IBPELUIConstants.ICON_MESSAGEEXCHANGE_16);
}
public Image getLargeImage(Object object) {
return BPELUIPlugin.INSTANCE.getImage(IBPELUIConstants.ICON_MESSAGEEXCHANGE_32);
}
public String getTypeLabel(Object object) {
return Messages.MessageExchangesAdapter_TypeLabel;
}
public String getLabel(Object object) {
return Messages.MessageExchangesAdapter_Label;
}
/* IOutlineEditPartFactory */
public EditPart createOutlineEditPart(EditPart context, final Object model) {
EditPart result = new OutlineTreeEditPart(){
protected List getModelChildren() {
MessageExchanges messageExchanges = (MessageExchanges) model;
List list = messageExchanges.getChildren();
return list;
}
};
result.setModel(model);
return result;
}
}
| chanakaudaya/developer-studio | bps/org.eclipse.bpel.ui/src/org/eclipse/bpel/ui/adapters/MessageExchangesAdapter.java | Java | apache-2.0 | 2,792 |
/**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portal.layout.dlm;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jasig.portal.PortalException;
import org.jasig.portal.layout.IUserLayoutStore;
import org.jasig.portal.security.IPerson;
import org.jasig.portal.spring.locator.UserLayoutStoreLocator;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* Looks for, applies against the ilf, and updates accordingly the delete
* set within a plf.
*
* @version $Revision$ $Date$
* @since uPortal 2.5
*/
public class DeleteManager
{
private static final Log LOG = LogFactory.getLog(DeleteManager.class);
private static IUserLayoutStore dls = null;
/**
* Hands back the single instance of RDBMDistributedLayoutStore. There is
* already a method
* for aquiring a single instance of the configured layout store so we
* delegate over there so that all references refer to the same instance.
* This method is solely for convenience so that we don't have to keep
* calling UserLayoutStoreFactory and casting the resulting class.
*/
private static IUserLayoutStore getDLS()
{
if ( dls == null )
{
dls = UserLayoutStoreLocator.getUserLayoutStore();
}
return dls;
}
/**
Get the delete set if any from the plf and process each delete command
removing any that fail from the delete set so that the delete set is
self cleaning.
*/
static void applyAndUpdateDeleteSet( Document plf,
Document ilf,
IntegrationResult result )
{
Element dSet = null;
try
{
dSet = getDeleteSet( plf, null, false );
}
catch( Exception e )
{
LOG.error("Exception occurred while getting user's DLM delete-set.",
e);
}
if ( dSet == null )
return;
NodeList deletes = dSet.getChildNodes();
for( int i=deletes.getLength()-1; i>=0; i-- )
{
if ( applyDelete( (Element) deletes.item(i), ilf ) == false )
{
dSet.removeChild( deletes.item(i) );
result.setChangedPLF(true);
}
else
{
result.setChangedILF(true);
}
}
if ( dSet.getChildNodes().getLength() == 0 )
{
plf.getDocumentElement().removeChild( dSet );
result.setChangedPLF(true);
}
}
/**
Attempt to apply a single delete command and return true if it succeeds
or false otherwise. If the delete is disallowed or the target element
no longer exists in the document the delete command fails and returns
false.
*/
private static boolean applyDelete( Element delete, Document ilf )
{
String nodeID = delete.getAttribute( Constants.ATT_NAME );
Element e = ilf.getElementById( nodeID );
if ( e == null )
return false;
String deleteAllowed = e.getAttribute( Constants.ATT_DELETE_ALLOWED );
if ( deleteAllowed.equals( "false" ) )
return false;
Element p = (Element) e.getParentNode();
e.setIdAttribute(Constants.ATT_ID, false);
p.removeChild( e );
return true;
}
/**
Get the delete set if any stored in the root of the document or create
it is passed in create flag is true.
*/
private static Element getDeleteSet( Document plf,
IPerson person,
boolean create )
throws PortalException
{
Node root = plf.getDocumentElement();
Node child = root.getFirstChild();
while( child != null )
{
if ( child.getNodeName().equals( Constants.ELM_DELETE_SET ) )
return (Element) child;
child = child.getNextSibling();
}
if ( create == false )
return null;
String ID = null;
try
{
ID = getDLS().getNextStructDirectiveId( person );
}
catch (Exception e)
{
throw new PortalException( "Exception encountered while " +
"generating new delete set node " +
"Id for userId=" + person.getID(), e );
}
Element delSet = plf.createElement( Constants.ELM_DELETE_SET );
delSet.setAttribute( Constants.ATT_TYPE,
Constants.ELM_DELETE_SET );
delSet.setAttribute( Constants.ATT_ID, ID );
root.appendChild( delSet );
return delSet;
}
/**
Create and append a delete directive to delete the node identified by
the passed in element id. If this node contains any incorporated
elements then they must also have a delete directive added in here to
prevent incorporated channels originating in another column from
reappearing in that column because the position set entry that pulled
them into this column was now removed. (ie: the user moved an inc'd
channel to this column and then deleted the column means that the inc'd
channel should be deleted also.) This was designed to add a delete
directive for each nested element having an ID so as to work for the
future case of a tree view.
*/
public static void addDeleteDirective( Element compViewNode,
String elementID,
IPerson person )
throws PortalException
{
Document plf = (Document) person.getAttribute( Constants.PLF );
Element delSet = getDeleteSet( plf, person, true );
addDeleteDirective( compViewNode, elementID, person, plf, delSet );
}
/**
This method does the actual work of adding a delete directive and then
recursively calling itself for any incoporated children that need to be
deleted as well.
*/
private static void addDeleteDirective( Element compViewNode,
String elementID,
IPerson person,
Document plf,
Element delSet )
throws PortalException
{
String ID = null;
try
{
ID = getDLS().getNextStructDirectiveId( person );
}
catch (Exception e)
{
throw new PortalException( "Exception encountered while " +
"generating new delete node " +
"Id for userId=" + person.getID(), e );
}
Element delete = plf.createElement( Constants.ELM_DELETE );
delete.setAttribute( Constants.ATT_TYPE, Constants.ELM_DELETE );
delete.setAttribute( Constants.ATT_ID, ID );
delete.setAttributeNS( Constants.NS_URI,
Constants.ATT_NAME, elementID );
delSet.appendChild( delete );
// now pass through children and add delete directives for those with
// IDs indicating that they were incorporated
Element child = (Element) compViewNode.getFirstChild();
while( child != null )
{
String childID = child.getAttribute( "ID" );
if ( childID.startsWith( Constants.FRAGMENT_ID_USER_PREFIX ) )
addDeleteDirective( child, childID, person, plf, delSet );
child = (Element) child.getNextSibling();
}
}
}
| mgillian/uPortal | uportal-war/src/main/java/org/jasig/portal/layout/dlm/DeleteManager.java | Java | apache-2.0 | 8,567 |
/*
* Copyright (c) 2013-2016 Cinchapi Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cinchapi.concourse.server.model;
import java.nio.ByteBuffer;
import java.util.Objects;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
import com.cinchapi.concourse.server.io.Byteable;
import com.cinchapi.concourse.util.ByteBuffers;
import com.google.common.base.Preconditions;
/**
* A Position is an abstraction for the association between a
* relative location and a {@link PrimaryKey} that is used in a
* {@link SearchRecord} to specify the location of a term in a record.
*
* @author Jeff Nelson
*/
@Immutable
public final class Position implements Byteable, Comparable<Position> {
/**
* Return the Position encoded in {@code bytes} so long as those bytes
* adhere to the format specified by the {@link #getBytes()} method. This
* method assumes that all the bytes in the {@code bytes} belong to the
* Position. In general, it is necessary to get the appropriate Position
* slice from the parent ByteBuffer using
* {@link ByteBuffers#slice(ByteBuffer, int, int)}.
*
* @param bytes
* @return the Position
*/
public static Position fromByteBuffer(ByteBuffer bytes) {
PrimaryKey primaryKey = PrimaryKey.fromByteBuffer(ByteBuffers.get(
bytes, PrimaryKey.SIZE));
int index = bytes.getInt();
return new Position(primaryKey, index);
}
/**
* Return a Position that is backed by {@code primaryKey} and {@code index}.
*
* @param primaryKey
* @param index
* @return the Position
*/
public static Position wrap(PrimaryKey primaryKey, int index) {
return new Position(primaryKey, index);
}
/**
* The total number of bytes used to store each Position
*/
public static final int SIZE = PrimaryKey.SIZE + 4; // index
/**
* A cached copy of the binary representation that is returned from
* {@link #getBytes()}.
*/
private transient ByteBuffer bytes;
/**
* The index that this Position represents.
*/
private final int index;
/**
* The PrimaryKey of the record that this Position represents.
*/
private final PrimaryKey primaryKey;
/**
* Construct a new instance.
*
* @param primaryKey
* @param index
*/
private Position(PrimaryKey primaryKey, int index) {
this(primaryKey, index, null);
}
/**
* Construct a new instance.
*
* @param primaryKey
* @param index
* @param bytes;
*/
private Position(PrimaryKey primaryKey, int index,
@Nullable ByteBuffer bytes) {
Preconditions
.checkArgument(index >= 0, "Cannot have an negative index");
this.primaryKey = primaryKey;
this.index = index;
this.bytes = bytes;
}
@Override
public int compareTo(Position other) {
int comparison;
return (comparison = primaryKey.compareTo(other.primaryKey)) != 0 ? comparison
: Integer.compare(index, other.index);
}
@Override
public boolean equals(Object obj) {
if(obj instanceof Position) {
Position other = (Position) obj;
return primaryKey.equals(other.primaryKey) && index == other.index;
}
return false;
}
/**
* Return a byte buffer that represents this Value with the following order:
* <ol>
* <li><strong>primaryKey</strong> - position 0</li>
* <li><strong>index</strong> - position 8</li>
* </ol>
*
* @return the ByteBuffer representation
*/
@Override
public ByteBuffer getBytes() {
if(bytes == null) {
bytes = ByteBuffer.allocate(size());
copyTo(bytes);
bytes.rewind();
}
return ByteBuffers.asReadOnlyBuffer(bytes);
}
/**
* Return the associated {@code index}.
*
* @return the index
*/
public int getIndex() {
return index;
}
/**
* Return the associated {@code primaryKey}.
*
* @return the primaryKey
*/
public PrimaryKey getPrimaryKey() {
return primaryKey;
}
@Override
public int hashCode() {
return Objects.hash(primaryKey, index);
}
@Override
public int size() {
return SIZE;
}
@Override
public String toString() {
return "Position " + index + " in Record " + primaryKey;
}
@Override
public void copyTo(ByteBuffer buffer) {
// NOTE: Storing the index as an int instead of some size aware
// variable length is probably overkill since most indexes will be
// smaller than Byte.MAX_SIZE or Short.MAX_SIZE, but having variable
// size indexes means that the size of the entire Position (as an
// int) must be stored before the Position for proper
// deserialization. By storing the index as an int, the size of each
// Position is constant so we won't need to store the overall size
// prior to the Position to deserialize it, which is actually more
// space efficient.
primaryKey.copyTo(buffer);
buffer.putInt(index);
}
}
| remiemalik/concourse | concourse-server/src/main/java/com/cinchapi/concourse/server/model/Position.java | Java | apache-2.0 | 5,815 |
<?php
/**
* This example adds text ads to an ad group that uses upgraded URLs.
*
* To get ad groups, run GetAdGroups.php.
*
* Restriction: adwords-only
*
* Copyright 2014, Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @package GoogleApiAdsAdWords
* @subpackage v201506
* @category WebServices
* @copyright 2014, Google Inc. All Rights Reserved.
* @license http://www.apache.org/licenses/LICENSE-2.0 Apache License,
* Version 2.0
*/
// Include the initialization file
require_once dirname(dirname(__FILE__)) . '/init.php';
require_once UTIL_PATH . '/MediaUtils.php';
// Enter parameters required by the code example.
$adGroupId = 'INSERT_AD_GROUP_ID_HERE';
/**
* Runs the example.
* @param AdWordsUser $user the user to run the example with
* @param string $adGroupId the id of the ad group to add the ads to
*/
function AddTextAdWithUpgradedUrlsExample(AdWordsUser $user, $adGroupId) {
// Get the service, which loads the required classes.
$adGroupAdService = $user->GetService('AdGroupAdService', ADWORDS_VERSION);
$numAds = 5;
$operations = array();
for ($i = 0; $i < $numAds; $i++) {
// Create text ad.
$textAd = new TextAd();
$textAd->headline = 'Cruise #' . uniqid();
$textAd->description1 = 'Visit the Red Planet in style.';
$textAd->description2 = 'Low-gravity fun for everyone!';
$textAd->displayUrl = 'www.example.com';
// Specify a tracking url for 3rd party tracking provider. You may
// specify one at customer, campaign, ad group, ad, criterion or
// feed item levels.
$textAd->trackingUrlTemplate =
'http://tracker.example.com/?season={_season}&promocode={_promocode}' .
'&u={lpurl}';
// Since your tracking url has two custom parameters, provide their
// values too. This can be provided at campaign, ad group, ad, criterion
// or feed item levels.
$seasonParameter = new CustomParameter();
$seasonParameter->key = 'season';
$seasonParameter->value = 'christmas';
$promoCodeParameter = new CustomParameter();
$promoCodeParameter->key = 'promocode';
$promoCodeParameter->value = 'NYC123';
$textAd->urlCustomParameters = new CustomParameters();
$textAd->urlCustomParameters->parameters = array($seasonParameter,
$promoCodeParameter);
// Specify a list of final urls. This field cannot be set if url field is
// set. This may be specified at ad, criterion and feed item levels.
$textAd->finalUrls = array('http://www.example.com/cruise/space/',
'http://www.example.com/locations/mars/');
// Specify a list of final mobile urls. This field cannot be set if url
// field is set, or finalUrls is unset. This may be specified at ad,
// criterion and feed item levels.
$textAd->finalMobileUrls = array('http://mobile.example.com/cruise/space/',
'http://mobile.example.com/locations/mars/');
// Create ad group ad.
$adGroupAd = new AdGroupAd();
$adGroupAd->adGroupId = $adGroupId;
$adGroupAd->ad = $textAd;
// Set additional settings (optional).
$adGroupAd->status = 'PAUSED';
// Create operation.
$operation = new AdGroupAdOperation();
$operation->operand = $adGroupAd;
$operation->operator = 'ADD';
$operations[] = $operation;
}
// Make the mutate request.
$result = $adGroupAdService->mutate($operations);
// Display results.
foreach ($result->value as $adGroupAd) {
$ad = $adGroupAd->ad;
printf("Text ad with headline '%s' and ID '%d' was added.\n",
$ad->headline, $ad->id);
printf(" displayUrl is '%s'\n",
$ad->displayUrl);
print("Upgraded URL properties:\n");
printf(" Final URLs: %s\n",
implode(', ', $ad->finalUrls));
printf(" Final Mobile URLs: %s\n",
implode(', ', $ad->finalMobileUrls));
printf(" Tracking URL template: %s\n",
$ad->trackingUrlTemplate);
printf(" Custom parameters: %s\n",
implode(', ',
array_map(function($param) {
return sprintf('%s=%s', $param->key, $param->value);
},
$ad->urlCustomParameters->parameters)));
}
}
// Don't run the example if the file is being included.
if (__FILE__ != realpath($_SERVER['PHP_SELF'])) {
return;
}
try {
// Get AdWordsUser from credentials in "../auth.ini"
// relative to the AdWordsUser.php file's directory.
$user = new AdWordsUser();
// Log every SOAP XML request and response.
$user->LogAll();
// Run the example.
AddTextAdWithUpgradedUrlsExample($user, $adGroupId);
} catch (Exception $e) {
printf("An error has occurred: %s\n", $e->getMessage());
}
| a1pro/adwordsAPI | examples/AdWords/v201506/AdvancedOperations/AddTextAdWithUpgradedUrls.php | PHP | apache-2.0 | 5,203 |
from collections import OrderedDict
from app.master.atom_grouper import AtomGrouper
class TimeBasedAtomGrouper(object):
"""
This class implements the algorithm to best split & group atoms based on historic time values. This algorithm is
somewhat complicated, so I'm going to give a summary here.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Let N be the number of concurrent executors allocated for this job.
Let T be the aggregate serial time to execute all atoms on a single executor.
Both N and T are known values at the beginning of this algorithm.
In the ideal subjob atom-grouping, we would have exactly N subjobs, each allocated with T/N amount of work that
would all end at the same time. However, in reality, there are a few factors that makes this solution unfeasible:
- There is a significant amount of variability in the times of running these atoms, so numbers are never exact.
- Certain builds will introduce new tests (for which we don't have historical time data for).
- Not all of the machines are exactly the same, so we can't expect identical performance.
We have two aims for this algorithm:
- Minimize the amount of framework overhead (time spent sending and retrieving subjobs) and maximize the amount of
time the slaves actually spend running the build.
- Don't overload any single executor with too much work--this will cause the whole build to wait on a single
executor. We want to try to get all of the executors to end as close to the same time as possible in order to
get rid of any inefficient use of slave machines.
In order to accomplish this, the algorithm implemented by this class tries to split up the majority of the atoms
into N buckets, and splits up the rest of the atoms into smaller buckets. Hopefully, the timeline graph of
executed subjobs for each of the executors would end up looking like this:
[========================================================================][===][==][==]
[===============================================================================][==]
[====================================================================][====][===][==][=]
[========================================================================][===][==][=]
[=====================================================================][====][==][==]
[==================================================================================][=]
[===================================================================][======][==][==]
The algorithm has two stages of subjob creation: the 'big chunk' stage and the 'small chunk' stage. The 'big chunk'
stage creates exactly N large subjob groupings that will consist of the majority of atoms (in terms of runtime).
The 'small chunk' stage creates ~2N short subjob groupings that will be used to fill in the gaps in order to aim for
having all of the executors end at similar times.
Notes:
- For new atoms that we don't have historic times for, we will assign it the highest atom time value in order to
avoid underestimating the length of unknown atoms.
- We will have to try tweaking the percentage of T that we want to be allocated for the initial large batch of
big subjobs. Same goes for the number and size of the smaller buckets.
"""
BIG_CHUNK_FRACTION = 0.8
def __init__(self, atoms, max_executors, atom_time_map, project_directory):
"""
:param atoms: the list of atoms for this build
:type atoms: list[app.master.atom.Atom]
:param max_executors: the maximum number of executors for this build
:type max_executors: int
:param atom_time_map: a dictionary containing the historic times for atoms for this particular job
:type atom_time_map: dict[str, float]
:type project_directory: str
"""
self._atoms = atoms
self._max_executors = max_executors
self._atom_time_map = atom_time_map
self._project_directory = project_directory
def groupings(self):
"""
Group the atoms into subjobs using historic timing data.
:return: a list of lists of atoms
:rtype: list[list[app.master.atom.Atom]]
"""
# 1). Coalesce the atoms with historic atom times, and also get total estimated runtime
try:
total_estimated_runtime = self._set_expected_atom_times(
self._atoms, self._atom_time_map, self._project_directory)
except _AtomTimingDataError:
grouper = AtomGrouper(self._atoms, self._max_executors)
return grouper.groupings()
# 2). Sort them by decreasing time, and add them to an OrderedDict
atoms_by_decreasing_time = sorted(self._atoms, key=lambda atom: atom.expected_time, reverse=True)
sorted_atom_times_left = OrderedDict([(atom, atom.expected_time) for atom in atoms_by_decreasing_time])
# 3). Group them!
# Calculate what the target 'big subjob' time is going to be for each executor's initial subjob
big_subjob_time = (total_estimated_runtime * self.BIG_CHUNK_FRACTION) / self._max_executors
# Calculate what the target 'small subjob' time is going to be
small_subjob_time = (total_estimated_runtime * (1.0 - self.BIG_CHUNK_FRACTION)) / (2 * self._max_executors)
# _group_atoms_into_sized_buckets() will remove elements from sorted_atom_times_left.
subjobs = self._group_atoms_into_sized_buckets(sorted_atom_times_left, big_subjob_time, self._max_executors)
small_subjobs = self._group_atoms_into_sized_buckets(sorted_atom_times_left, small_subjob_time, None)
subjobs.extend(small_subjobs)
return subjobs
def _set_expected_atom_times(self, new_atoms, old_atoms_with_times, project_directory):
"""
Set the expected runtime (new_atom.expected_time) of each atom in new_atoms using historic timing data.
Additionally, return the total estimated serial-runtime for this build. Although this seems like an odd thing
for this method to return, it is done here for efficiency. There can be thousands of atoms, and iterating
through them multiple times seems inefficient.
:param new_atoms: the list of atoms that will be run in this build
:type new_atoms: list[app.master.atom.Atom]
:param old_atoms_with_times: a dictionary containing the historic times for atoms for this particular job
:type old_atoms_with_times: dict[str, float]
:type project_directory: str
:return: the total estimated runtime in seconds
:rtype: float
"""
atoms_without_timing_data = []
total_time = 0
max_atom_time = 0
# Generate list for atoms that have timing data
for new_atom in new_atoms:
if new_atom.command_string not in old_atoms_with_times:
atoms_without_timing_data.append(new_atom)
continue
new_atom.expected_time = old_atoms_with_times[new_atom.command_string]
# Discover largest single atom time to use as conservative estimates for atoms with unknown times
if max_atom_time < new_atom.expected_time:
max_atom_time = new_atom.expected_time
# We want to return the atom with the project directory still in it, as this data will directly be
# sent to the slave to be run.
total_time += new_atom.expected_time
# For the atoms without historic timing data, assign them the largest atom time we have
for new_atom in atoms_without_timing_data:
new_atom.expected_time = max_atom_time
if len(new_atoms) == len(atoms_without_timing_data):
raise _AtomTimingDataError
total_time += (max_atom_time * len(atoms_without_timing_data))
return total_time
def _group_atoms_into_sized_buckets(self, sorted_atom_time_dict, target_group_time, max_groups_to_create):
"""
Given a sorted dictionary (Python FTW) of [atom, time] pairs in variable sorted_atom_time_dict, return a list
of lists of atoms that each are estimated to take target_group_time seconds. This method will generate at most
max_groups_to_create groupings, and will return once this limit is reached or when sorted_atom_time_dict is
empty.
Note, this method will modify sorted_atom_time_dict's state by removing elements as needed (often from the
middle of the collection).
:param sorted_atom_time_dict: the sorted (longest first), double-ended queue containing [atom, time] pairs.
This OrderedDict will have elements removed from this method.
:type sorted_atom_time_dict: OrderedDict[app.master.atom.Atom, float]
:param target_group_time: how long each subjob should approximately take
:type target_group_time: float
:param max_groups_to_create: the maximum number of subjobs to create. Once max_groups_to_create limit is
reached, this method will return the subjobs that have already been grouped. If set to None, then there
is no limit.
:type max_groups_to_create: int|None
:return: the groups of grouped atoms, with each group taking an estimated target_group_time
:rtype: list[list[app.master.atom.Atom]]
"""
subjobs = []
subjob_time_so_far = 0
subjob_atoms = []
while (max_groups_to_create is None or len(subjobs) < max_groups_to_create) and len(sorted_atom_time_dict) > 0:
for atom, time in sorted_atom_time_dict.items():
if len(subjob_atoms) == 0 or (time + subjob_time_so_far) <= target_group_time:
subjob_time_so_far += time
subjob_atoms.append(atom)
sorted_atom_time_dict.pop(atom)
# If (number of subjobs created so far + atoms left) is less than or equal to the total number of
# subjobs we need to create, then have each remaining atom be a subjob and return.
# The "+ 1" is here to account for the current subjob being generated, but that hasn't been
# appended to subjobs yet.
if max_groups_to_create is not None and (len(subjobs) + len(sorted_atom_time_dict) + 1) <= max_groups_to_create:
subjobs.append(subjob_atoms)
for atom, _ in sorted_atom_time_dict.items():
sorted_atom_time_dict.pop(atom)
subjobs.append([atom])
return subjobs
subjobs.append(subjob_atoms)
subjob_atoms = []
subjob_time_so_far = 0
return subjobs
class _AtomTimingDataError(Exception):
"""
An exception to represent the case where the atom timing data is either not present or incorrect.
"""
| nickzuber/ClusterRunner | app/master/time_based_atom_grouper.py | Python | apache-2.0 | 11,090 |
// Copyright 2012 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.common.lib.soap;
/**
* Used to package a SOAP call's return. Contains the return value, the request
* and response info, and the originating {@link SoapCall}.
*
* @author Adam Rogal
*/
public class SoapCallReturn {
private Object returnValue;
private RequestInfo requestInfo;
private ResponseInfo responseInfo;
private Throwable exception;
/**
* Constructor.
*/
public SoapCallReturn(){
requestInfo = new RequestInfo();
responseInfo = new ResponseInfo();
}
/**
* Gets the return value from the SOAP call that was made.
*
* @return the return value from the SOAP call that was made or {@code null}
* if there was an exception
*/
public Object getReturnValue() {
return returnValue;
}
/**
* Gets the request info from the SOAP call that was made.
*/
public RequestInfo getRequestInfo() {
return requestInfo;
}
/**
* Gets the response info from the SOAP call that was made.
*/
public ResponseInfo getResponseInfo() {
return responseInfo;
}
/**
* Gets the exception from the SOAP call that was made if one occurred.
*
* @return the exception from the SOAP call that was made or {@code null}
* if there was no exception
*/
public Throwable getException() {
return exception;
}
/**
* Builder for {@link SoapCallReturn} objects.
*
* @author Adam Rogal
*/
public static class Builder {
private SoapCallReturn soapCallReturn;
/**
* Constructor.
*/
public Builder() {
this.soapCallReturn = new SoapCallReturn();
}
/**
* Adds a return value to the SoapCallReturn under construction.
*
* @param returnValue the return value to add to the SoapCallReturn
* @return this builder
*/
public Builder withReturnValue(Object returnValue) {
soapCallReturn.returnValue = returnValue;
return this;
}
/**
* Adds a response info to the SoapCallReturn under construction.
*
* @param responseInfo the response info to add to the SoapCallReturn
* @return this builder
*/
public Builder withResponseInfo(ResponseInfo responseInfo) {
soapCallReturn.responseInfo = responseInfo;
return this;
}
/**
* Adds a request info to the SoapCallReturn under construction.
*
* @param requestInfo the request info to add to the SoapCallReturn
* @return this builder
*/
public Builder withRequestInfo(RequestInfo requestInfo) {
soapCallReturn.requestInfo = requestInfo;
return this;
}
/**
* Adds an exception to the SoapCallReturn under construction.
*
* @param exception the exception to add to the SoapCallReturn
* @return this builder
*/
public Builder withException(Throwable exception) {
soapCallReturn.exception = exception;
return this;
}
/**
* Returns the SoapCallReturn this Builder has been constructing.
*
* @return the built SoapCallReturn object
*/
public SoapCallReturn build() {
return soapCallReturn;
}
}
}
| andyj24/googleads-java-lib | modules/ads_lib/src/main/java/com/google/api/ads/common/lib/soap/SoapCallReturn.java | Java | apache-2.0 | 3,736 |
/**
* Copyright 2020 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {Services} from '../../../src/services';
import {getMode} from '../../../src/mode';
import {includes} from '../../../src/string';
import {map} from '../../../src/utils/object';
import {parseExtensionUrl} from '../../../src/service/extension-script';
import {preloadFriendlyIframeEmbedExtensions} from '../../../src/friendly-iframe-embed';
import {removeElement, rootNodeFor} from '../../../src/dom';
import {urls} from '../../../src/config';
/**
* @typedef {{
* extensions: !Array<{extensionId: (string|undefined), extensionVersion: (string|undefined)}>,
* head: !Element
* }}
*/
export let ValidatedHeadDef;
// From validator/validator-main.protoascii
const ALLOWED_FONT_REGEX = new RegExp(
'https://cdn\\.materialdesignicons\\.com/' +
'([0-9]+\\.?)+/css/materialdesignicons\\.min\\.css|' +
'https://cloud\\.typography\\.com/' +
'[0-9]*/[0-9]*/css/fonts\\.css|' +
'https://fast\\.fonts\\.net/.*|' +
'https://fonts\\.googleapis\\.com/css2?\\?.*|' +
'https://fonts\\.googleapis\\.com/icon\\?.*|' +
'https://fonts\\.googleapis\\.com/earlyaccess/.*\\.css|' +
'https://maxcdn\\.bootstrapcdn\\.com/font-awesome/' +
'([0-9]+\\.?)+/css/font-awesome\\.min\\.css(\\?.*)?|' +
'https://(use|pro)\\.fontawesome\\.com/releases/v([0-9]+\\.?)+' +
'/css/[0-9a-zA-Z-]+\\.css|' +
'https://(use|pro)\\.fontawesome\\.com/[0-9a-zA-Z-]+\\.css|' +
'https://use\\.typekit\\.net/[\\w\\p{L}\\p{N}_]+\\.css'
);
// If editing please also change:
// extensions/amp-a4a/amp-a4a-format.md#allowed-amp-extensions-and-builtins
const EXTENSION_ALLOWLIST = map({
'amp-accordion': true,
'amp-ad-exit': true,
'amp-analytics': true,
'amp-anim': true,
'amp-animation': true,
'amp-audio': true,
'amp-bind': true,
'amp-carousel': true,
'amp-fit-text': true,
'amp-font': true,
'amp-form': true,
'amp-img': true,
'amp-layout': true,
'amp-lightbox': true,
'amp-mraid': true,
'amp-mustache': true,
'amp-pixel': true,
'amp-position-observer': true,
'amp-selector': true,
'amp-social-share': true,
'amp-video': true,
});
const EXTENSION_URL_PREFIX = new RegExp(
urls.cdn.replace(/\./g, '\\.') + '/v0/'
);
/**
* Sanitizes AMPHTML Ad head element and extracts extensions to be installed.
* @param {!Window} win
* @param {!Element} adElement
* @param {?Element} head
* @return {?ValidatedHeadDef}
*/
export function processHead(win, adElement, head) {
if (!head || !head.firstChild) {
return null;
}
const root = rootNodeFor(head);
const htmlTag = root.documentElement;
if (
!htmlTag ||
(!htmlTag.hasAttribute('amp4ads') &&
!htmlTag.hasAttribute('⚡️4ads') &&
!htmlTag.hasAttribute('⚡4ads')) // Unicode weirdness.
) {
return null;
}
const urlService = Services.urlForDoc(adElement);
/** @type {!Array<{extensionId: string, extensionVersion: string}>} */
const extensions = [];
const fonts = [];
const images = [];
let element = head.firstElementChild;
while (element) {
// Store next element here as the following code will remove
// certain elements from the detached DOM.
const nextElement = element.nextElementSibling;
switch (element.tagName.toUpperCase()) {
case 'SCRIPT':
handleScript(extensions, element);
break;
case 'STYLE':
handleStyle(element);
break;
case 'LINK':
handleLink(fonts, images, element);
break;
// Allow these without validation.
case 'META':
case 'TITLE':
break;
default:
removeElement(element);
break;
}
element = nextElement;
}
// Load any extensions; do not wait on their promises as this
// is just to prefetch.
preloadFriendlyIframeEmbedExtensions(win, extensions);
// Preload any fonts.
fonts.forEach((fontUrl) =>
Services.preconnectFor(win).preload(adElement.getAmpDoc(), fontUrl)
);
// Preload any AMP images.
images.forEach(
(imageUrl) =>
urlService.isSecure(imageUrl) &&
Services.preconnectFor(win).preload(adElement.getAmpDoc(), imageUrl)
);
return {
extensions,
head,
};
}
/**
* Allows json scripts and allowlisted amp elements while removing others.
* @param {!Array<{extensionId: string, extensionVersion: string}>} extensions
* @param {!Element} script
*/
function handleScript(extensions, script) {
if (script.type === 'application/json') {
return;
}
const {src} = script;
const isTesting = getMode().test || getMode().localDev;
if (
EXTENSION_URL_PREFIX.test(src) ||
// Integration tests point to local files.
(isTesting && includes(src, '/dist/'))
) {
const extensionInfo = parseExtensionUrl(src);
if (extensionInfo && EXTENSION_ALLOWLIST[extensionInfo.extensionId]) {
extensions.push(extensionInfo);
}
}
removeElement(script);
}
/**
* Collect links that are from allowed font providers or used for image
* preloading. Remove other <link> elements.
* @param {!Array<string>} fonts
* @param {!Array<string>} images
* @param {!Element} link
*/
function handleLink(fonts, images, link) {
const {href, as, rel} = link;
if (rel === 'preload' && as === 'image') {
images.push(href);
return;
}
if (rel === 'stylesheet' && ALLOWED_FONT_REGEX.test(href)) {
fonts.push(href);
return;
}
removeElement(link);
}
/**
* Remove any non `amp-custom` or `amp-keyframe` styles.
* @param {!Element} style
*/
function handleStyle(style) {
if (
style.hasAttribute('amp-custom') ||
style.hasAttribute('amp-keyframes') ||
style.hasAttribute('amp4ads-boilerplate')
) {
return;
}
removeElement(style);
}
| lannka/amphtml | extensions/amp-a4a/0.1/head-validation.js | JavaScript | apache-2.0 | 6,308 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.vxquery.compiler.rewriter.rules;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.vxquery.functions.BuiltinFunctions;
import org.apache.vxquery.functions.BuiltinOperators;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
/**
* The rule searches for aggregate operators with an aggregate function
* expression that has not been initialized for two step aggregation.
*
* <pre>
* Before
*
* plan__parent
* AGGREGATE( $v : af1( $v1 ) )
* plan__child
*
* Where af1 is a VXquery aggregate function expression configured for single
* step processing and $v1 is defined in plan__child.
*
* After
*
* if (af1 == count) aggregate operating settings:
* Step 1: count
* Step 2: sum
* if (af1 == avg) aggregate operating settings:
* Step 1: avg-local
* Step 2: avg-global
* if (af1 in (max, min, sum)) aggregate operating settings:
* Step 1: af1
* Step 2: af1
* </pre>
*
* @author prestonc
*/
public class IntroduceTwoStepAggregateRule implements IAlgebraicRewriteRule {
final Map<FunctionIdentifier, Pair<IFunctionInfo, IFunctionInfo>> AGGREGATE_MAP = new HashMap<FunctionIdentifier, Pair<IFunctionInfo, IFunctionInfo>>();
public IntroduceTwoStepAggregateRule() {
AGGREGATE_MAP.put(BuiltinFunctions.FN_AVG_1.getFunctionIdentifier(),
new Pair<IFunctionInfo, IFunctionInfo>(BuiltinOperators.AVG_LOCAL, BuiltinOperators.AVG_GLOBAL));
AGGREGATE_MAP.put(BuiltinFunctions.FN_COUNT_1.getFunctionIdentifier(),
new Pair<IFunctionInfo, IFunctionInfo>(BuiltinFunctions.FN_COUNT_1, BuiltinFunctions.FN_SUM_1));
AGGREGATE_MAP.put(BuiltinFunctions.FN_MAX_1.getFunctionIdentifier(),
new Pair<IFunctionInfo, IFunctionInfo>(BuiltinFunctions.FN_MAX_1, BuiltinFunctions.FN_MAX_1));
AGGREGATE_MAP.put(BuiltinFunctions.FN_MIN_1.getFunctionIdentifier(),
new Pair<IFunctionInfo, IFunctionInfo>(BuiltinFunctions.FN_MIN_1, BuiltinFunctions.FN_MIN_1));
AGGREGATE_MAP.put(BuiltinFunctions.FN_SUM_1.getFunctionIdentifier(),
new Pair<IFunctionInfo, IFunctionInfo>(BuiltinFunctions.FN_SUM_1, BuiltinFunctions.FN_SUM_1));
}
@Override
public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
// Check if aggregate function.
AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
if (op.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
return false;
}
AggregateOperator aggregate = (AggregateOperator) op;
if (aggregate.getExpressions().size() == 0) {
return false;
}
Mutable<ILogicalExpression> mutableLogicalExpression = aggregate.getExpressions().get(0);
ILogicalExpression logicalExpression = mutableLogicalExpression.getValue();
if (logicalExpression.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
return false;
}
AbstractFunctionCallExpression functionCall = (AbstractFunctionCallExpression) logicalExpression;
if (AGGREGATE_MAP.containsKey(functionCall.getFunctionIdentifier())) {
AggregateFunctionCallExpression aggregateFunctionCall = (AggregateFunctionCallExpression) functionCall;
if (aggregateFunctionCall.isTwoStep()) {
return false;
}
aggregateFunctionCall.setTwoStep(true);
aggregateFunctionCall.setStepOneAggregate(AGGREGATE_MAP.get(functionCall.getFunctionIdentifier()).first);
aggregateFunctionCall.setStepTwoAggregate(AGGREGATE_MAP.get(functionCall.getFunctionIdentifier()).second);
return true;
}
return false;
}
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) {
return false;
}
}
| prestoncarman/vxquery | vxquery-core/src/main/java/org/apache/vxquery/compiler/rewriter/rules/IntroduceTwoStepAggregateRule.java | Java | apache-2.0 | 5,872 |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.roots.ui.configuration;
import com.intellij.ide.DataManager;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectBundle;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.SdkType;
import com.intellij.openapi.projectRoots.SdkTypeId;
import com.intellij.openapi.roots.ui.OrderEntryAppearanceService;
import com.intellij.openapi.roots.ui.configuration.projectRoot.JdkListConfigurable;
import com.intellij.openapi.roots.ui.configuration.projectRoot.ProjectSdksModel;
import com.intellij.openapi.ui.ComboBoxWithWidePopup;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Conditions;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.ColoredListCellRenderer;
import com.intellij.ui.ScreenUtil;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.EmptyIcon;
import com.intellij.util.ui.JBUI;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.util.Arrays;
import java.util.Collection;
/**
* @author Eugene Zhuravlev
* @since May 18, 2005
*/
public class JdkComboBox extends ComboBoxWithWidePopup<JdkComboBox.JdkComboBoxItem> {
private static final Icon EMPTY_ICON = JBUI.scale(EmptyIcon.create(1, 16));
@Nullable
private final Condition<Sdk> myFilter;
@Nullable
private final Condition<SdkTypeId> myCreationFilter;
private JButton mySetUpButton;
private final Condition<SdkTypeId> mySdkTypeFilter;
public JdkComboBox(@NotNull final ProjectSdksModel jdkModel) {
this(jdkModel, null);
}
public JdkComboBox(@NotNull final ProjectSdksModel jdkModel,
@Nullable Condition<SdkTypeId> filter) {
this(jdkModel, filter, getSdkFilter(filter), filter, false);
}
public JdkComboBox(@NotNull final ProjectSdksModel jdkModel,
@Nullable Condition<SdkTypeId> sdkTypeFilter,
@Nullable Condition<Sdk> filter,
@Nullable Condition<SdkTypeId> creationFilter,
boolean addSuggestedItems) {
super(new JdkComboBoxModel(jdkModel, sdkTypeFilter, filter, addSuggestedItems));
myFilter = filter;
mySdkTypeFilter = sdkTypeFilter;
myCreationFilter = creationFilter;
setRenderer(new ColoredListCellRenderer<JdkComboBoxItem>() {
@Override
protected void customizeCellRenderer(@NotNull JList<? extends JdkComboBoxItem> list,
JdkComboBoxItem value,
int index,
boolean selected,
boolean hasFocus) {
if (JdkComboBox.this.isEnabled()) {
setIcon(EMPTY_ICON); // to fix vertical size
if (value instanceof InvalidJdkComboBoxItem) {
final String str = value.toString();
append(str, SimpleTextAttributes.ERROR_ATTRIBUTES);
}
else if (value instanceof ProjectJdkComboBoxItem) {
final Sdk jdk = jdkModel.getProjectSdk();
if (jdk != null) {
setIcon(((SdkType)jdk.getSdkType()).getIcon());
append(ProjectBundle.message("project.roots.project.jdk.inherited"), SimpleTextAttributes.REGULAR_ATTRIBUTES);
append(" (" + jdk.getName() + ")", SimpleTextAttributes.GRAYED_ATTRIBUTES);
}
else {
final String str = value.toString();
append(str, SimpleTextAttributes.ERROR_ATTRIBUTES);
}
}
else if (value instanceof SuggestedJdkItem) {
SdkType type = ((SuggestedJdkItem)value).getSdkType();
String home = ((SuggestedJdkItem)value).getPath();
setIcon(type.getIconForAddAction());
String version = type.getVersionString(home);
append(version == null ? type.getPresentableName() : version);
append(" (" + home + ")", SimpleTextAttributes.GRAYED_ATTRIBUTES);
}
else if (value != null) {
OrderEntryAppearanceService.getInstance().forJdk(value.getJdk(), false, selected, true).customize(this);
}
else {
customizeCellRenderer(list, new NoneJdkComboBoxItem(), index, selected, hasFocus);
}
}
}
});
}
@Override
public Dimension getPreferredSize() {
final Rectangle rec = ScreenUtil.getScreenRectangle(0, 0);
final Dimension size = super.getPreferredSize();
final int maxWidth = rec.width / 4;
if (size.width > maxWidth) {
size.width = maxWidth;
}
return size;
}
@Override
public Dimension getMinimumSize() {
final Dimension minSize = super.getMinimumSize();
final Dimension prefSize = getPreferredSize();
if (minSize.width > prefSize.width) {
minSize.width = prefSize.width;
}
return minSize;
}
public void setSetupButton(final JButton setUpButton,
@Nullable final Project project,
final ProjectSdksModel jdksModel,
final JdkComboBoxItem firstItem,
@Nullable final Condition<Sdk> additionalSetup,
final boolean moduleJdkSetup) {
setSetupButton(setUpButton, project, jdksModel, firstItem, additionalSetup,
ProjectBundle.message("project.roots.set.up.jdk.title", moduleJdkSetup ? 1 : 2));
}
public void setSetupButton(final JButton setUpButton,
@Nullable final Project project,
final ProjectSdksModel jdksModel,
final JdkComboBoxItem firstItem,
@Nullable final Condition<Sdk> additionalSetup,
final String actionGroupTitle) {
mySetUpButton = setUpButton;
mySetUpButton.addActionListener(e -> {
DefaultActionGroup group = new DefaultActionGroup();
jdksModel.createAddActions(group, this, getSelectedJdk(), jdk -> {
if (project != null) {
final JdkListConfigurable configurable = JdkListConfigurable.getInstance(project);
configurable.addJdkNode(jdk, false);
}
reloadModel(new ActualJdkComboBoxItem(jdk), project);
setSelectedJdk(jdk); //restore selection
if (additionalSetup != null) {
if (additionalSetup.value(jdk)) { //leave old selection
setSelectedJdk(firstItem.getJdk());
}
}
}, myCreationFilter);
final DataContext dataContext = DataManager.getInstance().getDataContext(this);
if (group.getChildrenCount() > 1) {
JBPopupFactory.getInstance()
.createActionGroupPopup(actionGroupTitle, group, dataContext, JBPopupFactory.ActionSelectionAid.MNEMONICS, false)
.showUnderneathOf(setUpButton);
}
else {
final AnActionEvent event =
new AnActionEvent(null, dataContext, ActionPlaces.UNKNOWN, new Presentation(""), ActionManager.getInstance(), 0);
group.getChildren(event)[0].actionPerformed(event);
}
});
}
public void setEditButton(final JButton editButton, final Project project, final Computable<Sdk> retrieveJDK){
editButton.addActionListener(e -> {
final Sdk projectJdk = retrieveJDK.compute();
if (projectJdk != null) {
ProjectStructureConfigurable.getInstance(project).select(projectJdk, true);
}
});
addActionListener(e -> {
final JdkComboBoxItem selectedItem = getSelectedItem();
if (selectedItem instanceof ProjectJdkComboBoxItem) {
editButton.setEnabled(ProjectStructureConfigurable.getInstance(project).getProjectJdksModel().getProjectSdk() != null);
}
else {
editButton.setEnabled(!(selectedItem instanceof InvalidJdkComboBoxItem) && selectedItem != null && selectedItem.getJdk() != null);
}
});
}
public JButton getSetUpButton() {
return mySetUpButton;
}
@Override
public JdkComboBoxItem getSelectedItem() {
return (JdkComboBoxItem)super.getSelectedItem();
}
@Nullable
public Sdk getSelectedJdk() {
final JdkComboBoxItem selectedItem = getSelectedItem();
return selectedItem != null? selectedItem.getJdk() : null;
}
public void setSelectedJdk(Sdk jdk) {
final int index = indexOf(jdk);
if (index >= 0) {
setSelectedIndex(index);
}
}
public void setInvalidJdk(String name) {
removeInvalidElement();
addItem(new InvalidJdkComboBoxItem(name));
setSelectedIndex(getModel().getSize() - 1);
}
private int indexOf(Sdk jdk) {
final JdkComboBoxModel model = (JdkComboBoxModel)getModel();
final int count = model.getSize();
for (int idx = 0; idx < count; idx++) {
final JdkComboBoxItem elementAt = model.getElementAt(idx);
if (jdk == null) {
if (elementAt instanceof NoneJdkComboBoxItem || elementAt instanceof ProjectJdkComboBoxItem) {
return idx;
}
}
else {
Sdk elementAtJdk = elementAt.getJdk();
if (elementAtJdk != null && jdk.getName().equals(elementAtJdk.getName())) {
return idx;
}
}
}
return -1;
}
private void removeInvalidElement() {
final JdkComboBoxModel model = (JdkComboBoxModel)getModel();
final int count = model.getSize();
for (int idx = 0; idx < count; idx++) {
final JdkComboBoxItem elementAt = model.getElementAt(idx);
if (elementAt instanceof InvalidJdkComboBoxItem) {
removeItemAt(idx);
break;
}
}
}
public void reloadModel(JdkComboBoxItem firstItem, @Nullable Project project) {
final JdkComboBoxModel model = (JdkComboBoxModel)getModel();
if (project == null) {
model.addElement(firstItem);
return;
}
model.reload(firstItem, ProjectStructureConfigurable.getInstance(project).getProjectJdksModel(), mySdkTypeFilter, myFilter, false);
}
private static class JdkComboBoxModel extends DefaultComboBoxModel<JdkComboBoxItem> {
JdkComboBoxModel(@NotNull final ProjectSdksModel jdksModel, @Nullable Condition<SdkTypeId> sdkTypeFilter,
@Nullable Condition<Sdk> sdkFilter, boolean addSuggested) {
reload(null, jdksModel, sdkTypeFilter, sdkFilter, addSuggested);
}
void reload(@Nullable final JdkComboBoxItem firstItem,
@NotNull final ProjectSdksModel jdksModel,
@Nullable Condition<SdkTypeId> sdkTypeFilter,
@Nullable Condition<Sdk> sdkFilter,
boolean addSuggested) {
removeAllElements();
if (firstItem != null) addElement(firstItem);
Sdk[] jdks = sortSdks(jdksModel.getSdks());
for (Sdk jdk : jdks) {
if (sdkFilter == null || sdkFilter.value(jdk)) {
addElement(new ActualJdkComboBoxItem(jdk));
}
}
if (addSuggested) {
addSuggestedItems(sdkTypeFilter, jdks);
}
}
@NotNull
private static Sdk[] sortSdks(@NotNull final Sdk[] sdks) {
Sdk[] clone = sdks.clone();
Arrays.sort(clone, (sdk1, sdk2) -> {
SdkType sdkType1 = (SdkType)sdk1.getSdkType();
SdkType sdkType2 = (SdkType)sdk2.getSdkType();
if (!sdkType1.getComparator().equals(sdkType2.getComparator())) return StringUtil.compare(sdkType1.getPresentableName(), sdkType2.getPresentableName(), true);
return sdkType1.getComparator().compare(sdk1, sdk2);
});
return clone;
}
void addSuggestedItems(@Nullable Condition<SdkTypeId> sdkTypeFilter, Sdk[] jdks) {
SdkType[] types = SdkType.getAllTypes();
for (SdkType type : types) {
if (sdkTypeFilter == null || sdkTypeFilter.value(type) && ContainerUtil.find(jdks, sdk -> sdk.getSdkType() == type) == null) {
Collection<String> paths = type.suggestHomePaths();
for (String path : paths) {
if (path != null && type.isValidSdkHome(path)) {
addElement(new SuggestedJdkItem(type, path));
}
}
}
}
}
}
public static Condition<Sdk> getSdkFilter(@Nullable final Condition<SdkTypeId> filter) {
return filter == null ? Conditions.alwaysTrue() : sdk -> filter.value(sdk.getSdkType());
}
public abstract static class JdkComboBoxItem {
@Nullable
public Sdk getJdk() {
return null;
}
@Nullable
public String getSdkName() {
return null;
}
}
public static class ActualJdkComboBoxItem extends JdkComboBoxItem {
private final Sdk myJdk;
public ActualJdkComboBoxItem(@NotNull Sdk jdk) {
myJdk = jdk;
}
@Override
public String toString() {
return myJdk.getName();
}
@Nullable
@Override
public Sdk getJdk() {
return myJdk;
}
@Nullable
@Override
public String getSdkName() {
return myJdk.getName();
}
}
public static class ProjectJdkComboBoxItem extends JdkComboBoxItem {
public String toString() {
return ProjectBundle.message("jdk.combo.box.project.item");
}
}
public static class NoneJdkComboBoxItem extends JdkComboBoxItem {
public String toString() {
return ProjectBundle.message("jdk.combo.box.none.item");
}
}
private static class InvalidJdkComboBoxItem extends JdkComboBoxItem {
private final String mySdkName;
InvalidJdkComboBoxItem(String name) {
mySdkName = name;
}
@Override
public String getSdkName() {
return mySdkName;
}
public String toString() {
return ProjectBundle.message("jdk.combo.box.invalid.item", mySdkName);
}
}
public static class SuggestedJdkItem extends JdkComboBoxItem {
private final SdkType mySdkType;
private final String myPath;
SuggestedJdkItem(@NotNull SdkType sdkType, @NotNull String path) {
mySdkType = sdkType;
myPath = path;
}
@NotNull
public SdkType getSdkType() {
return mySdkType;
}
@NotNull
public String getPath() {
return myPath;
}
@Override
public String toString() {
return myPath;
}
}
}
| ThiagoGarciaAlves/intellij-community | java/idea-ui/src/com/intellij/openapi/roots/ui/configuration/JdkComboBox.java | Java | apache-2.0 | 15,058 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.QueueACL;
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ActiveUsersManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppUtils;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplicationAttempt;
import org.apache.hadoop.yarn.util.resource.Resources;
@Private
@Unstable
public class FSLeafQueue extends FSQueue {
private static final Log LOG = LogFactory.getLog(
FSLeafQueue.class.getName());
private final List<AppSchedulable> runnableAppScheds = // apps that are runnable
new ArrayList<AppSchedulable>();
private final List<AppSchedulable> nonRunnableAppScheds =
new ArrayList<AppSchedulable>();
private Resource demand = Resources.createResource(0);
// Variables used for preemption
private long lastTimeAtMinShare;
private long lastTimeAtHalfFairShare;
// Track the AM resource usage for this queue
private Resource amResourceUsage;
private final ActiveUsersManager activeUsersManager;
public FSLeafQueue(String name, FairScheduler scheduler,
FSParentQueue parent) {
super(name, scheduler, parent);
this.lastTimeAtMinShare = scheduler.getClock().getTime();
this.lastTimeAtHalfFairShare = scheduler.getClock().getTime();
activeUsersManager = new ActiveUsersManager(getMetrics());
amResourceUsage = Resource.newInstance(0, 0);
}
public void addApp(FSSchedulerApp app, boolean runnable) {
AppSchedulable appSchedulable = new AppSchedulable(scheduler, app, this);
app.setAppSchedulable(appSchedulable);
if (runnable) {
runnableAppScheds.add(appSchedulable);
} else {
nonRunnableAppScheds.add(appSchedulable);
}
}
// for testing
void addAppSchedulable(AppSchedulable appSched) {
runnableAppScheds.add(appSched);
}
/**
* Removes the given app from this queue.
* @return whether or not the app was runnable
*/
public boolean removeApp(FSSchedulerApp app) {
if (runnableAppScheds.remove(app.getAppSchedulable())) {
// Update AM resource usage
if (app.isAmRunning() && app.getAMResource() != null) {
Resources.subtractFrom(amResourceUsage, app.getAMResource());
}
return true;
} else if (nonRunnableAppScheds.remove(app.getAppSchedulable())) {
return false;
} else {
throw new IllegalStateException("Given app to remove " + app +
" does not exist in queue " + this);
}
}
public Collection<AppSchedulable> getRunnableAppSchedulables() {
return runnableAppScheds;
}
public List<AppSchedulable> getNonRunnableAppSchedulables() {
return nonRunnableAppScheds;
}
@Override
public void collectSchedulerApplications(
Collection<ApplicationAttemptId> apps) {
for (AppSchedulable appSched : runnableAppScheds) {
apps.add(appSched.getApp().getApplicationAttemptId());
}
for (AppSchedulable appSched : nonRunnableAppScheds) {
apps.add(appSched.getApp().getApplicationAttemptId());
}
}
@Override
public void setPolicy(SchedulingPolicy policy)
throws AllocationConfigurationException {
if (!SchedulingPolicy.isApplicableTo(policy, SchedulingPolicy.DEPTH_LEAF)) {
throwPolicyDoesnotApplyException(policy);
}
super.policy = policy;
}
@Override
public void recomputeShares() {
policy.computeShares(getRunnableAppSchedulables(), getFairShare());
}
@Override
public Resource getDemand() {
return demand;
}
@Override
public Resource getResourceUsage() {
Resource usage = Resources.createResource(0);
for (AppSchedulable app : runnableAppScheds) {
Resources.addTo(usage, app.getResourceUsage());
}
for (AppSchedulable app : nonRunnableAppScheds) {
Resources.addTo(usage, app.getResourceUsage());
}
return usage;
}
public Resource getAmResourceUsage() {
return amResourceUsage;
}
@Override
public void updateDemand() {
// Compute demand by iterating through apps in the queue
// Limit demand to maxResources
Resource maxRes = scheduler.getAllocationConfiguration()
.getMaxResources(getName());
demand = Resources.createResource(0);
for (AppSchedulable sched : runnableAppScheds) {
if (Resources.equals(demand, maxRes)) {
break;
}
updateDemandForApp(sched, maxRes);
}
for (AppSchedulable sched : nonRunnableAppScheds) {
if (Resources.equals(demand, maxRes)) {
break;
}
updateDemandForApp(sched, maxRes);
}
if (LOG.isDebugEnabled()) {
LOG.debug("The updated demand for " + getName() + " is " + demand
+ "; the max is " + maxRes);
}
}
private void updateDemandForApp(AppSchedulable sched, Resource maxRes) {
sched.updateDemand();
Resource toAdd = sched.getDemand();
if (LOG.isDebugEnabled()) {
LOG.debug("Counting resource from " + sched.getName() + " " + toAdd
+ "; Total resource consumption for " + getName() + " now "
+ demand);
}
demand = Resources.add(demand, toAdd);
demand = Resources.componentwiseMin(demand, maxRes);
}
@Override
public Resource assignContainer(FSSchedulerNode node) {
Resource assigned = Resources.none();
if (LOG.isDebugEnabled()) {
LOG.debug("Node " + node.getNodeName() + " offered to queue: " + getName());
}
if (!assignContainerPreCheck(node)) {
return assigned;
}
Comparator<Schedulable> comparator = policy.getComparator();
Collections.sort(runnableAppScheds, comparator);
for (AppSchedulable sched : runnableAppScheds) {
if (SchedulerAppUtils.isBlacklisted(sched.getApp(), node, LOG)) {
continue;
}
assigned = sched.assignContainer(node);
if (!assigned.equals(Resources.none())) {
break;
}
}
return assigned;
}
@Override
public RMContainer preemptContainer() {
RMContainer toBePreempted = null;
if (LOG.isDebugEnabled()) {
LOG.debug("Queue " + getName() + " is going to preempt a container " +
"from its applications.");
}
// If this queue is not over its fair share, reject
if (!preemptContainerPreCheck()) {
return toBePreempted;
}
// Choose the app that is most over fair share
Comparator<Schedulable> comparator = policy.getComparator();
AppSchedulable candidateSched = null;
for (AppSchedulable sched : runnableAppScheds) {
if (candidateSched == null ||
comparator.compare(sched, candidateSched) > 0) {
candidateSched = sched;
}
}
// Preempt from the selected app
if (candidateSched != null) {
toBePreempted = candidateSched.preemptContainer();
}
return toBePreempted;
}
@Override
public List<FSQueue> getChildQueues() {
return new ArrayList<FSQueue>(1);
}
@Override
public List<QueueUserACLInfo> getQueueUserAclInfo(UserGroupInformation user) {
QueueUserACLInfo userAclInfo =
recordFactory.newRecordInstance(QueueUserACLInfo.class);
List<QueueACL> operations = new ArrayList<QueueACL>();
for (QueueACL operation : QueueACL.values()) {
if (hasAccess(operation, user)) {
operations.add(operation);
}
}
userAclInfo.setQueueName(getQueueName());
userAclInfo.setUserAcls(operations);
return Collections.singletonList(userAclInfo);
}
public long getLastTimeAtMinShare() {
return lastTimeAtMinShare;
}
public void setLastTimeAtMinShare(long lastTimeAtMinShare) {
this.lastTimeAtMinShare = lastTimeAtMinShare;
}
public long getLastTimeAtHalfFairShare() {
return lastTimeAtHalfFairShare;
}
public void setLastTimeAtHalfFairShare(long lastTimeAtHalfFairShare) {
this.lastTimeAtHalfFairShare = lastTimeAtHalfFairShare;
}
@Override
public int getNumRunnableApps() {
return runnableAppScheds.size();
}
@Override
public ActiveUsersManager getActiveUsersManager() {
return activeUsersManager;
}
/**
* Check whether this queue can run this application master under the
* maxAMShare limit
*
* @param amResource
* @return true if this queue can run
*/
public boolean canRunAppAM(Resource amResource) {
float maxAMShare =
scheduler.getAllocationConfiguration().getQueueMaxAMShare(getName());
if (Math.abs(maxAMShare - -1.0f) < 0.0001) {
return true;
}
Resource maxAMResource = Resources.multiply(getFairShare(), maxAMShare);
Resource ifRunAMResource = Resources.add(amResourceUsage, amResource);
return !policy
.checkIfAMResourceUsageOverLimit(ifRunAMResource, maxAMResource);
}
public void addAMResourceUsage(Resource amResource) {
if (amResource != null) {
Resources.addTo(amResourceUsage, amResource);
}
}
@Override
public void recoverContainer(Resource clusterResource,
SchedulerApplicationAttempt schedulerAttempt, RMContainer rmContainer) {
// TODO Auto-generated method stub
}
}
| tseen/Federated-HDFS | tseenliu/FedHDFS-hadoop-src/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/FSLeafQueue.java | Java | apache-2.0 | 10,600 |
/*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.governator.lifecycle.warmup;
import com.google.inject.Singleton;
import com.netflix.governator.annotations.WarmUp;
public class Flat
{
/*
Root classes without dependencies
*/
@Singleton
public static class A
{
public volatile Recorder recorder;
@WarmUp
public void warmUp() throws InterruptedException
{
recorder.record("A");
}
}
@Singleton
public static class B
{
public volatile Recorder recorder;
@WarmUp
public void warmUp() throws InterruptedException
{
recorder.record("B");
}
}
}
| skinzer/governator | governator-legacy/src/test/java/com/netflix/governator/lifecycle/warmup/Flat.java | Java | apache-2.0 | 1,291 |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.idm.engine.impl.cmd;
import java.io.Serializable;
import org.flowable.engine.common.api.FlowableIllegalArgumentException;
import org.flowable.engine.common.api.FlowableObjectNotFoundException;
import org.flowable.engine.common.impl.interceptor.Command;
import org.flowable.engine.common.impl.interceptor.CommandContext;
import org.flowable.idm.api.Picture;
import org.flowable.idm.api.User;
import org.flowable.idm.engine.impl.util.CommandContextUtil;
/**
* @author Tom Baeyens
*/
public class SetUserPictureCmd implements Command<Object>, Serializable {
private static final long serialVersionUID = 1L;
protected String userId;
protected Picture picture;
public SetUserPictureCmd(String userId, Picture picture) {
this.userId = userId;
this.picture = picture;
}
@Override
public Object execute(CommandContext commandContext) {
if (userId == null) {
throw new FlowableIllegalArgumentException("userId is null");
}
User user = CommandContextUtil.getIdmEngineConfiguration().getIdmIdentityService()
.createUserQuery().userId(userId)
.singleResult();
if (user == null) {
throw new FlowableObjectNotFoundException("user " + userId + " doesn't exist", User.class);
}
CommandContextUtil.getUserEntityManager(commandContext).setUserPicture(user, picture);
return null;
}
}
| zwets/flowable-engine | modules/flowable-idm-engine/src/main/java/org/flowable/idm/engine/impl/cmd/SetUserPictureCmd.java | Java | apache-2.0 | 2,022 |
/*
Copyright 2016 Goldman Sachs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package com.gs.fw.common.mithra.test.tax;
import com.gs.fw.finder.Operation;
import java.util.*;
public class FormRoleList extends FormRoleListAbstract
{
public FormRoleList()
{
super();
}
public FormRoleList(int initialSize)
{
super(initialSize);
}
public FormRoleList(Collection c)
{
super(c);
}
public FormRoleList(Operation operation)
{
super(operation);
}
}
| goldmansachs/reladomo | reladomo/src/test/java/com/gs/fw/common/mithra/test/tax/FormRoleList.java | Java | apache-2.0 | 1,001 |
/**
* Purpose of package - find largest number from 2 and 3 numbers.
* @since 1.0
* @author skuznetsov
* @version 2.0
*/
package ru.skuznetsov;
| kuznetsovsergeyymailcom/homework | chapter_001/maximumFromTwoNumbers/src/test/java/ru/skuznetsov/package-info.java | Java | apache-2.0 | 145 |
/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package framework
import (
"fmt"
"path/filepath"
"strconv"
"strings"
"time"
. "github.com/onsi/gomega"
apierrs "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/api/resource"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/sets"
"k8s.io/apimachinery/pkg/util/wait"
utilyaml "k8s.io/apimachinery/pkg/util/yaml"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/v1"
podutil "k8s.io/kubernetes/pkg/api/v1/pod"
apps "k8s.io/kubernetes/pkg/apis/apps/v1beta1"
"k8s.io/kubernetes/pkg/client/clientset_generated/clientset"
"k8s.io/kubernetes/test/e2e/generated"
)
const (
// Poll interval for StatefulSet tests
StatefulSetPoll = 10 * time.Second
// Timeout interval for StatefulSet operations
StatefulSetTimeout = 10 * time.Minute
// Timeout for stateful pods to change state
StatefulPodTimeout = 5 * time.Minute
)
// CreateStatefulSetService creates a Headless Service with Name name and Selector set to match labels.
func CreateStatefulSetService(name string, labels map[string]string) *v1.Service {
headlessService := &v1.Service{
ObjectMeta: metav1.ObjectMeta{
Name: name,
},
Spec: v1.ServiceSpec{
Selector: labels,
},
}
headlessService.Spec.Ports = []v1.ServicePort{
{Port: 80, Name: "http", Protocol: "TCP"},
}
headlessService.Spec.ClusterIP = "None"
return headlessService
}
// StatefulSetFromManifest returns a StatefulSet from a manifest stored in fileName in the Namespace indicated by ns.
func StatefulSetFromManifest(fileName, ns string) *apps.StatefulSet {
var ss apps.StatefulSet
Logf("Parsing statefulset from %v", fileName)
data := generated.ReadOrDie(fileName)
json, err := utilyaml.ToJSON(data)
Expect(err).NotTo(HaveOccurred())
Expect(runtime.DecodeInto(api.Codecs.UniversalDecoder(), json, &ss)).NotTo(HaveOccurred())
ss.Namespace = ns
if ss.Spec.Selector == nil {
ss.Spec.Selector = &metav1.LabelSelector{
MatchLabels: ss.Spec.Template.Labels,
}
}
return &ss
}
// StatefulSetTester is a struct that contains utility methods for testing StatefulSet related functionality. It uses a
// clientset.Interface to communicate with the API server.
type StatefulSetTester struct {
c clientset.Interface
}
// NewStatefulSetTester creates a StatefulSetTester that uses c to interact with the API server.
func NewStatefulSetTester(c clientset.Interface) *StatefulSetTester {
return &StatefulSetTester{c}
}
// CreateStatefulSet creates a StatefulSet from the manifest at manifestPath in the Namespace ns using kubectl create.
func (s *StatefulSetTester) CreateStatefulSet(manifestPath, ns string) *apps.StatefulSet {
mkpath := func(file string) string {
return filepath.Join(manifestPath, file)
}
ss := StatefulSetFromManifest(mkpath("statefulset.yaml"), ns)
svcYaml := generated.ReadOrDie(mkpath("service.yaml"))
ssYaml := generated.ReadOrDie(mkpath("statefulset.yaml"))
Logf(fmt.Sprintf("creating " + ss.Name + " service"))
RunKubectlOrDieInput(string(svcYaml[:]), "create", "-f", "-", fmt.Sprintf("--namespace=%v", ns))
Logf(fmt.Sprintf("creating statefulset %v/%v with %d replicas and selector %+v", ss.Namespace, ss.Name, *(ss.Spec.Replicas), ss.Spec.Selector))
RunKubectlOrDieInput(string(ssYaml[:]), "create", "-f", "-", fmt.Sprintf("--namespace=%v", ns))
s.WaitForRunningAndReady(*ss.Spec.Replicas, ss)
return ss
}
// CheckMount checks that the mount at mountPath is valid for all Pods in ss.
func (s *StatefulSetTester) CheckMount(ss *apps.StatefulSet, mountPath string) error {
for _, cmd := range []string{
// Print inode, size etc
fmt.Sprintf("ls -idlh %v", mountPath),
// Print subdirs
fmt.Sprintf("find %v", mountPath),
// Try writing
fmt.Sprintf("touch %v", filepath.Join(mountPath, fmt.Sprintf("%v", time.Now().UnixNano()))),
} {
if err := s.ExecInStatefulPods(ss, cmd); err != nil {
return fmt.Errorf("failed to execute %v, error: %v", cmd, err)
}
}
return nil
}
// ExecInStatefulPods executes cmd in all Pods in ss. If a error occurs it is returned and cmd is not execute in any subsequent Pods.
func (s *StatefulSetTester) ExecInStatefulPods(ss *apps.StatefulSet, cmd string) error {
podList := s.GetPodList(ss)
for _, statefulPod := range podList.Items {
stdout, err := RunHostCmd(statefulPod.Namespace, statefulPod.Name, cmd)
Logf("stdout of %v on %v: %v", cmd, statefulPod.Name, stdout)
if err != nil {
return err
}
}
return nil
}
// CheckHostname verifies that all Pods in ss have the correct Hostname. If the returned error is not nil than verification failed.
func (s *StatefulSetTester) CheckHostname(ss *apps.StatefulSet) error {
cmd := "printf $(hostname)"
podList := s.GetPodList(ss)
for _, statefulPod := range podList.Items {
hostname, err := RunHostCmd(statefulPod.Namespace, statefulPod.Name, cmd)
if err != nil {
return err
}
if hostname != statefulPod.Name {
return fmt.Errorf("unexpected hostname (%s) and stateful pod name (%s) not equal", hostname, statefulPod.Name)
}
}
return nil
}
// Saturate waits for all Pods in ss to become Running and Ready.
func (s *StatefulSetTester) Saturate(ss *apps.StatefulSet) {
var i int32
for i = 0; i < *(ss.Spec.Replicas); i++ {
Logf("Waiting for stateful pod at index " + fmt.Sprintf("%v", i+1) + " to enter Running")
s.WaitForRunningAndReady(i+1, ss)
Logf("Marking stateful pod at index " + fmt.Sprintf("%v", i) + " healthy")
s.SetHealthy(ss)
}
}
// DeleteStatefulPodAtIndex deletes the Pod with ordinal index in ss.
func (s *StatefulSetTester) DeleteStatefulPodAtIndex(index int, ss *apps.StatefulSet) {
name := getStatefulSetPodNameAtIndex(index, ss)
noGrace := int64(0)
if err := s.c.Core().Pods(ss.Namespace).Delete(name, &metav1.DeleteOptions{GracePeriodSeconds: &noGrace}); err != nil {
Failf("Failed to delete stateful pod %v for StatefulSet %v/%v: %v", name, ss.Namespace, ss.Name, err)
}
}
// VerifyStatefulPodFunc is a func that examines a StatefulSetPod.
type VerifyStatefulPodFunc func(*v1.Pod)
// VerifyPodAtIndex applies a visitor patter to the Pod at index in ss. verify is is applied to the Pod to "visit" it.
func (s *StatefulSetTester) VerifyPodAtIndex(index int, ss *apps.StatefulSet, verify VerifyStatefulPodFunc) {
name := getStatefulSetPodNameAtIndex(index, ss)
pod, err := s.c.Core().Pods(ss.Namespace).Get(name, metav1.GetOptions{})
Expect(err).NotTo(HaveOccurred(), fmt.Sprintf("Failed to get stateful pod %s for StatefulSet %s/%s", name, ss.Namespace, ss.Name))
verify(pod)
}
func getStatefulSetPodNameAtIndex(index int, ss *apps.StatefulSet) string {
// TODO: we won't use "-index" as the name strategy forever,
// pull the name out from an identity mapper.
return fmt.Sprintf("%v-%v", ss.Name, index)
}
// Scale scales ss to count replicas.
func (s *StatefulSetTester) Scale(ss *apps.StatefulSet, count int32) error {
name := ss.Name
ns := ss.Namespace
s.update(ns, name, func(ss *apps.StatefulSet) { *(ss.Spec.Replicas) = count })
var statefulPodList *v1.PodList
pollErr := wait.PollImmediate(StatefulSetPoll, StatefulSetTimeout, func() (bool, error) {
statefulPodList = s.GetPodList(ss)
if int32(len(statefulPodList.Items)) == count {
return true, nil
}
return false, nil
})
if pollErr != nil {
unhealthy := []string{}
for _, statefulPod := range statefulPodList.Items {
delTs, phase, readiness := statefulPod.DeletionTimestamp, statefulPod.Status.Phase, podutil.IsPodReady(&statefulPod)
if delTs != nil || phase != v1.PodRunning || !readiness {
unhealthy = append(unhealthy, fmt.Sprintf("%v: deletion %v, phase %v, readiness %v", statefulPod.Name, delTs, phase, readiness))
}
}
return fmt.Errorf("Failed to scale statefulset to %d in %v. Remaining pods:\n%v", count, StatefulSetTimeout, unhealthy)
}
return nil
}
// UpdateReplicas updates the replicas of ss to count.
func (s *StatefulSetTester) UpdateReplicas(ss *apps.StatefulSet, count int32) {
s.update(ss.Namespace, ss.Name, func(ss *apps.StatefulSet) { ss.Spec.Replicas = &count })
}
// Restart scales ss to 0 and then back to its previous number of replicas.
func (s *StatefulSetTester) Restart(ss *apps.StatefulSet) {
oldReplicas := *(ss.Spec.Replicas)
ExpectNoError(s.Scale(ss, 0))
s.update(ss.Namespace, ss.Name, func(ss *apps.StatefulSet) { *(ss.Spec.Replicas) = oldReplicas })
}
func (s *StatefulSetTester) update(ns, name string, update func(ss *apps.StatefulSet)) {
for i := 0; i < 3; i++ {
ss, err := s.c.Apps().StatefulSets(ns).Get(name, metav1.GetOptions{})
if err != nil {
Failf("failed to get statefulset %q: %v", name, err)
}
update(ss)
ss, err = s.c.Apps().StatefulSets(ns).Update(ss)
if err == nil {
return
}
if !apierrs.IsConflict(err) && !apierrs.IsServerTimeout(err) {
Failf("failed to update statefulset %q: %v", name, err)
}
}
Failf("too many retries draining statefulset %q", name)
}
// GetPodList gets the current Pods in ss.
func (s *StatefulSetTester) GetPodList(ss *apps.StatefulSet) *v1.PodList {
selector, err := metav1.LabelSelectorAsSelector(ss.Spec.Selector)
ExpectNoError(err)
podList, err := s.c.Core().Pods(ss.Namespace).List(metav1.ListOptions{LabelSelector: selector.String()})
ExpectNoError(err)
return podList
}
// ConfirmStatefulPodCount asserts that the current number of Pods in ss is count waiting up to timeout for ss to
// to scale to count.
func (s *StatefulSetTester) ConfirmStatefulPodCount(count int, ss *apps.StatefulSet, timeout time.Duration, hard bool) {
start := time.Now()
deadline := start.Add(timeout)
for t := time.Now(); t.Before(deadline); t = time.Now() {
podList := s.GetPodList(ss)
statefulPodCount := len(podList.Items)
if statefulPodCount != count {
logPodStates(podList.Items)
if hard {
Failf("StatefulSet %v scaled unexpectedly scaled to %d -> %d replicas", ss.Name, count, len(podList.Items))
} else {
Logf("StatefulSet %v has not reached scale %d, at %d", ss.Name, count, statefulPodCount)
}
time.Sleep(1 * time.Second)
continue
}
Logf("Verifying statefulset %v doesn't scale past %d for another %+v", ss.Name, count, deadline.Sub(t))
time.Sleep(1 * time.Second)
}
}
func (s *StatefulSetTester) waitForRunning(numStatefulPods int32, ss *apps.StatefulSet, shouldBeReady bool) {
pollErr := wait.PollImmediate(StatefulSetPoll, StatefulSetTimeout,
func() (bool, error) {
podList := s.GetPodList(ss)
if int32(len(podList.Items)) < numStatefulPods {
Logf("Found %d stateful pods, waiting for %d", len(podList.Items), numStatefulPods)
return false, nil
}
if int32(len(podList.Items)) > numStatefulPods {
return false, fmt.Errorf("Too many pods scheduled, expected %d got %d", numStatefulPods, len(podList.Items))
}
for _, p := range podList.Items {
isReady := podutil.IsPodReady(&p)
desiredReadiness := shouldBeReady == isReady
Logf("Waiting for pod %v to enter %v - Ready=%v, currently %v - Ready=%v", p.Name, v1.PodRunning, shouldBeReady, p.Status.Phase, isReady)
if p.Status.Phase != v1.PodRunning || !desiredReadiness {
return false, nil
}
}
return true, nil
})
if pollErr != nil {
Failf("Failed waiting for pods to enter running: %v", pollErr)
}
}
// WaitForState periodically polls for the ss and its pods until the until function returns either true or an error
func (s *StatefulSetTester) WaitForState(ss *apps.StatefulSet, until func(*apps.StatefulSet, *v1.PodList) (bool, error)) {
pollErr := wait.PollImmediate(StatefulSetPoll, StatefulSetTimeout,
func() (bool, error) {
ssGet, err := s.c.Apps().StatefulSets(ss.Namespace).Get(ss.Name, metav1.GetOptions{})
if err != nil {
return false, err
}
podList := s.GetPodList(ssGet)
return until(ssGet, podList)
})
if pollErr != nil {
Failf("Failed waiting for pods to enter running: %v", pollErr)
}
}
// WaitForRunningAndReady waits for numStatefulPods in ss to be Running and Ready.
func (s *StatefulSetTester) WaitForRunningAndReady(numStatefulPods int32, ss *apps.StatefulSet) {
s.waitForRunning(numStatefulPods, ss, true)
}
// WaitForRunningAndReady waits for numStatefulPods in ss to be Running and not Ready.
func (s *StatefulSetTester) WaitForRunningAndNotReady(numStatefulPods int32, ss *apps.StatefulSet) {
s.waitForRunning(numStatefulPods, ss, false)
}
// BreakProbe breaks the readiness probe for Nginx StatefulSet containers.
func (s *StatefulSetTester) BreakProbe(ss *apps.StatefulSet, probe *v1.Probe) error {
path := probe.HTTPGet.Path
if path == "" {
return fmt.Errorf("Path expected to be not empty: %v", path)
}
cmd := fmt.Sprintf("mv -v /usr/share/nginx/html%v /tmp/", path)
return s.ExecInStatefulPods(ss, cmd)
}
// RestoreProbe restores the readiness probe for Nginx StatefulSet containers.
func (s *StatefulSetTester) RestoreProbe(ss *apps.StatefulSet, probe *v1.Probe) error {
path := probe.HTTPGet.Path
if path == "" {
return fmt.Errorf("Path expected to be not empty: %v", path)
}
cmd := fmt.Sprintf("mv -v /tmp%v /usr/share/nginx/html/", path)
return s.ExecInStatefulPods(ss, cmd)
}
// SetHealthy updates the StatefulSet InitAnnotation to true in order to set a StatefulSet Pod to be Running and Ready.
func (s *StatefulSetTester) SetHealthy(ss *apps.StatefulSet) {
podList := s.GetPodList(ss)
markedHealthyPod := ""
for _, pod := range podList.Items {
if pod.Status.Phase != v1.PodRunning {
Failf("Found pod in %v cannot set health", pod.Status.Phase)
}
if IsStatefulSetPodInitialized(pod) {
continue
}
if markedHealthyPod != "" {
Failf("Found multiple non-healthy stateful pods: %v and %v", pod.Name, markedHealthyPod)
}
p, err := UpdatePodWithRetries(s.c, pod.Namespace, pod.Name, func(update *v1.Pod) {
update.Annotations[apps.StatefulSetInitAnnotation] = "true"
})
ExpectNoError(err)
Logf("Set annotation %v to %v on pod %v", apps.StatefulSetInitAnnotation, p.Annotations[apps.StatefulSetInitAnnotation], pod.Name)
markedHealthyPod = pod.Name
}
}
// WaitForStatusReadyReplicas waits for the ss.Status.ReadyReplicas to be equal to expectedReplicas
func (s *StatefulSetTester) WaitForStatusReadyReplicas(ss *apps.StatefulSet, expectedReplicas int32) {
Logf("Waiting for statefulset status.replicas updated to %d", expectedReplicas)
ns, name := ss.Namespace, ss.Name
pollErr := wait.PollImmediate(StatefulSetPoll, StatefulSetTimeout,
func() (bool, error) {
ssGet, err := s.c.Apps().StatefulSets(ns).Get(name, metav1.GetOptions{})
if err != nil {
return false, err
}
if *ssGet.Status.ObservedGeneration < ss.Generation {
return false, nil
}
if ssGet.Status.ReadyReplicas != expectedReplicas {
Logf("Waiting for stateful set status to become %d, currently %d", expectedReplicas, ssGet.Status.Replicas)
return false, nil
}
return true, nil
})
if pollErr != nil {
Failf("Failed waiting for stateful set status.readyReplicas updated to %d: %v", expectedReplicas, pollErr)
}
}
// WaitForStatusReplicas waits for the ss.Status.Replicas to be equal to expectedReplicas
func (s *StatefulSetTester) WaitForStatusReplicas(ss *apps.StatefulSet, expectedReplicas int32) {
Logf("Waiting for statefulset status.replicas updated to %d", expectedReplicas)
ns, name := ss.Namespace, ss.Name
pollErr := wait.PollImmediate(StatefulSetPoll, StatefulSetTimeout,
func() (bool, error) {
ssGet, err := s.c.Apps().StatefulSets(ns).Get(name, metav1.GetOptions{})
if err != nil {
return false, err
}
if *ssGet.Status.ObservedGeneration < ss.Generation {
return false, nil
}
if ssGet.Status.Replicas != expectedReplicas {
Logf("Waiting for stateful set status to become %d, currently %d", expectedReplicas, ssGet.Status.Replicas)
return false, nil
}
return true, nil
})
if pollErr != nil {
Failf("Failed waiting for stateful set status.replicas updated to %d: %v", expectedReplicas, pollErr)
}
}
// CheckServiceName asserts that the ServiceName for ss is equivalent to expectedServiceName.
func (p *StatefulSetTester) CheckServiceName(ss *apps.StatefulSet, expectedServiceName string) error {
Logf("Checking if statefulset spec.serviceName is %s", expectedServiceName)
if expectedServiceName != ss.Spec.ServiceName {
return fmt.Errorf("Wrong service name governing statefulset. Expected %s got %s",
expectedServiceName, ss.Spec.ServiceName)
}
return nil
}
// DeleteAllStatefulSets deletes all StatefulSet API Objects in Namespace ns.
func DeleteAllStatefulSets(c clientset.Interface, ns string) {
sst := &StatefulSetTester{c: c}
ssList, err := c.Apps().StatefulSets(ns).List(metav1.ListOptions{LabelSelector: labels.Everything().String()})
ExpectNoError(err)
// Scale down each statefulset, then delete it completely.
// Deleting a pvc without doing this will leak volumes, #25101.
errList := []string{}
for _, ss := range ssList.Items {
Logf("Scaling statefulset %v to 0", ss.Name)
if err := sst.Scale(&ss, 0); err != nil {
errList = append(errList, fmt.Sprintf("%v", err))
}
sst.WaitForStatusReplicas(&ss, 0)
Logf("Deleting statefulset %v", ss.Name)
// Use OrphanDependents=false so it's deleted synchronously.
// We already made sure the Pods are gone inside Scale().
if err := c.Apps().StatefulSets(ss.Namespace).Delete(ss.Name, &metav1.DeleteOptions{OrphanDependents: new(bool)}); err != nil {
errList = append(errList, fmt.Sprintf("%v", err))
}
}
// pvs are global, so we need to wait for the exact ones bound to the statefulset pvcs.
pvNames := sets.NewString()
// TODO: Don't assume all pvcs in the ns belong to a statefulset
pvcPollErr := wait.PollImmediate(StatefulSetPoll, StatefulSetTimeout, func() (bool, error) {
pvcList, err := c.Core().PersistentVolumeClaims(ns).List(metav1.ListOptions{LabelSelector: labels.Everything().String()})
if err != nil {
Logf("WARNING: Failed to list pvcs, retrying %v", err)
return false, nil
}
for _, pvc := range pvcList.Items {
pvNames.Insert(pvc.Spec.VolumeName)
// TODO: Double check that there are no pods referencing the pvc
Logf("Deleting pvc: %v with volume %v", pvc.Name, pvc.Spec.VolumeName)
if err := c.Core().PersistentVolumeClaims(ns).Delete(pvc.Name, nil); err != nil {
return false, nil
}
}
return true, nil
})
if pvcPollErr != nil {
errList = append(errList, fmt.Sprintf("Timeout waiting for pvc deletion."))
}
pollErr := wait.PollImmediate(StatefulSetPoll, StatefulSetTimeout, func() (bool, error) {
pvList, err := c.Core().PersistentVolumes().List(metav1.ListOptions{LabelSelector: labels.Everything().String()})
if err != nil {
Logf("WARNING: Failed to list pvs, retrying %v", err)
return false, nil
}
waitingFor := []string{}
for _, pv := range pvList.Items {
if pvNames.Has(pv.Name) {
waitingFor = append(waitingFor, fmt.Sprintf("%v: %+v", pv.Name, pv.Status))
}
}
if len(waitingFor) == 0 {
return true, nil
}
Logf("Still waiting for pvs of statefulset to disappear:\n%v", strings.Join(waitingFor, "\n"))
return false, nil
})
if pollErr != nil {
errList = append(errList, fmt.Sprintf("Timeout waiting for pv provisioner to delete pvs, this might mean the test leaked pvs."))
}
if len(errList) != 0 {
ExpectNoError(fmt.Errorf("%v", strings.Join(errList, "\n")))
}
}
// IsStatefulSetPodInitialized returns true if pod's StatefulSetInitAnnotation exists and is set to true.
func IsStatefulSetPodInitialized(pod v1.Pod) bool {
initialized, ok := pod.Annotations[apps.StatefulSetInitAnnotation]
if !ok {
return false
}
inited, err := strconv.ParseBool(initialized)
if err != nil {
Failf("Couldn't parse statefulset init annotations %v", initialized)
}
return inited
}
// NewStatefulSetPVC returns a PersistentVolumeClaim named name, for testing StatefulSets.
func NewStatefulSetPVC(name string) v1.PersistentVolumeClaim {
return v1.PersistentVolumeClaim{
ObjectMeta: metav1.ObjectMeta{
Name: name,
Annotations: map[string]string{
"volume.alpha.kubernetes.io/storage-class": "anything",
},
},
Spec: v1.PersistentVolumeClaimSpec{
AccessModes: []v1.PersistentVolumeAccessMode{
v1.ReadWriteOnce,
},
Resources: v1.ResourceRequirements{
Requests: v1.ResourceList{
v1.ResourceStorage: *resource.NewQuantity(1, resource.BinarySI),
},
},
},
}
}
// NewStatefulSet creates a new NGINX StatefulSet for testing. The StatefulSet is named name, is in namespace ns,
// statefulPodsMounts are the mounts that will be backed by PVs. podsMounts are the mounts that are mounted directly
// to the Pod. labels are the labels that will be usd for the StatefulSet selector.
func NewStatefulSet(name, ns, governingSvcName string, replicas int32, statefulPodMounts []v1.VolumeMount, podMounts []v1.VolumeMount, labels map[string]string) *apps.StatefulSet {
mounts := append(statefulPodMounts, podMounts...)
claims := []v1.PersistentVolumeClaim{}
for _, m := range statefulPodMounts {
claims = append(claims, NewStatefulSetPVC(m.Name))
}
vols := []v1.Volume{}
for _, m := range podMounts {
vols = append(vols, v1.Volume{
Name: m.Name,
VolumeSource: v1.VolumeSource{
HostPath: &v1.HostPathVolumeSource{
Path: fmt.Sprintf("/tmp/%v", m.Name),
},
},
})
}
return &apps.StatefulSet{
TypeMeta: metav1.TypeMeta{
Kind: "StatefulSet",
APIVersion: "apps/v1beta1",
},
ObjectMeta: metav1.ObjectMeta{
Name: name,
Namespace: ns,
},
Spec: apps.StatefulSetSpec{
Selector: &metav1.LabelSelector{
MatchLabels: labels,
},
Replicas: func(i int32) *int32 { return &i }(replicas),
Template: v1.PodTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Labels: labels,
Annotations: map[string]string{},
},
Spec: v1.PodSpec{
Containers: []v1.Container{
{
Name: "nginx",
Image: "gcr.io/google_containers/nginx-slim:0.7",
VolumeMounts: mounts,
},
},
Volumes: vols,
},
},
UpdateStrategy: apps.StatefulSetUpdateStrategy{Type: apps.RollingUpdateStatefulSetStrategyType},
VolumeClaimTemplates: claims,
ServiceName: governingSvcName,
},
}
}
// SetStatefulSetInitializedAnnotation sets teh StatefulSetInitAnnotation to value.
func SetStatefulSetInitializedAnnotation(ss *apps.StatefulSet, value string) {
ss.Spec.Template.ObjectMeta.Annotations["pod.alpha.kubernetes.io/initialized"] = value
}
| caesarxuchao/kubernetes | test/e2e/framework/statefulset_utils.go | GO | apache-2.0 | 23,045 |
/*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.internal.gosu.parser;
import gw.lang.reflect.IMethodInfo;
/**
*/
public class ReducedDelegateFunctionSymbol extends ReducedDynamicFunctionSymbol implements IReducedDelegateFunctionSymbol {
private IMethodInfo _targetMethodInfo;
ReducedDelegateFunctionSymbol(DelegateFunctionSymbol dfs) {
super( dfs );
_targetMethodInfo = dfs.getMi();
}
@Override
public IMethodInfo getTargetMethodInfo() {
return _targetMethodInfo;
}
}
| dumitru-petrusca/gosu-lang | gosu-core/src/main/java/gw/internal/gosu/parser/ReducedDelegateFunctionSymbol.java | Java | apache-2.0 | 511 |
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/compiler/xla/service/cpu/layout_assignment.h"
#include <numeric>
#include "tensorflow/compiler/xla/map_util.h"
#include "tensorflow/compiler/xla/service/cpu/dot_op_emitter.h"
#include "tensorflow/compiler/xla/service/cpu/ir_emission_utils.h"
#include "tensorflow/core/lib/core/errors.h"
namespace xla {
namespace cpu {
Status CpuLayoutAssignment::AddBackendConstraints(
LayoutConstraints* constraints) {
auto row_major_shape = [](const Shape& old_shape) {
Shape new_shape(old_shape);
std::vector<int64> dimension_order(new_shape.dimensions_size());
std::iota(dimension_order.rbegin(), dimension_order.rend(), 0);
*new_shape.mutable_layout() = LayoutUtil::MakeLayout(dimension_order);
return new_shape;
};
auto col_major_shape = [](const Shape& old_shape) {
Shape new_shape(old_shape);
std::vector<int64> dimension_order(new_shape.dimensions_size());
std::iota(dimension_order.begin(), dimension_order.end(), 0);
*new_shape.mutable_layout() = LayoutUtil::MakeLayout(dimension_order);
return new_shape;
};
// We want to change the layout of constant arrays to be column major when all
// of their users are dot operations that can be made faster with the flipped
// layout. To avoid going quadriatic over the # of instructions, we cache
// this property in should_make_rhs_col_major -- it maps a constant to true if
// all of the users of said constant are dot operations that can be sped up.
// This cache is populated lazily as we encounter dot operations traversing
// the instruction stream.
tensorflow::gtl::FlatMap<const HloInstruction*, bool>
should_make_rhs_col_major_cache;
auto should_make_rhs_col_major = [&](const HloInstruction& instruction) {
if (ProfitableToImplementDotInUntiledLlvmIr(instruction) !=
DotInLlvmIrProfitable::kWithColumnMajorRhs) {
return false;
}
const auto* rhs = instruction.operand(1);
if (rhs->opcode() != HloOpcode::kConstant) {
return false;
}
auto it = should_make_rhs_col_major_cache.find(rhs);
if (it != should_make_rhs_col_major_cache.end()) {
return it->second;
}
bool result = std::all_of(
rhs->users().begin(), rhs->users().end(), [&](HloInstruction* user) {
return ProfitableToImplementDotInUntiledLlvmIr(*user) ==
DotInLlvmIrProfitable::kWithColumnMajorRhs &&
user->operand(0) != rhs;
});
InsertOrDie(&should_make_rhs_col_major_cache, rhs, result);
return result;
};
const HloComputation* computation = constraints->computation();
for (auto* instruction : computation->instructions()) {
if (instruction->opcode() == HloOpcode::kConvolution &&
PotentiallyImplementedAsEigenConvolution(*instruction)) {
const HloInstruction* convolution = instruction;
const HloInstruction* lhs_instruction = convolution->operand(0);
const HloInstruction* rhs_instruction = convolution->operand(1);
// In order to implement `convolution` with Eigen convolution, the layouts
// of the input, filter, and output need to be row-major.
//
// These constraints are not hard constraints. Ideally, we should decide
// which layouts to choose according to some cost model.
Shape output_shape(row_major_shape(convolution->shape()));
Shape input_shape(row_major_shape(lhs_instruction->shape()));
Shape filter_shape(row_major_shape(rhs_instruction->shape()));
// Set layouts of the instructions' shapes.
TF_RETURN_IF_ERROR(
constraints->SetOperandLayout(input_shape, convolution, 0));
TF_RETURN_IF_ERROR(
constraints->SetOperandLayout(filter_shape, convolution, 1));
TF_RETURN_IF_ERROR(
constraints->SetInstructionLayout(output_shape, convolution));
} else if (should_make_rhs_col_major(*instruction)) {
auto* dot = instruction;
const auto& rhs_shape = dot->operand(1)->shape();
TF_RETURN_IF_ERROR(
constraints->SetOperandLayout(col_major_shape(rhs_shape), dot, 1));
} else if (PotentiallyImplementedAsEigenDot(*instruction)) {
const HloInstruction* dot = instruction;
// In order to implement `dot` with Eigen dot, the layouts of the lhs,
// rhs, and output need to be row-major.
//
// These constraints are not hard constraints. Ideally, we should decide
// which layouts to choose according to some cost model.
Shape output_shape(row_major_shape(dot->shape()));
const HloInstruction* lhs_instruction = dot->operand(0);
Shape lhs_shape(row_major_shape(lhs_instruction->shape()));
TF_RETURN_IF_ERROR(constraints->SetOperandLayout(lhs_shape, dot, 0));
// dot is a kDot or a kTransposeDot fusion node. In the latter case, if
// it represents X @ X, it may have just one operand.
if (dot->operand_count() > 1) {
const HloInstruction* rhs_instruction = dot->operand(1);
Shape rhs_shape(row_major_shape(rhs_instruction->shape()));
TF_RETURN_IF_ERROR(constraints->SetOperandLayout(rhs_shape, dot, 1));
}
// Set layouts of the instructions' shapes.
TF_RETURN_IF_ERROR(constraints->SetInstructionLayout(output_shape, dot));
} else {
for (int64 operand_no = 0; operand_no < instruction->operand_count();
++operand_no) {
// Skip operands which already have a constraint.
if (constraints->OperandLayout(instruction, operand_no) != nullptr) {
continue;
}
// Skip over forwarded operands.
if (constraints->OperandBufferForwarded(instruction, operand_no)) {
continue;
}
Shape operand_shape(
row_major_shape(instruction->operand(operand_no)->shape()));
TF_RETURN_IF_ERROR(constraints->SetOperandLayout(
operand_shape, instruction, operand_no));
}
// Skip over the root instruction for the top-level computation.
if (computation->parent()->entry_computation() == computation &&
computation->root_instruction() == instruction) {
continue;
}
// Skip instructions which don't produce array shapes (tuples, opaque,
// etc.).
if (!ShapeUtil::IsArray(instruction->shape())) {
continue;
}
}
}
return tensorflow::Status::OK();
}
} // namespace cpu
} // namespace xla
| horance-liu/tensorflow | tensorflow/compiler/xla/service/cpu/layout_assignment.cc | C++ | apache-2.0 | 7,084 |
/* xlsx.js (C) 2013-present SheetJS -- http://sheetjs.com */
/* eslint-env node */
/* vim: set ts=2 ft=javascript: */
/// <reference types="../node_modules/@types/node/" />
const n = "xlsx";
import X = require("xlsx");
import 'exit-on-epipe';
import * as fs from 'fs';
import program = require('commander');
program
.version(X.version)
.usage('[options] <file> [sheetname]')
.option('-f, --file <file>', 'use specified workbook')
.option('-s, --sheet <sheet>', 'print specified sheet (default first sheet)')
.option('-N, --sheet-index <idx>', 'use specified sheet index (0-based)')
.option('-p, --password <pw>', 'if file is encrypted, try with specified pw')
.option('-l, --list-sheets', 'list sheet names and exit')
.option('-o, --output <file>', 'output to specified file')
.option('-B, --xlsb', 'emit XLSB to <sheetname> or <file>.xlsb')
.option('-M, --xlsm', 'emit XLSM to <sheetname> or <file>.xlsm')
.option('-X, --xlsx', 'emit XLSX to <sheetname> or <file>.xlsx')
.option('-I, --xlam', 'emit XLAM to <sheetname> or <file>.xlam')
.option('-Y, --ods', 'emit ODS to <sheetname> or <file>.ods')
.option('-8, --xls', 'emit XLS to <sheetname> or <file>.xls (BIFF8)')
.option('-5, --biff5','emit XLS to <sheetname> or <file>.xls (BIFF5)')
.option('-2, --biff2','emit XLS to <sheetname> or <file>.xls (BIFF2)')
.option('-i, --xla', 'emit XLA to <sheetname> or <file>.xla')
.option('-6, --xlml', 'emit SSML to <sheetname> or <file>.xls (2003 XML)')
.option('-T, --fods', 'emit FODS to <sheetname> or <file>.fods (Flat ODS)')
.option('-S, --formulae', 'emit list of values and formulae')
.option('-j, --json', 'emit formatted JSON (all fields text)')
.option('-J, --raw-js', 'emit raw JS object (raw numbers)')
.option('-A, --arrays', 'emit rows as JS objects (raw numbers)')
.option('-H, --html', 'emit HTML to <sheetname> or <file>.html')
.option('-D, --dif', 'emit DIF to <sheetname> or <file>.dif (Lotus DIF)')
.option('-U, --dbf', 'emit DBF to <sheetname> or <file>.dbf (MSVFP DBF)')
.option('-K, --sylk', 'emit SYLK to <sheetname> or <file>.slk (Excel SYLK)')
.option('-P, --prn', 'emit PRN to <sheetname> or <file>.prn (Lotus PRN)')
.option('-E, --eth', 'emit ETH to <sheetname> or <file>.eth (Ethercalc)')
.option('-t, --txt', 'emit TXT to <sheetname> or <file>.txt (UTF-8 TSV)')
.option('-r, --rtf', 'emit RTF to <sheetname> or <file>.txt (Table RTF)')
.option('-z, --dump', 'dump internal representation as JSON')
.option('--props', 'dump workbook properties as CSV')
.option('-F, --field-sep <sep>', 'CSV field separator', ",")
.option('-R, --row-sep <sep>', 'CSV row separator', "\n")
.option('-n, --sheet-rows <num>', 'Number of rows to process (0=all rows)')
.option('--codepage <cp>', 'default to specified codepage when ambiguous')
.option('--req <module>', 'require module before processing')
.option('--sst', 'generate shared string table for XLS* formats')
.option('--compress', 'use compression when writing XLSX/M/B and ODS')
.option('--read', 'read but do not generate output')
.option('--book', 'for single-sheet formats, emit a file per worksheet')
.option('--all', 'parse everything; write as much as possible')
.option('--dev', 'development mode')
.option('--sparse', 'sparse mode')
.option('-q, --quiet', 'quiet mode');
program.on('--help', function() {
console.log(' Default output format is CSV');
console.log(' Support email: dev@sheetjs.com');
console.log(' Web Demo: http://oss.sheetjs.com/js-'+n+'/');
});
/* flag, bookType, default ext */
const workbook_formats = [
['xlsx', 'xlsx', 'xlsx'],
['xlsm', 'xlsm', 'xlsm'],
['xlam', 'xlam', 'xlam'],
['xlsb', 'xlsb', 'xlsb'],
['xls', 'xls', 'xls'],
['xla', 'xla', 'xla'],
['biff5', 'biff5', 'xls'],
['ods', 'ods', 'ods'],
['fods', 'fods', 'fods']
];
const wb_formats_2 = [
['xlml', 'xlml', 'xls']
];
program.parse(process.argv);
let filename = '', sheetname = '';
if(program.args[0]) {
filename = program.args[0];
if(program.args[1]) sheetname = program.args[1];
}
if(program.sheet) sheetname = program.sheet;
if(program.file) filename = program.file;
if(!filename) {
console.error(n + ": must specify a filename");
process.exit(1);
}
if(!fs.existsSync(filename)) {
console.error(n + ": " + filename + ": No such file or directory");
process.exit(2);
}
const opts: X.ParsingOptions = {};
let wb: X.WorkBook;
if(program.listSheets) opts.bookSheets = true;
if(program.sheetRows) opts.sheetRows = program.sheetRows;
if(program.password) opts.password = program.password;
let seen = false;
function wb_fmt() {
seen = true;
opts.cellFormula = true;
opts.cellNF = true;
if(program.output) sheetname = program.output;
}
function isfmt(m: string): boolean {
if(!program.output) return false;
const t = m.charAt(0) === "." ? m : "." + m;
return program.output.slice(-t.length) === t;
}
workbook_formats.forEach(function(m) { if(program[m[0]] || isfmt(m[0])) { wb_fmt(); } });
wb_formats_2.forEach(function(m) { if(program[m[0]] || isfmt(m[0])) { wb_fmt(); } });
if(seen) {
} else if(program.formulae) opts.cellFormula = true;
else opts.cellFormula = false;
const wopts: X.WritingOptions = ({WTF:opts.WTF, bookSST:program.sst}/*:any*/);
if(program.compress) wopts.compression = true;
if(program.all) {
opts.cellFormula = true;
opts.bookVBA = true;
opts.cellNF = true;
opts.cellHTML = true;
opts.cellStyles = true;
opts.sheetStubs = true;
opts.cellDates = true;
wopts.cellStyles = true;
wopts.bookVBA = true;
}
if(program.sparse) opts.dense = false; else opts.dense = true;
if(program.codepage) opts.codepage = +program.codepage;
if(program.dev) {
opts.WTF = true;
wb = X.readFile(filename, opts);
} else try {
wb = X.readFile(filename, opts);
} catch(e) {
let msg = (program.quiet) ? "" : n + ": error parsing ";
msg += filename + ": " + e;
console.error(msg);
process.exit(3);
}
if(program.read) process.exit(0);
if(!wb) { console.error(n + ": error parsing " + filename + ": empty workbook"); process.exit(0); }
/*:: if(!wb) throw new Error("unreachable"); */
if(program.listSheets) {
console.log((wb.SheetNames||[]).join("\n"));
process.exit(0);
}
if(program.dump) {
console.log(JSON.stringify(wb));
process.exit(0);
}
if(program.props) {
dump_props(wb);
process.exit(0);
}
/* full workbook formats */
workbook_formats.forEach(function(m) { if(program[m[0]] || isfmt(m[0])) {
wopts.bookType = <X.BookType>(m[1]);
X.writeFile(wb, program.output || sheetname || ((filename || "") + "." + m[2]), wopts);
process.exit(0);
} });
wb_formats_2.forEach(function(m) { if(program[m[0]] || isfmt(m[0])) {
wopts.bookType = <X.BookType>(m[1]);
X.writeFile(wb, program.output || sheetname || ((filename || "") + "." + m[2]), wopts);
process.exit(0);
} });
let target_sheet = sheetname || '';
if(target_sheet === '') {
if(program.sheetIndex < (wb.SheetNames||[]).length) target_sheet = wb.SheetNames[program.sheetIndex];
else target_sheet = (wb.SheetNames||[""])[0];
}
let ws: X.WorkSheet;
try {
ws = wb.Sheets[target_sheet];
if(!ws) {
console.error("Sheet " + target_sheet + " cannot be found");
process.exit(3);
}
} catch(e) {
console.error(n + ": error parsing "+filename+" "+target_sheet+": " + e);
process.exit(4);
}
if(!program.quiet && !program.book) console.error(target_sheet);
/* single worksheet file formats */
[
['biff2', '.xls'],
['biff3', '.xls'],
['biff4', '.xls'],
['sylk', '.slk'],
['html', '.html'],
['prn', '.prn'],
['eth', '.eth'],
['rtf', '.rtf'],
['txt', '.txt'],
['dbf', '.dbf'],
['dif', '.dif']
].forEach(function(m) { if(program[m[0]] || isfmt(m[1])) {
wopts.bookType = <X.BookType>(m[0]);
X.writeFile(wb, program.output || sheetname || ((filename || "") + m[1]), wopts);
process.exit(0);
} });
let oo = "", strm = false;
if(!program.quiet) console.error(target_sheet);
if(program.formulae) oo = X.utils.sheet_to_formulae(ws).join("\n");
else if(program.json) oo = JSON.stringify(X.utils.sheet_to_json(ws));
else if(program.rawJs) oo = JSON.stringify(X.utils.sheet_to_json(ws,{raw:true}));
else if(program.arrays) oo = JSON.stringify(X.utils.sheet_to_json(ws,{raw:true, header:1}));
else {
strm = true;
const stream: NodeJS.ReadableStream = X.stream.to_csv(ws, {FS:program.fieldSep, RS:program.rowSep});
if(program.output) stream.pipe(fs.createWriteStream(program.output));
else stream.pipe(process.stdout);
}
if(!strm) {
if(program.output) fs.writeFileSync(program.output, oo);
else console.log(oo);
}
/*:: } */
/*:: } */
function dump_props(wb: X.WorkBook) {
let propaoa: any[][] = [];
propaoa = (<any>Object).entries({...wb.Props, ...wb.Custprops});
console.log(X.utils.sheet_to_csv(X.utils.aoa_to_sheet(propaoa)));
}
| SheetJS/js-xlsx | types/bin_xlsx.ts | TypeScript | apache-2.0 | 8,728 |
//------------------------------------------------------------------------------
// <自动生成>
// 此代码由工具生成。
//
// 对此文件的更改可能会导致不正确的行为,并且如果
// 重新生成代码,这些更改将会丢失。
// </自动生成>
//------------------------------------------------------------------------------
namespace DTcms.Web.admin.manager {
public partial class role_edit {
/// <summary>
/// form1 控件。
/// </summary>
/// <remarks>
/// 自动生成的字段。
/// 若要进行修改,请将字段声明从设计器文件移到代码隐藏文件。
/// </remarks>
protected global::System.Web.UI.HtmlControls.HtmlForm form1;
/// <summary>
/// ddlRoleType 控件。
/// </summary>
/// <remarks>
/// 自动生成的字段。
/// 若要进行修改,请将字段声明从设计器文件移到代码隐藏文件。
/// </remarks>
protected global::System.Web.UI.WebControls.DropDownList ddlRoleType;
/// <summary>
/// txtRoleName 控件。
/// </summary>
/// <remarks>
/// 自动生成的字段。
/// 若要进行修改,请将字段声明从设计器文件移到代码隐藏文件。
/// </remarks>
protected global::System.Web.UI.WebControls.TextBox txtRoleName;
/// <summary>
/// rptList 控件。
/// </summary>
/// <remarks>
/// 自动生成的字段。
/// 若要进行修改,请将字段声明从设计器文件移到代码隐藏文件。
/// </remarks>
protected global::System.Web.UI.WebControls.Repeater rptList;
/// <summary>
/// btnSubmit 控件。
/// </summary>
/// <remarks>
/// 自动生成的字段。
/// 若要进行修改,请将字段声明从设计器文件移到代码隐藏文件。
/// </remarks>
protected global::System.Web.UI.WebControls.Button btnSubmit;
}
}
| LutherW/MTMS | Source/DTcms.Web/admin/manager/role_edit.aspx.designer.cs | C# | apache-2.0 | 2,149 |
package org.apereo.cas.ticket.registry;
import org.apereo.cas.ticket.Ticket;
import org.infinispan.Cache;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collection;
import java.util.concurrent.TimeUnit;
/**
* This is {@link InfinispanTicketRegistry}. Infinispan is a distributed in-memory
* key/value data store with optional schema.
* It offers advanced functionality such as transactions, events, querying and distributed processing.
* See <a href="http://infinispan.org/features/">http://infinispan.org/features/</a> for more info.
*
* @author Misagh Moayyed
* @since 4.2.0
*/
public class InfinispanTicketRegistry extends AbstractTicketRegistry {
private static final Logger LOGGER = LoggerFactory.getLogger(InfinispanTicketRegistry.class);
private Cache<String, Ticket> cache;
/**
* Instantiates a new Infinispan ticket registry.
*
* @param cache the cache
*/
public InfinispanTicketRegistry(final Cache<String, Ticket> cache) {
this.cache = cache;
LOGGER.info("Setting up Infinispan Ticket Registry...");
}
@Override
public Ticket updateTicket(final Ticket ticket) {
this.cache.put(ticket.getId(), ticket);
return ticket;
}
@Override
public void addTicket(final Ticket ticketToAdd) {
final Ticket ticket = encodeTicket(ticketToAdd);
final long idleTime = ticket.getExpirationPolicy().getTimeToIdle() <= 0
? ticket.getExpirationPolicy().getTimeToLive()
: ticket.getExpirationPolicy().getTimeToIdle();
LOGGER.debug("Adding ticket [{}] to cache store to live [{}] seconds and stay idle for [{}]",
ticket.getId(), ticket.getExpirationPolicy().getTimeToLive(), idleTime);
this.cache.put(ticket.getId(), ticket,
ticket.getExpirationPolicy().getTimeToLive(), TimeUnit.SECONDS,
idleTime, TimeUnit.SECONDS);
}
@Override
public Ticket getTicket(final String ticketId) {
final String encTicketId = encodeTicketId(ticketId);
if (ticketId == null) {
return null;
}
return Ticket.class.cast(cache.get(encTicketId));
}
@Override
public boolean deleteSingleTicket(final String ticketId) {
this.cache.remove(ticketId);
return getTicket(ticketId) == null;
}
@Override
public long deleteAll() {
final int size = this.cache.size();
this.cache.clear();
return size;
}
/**
* Retrieve all tickets from the registry.
* <p>
* Note! Usage of this method can be computational and I/O intensive and should not be used for other than
* debugging.
*
* @return collection of tickets currently stored in the registry. Tickets
* might or might not be valid i.e. expired.
*/
@Override
public Collection<Ticket> getTickets() {
return decodeTickets(this.cache.values());
}
}
| petracvv/cas | support/cas-server-support-infinispan-ticket-registry/src/main/java/org/apereo/cas/ticket/registry/InfinispanTicketRegistry.java | Java | apache-2.0 | 2,994 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
'''Unit tests for the Dataset.py module'''
import unittest
from ocw.dataset import Dataset, Bounds
import numpy as np
import datetime as dt
class TestDatasetAttributes(unittest.TestCase):
def setUp(self):
self.lat = np.array([10, 12, 14, 16, 18])
self.lon = np.array([100, 102, 104, 106, 108])
self.time = np.array([dt.datetime(2000, x, 1) for x in range(1, 13)])
flat_array = np.array(range(300))
self.value = flat_array.reshape(12, 5, 5)
self.variable = 'prec'
self.name = 'foo'
self.origin = {'path': '/a/fake/file/path'}
self.test_dataset = Dataset(self.lat,
self.lon,
self.time,
self.value,
variable=self.variable,
name=self.name,
origin=self.origin)
def test_lats(self):
self.assertItemsEqual(self.test_dataset.lats, self.lat)
def test_lons(self):
self.assertItemsEqual(self.test_dataset.lons, self.lon)
def test_times(self):
self.assertItemsEqual(self.test_dataset.times, self.time)
def test_values(self):
self.assertEqual(self.test_dataset.values.all(), self.value.all())
def test_variable(self):
self.assertEqual(self.test_dataset.variable, self.variable)
def test_name(self):
self.assertEqual(self.test_dataset.name, self.name)
def test_origin(self):
self.assertEqual(self.test_dataset.origin, self.origin)
class TestInvalidDatasetInit(unittest.TestCase):
def setUp(self):
self.lat = np.array([10, 12, 14, 16, 18])
self.lon = np.array([100, 102, 104, 106, 108])
self.time = np.array([dt.datetime(2000, x, 1) for x in range(1, 13)])
flat_array = np.array(range(300))
self.value = flat_array.reshape(12, 5, 5)
self.values_in_wrong_order = flat_array.reshape(5, 5, 12)
def test_bad_lat_shape(self):
self.lat = np.array([[1, 2], [3, 4]])
with self.assertRaises(ValueError):
Dataset(self.lat, self.lon, self.time, self.value, 'prec')
def test_bad_lon_shape(self):
self.lon = np.array([[1, 2], [3, 4]])
with self.assertRaises(ValueError):
Dataset(self.lat, self.lon, self.time, self.value, 'prec')
def test_bad_times_shape(self):
self.time = np.array([[1, 2], [3, 4]])
with self.assertRaises(ValueError):
Dataset(self.lat, self.lon, self.time, self.value, 'prec')
def test_bad_values_shape(self):
self.value = np.array([1, 2, 3, 4, 5])
with self.assertRaises(ValueError):
Dataset(self.lat, self.lon, self.time, self.value, 'prec')
def test_values_shape_mismatch(self):
# If we change lats to this the shape of value will not match
# up with the length of the lats array.
self.lat = self.lat[:-2]
with self.assertRaises(ValueError):
Dataset(self.lat, self.lon, self.time, self.value, 'prec')
def test_values_given_in_wrong_order(self):
with self.assertRaises(ValueError):
Dataset(self.lat, self.lon, self.time, self.values_in_wrong_order)
def test_lons_values_incorrectly_gridded(self):
times = np.array([dt.datetime(2000, x, 1) for x in range(1, 13)])
lats = np.arange(-30, 30)
bad_lons = np.arange(360)
flat_array = np.arange(len(times) * len(lats) * len(bad_lons))
values = flat_array.reshape(len(times), len(lats), len(bad_lons))
ds = Dataset(lats, bad_lons, times, values)
np.testing.assert_array_equal(ds.lons, np.arange(-180, 180))
def test_reversed_lats(self):
ds = Dataset(self.lat[::-1], self.lon, self.time, self.value)
np.testing.assert_array_equal(ds.lats, self.lat)
class TestDatasetFunctions(unittest.TestCase):
def setUp(self):
self.lat = np.array([10, 12, 14, 16, 18])
self.lon = np.array([100, 102, 104, 106, 108])
self.time = np.array([dt.datetime(2000, x, 1) for x in range(1, 13)])
flat_array = np.array(range(300))
self.value = flat_array.reshape(12, 5, 5)
self.variable = 'prec'
self.test_dataset = Dataset(self.lat, self.lon, self.time,
self.value, self.variable)
def test_spatial_boundaries(self):
self.assertEqual(
self.test_dataset.spatial_boundaries(),
(min(self.lat), max(self.lat), min(self.lon), max(self.lon)))
def test_time_range(self):
self.assertEqual(
self.test_dataset.time_range(),
(dt.datetime(2000, 1, 1), dt.datetime(2000, 12, 1)))
def test_spatial_resolution(self):
self.assertEqual(self.test_dataset.spatial_resolution(), (2, 2))
def test_temporal_resolution(self):
self.assertEqual(self.test_dataset.temporal_resolution(), 'monthly')
class TestBounds(unittest.TestCase):
def setUp(self):
self.bounds = Bounds(-80, 80, # Lats
-160, 160, # Lons
dt.datetime(2000, 1, 1), # Start time
dt.datetime(2002, 1, 1)) # End time
# Latitude tests
def test_inverted_min_max_lat(self):
with self.assertRaises(ValueError):
self.bounds.lat_min = 81
with self.assertRaises(ValueError):
self.bounds.lat_max = -81
# Lat Min
def test_out_of_bounds_lat_min(self):
with self.assertRaises(ValueError):
self.bounds.lat_min = -91
with self.assertRaises(ValueError):
self.bounds.lat_min = 91
# Lat Max
def test_out_of_bounds_lat_max(self):
with self.assertRaises(ValueError):
self.bounds.lat_max = -91
with self.assertRaises(ValueError):
self.bounds.lat_max = 91
# Longitude tests
def test_inverted_max_max_lon(self):
with self.assertRaises(ValueError):
self.bounds.lon_min = 161
with self.assertRaises(ValueError):
self.bounds.lon_max = -161
# Lon Min
def test_out_of_bounds_lon_min(self):
with self.assertRaises(ValueError):
self.bounds.lon_min = -181
with self.assertRaises(ValueError):
self.bounds.lon_min = 181
# Lon Max
def test_out_of_bounds_lon_max(self):
with self.assertRaises(ValueError):
self.bounds.lon_max = -181
with self.assertRaises(ValueError):
self.bounds.lon_max = 181
# Temporal tests
def test_inverted_start_end_times(self):
with self.assertRaises(ValueError):
self.bounds.start = dt.datetime(2003, 1, 1)
with self.assertRaises(ValueError):
self.bounds.end = dt.datetime(1999, 1, 1)
# Start tests
def test_invalid_start(self):
with self.assertRaises(ValueError):
self.bounds.start = "This is not a date time object"
# End tests
def test_invalid_end(self):
with self.assertRaises(ValueError):
self.bounds.end = "This is not a date time object"
if __name__ == '__main__':
unittest.main()
| MJJoyce/climate | ocw/tests/test_dataset.py | Python | apache-2.0 | 8,091 |
'use strict';
module.exports = function (math) {
var util = require('../../util/index'),
BigNumber = math.type.BigNumber,
collection = require('../../type/collection'),
isNumber = util.number.isNumber,
isBoolean = util['boolean'].isBoolean,
isInteger = util.number.isInteger,
isCollection = collection.isCollection;
/**
* Compute the factorial of a value
*
* Factorial only supports an integer value as argument.
* For matrices, the function is evaluated element wise.
*
* Syntax:
*
* math.factorial(n)
*
* Examples:
*
* math.factorial(5); // returns 120
* math.factorial(3); // returns 6
*
* See also:
*
* combinations, permutations
*
* @param {Number | BigNumber | Array | Matrix} n An integer number
* @return {Number | BigNumber | Array | Matrix} The factorial of `n`
*/
math.factorial = function factorial (n) {
var value, res;
if (arguments.length != 1) {
throw new math.error.ArgumentsError('factorial', arguments.length, 1);
}
if (isNumber(n)) {
if (!isInteger(n) || n < 0) {
throw new TypeError('Positive integer value expected in function factorial');
}
value = n - 1;
res = n;
while (value > 1) {
res *= value;
value--;
}
if (res == 0) {
res = 1; // 0! is per definition 1
}
return res;
}
if (n instanceof BigNumber) {
if (!(isPositiveInteger(n))) {
throw new TypeError('Positive integer value expected in function factorial');
}
var one = new BigNumber(1);
value = n.minus(one);
res = n;
while (value.gt(one)) {
res = res.times(value);
value = value.minus(one);
}
if (res.equals(0)) {
res = one; // 0! is per definition 1
}
return res;
}
if (isBoolean(n)) {
return 1; // factorial(1) = 1, factorial(0) = 1
}
if (isCollection(n)) {
return collection.deepMap(n, factorial);
}
throw new math.error.UnsupportedTypeError('factorial', math['typeof'](n));
};
/**
* Test whether BigNumber n is a positive integer
* @param {BigNumber} n
* @returns {boolean} isPositiveInteger
*/
var isPositiveInteger = function(n) {
return n.isInteger() && n.gte(0);
};
};
| wyom/mathjs | lib/function/probability/factorial.js | JavaScript | apache-2.0 | 2,388 |
/*
* Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.stream.events.XMLEvent;
import com.amazonaws.services.ec2.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.MapEntry;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* Describe Tags Result StAX Unmarshaller
*/
public class DescribeTagsResultStaxUnmarshaller implements Unmarshaller<DescribeTagsResult, StaxUnmarshallerContext> {
public DescribeTagsResult unmarshall(StaxUnmarshallerContext context) throws Exception {
DescribeTagsResult describeTagsResult = new DescribeTagsResult();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument()) targetDepth += 1;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument()) return describeTagsResult;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("tagSet/item", targetDepth)) {
describeTagsResult.getTags().add(TagDescriptionStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return describeTagsResult;
}
}
}
}
private static DescribeTagsResultStaxUnmarshaller instance;
public static DescribeTagsResultStaxUnmarshaller getInstance() {
if (instance == null) instance = new DescribeTagsResultStaxUnmarshaller();
return instance;
}
}
| XidongHuang/aws-sdk-for-java | src/main/java/com/amazonaws/services/ec2/model/transform/DescribeTagsResultStaxUnmarshaller.java | Java | apache-2.0 | 2,423 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.metamodel.facets.properties.property.disabled;
import org.apache.isis.applib.annotation.Editing;
import org.apache.isis.applib.annotation.Property;
import org.apache.isis.applib.annotation.When;
import org.apache.isis.applib.annotation.Where;
import org.apache.isis.core.metamodel.facetapi.FacetHolder;
import org.apache.isis.core.metamodel.facets.members.disabled.DisabledFacet;
import org.apache.isis.core.metamodel.facets.members.disabled.DisabledFacetAbstractImpl;
public class DisabledFacetForPropertyAnnotation extends DisabledFacetAbstractImpl {
public static DisabledFacet create(final Property property, final FacetHolder holder) {
if (property == null) {
return null;
}
final Editing editing = property.editing();
final String disabledReason = property.editingDisabledReason();
switch (editing) {
case AS_CONFIGURED:
// nothing needs to be done here; the DomainObjectFactory (processing @DomainObject annotation)
// will install an ImmutableFacetForDomainObjectAnnotation on the domain object and then a
// DisabledFacetOnPropertyDerivedFromImmutable facet will be installed.
return null;
case DISABLED:
return new DisabledFacetForPropertyAnnotation(disabledReason, holder);
case ENABLED:
return null;
}
return null;
}
private DisabledFacetForPropertyAnnotation(final String reason, final FacetHolder holder) {
super(When.ALWAYS, Where.EVERYWHERE, reason, holder);
}
}
| howepeng/isis | core/metamodel/src/main/java/org/apache/isis/core/metamodel/facets/properties/property/disabled/DisabledFacetForPropertyAnnotation.java | Java | apache-2.0 | 2,470 |
module ActionMailer
# Returns the version of the currently loaded ActionMailer as a <tt>Gem::Version</tt>
def self.gem_version
Gem::Version.new VERSION::STRING
end
module VERSION
MAJOR = 4
MINOR = 1
TINY = 6
PRE = nil
STRING = [MAJOR, MINOR, TINY, PRE].compact.join(".")
end
end
| WilliamFdosS/os-management | vendor/bundle/ruby/2.1.0/gems/actionmailer-4.1.6/lib/action_mailer/gem_version.rb | Ruby | apache-2.0 | 318 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.authc.support.mapper;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.ContextPreservingActionListener;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.xpack.core.security.ScrollHelper;
import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheAction;
import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheRequest;
import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest;
import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest;
import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping;
import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.ExpressionModel;
import org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames;
import org.elasticsearch.xpack.security.authc.support.CachingRealm;
import org.elasticsearch.xpack.security.authc.support.UserRoleMapper;
import org.elasticsearch.xpack.security.support.SecurityIndexManager;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import static org.elasticsearch.action.DocWriteResponse.Result.CREATED;
import static org.elasticsearch.action.DocWriteResponse.Result.DELETED;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.mapper.MapperService.SINGLE_MAPPING_NAME;
import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING;
import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN;
import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin;
import static org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames.SECURITY_MAIN_ALIAS;
import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isIndexDeleted;
import static org.elasticsearch.xpack.security.support.SecurityIndexManager.isMoveFromRedToNonRed;
/**
* This store reads + writes {@link ExpressionRoleMapping role mappings} in an Elasticsearch
* {@link RestrictedIndicesNames#SECURITY_MAIN_ALIAS index}.
* <br>
* The store is responsible for all read and write operations as well as
* {@link #resolveRoles(UserData, ActionListener) resolving roles}.
* <p>
* No caching is done by this class, it is handled at a higher level and no polling for changes
* is done by this class. Modification operations make a best effort attempt to clear the cache
* on all nodes for the user that was modified.
*/
public class NativeRoleMappingStore implements UserRoleMapper {
private static final Logger logger = LogManager.getLogger(NativeRoleMappingStore.class);
static final String DOC_TYPE_FIELD = "doc_type";
static final String DOC_TYPE_ROLE_MAPPING = "role-mapping";
private static final String ID_PREFIX = DOC_TYPE_ROLE_MAPPING + "_";
private static final ActionListener<Object> NO_OP_ACTION_LISTENER = new ActionListener<Object>() {
@Override
public void onResponse(Object o) {
// nothing
}
@Override
public void onFailure(Exception e) {
// nothing
}
};
private final Settings settings;
private final Client client;
private final SecurityIndexManager securityIndex;
private final ScriptService scriptService;
private final List<String> realmsToRefresh = new CopyOnWriteArrayList<>();
public NativeRoleMappingStore(Settings settings, Client client, SecurityIndexManager securityIndex, ScriptService scriptService) {
this.settings = settings;
this.client = client;
this.securityIndex = securityIndex;
this.scriptService = scriptService;
}
private String getNameFromId(String id) {
assert id.startsWith(ID_PREFIX);
return id.substring(ID_PREFIX.length());
}
private String getIdForName(String name) {
return ID_PREFIX + name;
}
/**
* Loads all mappings from the index.
* <em>package private</em> for unit testing
*/
protected void loadMappings(ActionListener<List<ExpressionRoleMapping>> listener) {
if (securityIndex.isIndexUpToDate() == false) {
listener.onFailure(new IllegalStateException(
"Security index is not on the current version - the native realm will not be operational until " +
"the upgrade API is run on the security index"));
return;
}
final QueryBuilder query = QueryBuilders.termQuery(DOC_TYPE_FIELD, DOC_TYPE_ROLE_MAPPING);
final Supplier<ThreadContext.StoredContext> supplier = client.threadPool().getThreadContext().newRestorableContext(false);
try (ThreadContext.StoredContext ignore = client.threadPool().getThreadContext().stashWithOrigin(SECURITY_ORIGIN)) {
SearchRequest request = client.prepareSearch(SECURITY_MAIN_ALIAS)
.setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings))
.setQuery(query)
.setSize(1000)
.setFetchSource(true)
.request();
request.indicesOptions().ignoreUnavailable();
ScrollHelper.fetchAllByEntity(client, request,
new ContextPreservingActionListener<>(supplier, ActionListener.wrap((Collection<ExpressionRoleMapping> mappings) ->
listener.onResponse(mappings.stream().filter(Objects::nonNull).collect(Collectors.toList())),
ex -> {
logger.error(new ParameterizedMessage("failed to load role mappings from index [{}] skipping all mappings.",
SECURITY_MAIN_ALIAS), ex);
listener.onResponse(Collections.emptyList());
})),
doc -> buildMapping(getNameFromId(doc.getId()), doc.getSourceRef()));
}
}
protected ExpressionRoleMapping buildMapping(String id, BytesReference source) {
try (InputStream stream = source.streamInput();
XContentParser parser = XContentType.JSON.xContent()
.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) {
return ExpressionRoleMapping.parse(id, parser);
} catch (Exception e) {
logger.warn(new ParameterizedMessage("Role mapping [{}] cannot be parsed and will be skipped", id), e);
return null;
}
}
/**
* Stores (create or update) a single mapping in the index
*/
public void putRoleMapping(PutRoleMappingRequest request, ActionListener<Boolean> listener) {
modifyMapping(request.getName(), this::innerPutMapping, request, listener);
}
/**
* Deletes a named mapping from the index
*/
public void deleteRoleMapping(DeleteRoleMappingRequest request, ActionListener<Boolean> listener) {
modifyMapping(request.getName(), this::innerDeleteMapping, request, listener);
}
private <Request, Result> void modifyMapping(String name, CheckedBiConsumer<Request, ActionListener<Result>, Exception> inner,
Request request, ActionListener<Result> listener) {
if (securityIndex.isIndexUpToDate() == false) {
listener.onFailure(new IllegalStateException(
"Security index is not on the current version - the native realm will not be operational until " +
"the upgrade API is run on the security index"));
} else {
try {
inner.accept(request, ActionListener.wrap(r -> refreshRealms(listener, r), listener::onFailure));
} catch (Exception e) {
logger.error(new ParameterizedMessage("failed to modify role-mapping [{}]", name), e);
listener.onFailure(e);
}
}
}
private void innerPutMapping(PutRoleMappingRequest request, ActionListener<Boolean> listener) {
final ExpressionRoleMapping mapping = request.getMapping();
securityIndex.prepareIndexIfNeededThenExecute(listener::onFailure, () -> {
final XContentBuilder xContentBuilder;
try {
xContentBuilder = mapping.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS, true);
} catch (IOException e) {
listener.onFailure(e);
return;
}
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
client.prepareIndex(SECURITY_MAIN_ALIAS, SINGLE_MAPPING_NAME, getIdForName(mapping.getName()))
.setSource(xContentBuilder)
.setRefreshPolicy(request.getRefreshPolicy())
.request(),
new ActionListener<IndexResponse>() {
@Override
public void onResponse(IndexResponse indexResponse) {
boolean created = indexResponse.getResult() == CREATED;
listener.onResponse(created);
}
@Override
public void onFailure(Exception e) {
logger.error(new ParameterizedMessage("failed to put role-mapping [{}]", mapping.getName()), e);
listener.onFailure(e);
}
}, client::index);
});
}
private void innerDeleteMapping(DeleteRoleMappingRequest request, ActionListener<Boolean> listener) {
final SecurityIndexManager frozenSecurityIndex = securityIndex.freeze();
if (frozenSecurityIndex.indexExists() == false) {
listener.onResponse(false);
} else if (securityIndex.isAvailable() == false) {
listener.onFailure(frozenSecurityIndex.getUnavailableReason());
} else {
securityIndex.checkIndexVersionThenExecute(listener::onFailure, () -> {
executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN,
client.prepareDelete(SECURITY_MAIN_ALIAS, SINGLE_MAPPING_NAME, getIdForName(request.getName()))
.setRefreshPolicy(request.getRefreshPolicy())
.request(),
new ActionListener<DeleteResponse>() {
@Override
public void onResponse(DeleteResponse deleteResponse) {
boolean deleted = deleteResponse.getResult() == DELETED;
listener.onResponse(deleted);
}
@Override
public void onFailure(Exception e) {
logger.error(new ParameterizedMessage("failed to delete role-mapping [{}]", request.getName()), e);
listener.onFailure(e);
}
}, client::delete);
});
}
}
/**
* Retrieves one or more mappings from the index.
* If <code>names</code> is <code>null</code> or {@link Set#isEmpty empty}, then this retrieves all mappings.
* Otherwise it retrieves the specified mappings by name.
*/
public void getRoleMappings(Set<String> names, ActionListener<List<ExpressionRoleMapping>> listener) {
if (names == null || names.isEmpty()) {
getMappings(listener);
} else {
getMappings(new ActionListener<List<ExpressionRoleMapping>>() {
@Override
public void onResponse(List<ExpressionRoleMapping> mappings) {
final List<ExpressionRoleMapping> filtered = mappings.stream()
.filter(m -> names.contains(m.getName()))
.collect(Collectors.toList());
listener.onResponse(filtered);
}
@Override
public void onFailure(Exception e) {
listener.onFailure(e);
}
});
}
}
private void getMappings(ActionListener<List<ExpressionRoleMapping>> listener) {
if (securityIndex.isAvailable()) {
loadMappings(listener);
} else {
logger.info("The security index is not yet available - no role mappings can be loaded");
if (logger.isDebugEnabled()) {
logger.debug("Security Index [{}] [exists: {}] [available: {}] [mapping up to date: {}]",
SECURITY_MAIN_ALIAS,
securityIndex.indexExists(),
securityIndex.isAvailable(),
securityIndex.isMappingUpToDate()
);
}
listener.onResponse(Collections.emptyList());
}
}
/**
* Provides usage statistics for this store.
* The resulting map contains the keys
* <ul>
* <li><code>size</code> - The total number of mappings stored in the index</li>
* <li><code>enabled</code> - The number of mappings that are
* {@link ExpressionRoleMapping#isEnabled() enabled}</li>
* </ul>
*/
public void usageStats(ActionListener<Map<String, Object>> listener) {
if (securityIndex.isAvailable() == false) {
reportStats(listener, Collections.emptyList());
} else {
getMappings(ActionListener.wrap(mappings -> reportStats(listener, mappings), listener::onFailure));
}
}
private void reportStats(ActionListener<Map<String, Object>> listener, List<ExpressionRoleMapping> mappings) {
Map<String, Object> usageStats = new HashMap<>();
usageStats.put("size", mappings.size());
usageStats.put("enabled", mappings.stream().filter(ExpressionRoleMapping::isEnabled).count());
listener.onResponse(usageStats);
}
public void onSecurityIndexStateChange(SecurityIndexManager.State previousState, SecurityIndexManager.State currentState) {
if (isMoveFromRedToNonRed(previousState, currentState) || isIndexDeleted(previousState, currentState) ||
previousState.isIndexUpToDate != currentState.isIndexUpToDate) {
refreshRealms(NO_OP_ACTION_LISTENER, null);
}
}
private <Result> void refreshRealms(ActionListener<Result> listener, Result result) {
if (realmsToRefresh.isEmpty()) {
listener.onResponse(result);
return;
}
final String[] realmNames = this.realmsToRefresh.toArray(Strings.EMPTY_ARRAY);
executeAsyncWithOrigin(client, SECURITY_ORIGIN, ClearRealmCacheAction.INSTANCE, new ClearRealmCacheRequest().realms(realmNames),
ActionListener.wrap(
response -> {
logger.debug((org.apache.logging.log4j.util.Supplier<?>) () -> new ParameterizedMessage(
"Cleared cached in realms [{}] due to role mapping change", Arrays.toString(realmNames)));
listener.onResponse(result);
},
ex -> {
logger.warn(new ParameterizedMessage("Failed to clear cache for realms [{}]", Arrays.toString(realmNames)), ex);
listener.onFailure(ex);
}));
}
@Override
public void resolveRoles(UserData user, ActionListener<Set<String>> listener) {
getRoleMappings(null, ActionListener.wrap(
mappings -> {
final ExpressionModel model = user.asModel();
final Set<String> roles = mappings.stream()
.filter(ExpressionRoleMapping::isEnabled)
.filter(m -> m.getExpression().match(model))
.flatMap(m -> {
final Set<String> roleNames = m.getRoleNames(scriptService, model);
logger.trace("Applying role-mapping [{}] to user-model [{}] produced role-names [{}]",
m.getName(), model, roleNames);
return roleNames.stream();
})
.collect(Collectors.toSet());
logger.debug("Mapping user [{}] to roles [{}]", user, roles);
listener.onResponse(roles);
}, listener::onFailure
));
}
/**
* Indicates that the provided realm should have its cache cleared if this store is updated
* (that is, {@link #putRoleMapping(PutRoleMappingRequest, ActionListener)} or
* {@link #deleteRoleMapping(DeleteRoleMappingRequest, ActionListener)} are called).
* @see ClearRealmCacheAction
*/
@Override
public void refreshRealmOnChange(CachingRealm realm) {
realmsToRefresh.add(realm.name());
}
}
| coding0011/elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java | Java | apache-2.0 | 18,616 |
# This is the Ruby 2.0-specific kernel file.
# Currently, all 1.9 features are in 2.0. We will need to
# differentiate when there are features from 1.9 removed
# in 2.0.
# These are loads so they don't pollute LOADED_FEATURES
load 'jruby/kernel19.rb'
load 'jruby/kernel20/enumerable.rb'
load 'jruby/kernel20/range.rb'
load 'jruby/kernel20/load_error.rb' | evandor/skysail-framework | skysail.server.text.asciidoc/resources/jruby/kernel20.rb | Ruby | apache-2.0 | 355 |
/*
* Copyright (c) 2007, 2016, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/*
* Copyright 2004,2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.org.apache.xerces.internal.impl.dv.xs;
import java.math.BigInteger;
import javax.xml.datatype.DatatypeConstants;
import javax.xml.datatype.Duration;
import com.sun.org.apache.xerces.internal.impl.dv.InvalidDatatypeValueException;
import com.sun.org.apache.xerces.internal.impl.dv.ValidationContext;
/**
* Used to validate the <yearMonthDuration> type
*
* @xerces.internal
*
* @author Ankit Pasricha, IBM
*
* @version $Id: YearMonthDurationDV.java,v 1.6 2010-11-01 04:39:47 joehw Exp $
*/
class YearMonthDurationDV extends DurationDV {
public Object getActualValue(String content, ValidationContext context)
throws InvalidDatatypeValueException {
try {
return parse(content, DurationDV.YEARMONTHDURATION_TYPE);
}
catch (Exception ex) {
throw new InvalidDatatypeValueException("cvc-datatype-valid.1.2.1", new Object[]{content, "yearMonthDuration"});
}
}
protected Duration getDuration(DateTimeData date) {
int sign = 1;
if ( date.year<0 || date.month<0) {
sign = -1;
}
return datatypeFactory.newDuration(sign == 1,
date.year != DatatypeConstants.FIELD_UNDEFINED?BigInteger.valueOf(sign*date.year):null,
date.month != DatatypeConstants.FIELD_UNDEFINED?BigInteger.valueOf(sign*date.month):null,
null,
null,
null,
null);
}
}
| shun634501730/java_source_cn | src_en/com/sun/org/apache/xerces/internal/impl/dv/xs/YearMonthDurationDV.java | Java | apache-2.0 | 2,247 |
package depends
import (
"fmt"
"sync"
logutil "github.com/docker/infrakit/pkg/log"
"github.com/docker/infrakit/pkg/types"
)
var log = logutil.New("module", "run/depends")
// ParseDependsFunc returns a list of dependencies of this spec.
type ParseDependsFunc func(types.Spec) (Runnables, error)
var (
parsers = map[string]map[types.InterfaceSpec]ParseDependsFunc{}
lock = sync.RWMutex{}
)
// Register registers a helper for parsing for dependencies based on a key (e.g. 'group')
// and interface spec (Group/1.0)
func Register(key string, interfaceSpec types.InterfaceSpec, f ParseDependsFunc) {
lock.Lock()
defer lock.Unlock()
if _, has := parsers[key]; !has {
parsers[key] = map[types.InterfaceSpec]ParseDependsFunc{}
}
if _, has := parsers[key][interfaceSpec]; has {
panic(fmt.Errorf("duplicate depdency parser for %v / %v", key, interfaceSpec))
}
parsers[key][interfaceSpec] = f
}
// Resolve returns the dependencies listed in the spec as well as inside the properties.
// InterfaceSpec is optional. If nil, the first match by key (kind) is used. If nothing is registered, returns nil
// and no error. Error is returned for exceptions (eg. parsing, etc.)
func Resolve(spec types.Spec, key string, interfaceSpec *types.InterfaceSpec) (Runnables, error) {
lock.RLock()
defer lock.RUnlock()
m, has := parsers[key]
if !has {
return nil, nil
}
if interfaceSpec == nil {
for _, parse := range m {
// First match
return parse(spec)
}
}
parse, has := m[*interfaceSpec]
if !has {
return nil, nil
}
return parse(spec)
}
| kaufers/infrakit | pkg/run/depends/depends.go | GO | apache-2.0 | 1,571 |
/*
* Copyright (c) 2010-2012 Grid Dynamics Consulting Services, Inc, All Rights Reserved
* http://www.griddynamics.com
*
* This library is free software; you can redistribute it and/or modify it under the terms of
* the Apache License; either
* version 2.0 of the License, or any later version.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.griddynamics.jagger.dbapi.parameter;
import com.google.common.base.Objects;
public class GroupKey {
private String upperName;
private String leftName;
public GroupKey(String upperName) {
this.upperName = upperName;
this.leftName = upperName;
}
public GroupKey(String upperName, String leftName) {
this.upperName = upperName;
this.leftName = leftName;
}
public String getUpperName() {
return upperName;
}
public String getLeftName() {
return leftName;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GroupKey groupKey = (GroupKey) o;
if (leftName != null ? !leftName.equals(groupKey.leftName) : groupKey.leftName != null) return false;
if (upperName != null ? !upperName.equals(groupKey.upperName) : groupKey.upperName != null) return false;
return true;
}
@Override
public int hashCode() {
int result = upperName != null ? upperName.hashCode() : 0;
result = 31 * result + (leftName != null ? leftName.hashCode() : 0);
return result;
}
@Override
public String toString() {
return Objects.toStringHelper(this)
.add("upperName", upperName)
.add("leftName", leftName)
.toString();
}
}
| Nmishin/jagger | dbapi/src/main/java/com/griddynamics/jagger/dbapi/parameter/GroupKey.java | Java | apache-2.0 | 2,536 |
using Newtonsoft.Json.Linq;
using System;
namespace Kudu.Core.Functions
{
public interface IKeyJsonOps<T>
{
int NumberOfKeysInDefaultFormat
{
get;
}
// key generation is based on run time
string GenerateKeyJson(Tuple<string,string>[] keyPairs, string functionRt, out string unencryptedKey);
// read existing key file based on the content format, not the run time version
string GetKeyValueFromJson(string json, out bool isEncrypted);
T GenerateKeyObject(string functionKey, string functionName);
}
}
| EricSten-MSFT/kudu | Kudu.Contracts/Functions/IKeyJsonOps.cs | C# | apache-2.0 | 611 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
import sys
import os
from datetime import date
# eventlet/gevent should not monkey patch anything.
os.environ["GEVENT_NOPATCH"] = "yes"
os.environ["EVENTLET_NOPATCH"] = "yes"
#os.environ["CELERY_LOADER"] = "default"
this = os.path.dirname(os.path.abspath(__file__))
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
sys.path.append(os.path.join(os.pardir, "tests"))
sys.path.append(os.path.join(this, "_ext"))
#import celery
# General configuration
# ---------------------
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.pngmath',
'sphinx.ext.intersphinx',
]
html_show_sphinx = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Apache Flume'
copyright = '2009-%s The Apache Software Foundation' % date.today().year
keep_warnings = True
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
#version = ".".join(map(str, celery.VERSION[0:2]))
# The full version, including alpha/beta/rc tags.
#release = celery.__version__
exclude_trees = ['.build']
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
#intersphinx_mapping = {
# "http://docs.python.org/dev": None,
# "http://kombu.readthedocs.org/en/latest/": None,
# "http://django-celery.readthedocs.org/en/latest": None,
#}
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'trac'
highlight_language = 'none'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['../resources/images']
html_logo = 'images/flume-logo.png'
html_use_smartypants = True
# If false, no module index is generated.
html_use_modindex = True
# If false, no index is generated.
html_use_index = True
#html_theme = 'default'
html_sidebars = {
'**': ['localtoc.html', 'relations.html', 'sourcelink.html'],
}
| tmgstevens/flume | flume-ng-doc/sphinx/conf.py | Python | apache-2.0 | 3,274 |