code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
/*
* This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not
* distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
#include "CarbonEngine/Common.h"
#include "CarbonEngine/Core/CoreEvents.h"
#include "CarbonEngine/Core/EventManager.h"
#include "CarbonEngine/Globals.h"
#include "CarbonEngine/Render/EffectManager.h"
#include "CarbonEngine/Render/EffectQueue.h"
#include "CarbonEngine/Render/EffectQueueArray.h"
#include "CarbonEngine/Render/Font.h"
#include "CarbonEngine/Render/GeometryChunk.h"
#include "CarbonEngine/Scene/GeometryGather.h"
#include "CarbonEngine/Scene/Material.h"
#include "CarbonEngine/Scene/MaterialManager.h"
namespace Carbon
{
// Immediate triangles that get gathered are rendered by putting their geometry into the following geometry chunk and
// appending a draw item to it. This geometry chunk has its draw items cleared at the start of every frame which means
// it will grow in size as needed and never shrink back down, thus avoiding unnecessary allocations.
static auto immediateTriangleGeometry = GeometryChunk();
static auto immediateTriangleCount = 0U;
static bool onFrameBeginEvent(const FrameBeginEvent& e)
{
immediateTriangleGeometry.clearDrawItems();
immediateTriangleCount = 0;
return true;
}
CARBON_REGISTER_EVENT_HANDLER_FUNCTION(FrameBeginEvent, onFrameBeginEvent)
static void clearImmediateTriangleGeometry()
{
immediateTriangleGeometry.clear();
}
CARBON_REGISTER_SHUTDOWN_FUNCTION(clearImmediateTriangleGeometry, 0)
GeometryGather::GeometryGather(const Vec3& cameraPosition, const ConvexHull& frustum, bool isShadowGeometryGather,
EffectQueueArray& queues)
: cameraPosition_(cameraPosition),
frustum_(frustum),
isShadowGeometryGather_(isShadowGeometryGather),
scale_(Vec3::One),
queues_(queues)
{
materialQueueInfos_.reserve(1024);
}
GeometryGather::~GeometryGather()
{
// Now that the gather has completed it is important to unlock the vertex data so it has an opportunity to be
// uploaded to the graphics interface
immediateTriangleGeometry.unlockVertexData();
}
void GeometryGather::changeMaterial(const String& material, const ParameterArray& materialOverrideParameters)
{
currentQueue_ = nullptr;
if (materialOverrideParameters.empty())
{
// Try and find an existing queue that uses the specified material, the current priority, and has no custom
// parameters
for (auto& q : materialQueueInfos_)
{
if (q.queue->getPriority() == currentPriority_ && q.material == material && !q.queue->hasCustomParams() &&
!q.queue->getInternalParams().size())
{
currentQueue_ = &q;
currentQueue_->isTransformCurrent = false;
return;
}
}
}
// No existing material queue can be used, so create a new one
newMaterial(&materials().getMaterial(material), materialOverrideParameters);
}
void GeometryGather::newMaterial(Material* material, const ParameterArray& materialOverrideParameters,
const ParameterArray& internalParams)
{
// Create new material queue entry
auto queue = queues_.create(currentPriority_, material->getEffect(), internalParams);
materialQueueInfos_.emplace(material->getName(), queue);
currentQueue_ = &materialQueueInfos_.back();
material->update();
material->setupEffectQueue(currentQueue_->queue);
// If override parameters are specified then add them as custom parameters to this queue
if (!materialOverrideParameters.empty())
{
for (auto parameter : materialOverrideParameters)
currentQueue_->queue->setCustomParameter(parameter.getLookup(), parameter.getValue());
}
}
void GeometryGather::ensureTransformIsCurrent()
{
if (!currentQueue_->isTransformCurrent)
{
currentQueue_->queue->getItems().addChangeTransformItem(transform_, scale_);
currentQueue_->isTransformCurrent = true;
}
}
void GeometryGather::addGeometryChunk(const GeometryChunk& geometryChunk, int drawItemIndex)
{
ensureTransformIsCurrent();
currentQueue_->queue->getItems().addDrawGeometryChunkItem(geometryChunk, drawItemIndex);
}
void GeometryGather::addRectangle(float width, float height)
{
ensureTransformIsCurrent();
currentQueue_->queue->getItems().addDrawRectangleItem(width, height);
}
void GeometryGather::addText(const Font* font, float fontSize, const UnicodeString& text, const Color& color)
{
if (!font->isReadyForUse() || !text.length())
return;
changeMaterial("Font");
ensureTransformIsCurrent();
currentQueue_->queue->getItems().addDrawTextItem(font, fontSize, text, color);
}
struct ImmediateVertex
{
Vec3 p;
Vec2 st;
unsigned int color = 0;
};
void GeometryGather::addImmediateTriangles(unsigned int triangleCount)
{
changeMaterial("ImmediateGeometry");
// Expand the immediate triangles chunk if needed
if ((immediateTriangleCount + triangleCount) * 3 > immediateTriangleGeometry.getVertexCount())
{
immediateTriangleGeometry.unlockVertexData();
immediateTriangleGeometry.unregisterWithRenderer();
if (immediateTriangleGeometry.getVertexCount() == 0)
{
immediateTriangleGeometry.setDynamic(true);
immediateTriangleGeometry.addVertexStream({VertexStream::Position, 3});
immediateTriangleGeometry.addVertexStream({VertexStream::DiffuseTextureCoordinate, 2});
immediateTriangleGeometry.addVertexStream({VertexStream::Color, 4, TypeUInt8});
}
auto initialVertexCount = immediateTriangleGeometry.getVertexCount();
immediateTriangleGeometry.setVertexCount(
std::max(immediateTriangleGeometry.getVertexCount() * 2,
immediateTriangleGeometry.getVertexCount() + triangleCount * 3));
// Reset the new vertex data
auto vertices = immediateTriangleGeometry.lockVertexData<ImmediateVertex>();
for (auto i = initialVertexCount; i < immediateTriangleGeometry.getVertexCount(); i++)
vertices[i] = ImmediateVertex();
immediateTriangleGeometry.unlockVertexData();
// Setup indices while preserving draw items
auto indices = Vector<unsigned int>(immediateTriangleGeometry.getVertexCount());
for (auto i = 0U; i < indices.size(); i++)
indices[i] = i;
immediateTriangleGeometry.setupIndexData(immediateTriangleGeometry.getDrawItems(), indices);
immediateTriangleGeometry.registerWithRenderer();
}
// Add a drawitem for these immediate triangles and queue it for rendering
immediateTriangleGeometry.appendDrawItem(
{GraphicsInterface::TriangleList, triangleCount * 3, immediateTriangleCount * 3});
addGeometryChunk(immediateTriangleGeometry, immediateTriangleGeometry.getDrawItems().size() - 1);
}
void GeometryGather::addImmediateTriangle(const Vec3& v0, const Vec3& v1, const Vec3& v2, const Color& color)
{
if (!immediateTriangleGeometry.isVertexDataLocked())
immediateTriangleGeometry.lockVertexData();
assert(immediateTriangleGeometry.isVertexDataLocked());
auto vertices =
immediateTriangleGeometry.getLockedVertexDataPointer<ImmediateVertex>() + immediateTriangleCount * 3;
vertices[0].p = v0;
vertices[0].color = color.toRGBA8();
vertices[1].p = v1;
vertices[1].color = vertices[0].color;
vertices[2].p = v2;
vertices[2].color = vertices[0].color;
immediateTriangleCount++;
}
}
| savant-nz/carbon | Source/CarbonEngine/Scene/GeometryGather.cpp | C++ | mpl-2.0 | 7,690 |
/**
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
* Copyright (C) 2010, 2011, 2012, 2013 Pyravlos Team
*
* http://www.strabon.di.uoa.gr/
*/
package org.openrdf.sail.postgis.evaluation;
import java.sql.Types;
import org.openrdf.sail.generaldb.evaluation.GeneralDBQueryBuilderFactory;
import org.openrdf.sail.generaldb.evaluation.GeneralDBSqlCastBuilder;
import org.openrdf.sail.generaldb.evaluation.GeneralDBSqlExprBuilder;
public class PostGISSqlCastBuilder extends PostGISSqlExprBuilder implements GeneralDBSqlCastBuilder {
protected GeneralDBSqlExprBuilder where;
protected int jdbcType;
public PostGISSqlCastBuilder(GeneralDBSqlExprBuilder where, GeneralDBQueryBuilderFactory factory, int jdbcType) {
super(factory);
this.where = where;
this.jdbcType = jdbcType;
append(" CAST(");
}
public GeneralDBSqlExprBuilder close() {
append(" AS ");
append(getSqlType(jdbcType));
append(")");
where.append(toSql());
where.addParameters(getParameters());
return where;
}
protected CharSequence getSqlType(int type) {
switch (type) {
case Types.VARCHAR:
return "VARCHAR";
default:
throw new AssertionError(type);
}
}
}
| wx1988/strabon | postgis/src/main/java/org/openrdf/sail/postgis/evaluation/PostGISSqlCastBuilder.java | Java | mpl-2.0 | 1,330 |
package terraform
import (
"bytes"
"errors"
"fmt"
"os"
"reflect"
"sort"
"strings"
"sync"
"testing"
"github.com/davecgh/go-spew/spew"
"github.com/google/go-cmp/cmp"
"github.com/zclconf/go-cty/cty"
"github.com/hashicorp/terraform/internal/addrs"
"github.com/hashicorp/terraform/internal/configs/configschema"
"github.com/hashicorp/terraform/internal/configs/hcl2shim"
"github.com/hashicorp/terraform/internal/lang/marks"
"github.com/hashicorp/terraform/internal/plans"
"github.com/hashicorp/terraform/internal/providers"
"github.com/hashicorp/terraform/internal/provisioners"
"github.com/hashicorp/terraform/internal/states"
"github.com/hashicorp/terraform/internal/tfdiags"
)
func TestContext2Plan_basic(t *testing.T) {
m := testModule(t, "plan-good")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
ProviderSHA256s: map[string][]byte{
"aws": []byte("placeholder"),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
if l := len(plan.Changes.Resources); l < 2 {
t.Fatalf("wrong number of resources %d; want fewer than two\n%s", l, spew.Sdump(plan.Changes.Resources))
}
if !reflect.DeepEqual(plan.ProviderSHA256s, ctx.providerSHA256s) {
t.Errorf("wrong ProviderSHA256s %#v; want %#v", plan.ProviderSHA256s, ctx.providerSHA256s)
}
if !ctx.State().Empty() {
t.Fatalf("expected empty state, got %#v\n", ctx.State())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
for _, r := range plan.Changes.Resources {
ric, err := r.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
foo := ric.After.GetAttr("foo").AsString()
if foo != "2" {
t.Fatalf("incorrect plan for 'bar': %#v", ric.After)
}
case "aws_instance.foo":
num, _ := ric.After.GetAttr("num").AsBigFloat().Int64()
if num != 2 {
t.Fatalf("incorrect plan for 'foo': %#v", ric.After)
}
default:
t.Fatal("unknown instance:", i)
}
}
if !p.ValidateProviderConfigCalled {
t.Fatal("provider config was not checked before Configure")
}
}
func TestContext2Plan_createBefore_deposed(t *testing.T) {
m := testModule(t, "plan-cbd")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"baz","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceDeposed(
mustResourceInstanceAddr("aws_instance.foo").Resource,
states.DeposedKey("00000001"),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"foo"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
// the state should still show one deposed
expectedState := strings.TrimSpace(`
aws_instance.foo: (1 deposed)
ID = baz
provider = provider["registry.terraform.io/hashicorp/aws"]
type = aws_instance
Deposed ID 1 = foo`)
if ctx.State().String() != expectedState {
t.Fatalf("\nexpected: %q\ngot: %q\n", expectedState, ctx.State().String())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
type InstanceGen struct {
Addr string
DeposedKey states.DeposedKey
}
want := map[InstanceGen]bool{
{
Addr: "aws_instance.foo",
}: true,
{
Addr: "aws_instance.foo",
DeposedKey: states.DeposedKey("00000001"),
}: true,
}
got := make(map[InstanceGen]bool)
changes := make(map[InstanceGen]*plans.ResourceInstanceChangeSrc)
for _, change := range plan.Changes.Resources {
k := InstanceGen{
Addr: change.Addr.String(),
DeposedKey: change.DeposedKey,
}
got[k] = true
changes[k] = change
}
if !reflect.DeepEqual(got, want) {
t.Fatalf("wrong resource instance object changes in plan\ngot: %s\nwant: %s", spew.Sdump(got), spew.Sdump(want))
}
{
ric, err := changes[InstanceGen{Addr: "aws_instance.foo"}].Decode(ty)
if err != nil {
t.Fatal(err)
}
if got, want := ric.Action, plans.NoOp; got != want {
t.Errorf("current object change action is %s; want %s", got, want)
}
// the existing instance should only have an unchanged id
expected, err := schema.CoerceValue(cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("baz"),
"type": cty.StringVal("aws_instance"),
}))
if err != nil {
t.Fatal(err)
}
checkVals(t, expected, ric.After)
}
{
ric, err := changes[InstanceGen{Addr: "aws_instance.foo", DeposedKey: states.DeposedKey("00000001")}].Decode(ty)
if err != nil {
t.Fatal(err)
}
if got, want := ric.Action, plans.Delete; got != want {
t.Errorf("deposed object change action is %s; want %s", got, want)
}
}
}
func TestContext2Plan_createBefore_maintainRoot(t *testing.T) {
m := testModule(t, "plan-cbd-maintain-root")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
if !ctx.State().Empty() {
t.Fatal("expected empty state, got:", ctx.State())
}
if len(plan.Changes.Resources) != 4 {
t.Error("expected 4 resource in plan, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
// these should all be creates
if res.Action != plans.Create {
t.Fatalf("unexpected action %s for %s", res.Action, res.Addr.String())
}
}
}
func TestContext2Plan_emptyDiff(t *testing.T) {
m := testModule(t, "plan-empty")
p := testProvider("aws")
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) (resp providers.PlanResourceChangeResponse) {
resp.PlannedState = req.ProposedNewState
return resp
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
if !ctx.State().Empty() {
t.Fatal("expected empty state, got:", ctx.State())
}
if len(plan.Changes.Resources) != 2 {
t.Error("expected 2 resource in plan, got", len(plan.Changes.Resources))
}
actions := map[string]plans.Action{}
for _, res := range plan.Changes.Resources {
actions[res.Addr.String()] = res.Action
}
expected := map[string]plans.Action{
"aws_instance.foo": plans.Create,
"aws_instance.bar": plans.Create,
}
if !cmp.Equal(expected, actions) {
t.Fatal(cmp.Diff(expected, actions))
}
}
func TestContext2Plan_escapedVar(t *testing.T) {
m := testModule(t, "plan-escaped-var")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
if len(plan.Changes.Resources) != 1 {
t.Error("expected 1 resource in plan, got", len(plan.Changes.Resources))
}
res := plan.Changes.Resources[0]
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
expected := objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("bar-${baz}"),
"type": cty.UnknownVal(cty.String),
})
checkVals(t, expected, ric.After)
}
func TestContext2Plan_minimal(t *testing.T) {
m := testModule(t, "plan-empty")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
if !ctx.State().Empty() {
t.Fatal("expected empty state, got:", ctx.State())
}
if len(plan.Changes.Resources) != 2 {
t.Error("expected 2 resource in plan, got", len(plan.Changes.Resources))
}
actions := map[string]plans.Action{}
for _, res := range plan.Changes.Resources {
actions[res.Addr.String()] = res.Action
}
expected := map[string]plans.Action{
"aws_instance.foo": plans.Create,
"aws_instance.bar": plans.Create,
}
if !cmp.Equal(expected, actions) {
t.Fatal(cmp.Diff(expected, actions))
}
}
func TestContext2Plan_modules(t *testing.T) {
m := testModule(t, "plan-modules")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
if len(plan.Changes.Resources) != 3 {
t.Error("expected 3 resource in plan, got", len(plan.Changes.Resources))
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
expectFoo := objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("2"),
"type": cty.UnknownVal(cty.String),
})
expectNum := objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"num": cty.NumberIntVal(2),
"type": cty.UnknownVal(cty.String),
})
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
var expected cty.Value
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
expected = expectFoo
case "aws_instance.foo":
expected = expectNum
case "module.child.aws_instance.foo":
expected = expectNum
default:
t.Fatal("unknown instance:", i)
}
checkVals(t, expected, ric.After)
}
}
func TestContext2Plan_moduleExpand(t *testing.T) {
// Test a smattering of plan expansion behavior
m := testModule(t, "plan-modules-expand")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
expected := map[string]struct{}{
`aws_instance.foo["a"]`: struct{}{},
`module.count_child[1].aws_instance.foo[0]`: struct{}{},
`module.count_child[1].aws_instance.foo[1]`: struct{}{},
`module.count_child[0].aws_instance.foo[0]`: struct{}{},
`module.count_child[0].aws_instance.foo[1]`: struct{}{},
`module.for_each_child["a"].aws_instance.foo[1]`: struct{}{},
`module.for_each_child["a"].aws_instance.foo[0]`: struct{}{},
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
_, ok := expected[ric.Addr.String()]
if !ok {
t.Fatal("unexpected resource:", ric.Addr.String())
}
delete(expected, ric.Addr.String())
}
for addr := range expected {
t.Error("missing resource", addr)
}
}
// GH-1475
func TestContext2Plan_moduleCycle(t *testing.T) {
m := testModule(t, "plan-module-cycle")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
"some_input": {Type: cty.String, Optional: true},
"type": {Type: cty.String, Computed: true},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
var expected cty.Value
switch i := ric.Addr.String(); i {
case "aws_instance.b":
expected = objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
})
case "aws_instance.c":
expected = objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"some_input": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
})
default:
t.Fatal("unknown instance:", i)
}
checkVals(t, expected, ric.After)
}
}
func TestContext2Plan_moduleDeadlock(t *testing.T) {
testCheckDeadlock(t, func() {
m := testModule(t, "plan-module-deadlock")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, err := ctx.Plan()
if err != nil {
t.Fatalf("err: %s", err)
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
expected := objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
})
switch i := ric.Addr.String(); i {
case "module.child.aws_instance.foo[0]":
case "module.child.aws_instance.foo[1]":
case "module.child.aws_instance.foo[2]":
default:
t.Fatal("unknown instance:", i)
}
checkVals(t, expected, ric.After)
}
})
}
func TestContext2Plan_moduleInput(t *testing.T) {
m := testModule(t, "plan-module-input")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
var expected cty.Value
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
expected = objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("2"),
"type": cty.UnknownVal(cty.String),
})
case "module.child.aws_instance.foo":
expected = objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("42"),
"type": cty.UnknownVal(cty.String),
})
default:
t.Fatal("unknown instance:", i)
}
checkVals(t, expected, ric.After)
}
}
func TestContext2Plan_moduleInputComputed(t *testing.T) {
m := testModule(t, "plan-module-input-computed")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
"compute": cty.StringVal("foo"),
}), ric.After)
case "module.child.aws_instance.foo":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_moduleInputFromVar(t *testing.T) {
m := testModule(t, "plan-module-input-var")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
Variables: InputValues{
"foo": &InputValue{
Value: cty.StringVal("52"),
SourceType: ValueFromCaller,
},
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("2"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "module.child.aws_instance.foo":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("52"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_moduleMultiVar(t *testing.T) {
m := testModule(t, "plan-module-multi-var")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
"foo": {Type: cty.String, Optional: true},
"baz": {Type: cty.String, Optional: true},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 5 {
t.Fatal("expected 5 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.parent[0]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.parent[1]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
}), ric.After)
case "module.child.aws_instance.bar[0]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"baz": cty.StringVal("baz"),
}), ric.After)
case "module.child.aws_instance.bar[1]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"baz": cty.StringVal("baz"),
}), ric.After)
case "module.child.aws_instance.foo":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("baz,baz"),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_moduleOrphans(t *testing.T) {
m := testModule(t, "plan-modules-remove")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
state := states.NewState()
child := state.EnsureModule(addrs.RootModuleInstance.Child("child", addrs.NoKey))
child.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"baz"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.foo":
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"num": cty.NumberIntVal(2),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "module.child.aws_instance.foo":
if res.Action != plans.Delete {
t.Fatalf("expected resource delete, got %s", res.Action)
}
default:
t.Fatal("unknown instance:", i)
}
}
expectedState := `<no state>
module.child:
aws_instance.foo:
ID = baz
provider = provider["registry.terraform.io/hashicorp/aws"]`
if ctx.State().String() != expectedState {
t.Fatalf("\nexpected state: %q\n\ngot: %q", expectedState, ctx.State().String())
}
}
// https://github.com/hashicorp/terraform/issues/3114
func TestContext2Plan_moduleOrphansWithProvisioner(t *testing.T) {
m := testModule(t, "plan-modules-remove-provisioners")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
pr := testProvisioner()
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.top").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"top","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
child1 := state.EnsureModule(addrs.RootModuleInstance.Child("parent", addrs.NoKey).Child("child1", addrs.NoKey))
child1.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"baz","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
child2 := state.EnsureModule(addrs.RootModuleInstance.Child("parent", addrs.NoKey).Child("child2", addrs.NoKey))
child2.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"baz","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
Provisioners: map[string]provisioners.Factory{
"shell": testProvisionerFuncFixed(pr),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 3 {
t.Error("expected 3 planned resources, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "module.parent.module.child1.aws_instance.foo":
if res.Action != plans.Delete {
t.Fatalf("expected resource Delete, got %s", res.Action)
}
case "module.parent.module.child2.aws_instance.foo":
if res.Action != plans.Delete {
t.Fatalf("expected resource Delete, got %s", res.Action)
}
case "aws_instance.top":
if res.Action != plans.NoOp {
t.Fatalf("expected no changes, got %s", res.Action)
}
default:
t.Fatalf("unknown instance: %s\nafter: %#v", i, hcl2shim.ConfigValueFromHCL2(ric.After))
}
}
expectedState := `aws_instance.top:
ID = top
provider = provider["registry.terraform.io/hashicorp/aws"]
type = aws_instance
module.parent.child1:
aws_instance.foo:
ID = baz
provider = provider["registry.terraform.io/hashicorp/aws"]
type = aws_instance
module.parent.child2:
aws_instance.foo:
ID = baz
provider = provider["registry.terraform.io/hashicorp/aws"]
type = aws_instance`
if expectedState != ctx.State().String() {
t.Fatalf("\nexpect state:\n%s\n\ngot state:\n%s\n", expectedState, ctx.State().String())
}
}
func TestContext2Plan_moduleProviderInherit(t *testing.T) {
var l sync.Mutex
var calls []string
m := testModule(t, "plan-module-provider-inherit")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): func() (providers.Interface, error) {
l.Lock()
defer l.Unlock()
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
Provider: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"from": {Type: cty.String, Optional: true},
},
},
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"from": {Type: cty.String, Optional: true},
},
},
},
})
p.ConfigureProviderFn = func(req providers.ConfigureProviderRequest) (resp providers.ConfigureProviderResponse) {
from := req.Config.GetAttr("from")
if from.IsNull() || from.AsString() != "root" {
resp.Diagnostics = resp.Diagnostics.Append(fmt.Errorf("not root"))
}
return
}
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) (resp providers.PlanResourceChangeResponse) {
from := req.Config.GetAttr("from").AsString()
l.Lock()
defer l.Unlock()
calls = append(calls, from)
return testDiffFn(req)
}
return p, nil
},
},
})
_, err := ctx.Plan()
if err != nil {
t.Fatalf("err: %s", err)
}
actual := calls
sort.Strings(actual)
expected := []string{"child", "root"}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("bad: %#v", actual)
}
}
// This tests (for GH-11282) that deeply nested modules properly inherit
// configuration.
func TestContext2Plan_moduleProviderInheritDeep(t *testing.T) {
var l sync.Mutex
m := testModule(t, "plan-module-provider-inherit-deep")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): func() (providers.Interface, error) {
l.Lock()
defer l.Unlock()
var from string
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
Provider: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"from": {Type: cty.String, Optional: true},
},
},
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{},
},
},
})
p.ConfigureProviderFn = func(req providers.ConfigureProviderRequest) (resp providers.ConfigureProviderResponse) {
v := req.Config.GetAttr("from")
if v.IsNull() || v.AsString() != "root" {
resp.Diagnostics = resp.Diagnostics.Append(fmt.Errorf("not root"))
}
from = v.AsString()
return
}
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) (resp providers.PlanResourceChangeResponse) {
if from != "root" {
resp.Diagnostics = resp.Diagnostics.Append(fmt.Errorf("bad resource"))
return
}
return testDiffFn(req)
}
return p, nil
},
},
})
_, err := ctx.Plan()
if err != nil {
t.Fatalf("err: %s", err)
}
}
func TestContext2Plan_moduleProviderDefaultsVar(t *testing.T) {
var l sync.Mutex
var calls []string
m := testModule(t, "plan-module-provider-defaults-var")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): func() (providers.Interface, error) {
l.Lock()
defer l.Unlock()
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
Provider: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"to": {Type: cty.String, Optional: true},
"from": {Type: cty.String, Optional: true},
},
},
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"from": {Type: cty.String, Optional: true},
},
},
},
})
p.ConfigureProviderFn = func(req providers.ConfigureProviderRequest) (resp providers.ConfigureProviderResponse) {
var buf bytes.Buffer
from := req.Config.GetAttr("from")
if !from.IsNull() {
buf.WriteString(from.AsString() + "\n")
}
to := req.Config.GetAttr("to")
if !to.IsNull() {
buf.WriteString(to.AsString() + "\n")
}
l.Lock()
defer l.Unlock()
calls = append(calls, buf.String())
return
}
return p, nil
},
},
Variables: InputValues{
"foo": &InputValue{
Value: cty.StringVal("root"),
SourceType: ValueFromCaller,
},
},
})
_, err := ctx.Plan()
if err != nil {
t.Fatalf("err: %s", err)
}
expected := []string{
"child\nchild\n",
"root\n",
}
sort.Strings(calls)
if !reflect.DeepEqual(calls, expected) {
t.Fatalf("expected:\n%#v\ngot:\n%#v\n", expected, calls)
}
}
func TestContext2Plan_moduleProviderVar(t *testing.T) {
m := testModule(t, "plan-module-provider-var")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
Provider: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"value": {Type: cty.String, Optional: true},
},
},
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"value": {Type: cty.String, Optional: true},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 1 {
t.Fatal("expected 1 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "module.child.aws_instance.test":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"value": cty.StringVal("hello"),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_moduleVar(t *testing.T) {
m := testModule(t, "plan-module-var")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
var expected cty.Value
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
expected = objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("2"),
"type": cty.UnknownVal(cty.String),
})
case "module.child.aws_instance.foo":
expected = objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"num": cty.NumberIntVal(2),
"type": cty.UnknownVal(cty.String),
})
default:
t.Fatal("unknown instance:", i)
}
checkVals(t, expected, ric.After)
}
}
func TestContext2Plan_moduleVarWrongTypeBasic(t *testing.T) {
m := testModule(t, "plan-module-wrong-var-type")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan()
if !diags.HasErrors() {
t.Fatalf("succeeded; want errors")
}
}
func TestContext2Plan_moduleVarWrongTypeNested(t *testing.T) {
m := testModule(t, "plan-module-wrong-var-type-nested")
p := testProvider("null")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("null"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan()
if !diags.HasErrors() {
t.Fatalf("succeeded; want errors")
}
}
func TestContext2Plan_moduleVarWithDefaultValue(t *testing.T) {
m := testModule(t, "plan-module-var-with-default-value")
p := testProvider("null")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("null"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
}
func TestContext2Plan_moduleVarComputed(t *testing.T) {
m := testModule(t, "plan-module-var-computed")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "module.child.aws_instance.foo":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
"compute": cty.StringVal("foo"),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_preventDestroy_bad(t *testing.T) {
m := testModule(t, "plan-prevent-destroy-bad")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"i-abc123"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, err := ctx.Plan()
expectedErr := "aws_instance.foo has lifecycle.prevent_destroy"
if !strings.Contains(fmt.Sprintf("%s", err), expectedErr) {
if plan != nil {
t.Logf(legacyDiffComparisonString(plan.Changes))
}
t.Fatalf("expected err would contain %q\nerr: %s", expectedErr, err)
}
}
func TestContext2Plan_preventDestroy_good(t *testing.T) {
m := testModule(t, "plan-prevent-destroy-good")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"i-abc123","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
if !plan.Changes.Empty() {
t.Fatalf("expected no changes, got %#v\n", plan.Changes)
}
}
func TestContext2Plan_preventDestroy_countBad(t *testing.T) {
m := testModule(t, "plan-prevent-destroy-count-bad")
p := testProvider("aws")
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[0]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"i-abc123"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[1]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"i-abc345"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, err := ctx.Plan()
expectedErr := "aws_instance.foo[1] has lifecycle.prevent_destroy"
if !strings.Contains(fmt.Sprintf("%s", err), expectedErr) {
if plan != nil {
t.Logf(legacyDiffComparisonString(plan.Changes))
}
t.Fatalf("expected err would contain %q\nerr: %s", expectedErr, err)
}
}
func TestContext2Plan_preventDestroy_countGood(t *testing.T) {
m := testModule(t, "plan-prevent-destroy-count-good")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"current": {Type: cty.String, Optional: true},
"id": {Type: cty.String, Computed: true},
},
},
},
})
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[0]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"i-abc123"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[1]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"i-abc345"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
if plan.Changes.Empty() {
t.Fatalf("Expected non-empty plan, got %s", legacyDiffComparisonString(plan.Changes))
}
}
func TestContext2Plan_preventDestroy_countGoodNoChange(t *testing.T) {
m := testModule(t, "plan-prevent-destroy-count-good")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"current": {Type: cty.String, Optional: true},
"type": {Type: cty.String, Optional: true, Computed: true},
"id": {Type: cty.String, Computed: true},
},
},
},
})
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[0]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"i-abc123","current":"0","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
if !plan.Changes.Empty() {
t.Fatalf("Expected empty plan, got %s", legacyDiffComparisonString(plan.Changes))
}
}
func TestContext2Plan_preventDestroy_destroyPlan(t *testing.T) {
m := testModule(t, "plan-prevent-destroy-good")
p := testProvider("aws")
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"i-abc123"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
PlanMode: plans.DestroyMode,
})
plan, diags := ctx.Plan()
expectedErr := "aws_instance.foo has lifecycle.prevent_destroy"
if !strings.Contains(fmt.Sprintf("%s", diags.Err()), expectedErr) {
if plan != nil {
t.Logf(legacyDiffComparisonString(plan.Changes))
}
t.Fatalf("expected err would contain %q\nerr: %s", expectedErr, diags.Err())
}
}
func TestContext2Plan_provisionerCycle(t *testing.T) {
m := testModule(t, "plan-provisioner-cycle")
p := testProvider("aws")
pr := testProvisioner()
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
Provisioners: map[string]provisioners.Factory{
"local-exec": testProvisionerFuncFixed(pr),
},
})
_, diags := ctx.Plan()
if !diags.HasErrors() {
t.Fatalf("succeeded; want errors")
}
}
func TestContext2Plan_computed(t *testing.T) {
m := testModule(t, "plan-computed")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.UnknownVal(cty.String),
"num": cty.NumberIntVal(2),
"type": cty.UnknownVal(cty.String),
"compute": cty.StringVal("foo"),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_blockNestingGroup(t *testing.T) {
m := testModule(t, "plan-block-nesting-group")
p := testProvider("test")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test": {
BlockTypes: map[string]*configschema.NestedBlock{
"blah": {
Nesting: configschema.NestingGroup,
Block: configschema.Block{
Attributes: map[string]*configschema.Attribute{
"baz": {Type: cty.String, Required: true},
},
},
},
},
},
},
})
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
return providers.PlanResourceChangeResponse{
PlannedState: req.ProposedNewState,
}
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
if got, want := 1, len(plan.Changes.Resources); got != want {
t.Fatalf("wrong number of planned resource changes %d; want %d\n%s", got, want, spew.Sdump(plan.Changes.Resources))
}
if !p.PlanResourceChangeCalled {
t.Fatalf("PlanResourceChange was not called at all")
}
got := p.PlanResourceChangeRequest
want := providers.PlanResourceChangeRequest{
TypeName: "test",
// Because block type "blah" is defined as NestingGroup, we get a non-null
// value for it with null nested attributes, rather than the "blah" object
// itself being null, when there's no "blah" block in the config at all.
//
// This represents the situation where the remote service _always_ creates
// a single "blah", regardless of whether the block is present, but when
// the block _is_ present the user can override some aspects of it. The
// absense of the block means "use the defaults", in that case.
Config: cty.ObjectVal(map[string]cty.Value{
"blah": cty.ObjectVal(map[string]cty.Value{
"baz": cty.NullVal(cty.String),
}),
}),
ProposedNewState: cty.ObjectVal(map[string]cty.Value{
"blah": cty.ObjectVal(map[string]cty.Value{
"baz": cty.NullVal(cty.String),
}),
}),
}
if !cmp.Equal(got, want, valueTrans) {
t.Errorf("wrong PlanResourceChange request\n%s", cmp.Diff(got, want, valueTrans))
}
}
func TestContext2Plan_computedDataResource(t *testing.T) {
m := testModule(t, "plan-computed-data-resource")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"num": {Type: cty.String, Optional: true},
"compute": {Type: cty.String, Optional: true},
"foo": {Type: cty.String, Computed: true},
},
},
},
DataSources: map[string]*configschema.Block{
"aws_vpc": {
Attributes: map[string]*configschema.Attribute{
"foo": {Type: cty.String, Optional: true},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.DataSources["aws_vpc"].Block
ty := schema.ImpliedType()
if rc := plan.Changes.ResourceInstance(addrs.Resource{Mode: addrs.ManagedResourceMode, Type: "aws_instance", Name: "foo"}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance)); rc == nil {
t.Fatalf("missing diff for aws_instance.foo")
}
rcs := plan.Changes.ResourceInstance(addrs.Resource{
Mode: addrs.DataResourceMode,
Type: "aws_vpc",
Name: "bar",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance))
if rcs == nil {
t.Fatalf("missing diff for data.aws_vpc.bar")
}
rc, err := rcs.Decode(ty)
if err != nil {
t.Fatal(err)
}
checkVals(t,
cty.ObjectVal(map[string]cty.Value{
"foo": cty.UnknownVal(cty.String),
}),
rc.After,
)
}
func TestContext2Plan_computedInFunction(t *testing.T) {
m := testModule(t, "plan-computed-in-function")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"attr": {Type: cty.Number, Optional: true},
},
},
},
DataSources: map[string]*configschema.Block{
"aws_data_source": {
Attributes: map[string]*configschema.Attribute{
"computed": {Type: cty.List(cty.String), Computed: true},
},
},
},
})
p.ReadDataSourceResponse = &providers.ReadDataSourceResponse{
State: cty.ObjectVal(map[string]cty.Value{
"computed": cty.ListVal([]cty.Value{
cty.StringVal("foo"),
}),
}),
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
diags := ctx.Validate()
assertNoErrors(t, diags)
_, diags = ctx.Plan()
assertNoErrors(t, diags)
if !p.ReadDataSourceCalled {
t.Fatalf("ReadDataSource was not called on provider during plan; should've been called")
}
}
func TestContext2Plan_computedDataCountResource(t *testing.T) {
m := testModule(t, "plan-computed-data-count")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"num": {Type: cty.String, Optional: true},
"compute": {Type: cty.String, Optional: true},
"foo": {Type: cty.String, Computed: true},
},
},
},
DataSources: map[string]*configschema.Block{
"aws_vpc": {
Attributes: map[string]*configschema.Attribute{
"foo": {Type: cty.String, Optional: true},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
// make sure we created 3 "bar"s
for i := 0; i < 3; i++ {
addr := addrs.Resource{
Mode: addrs.DataResourceMode,
Type: "aws_vpc",
Name: "bar",
}.Instance(addrs.IntKey(i)).Absolute(addrs.RootModuleInstance)
if rcs := plan.Changes.ResourceInstance(addr); rcs == nil {
t.Fatalf("missing changes for %s", addr)
}
}
}
func TestContext2Plan_localValueCount(t *testing.T) {
m := testModule(t, "plan-local-value-count")
p := testProvider("test")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
// make sure we created 3 "foo"s
for i := 0; i < 3; i++ {
addr := addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_resource",
Name: "foo",
}.Instance(addrs.IntKey(i)).Absolute(addrs.RootModuleInstance)
if rcs := plan.Changes.ResourceInstance(addr); rcs == nil {
t.Fatalf("missing changes for %s", addr)
}
}
}
func TestContext2Plan_dataResourceBecomesComputed(t *testing.T) {
m := testModule(t, "plan-data-resource-becomes-computed")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"foo": {Type: cty.String, Optional: true},
"computed": {Type: cty.String, Computed: true},
},
},
},
DataSources: map[string]*configschema.Block{
"aws_data_source": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
"foo": {Type: cty.String, Optional: true},
},
},
},
})
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
fooVal := req.ProposedNewState.GetAttr("foo")
return providers.PlanResourceChangeResponse{
PlannedState: cty.ObjectVal(map[string]cty.Value{
"foo": fooVal,
"computed": cty.UnknownVal(cty.String),
}),
PlannedPrivate: req.PriorPrivate,
}
}
schema := p.GetProviderSchemaResponse.DataSources["aws_data_source"].Block
ty := schema.ImpliedType()
p.ReadDataSourceResponse = &providers.ReadDataSourceResponse{
// This should not be called, because the configuration for the
// data resource contains an unknown value for "foo".
Diagnostics: tfdiags.Diagnostics(nil).Append(fmt.Errorf("ReadDataSource called, but should not have been")),
}
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("data.aws_data_source.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"i-abc123","foo":"baz"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
_, diags := ctx.Refresh()
if diags.HasErrors() {
t.Fatalf("unexpected errors during refresh: %s", diags.Err())
}
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors during plan: %s", diags.Err())
}
rcs := plan.Changes.ResourceInstance(addrs.Resource{
Mode: addrs.DataResourceMode,
Type: "aws_data_source",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance))
if rcs == nil {
t.Logf("full changeset: %s", spew.Sdump(plan.Changes))
t.Fatalf("missing diff for data.aws_data_resource.foo")
}
rc, err := rcs.Decode(ty)
if err != nil {
t.Fatal(err)
}
// foo should now be unknown
foo := rc.After.GetAttr("foo")
if foo.IsKnown() {
t.Fatalf("foo should be unknown, got %#v", foo)
}
}
func TestContext2Plan_computedList(t *testing.T) {
m := testModule(t, "plan-computed-list")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"compute": {Type: cty.String, Optional: true},
"foo": {Type: cty.String, Optional: true},
"num": {Type: cty.String, Optional: true},
"list": {Type: cty.List(cty.String), Computed: true},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"foo": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"list": cty.UnknownVal(cty.List(cty.String)),
"num": cty.NumberIntVal(2),
"compute": cty.StringVal("list.#"),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
// GH-8695. This tests that you can index into a computed list on a
// splatted resource.
func TestContext2Plan_computedMultiIndex(t *testing.T) {
m := testModule(t, "plan-computed-multi-index")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"compute": {Type: cty.String, Optional: true},
"foo": {Type: cty.List(cty.String), Optional: true},
"ip": {Type: cty.List(cty.String), Computed: true},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 3 {
t.Fatal("expected 3 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.foo[0]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"ip": cty.UnknownVal(cty.List(cty.String)),
"foo": cty.NullVal(cty.List(cty.String)),
"compute": cty.StringVal("ip.#"),
}), ric.After)
case "aws_instance.foo[1]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"ip": cty.UnknownVal(cty.List(cty.String)),
"foo": cty.NullVal(cty.List(cty.String)),
"compute": cty.StringVal("ip.#"),
}), ric.After)
case "aws_instance.bar[0]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"foo": cty.UnknownVal(cty.List(cty.String)),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_count(t *testing.T) {
m := testModule(t, "plan-count")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 6 {
t.Fatal("expected 6 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo,foo,foo,foo,foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo[0]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo[1]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo[2]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo[3]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo[4]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_countComputed(t *testing.T) {
m := testModule(t, "plan-count-computed")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
_, err := ctx.Plan()
if err == nil {
t.Fatal("should error")
}
}
func TestContext2Plan_countComputedModule(t *testing.T) {
m := testModule(t, "plan-count-computed-module")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
_, err := ctx.Plan()
expectedErr := `The "count" value depends on resource attributes`
if !strings.Contains(fmt.Sprintf("%s", err), expectedErr) {
t.Fatalf("expected err would contain %q\nerr: %s\n",
expectedErr, err)
}
}
func TestContext2Plan_countModuleStatic(t *testing.T) {
m := testModule(t, "plan-count-module-static")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 3 {
t.Fatal("expected 3 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "module.child.aws_instance.foo[0]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "module.child.aws_instance.foo[1]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "module.child.aws_instance.foo[2]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_countModuleStaticGrandchild(t *testing.T) {
m := testModule(t, "plan-count-module-static-grandchild")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 3 {
t.Fatal("expected 3 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "module.child.module.child.aws_instance.foo[0]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "module.child.module.child.aws_instance.foo[1]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "module.child.module.child.aws_instance.foo[2]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_countIndex(t *testing.T) {
m := testModule(t, "plan-count-index")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.foo[0]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("0"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo[1]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("1"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_countVar(t *testing.T) {
m := testModule(t, "plan-count-var")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
Variables: InputValues{
"instance_count": &InputValue{
Value: cty.StringVal("3"),
SourceType: ValueFromCaller,
},
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 4 {
t.Fatal("expected 4 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo,foo,foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo[0]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo[1]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo[2]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_countZero(t *testing.T) {
m := testModule(t, "plan-count-zero")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"foo": {Type: cty.DynamicPseudoType, Optional: true},
},
},
},
})
// This schema contains a DynamicPseudoType, and therefore can't go through any shim functions
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) (resp providers.PlanResourceChangeResponse) {
resp.PlannedState = req.ProposedNewState
resp.PlannedPrivate = req.PriorPrivate
return resp
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 1 {
t.Fatal("expected 1 changes, got", len(plan.Changes.Resources))
}
res := plan.Changes.Resources[0]
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
expected := cty.TupleVal(nil)
foo := ric.After.GetAttr("foo")
if !cmp.Equal(expected, foo, valueComparer) {
t.Fatal(cmp.Diff(expected, foo, valueComparer))
}
}
func TestContext2Plan_countOneIndex(t *testing.T) {
m := testModule(t, "plan-count-one-index")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo[0]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_countDecreaseToOne(t *testing.T) {
m := testModule(t, "plan-count-dec")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[0]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","foo":"foo","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[1]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[2]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 4 {
t.Fatal("expected 4 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
if res.Action != plans.Create {
t.Fatalf("expected resource create, got %s", res.Action)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("bar"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo":
if res.Action != plans.NoOp {
t.Fatalf("resource %s should be unchanged", i)
}
case "aws_instance.foo[1]":
if res.Action != plans.Delete {
t.Fatalf("expected resource delete, got %s", res.Action)
}
case "aws_instance.foo[2]":
if res.Action != plans.Delete {
t.Fatalf("expected resource delete, got %s", res.Action)
}
default:
t.Fatal("unknown instance:", i)
}
}
expectedState := `aws_instance.foo:
ID = bar
provider = provider["registry.terraform.io/hashicorp/aws"]
foo = foo
type = aws_instance
aws_instance.foo.1:
ID = bar
provider = provider["registry.terraform.io/hashicorp/aws"]
aws_instance.foo.2:
ID = bar
provider = provider["registry.terraform.io/hashicorp/aws"]`
if ctx.State().String() != expectedState {
t.Fatalf("epected state:\n%q\n\ngot state:\n%q\n", expectedState, ctx.State().String())
}
}
func TestContext2Plan_countIncreaseFromNotSet(t *testing.T) {
m := testModule(t, "plan-count-inc")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","type":"aws_instance","foo":"foo"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 4 {
t.Fatal("expected 4 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
if res.Action != plans.Create {
t.Fatalf("expected resource create, got %s", res.Action)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("bar"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo[0]":
if res.Action != plans.NoOp {
t.Fatalf("resource %s should be unchanged", i)
}
case "aws_instance.foo[1]":
if res.Action != plans.Create {
t.Fatalf("expected resource create, got %s", res.Action)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo[2]":
if res.Action != plans.Create {
t.Fatalf("expected resource create, got %s", res.Action)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_countIncreaseFromOne(t *testing.T) {
m := testModule(t, "plan-count-inc")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[0]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","foo":"foo","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 4 {
t.Fatal("expected 4 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
if res.Action != plans.Create {
t.Fatalf("expected resource create, got %s", res.Action)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("bar"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo[0]":
if res.Action != plans.NoOp {
t.Fatalf("resource %s should be unchanged", i)
}
case "aws_instance.foo[1]":
if res.Action != plans.Create {
t.Fatalf("expected resource create, got %s", res.Action)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo[2]":
if res.Action != plans.Create {
t.Fatalf("expected resource create, got %s", res.Action)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
// https://github.com/PeoplePerHour/terraform/pull/11
//
// This tests a case where both a "resource" and "resource.0" are in
// the state file, which apparently is a reasonable backwards compatibility
// concern found in the above 3rd party repo.
func TestContext2Plan_countIncreaseFromOneCorrupted(t *testing.T) {
m := testModule(t, "plan-count-inc")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","foo":"foo","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[0]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","foo":"foo","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 5 {
t.Fatal("expected 5 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
if res.Action != plans.Create {
t.Fatalf("expected resource create, got %s", res.Action)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("bar"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo":
if res.Action != plans.Delete {
t.Fatalf("resource %s should be removed", i)
}
case "aws_instance.foo[0]":
if res.Action != plans.NoOp {
t.Fatalf("resource %s should be unchanged", i)
}
case "aws_instance.foo[1]":
if res.Action != plans.Create {
t.Fatalf("expected resource create, got %s", res.Action)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo[2]":
if res.Action != plans.Create {
t.Fatalf("expected resource create, got %s", res.Action)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
// A common pattern in TF configs is to have a set of resources with the same
// count and to use count.index to create correspondences between them:
//
// foo_id = "${foo.bar.*.id[count.index]}"
//
// This test is for the situation where some instances already exist and the
// count is increased. In that case, we should see only the create diffs
// for the new instances and not any update diffs for the existing ones.
func TestContext2Plan_countIncreaseWithSplatReference(t *testing.T) {
m := testModule(t, "plan-count-splat-reference")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"name": {Type: cty.String, Optional: true},
"foo_name": {Type: cty.String, Optional: true},
"id": {Type: cty.String, Computed: true},
},
},
},
})
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[0]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","name":"foo 0"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[1]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","name":"foo 1"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.bar[0]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","foo_name":"foo 0"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.bar[1]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","foo_name":"foo 1"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 6 {
t.Fatal("expected 6 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.bar[0]", "aws_instance.bar[1]", "aws_instance.foo[0]", "aws_instance.foo[1]":
if res.Action != plans.NoOp {
t.Fatalf("resource %s should be unchanged", i)
}
case "aws_instance.bar[2]":
if res.Action != plans.Create {
t.Fatalf("expected resource create, got %s", res.Action)
}
// The instance ID changed, so just check that the name updated
if ric.After.GetAttr("foo_name") != cty.StringVal("foo 2") {
t.Fatalf("resource %s attr \"foo_name\" should be changed", i)
}
case "aws_instance.foo[2]":
if res.Action != plans.Create {
t.Fatalf("expected resource create, got %s", res.Action)
}
// The instance ID changed, so just check that the name updated
if ric.After.GetAttr("name") != cty.StringVal("foo 2") {
t.Fatalf("resource %s attr \"name\" should be changed", i)
}
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_forEach(t *testing.T) {
m := testModule(t, "plan-for-each")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 8 {
t.Fatal("expected 8 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
_, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
}
}
func TestContext2Plan_forEachUnknownValue(t *testing.T) {
// This module has a variable defined, but it's value is unknown. We
// expect this to produce an error, but not to panic.
m := testModule(t, "plan-for-each-unknown-value")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
Variables: InputValues{
"foo": {
Value: cty.UnknownVal(cty.String),
SourceType: ValueFromCLIArg,
},
},
})
_, diags := ctx.Plan()
if !diags.HasErrors() {
// Should get this error:
// Invalid for_each argument: The "for_each" value depends on resource attributes that cannot be determined until apply...
t.Fatal("succeeded; want errors")
}
gotErrStr := diags.Err().Error()
wantErrStr := "Invalid for_each argument"
if !strings.Contains(gotErrStr, wantErrStr) {
t.Fatalf("missing expected error\ngot: %s\n\nwant: error containing %q", gotErrStr, wantErrStr)
}
}
func TestContext2Plan_destroy(t *testing.T) {
m := testModule(t, "plan-destroy")
p := testProvider("aws")
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.one").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.two").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"baz"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
PlanMode: plans.DestroyMode,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.one", "aws_instance.two":
if res.Action != plans.Delete {
t.Fatalf("resource %s should be removed", i)
}
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_moduleDestroy(t *testing.T) {
m := testModule(t, "plan-module-destroy")
p := testProvider("aws")
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
child := state.EnsureModule(addrs.RootModuleInstance.Child("child", addrs.NoKey))
child.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
PlanMode: plans.DestroyMode,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.foo", "module.child.aws_instance.foo":
if res.Action != plans.Delete {
t.Fatalf("resource %s should be removed", i)
}
default:
t.Fatal("unknown instance:", i)
}
}
}
// GH-1835
func TestContext2Plan_moduleDestroyCycle(t *testing.T) {
m := testModule(t, "plan-module-destroy-gh-1835")
p := testProvider("aws")
state := states.NewState()
aModule := state.EnsureModule(addrs.RootModuleInstance.Child("a_module", addrs.NoKey))
aModule.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.a").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"a"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
bModule := state.EnsureModule(addrs.RootModuleInstance.Child("b_module", addrs.NoKey))
bModule.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.b").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"b"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
PlanMode: plans.DestroyMode,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "module.a_module.aws_instance.a", "module.b_module.aws_instance.b":
if res.Action != plans.Delete {
t.Fatalf("resource %s should be removed", i)
}
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_moduleDestroyMultivar(t *testing.T) {
m := testModule(t, "plan-module-destroy-multivar")
p := testProvider("aws")
state := states.NewState()
child := state.EnsureModule(addrs.RootModuleInstance.Child("child", addrs.NoKey))
child.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[0]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar0"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
child.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[1]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar1"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
PlanMode: plans.DestroyMode,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "module.child.aws_instance.foo[0]", "module.child.aws_instance.foo[1]":
if res.Action != plans.Delete {
t.Fatalf("resource %s should be removed", i)
}
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_pathVar(t *testing.T) {
cwd, err := os.Getwd()
if err != nil {
t.Fatalf("err: %s", err)
}
m := testModule(t, "plan-path-var")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"cwd": {Type: cty.String, Optional: true},
"module": {Type: cty.String, Optional: true},
"root": {Type: cty.String, Optional: true},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 1 {
t.Fatal("expected 1 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.foo":
if res.Action != plans.Create {
t.Fatalf("resource %s should be created", i)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"cwd": cty.StringVal(cwd + "/barpath"),
"module": cty.StringVal(m.Module.SourceDir + "/foopath"),
"root": cty.StringVal(m.Module.SourceDir + "/barpath"),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_diffVar(t *testing.T) {
m := testModule(t, "plan-diffvar")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","num":"2","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
if res.Action != plans.Create {
t.Fatalf("resource %s should be created", i)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"num": cty.NumberIntVal(3),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo":
if res.Action != plans.Update {
t.Fatalf("resource %s should be updated", i)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.StringVal("bar"),
"num": cty.NumberIntVal(2),
"type": cty.StringVal("aws_instance"),
}), ric.Before)
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.StringVal("bar"),
"num": cty.NumberIntVal(3),
"type": cty.StringVal("aws_instance"),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_hook(t *testing.T) {
m := testModule(t, "plan-good")
h := new(MockHook)
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
Hooks: []Hook{h},
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
if !h.PreDiffCalled {
t.Fatal("should be called")
}
if !h.PostDiffCalled {
t.Fatal("should be called")
}
}
func TestContext2Plan_closeProvider(t *testing.T) {
// this fixture only has an aliased provider located in the module, to make
// sure that the provier name contains a path more complex than
// "provider.aws".
m := testModule(t, "plan-close-module-provider")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
if !p.CloseCalled {
t.Fatal("provider not closed")
}
}
func TestContext2Plan_orphan(t *testing.T) {
m := testModule(t, "plan-orphan")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.baz").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.baz":
if res.Action != plans.Delete {
t.Fatalf("resource %s should be removed", i)
}
if got, want := ric.ActionReason, plans.ResourceInstanceChangeNoReason; got != want {
t.Errorf("wrong action reason\ngot: %s\nwant: %s", got, want)
}
case "aws_instance.foo":
if res.Action != plans.Create {
t.Fatalf("resource %s should be created", i)
}
if got, want := ric.ActionReason, plans.ResourceInstanceChangeNoReason; got != want {
t.Errorf("wrong action reason\ngot: %s\nwant: %s", got, want)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"num": cty.NumberIntVal(2),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
// This tests that configurations with UUIDs don't produce errors.
// For shadows, this would produce errors since a UUID changes every time.
func TestContext2Plan_shadowUuid(t *testing.T) {
m := testModule(t, "plan-shadow-uuid")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
}
func TestContext2Plan_state(t *testing.T) {
m := testModule(t, "plan-good")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
if len(plan.Changes.Resources) < 2 {
t.Fatalf("bad: %#v", plan.Changes.Resources)
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
if res.Action != plans.Create {
t.Fatalf("resource %s should be created", i)
}
if got, want := ric.ActionReason, plans.ResourceInstanceChangeNoReason; got != want {
t.Errorf("wrong action reason\ngot: %s\nwant: %s", got, want)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("2"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo":
if res.Action != plans.Update {
t.Fatalf("resource %s should be updated", i)
}
if got, want := ric.ActionReason, plans.ResourceInstanceChangeNoReason; got != want {
t.Errorf("wrong action reason\ngot: %s\nwant: %s", got, want)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.StringVal("bar"),
"num": cty.NullVal(cty.Number),
"type": cty.NullVal(cty.String),
}), ric.Before)
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.StringVal("bar"),
"num": cty.NumberIntVal(2),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_requiresReplace(t *testing.T) {
m := testModule(t, "plan-requires-replace")
p := testProvider("test")
p.GetProviderSchemaResponse = &providers.GetProviderSchemaResponse{
Provider: providers.Schema{
Block: &configschema.Block{},
},
ResourceTypes: map[string]providers.Schema{
"test_thing": providers.Schema{
Block: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"v": {
Type: cty.String,
Required: true,
},
},
},
},
},
}
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
return providers.PlanResourceChangeResponse{
PlannedState: req.ProposedNewState,
RequiresReplace: []cty.Path{
cty.GetAttrPath("v"),
},
}
}
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("test_thing.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"v":"hello"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["test_thing"].Block
ty := schema.ImpliedType()
if got, want := len(plan.Changes.Resources), 1; got != want {
t.Fatalf("got %d changes; want %d", got, want)
}
for _, res := range plan.Changes.Resources {
t.Run(res.Addr.String(), func(t *testing.T) {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "test_thing.foo":
if got, want := ric.Action, plans.DeleteThenCreate; got != want {
t.Errorf("wrong action\ngot: %s\nwant: %s", got, want)
}
if got, want := ric.ActionReason, plans.ResourceInstanceReplaceBecauseCannotUpdate; got != want {
t.Errorf("wrong action reason\ngot: %s\nwant: %s", got, want)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"v": cty.StringVal("goodbye"),
}), ric.After)
default:
t.Fatalf("unexpected resource instance %s", i)
}
})
}
}
func TestContext2Plan_taint(t *testing.T) {
m := testModule(t, "plan-taint")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","num":"2","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.bar").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectTainted,
AttrsJSON: []byte(`{"id":"baz"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
t.Run(res.Addr.String(), func(t *testing.T) {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.bar":
if got, want := res.Action, plans.DeleteThenCreate; got != want {
t.Errorf("wrong action\ngot: %s\nwant: %s", got, want)
}
if got, want := res.ActionReason, plans.ResourceInstanceReplaceBecauseTainted; got != want {
t.Errorf("wrong action reason\ngot: %s\nwant: %s", got, want)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("2"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo":
if got, want := res.Action, plans.NoOp; got != want {
t.Errorf("wrong action\ngot: %s\nwant: %s", got, want)
}
if got, want := res.ActionReason, plans.ResourceInstanceChangeNoReason; got != want {
t.Errorf("wrong action reason\ngot: %s\nwant: %s", got, want)
}
default:
t.Fatal("unknown instance:", i)
}
})
}
}
func TestContext2Plan_taintIgnoreChanges(t *testing.T) {
m := testModule(t, "plan-taint-ignore-changes")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
"vars": {Type: cty.String, Optional: true},
"type": {Type: cty.String, Computed: true},
},
},
},
})
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectTainted,
AttrsJSON: []byte(`{"id":"foo","vars":"foo","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 1 {
t.Fatal("expected 1 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.foo":
if got, want := res.Action, plans.DeleteThenCreate; got != want {
t.Errorf("wrong action\ngot: %s\nwant: %s", got, want)
}
if got, want := res.ActionReason, plans.ResourceInstanceReplaceBecauseTainted; got != want {
t.Errorf("wrong action reason\ngot: %s\nwant: %s", got, want)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.StringVal("foo"),
"vars": cty.StringVal("foo"),
"type": cty.StringVal("aws_instance"),
}), ric.Before)
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"vars": cty.StringVal("foo"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
// Fails about 50% of the time before the fix for GH-4982, covers the fix.
func TestContext2Plan_taintDestroyInterpolatedCountRace(t *testing.T) {
m := testModule(t, "plan-taint-interpolated-count")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[0]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectTainted,
AttrsJSON: []byte(`{"id":"bar","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[1]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[2]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
for i := 0; i < 100; i++ {
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state.DeepCopy(),
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 3 {
t.Fatal("expected 3 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.foo[0]":
if got, want := ric.Action, plans.DeleteThenCreate; got != want {
t.Errorf("wrong action\ngot: %s\nwant: %s", got, want)
}
if got, want := ric.ActionReason, plans.ResourceInstanceReplaceBecauseTainted; got != want {
t.Errorf("wrong action reason\ngot: %s\nwant: %s", got, want)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.StringVal("bar"),
"type": cty.StringVal("aws_instance"),
}), ric.Before)
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "aws_instance.foo[1]", "aws_instance.foo[2]":
if res.Action != plans.NoOp {
t.Fatalf("resource %s should not be changed", i)
}
default:
t.Fatal("unknown instance:", i)
}
}
}
}
func TestContext2Plan_targeted(t *testing.T) {
m := testModule(t, "plan-targeted")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 1 {
t.Fatal("expected 1 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.foo":
if res.Action != plans.Create {
t.Fatalf("resource %s should be created", i)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"num": cty.NumberIntVal(2),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
// Test that targeting a module properly plans any inputs that depend
// on another module.
func TestContext2Plan_targetedCrossModule(t *testing.T) {
m := testModule(t, "plan-targeted-cross-module")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("B", addrs.NoKey),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
if res.Action != plans.Create {
t.Fatalf("resource %s should be created", ric.Addr)
}
switch i := ric.Addr.String(); i {
case "module.A.aws_instance.foo":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.StringVal("bar"),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "module.B.aws_instance.bar":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"foo": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_targetedModuleWithProvider(t *testing.T) {
m := testModule(t, "plan-targeted-module-with-provider")
p := testProvider("null")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
Provider: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"key": {Type: cty.String, Optional: true},
},
},
ResourceTypes: map[string]*configschema.Block{
"null_resource": {
Attributes: map[string]*configschema.Attribute{},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("null"): testProviderFuncFixed(p),
},
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("child2", addrs.NoKey),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["null_resource"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 1 {
t.Fatal("expected 1 changes, got", len(plan.Changes.Resources))
}
res := plan.Changes.Resources[0]
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
if ric.Addr.String() != "module.child2.null_resource.foo" {
t.Fatalf("unexpcetd resource: %s", ric.Addr)
}
}
func TestContext2Plan_targetedOrphan(t *testing.T) {
m := testModule(t, "plan-targeted-orphan")
p := testProvider("aws")
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.orphan").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"i-789xyz"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.nottargeted").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"i-abc123"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
PlanMode: plans.DestroyMode,
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "orphan",
),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 1 {
t.Fatal("expected 1 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.orphan":
if res.Action != plans.Delete {
t.Fatalf("resource %s should be destroyed", ric.Addr)
}
default:
t.Fatal("unknown instance:", i)
}
}
}
// https://github.com/hashicorp/terraform/issues/2538
func TestContext2Plan_targetedModuleOrphan(t *testing.T) {
m := testModule(t, "plan-targeted-module-orphan")
p := testProvider("aws")
state := states.NewState()
child := state.EnsureModule(addrs.RootModuleInstance.Child("child", addrs.NoKey))
child.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.orphan").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"i-789xyz"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
child.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.nottargeted").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"i-abc123"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
PlanMode: plans.DestroyMode,
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("child", addrs.NoKey).Resource(
addrs.ManagedResourceMode, "aws_instance", "orphan",
),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 1 {
t.Fatal("expected 1 changes, got", len(plan.Changes.Resources))
}
res := plan.Changes.Resources[0]
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
if ric.Addr.String() != "module.child.aws_instance.orphan" {
t.Fatalf("unexpected resource :%s", ric.Addr)
}
if res.Action != plans.Delete {
t.Fatalf("resource %s should be deleted", ric.Addr)
}
}
func TestContext2Plan_targetedModuleUntargetedVariable(t *testing.T) {
m := testModule(t, "plan-targeted-module-untargeted-variable")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "blue",
),
addrs.RootModuleInstance.Child("blue_mod", addrs.NoKey),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
if res.Action != plans.Create {
t.Fatalf("resource %s should be created", ric.Addr)
}
switch i := ric.Addr.String(); i {
case "aws_instance.blue":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
}), ric.After)
case "module.blue_mod.aws_instance.mod":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"value": cty.UnknownVal(cty.String),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
// ensure that outputs missing references due to targetting are removed from
// the graph.
func TestContext2Plan_outputContainsTargetedResource(t *testing.T) {
m := testModule(t, "plan-untargeted-resource-output")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("mod", addrs.NoKey).Resource(
addrs.ManagedResourceMode, "aws_instance", "a",
),
},
})
_, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("err: %s", diags)
}
if len(diags) != 1 {
t.Fatalf("got %d diagnostics; want 1", diags)
}
if got, want := diags[0].Severity(), tfdiags.Warning; got != want {
t.Errorf("wrong diagnostic severity %#v; want %#v", got, want)
}
if got, want := diags[0].Description().Summary, "Resource targeting is in effect"; got != want {
t.Errorf("wrong diagnostic summary %#v; want %#v", got, want)
}
}
// https://github.com/hashicorp/terraform/issues/4515
func TestContext2Plan_targetedOverTen(t *testing.T) {
m := testModule(t, "plan-targeted-over-ten")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
for i := 0; i < 13; i++ {
key := fmt.Sprintf("aws_instance.foo[%d]", i)
id := fmt.Sprintf("i-abc%d", i)
attrs := fmt.Sprintf(`{"id":"%s","type":"aws_instance"}`, id)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr(key).Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(attrs),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
Targets: []addrs.Targetable{
addrs.RootModuleInstance.ResourceInstance(
addrs.ManagedResourceMode, "aws_instance", "foo", addrs.IntKey(1),
),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
if res.Action != plans.NoOp {
t.Fatalf("unexpected action %s for %s", res.Action, ric.Addr)
}
}
}
func TestContext2Plan_provider(t *testing.T) {
m := testModule(t, "plan-provider")
p := testProvider("aws")
var value interface{}
p.ConfigureProviderFn = func(req providers.ConfigureProviderRequest) (resp providers.ConfigureProviderResponse) {
value = req.Config.GetAttr("foo").AsString()
return
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
Variables: InputValues{
"foo": &InputValue{
Value: cty.StringVal("bar"),
SourceType: ValueFromCaller,
},
},
})
if _, err := ctx.Plan(); err != nil {
t.Fatalf("err: %s", err)
}
if value != "bar" {
t.Fatalf("bad: %#v", value)
}
}
func TestContext2Plan_varListErr(t *testing.T) {
m := testModule(t, "plan-var-list-err")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
_, err := ctx.Plan()
if err == nil {
t.Fatal("should error")
}
}
func TestContext2Plan_ignoreChanges(t *testing.T) {
m := testModule(t, "plan-ignore-changes")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","ami":"ami-abcd1234","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
Variables: InputValues{
"foo": &InputValue{
Value: cty.StringVal("ami-1234abcd"),
SourceType: ValueFromCaller,
},
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 1 {
t.Fatal("expected 1 changes, got", len(plan.Changes.Resources))
}
res := plan.Changes.Resources[0]
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
if ric.Addr.String() != "aws_instance.foo" {
t.Fatalf("unexpected resource: %s", ric.Addr)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.StringVal("bar"),
"ami": cty.StringVal("ami-abcd1234"),
"type": cty.StringVal("aws_instance"),
}), ric.After)
}
func TestContext2Plan_ignoreChangesWildcard(t *testing.T) {
m := testModule(t, "plan-ignore-changes-wildcard")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","ami":"ami-abcd1234","instance":"t2.micro","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
Variables: InputValues{
"foo": &InputValue{
Value: cty.StringVal("ami-1234abcd"),
SourceType: ValueFromCaller,
},
"bar": &InputValue{
Value: cty.StringVal("t2.small"),
SourceType: ValueFromCaller,
},
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.NoOp {
t.Fatalf("unexpected resource diffs in root module: %s", spew.Sdump(plan.Changes.Resources))
}
}
}
func TestContext2Plan_ignoreChangesInMap(t *testing.T) {
p := testProvider("test")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test_ignore_changes_map": {
Attributes: map[string]*configschema.Attribute{
"tags": {Type: cty.Map(cty.String), Optional: true},
},
},
},
})
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
return providers.PlanResourceChangeResponse{
PlannedState: req.ProposedNewState,
}
}
s := states.BuildState(func(ss *states.SyncState) {
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_ignore_changes_map",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"foo","tags":{"ignored":"from state","other":"from state"},"type":"aws_instance"}`),
},
addrs.AbsProviderConfig{
Provider: addrs.NewDefaultProvider("test"),
Module: addrs.RootModule,
},
)
})
m := testModule(t, "plan-ignore-changes-in-map")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
State: s,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["test_ignore_changes_map"].Block
ty := schema.ImpliedType()
if got, want := len(plan.Changes.Resources), 1; got != want {
t.Fatalf("wrong number of changes %d; want %d", got, want)
}
res := plan.Changes.Resources[0]
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
if res.Action != plans.Update {
t.Fatalf("resource %s should be updated, got %s", ric.Addr, res.Action)
}
if got, want := ric.Addr.String(), "test_ignore_changes_map.foo"; got != want {
t.Fatalf("unexpected resource address %s; want %s", got, want)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"tags": cty.MapVal(map[string]cty.Value{
"ignored": cty.StringVal("from state"),
"other": cty.StringVal("from config"),
}),
}), ric.After)
}
func TestContext2Plan_ignoreChangesSensitive(t *testing.T) {
m := testModule(t, "plan-ignore-changes-sensitive")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","ami":"ami-abcd1234","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
Variables: InputValues{
"foo": &InputValue{
Value: cty.StringVal("ami-1234abcd"),
SourceType: ValueFromCaller,
},
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 1 {
t.Fatal("expected 1 changes, got", len(plan.Changes.Resources))
}
res := plan.Changes.Resources[0]
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
if ric.Addr.String() != "aws_instance.foo" {
t.Fatalf("unexpected resource: %s", ric.Addr)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.StringVal("bar"),
"ami": cty.StringVal("ami-abcd1234"),
"type": cty.StringVal("aws_instance"),
}), ric.After)
}
func TestContext2Plan_moduleMapLiteral(t *testing.T) {
m := testModule(t, "plan-module-map-literal")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"meta": {Type: cty.Map(cty.String), Optional: true},
"tags": {Type: cty.Map(cty.String), Optional: true},
},
},
},
})
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) (resp providers.PlanResourceChangeResponse) {
s := req.ProposedNewState.AsValueMap()
m := s["tags"].AsValueMap()
if m["foo"].AsString() != "bar" {
t.Fatalf("Bad value in tags attr: %#v", m)
}
meta := s["meta"].AsValueMap()
if len(meta) != 0 {
t.Fatalf("Meta attr not empty: %#v", meta)
}
return testDiffFn(req)
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
}
func TestContext2Plan_computedValueInMap(t *testing.T) {
m := testModule(t, "plan-computed-value-in-map")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"looked_up": {Type: cty.String, Optional: true},
},
},
"aws_computed_source": {
Attributes: map[string]*configschema.Attribute{
"computed_read_only": {Type: cty.String, Computed: true},
},
},
},
})
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) (resp providers.PlanResourceChangeResponse) {
resp = testDiffFn(req)
if req.TypeName != "aws_computed_source" {
return
}
planned := resp.PlannedState.AsValueMap()
planned["computed_read_only"] = cty.UnknownVal(cty.String)
resp.PlannedState = cty.ObjectVal(planned)
return resp
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
schema := p.GetProviderSchemaResponse.ResourceTypes[res.Addr.Resource.Resource.Type].Block
ric, err := res.Decode(schema.ImpliedType())
if err != nil {
t.Fatal(err)
}
if res.Action != plans.Create {
t.Fatalf("resource %s should be created", ric.Addr)
}
switch i := ric.Addr.String(); i {
case "aws_computed_source.intermediates":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"computed_read_only": cty.UnknownVal(cty.String),
}), ric.After)
case "module.test_mod.aws_instance.inner2":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"looked_up": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_moduleVariableFromSplat(t *testing.T) {
m := testModule(t, "plan-module-variable-from-splat")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"thing": {Type: cty.String, Optional: true},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
if len(plan.Changes.Resources) != 4 {
t.Fatal("expected 4 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
schema := p.GetProviderSchemaResponse.ResourceTypes[res.Addr.Resource.Resource.Type].Block
ric, err := res.Decode(schema.ImpliedType())
if err != nil {
t.Fatal(err)
}
if res.Action != plans.Create {
t.Fatalf("resource %s should be created", ric.Addr)
}
switch i := ric.Addr.String(); i {
case "module.mod1.aws_instance.test[0]",
"module.mod1.aws_instance.test[1]",
"module.mod2.aws_instance.test[0]",
"module.mod2.aws_instance.test[1]":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"thing": cty.StringVal("doesnt"),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_createBeforeDestroy_depends_datasource(t *testing.T) {
m := testModule(t, "plan-cbd-depends-datasource")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"num": {Type: cty.String, Optional: true},
"computed": {Type: cty.String, Optional: true, Computed: true},
},
},
},
DataSources: map[string]*configschema.Block{
"aws_vpc": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
"foo": {Type: cty.Number, Optional: true},
},
},
},
})
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
computedVal := req.ProposedNewState.GetAttr("computed")
if computedVal.IsNull() {
computedVal = cty.UnknownVal(cty.String)
}
return providers.PlanResourceChangeResponse{
PlannedState: cty.ObjectVal(map[string]cty.Value{
"num": req.ProposedNewState.GetAttr("num"),
"computed": computedVal,
}),
}
}
p.ReadDataSourceFn = func(req providers.ReadDataSourceRequest) providers.ReadDataSourceResponse {
cfg := req.Config.AsValueMap()
cfg["id"] = cty.StringVal("data_id")
return providers.ReadDataSourceResponse{
State: cty.ObjectVal(cfg),
}
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
seenAddrs := make(map[string]struct{})
for _, res := range plan.Changes.Resources {
var schema *configschema.Block
switch res.Addr.Resource.Resource.Mode {
case addrs.DataResourceMode:
schema = p.GetProviderSchemaResponse.DataSources[res.Addr.Resource.Resource.Type].Block
case addrs.ManagedResourceMode:
schema = p.GetProviderSchemaResponse.ResourceTypes[res.Addr.Resource.Resource.Type].Block
}
ric, err := res.Decode(schema.ImpliedType())
if err != nil {
t.Fatal(err)
}
seenAddrs[ric.Addr.String()] = struct{}{}
t.Run(ric.Addr.String(), func(t *testing.T) {
switch i := ric.Addr.String(); i {
case "aws_instance.foo[0]":
if res.Action != plans.Create {
t.Fatalf("resource %s should be created, got %s", ric.Addr, ric.Action)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"num": cty.StringVal("2"),
"computed": cty.StringVal("data_id"),
}), ric.After)
case "aws_instance.foo[1]":
if res.Action != plans.Create {
t.Fatalf("resource %s should be created, got %s", ric.Addr, ric.Action)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"num": cty.StringVal("2"),
"computed": cty.StringVal("data_id"),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
})
}
wantAddrs := map[string]struct{}{
"aws_instance.foo[0]": struct{}{},
"aws_instance.foo[1]": struct{}{},
}
if !cmp.Equal(seenAddrs, wantAddrs) {
t.Errorf("incorrect addresses in changeset:\n%s", cmp.Diff(wantAddrs, seenAddrs))
}
}
// interpolated lists need to be stored in the original order.
func TestContext2Plan_listOrder(t *testing.T) {
m := testModule(t, "plan-list-order")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"foo": {Type: cty.List(cty.String), Optional: true},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
changes := plan.Changes
rDiffA := changes.ResourceInstance(addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "a",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance))
rDiffB := changes.ResourceInstance(addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "b",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance))
if !cmp.Equal(rDiffA.After, rDiffB.After, valueComparer) {
t.Fatal(cmp.Diff(rDiffA.After, rDiffB.After, valueComparer))
}
}
// Make sure ignore-changes doesn't interfere with set/list/map diffs.
// If a resource was being replaced by a RequiresNew attribute that gets
// ignored, we need to filter the diff properly to properly update rather than
// replace.
func TestContext2Plan_ignoreChangesWithFlatmaps(t *testing.T) {
m := testModule(t, "plan-ignore-changes-with-flatmaps")
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"user_data": {Type: cty.String, Optional: true},
"require_new": {Type: cty.String, Optional: true},
// This test predates the 0.12 work to integrate cty and
// HCL, and so it was ported as-is where its expected
// test output was clearly expecting a list of maps here
// even though it is named "set".
"set": {Type: cty.List(cty.Map(cty.String)), Optional: true},
"lst": {Type: cty.List(cty.String), Optional: true},
},
},
},
})
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{
"user_data":"x","require_new":"",
"set":[{"a":"1"}],
"lst":["j"]
}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
if len(plan.Changes.Resources) != 1 {
t.Fatal("expected 1 changes, got", len(plan.Changes.Resources))
}
res := plan.Changes.Resources[0]
schema := p.GetProviderSchemaResponse.ResourceTypes[res.Addr.Resource.Resource.Type].Block
ric, err := res.Decode(schema.ImpliedType())
if err != nil {
t.Fatal(err)
}
if res.Action != plans.Update {
t.Fatalf("resource %s should be updated, got %s", ric.Addr, ric.Action)
}
if ric.Addr.String() != "aws_instance.foo" {
t.Fatalf("unknown resource: %s", ric.Addr)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"lst": cty.ListVal([]cty.Value{
cty.StringVal("j"),
cty.StringVal("k"),
}),
"require_new": cty.StringVal(""),
"user_data": cty.StringVal("x"),
"set": cty.ListVal([]cty.Value{cty.MapVal(map[string]cty.Value{
"a": cty.StringVal("1"),
"b": cty.StringVal("2"),
})}),
}), ric.After)
}
// TestContext2Plan_resourceNestedCount ensures resource sets that depend on
// the count of another resource set (ie: count of a data source that depends
// on another data source's instance count - data.x.foo.*.id) get properly
// normalized to the indexes they should be. This case comes up when there is
// an existing state (after an initial apply).
func TestContext2Plan_resourceNestedCount(t *testing.T) {
m := testModule(t, "nested-resource-count-plan")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
p.ReadResourceFn = func(req providers.ReadResourceRequest) providers.ReadResourceResponse {
return providers.ReadResourceResponse{
NewState: req.PriorState,
}
}
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[0]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"foo0","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo[1]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"foo1","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.bar[0]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar0","type":"aws_instance"}`),
Dependencies: []addrs.ConfigResource{mustConfigResourceAddr("aws_instance.foo")},
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.bar[1]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar1","type":"aws_instance"}`),
Dependencies: []addrs.ConfigResource{mustConfigResourceAddr("aws_instance.foo")},
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.baz[0]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"baz0","type":"aws_instance"}`),
Dependencies: []addrs.ConfigResource{mustConfigResourceAddr("aws_instance.bar")},
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.baz[1]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"baz1","type":"aws_instance"}`),
Dependencies: []addrs.ConfigResource{mustConfigResourceAddr("aws_instance.bar")},
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
diags := ctx.Validate()
if diags.HasErrors() {
t.Fatalf("validate errors: %s", diags.Err())
}
_, diags = ctx.Refresh()
if diags.HasErrors() {
t.Fatalf("refresh errors: %s", diags.Err())
}
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.NoOp {
t.Fatalf("resource %s should not change, plan returned %s", res.Addr, res.Action)
}
}
}
// Higher level test at TestResource_dataSourceListApplyPanic
func TestContext2Plan_computedAttrRefTypeMismatch(t *testing.T) {
m := testModule(t, "plan-computed-attr-ref-type-mismatch")
p := testProvider("aws")
p.ValidateResourceConfigFn = func(req providers.ValidateResourceConfigRequest) providers.ValidateResourceConfigResponse {
var diags tfdiags.Diagnostics
if req.TypeName == "aws_instance" {
amiVal := req.Config.GetAttr("ami")
if amiVal.Type() != cty.String {
diags = diags.Append(fmt.Errorf("Expected ami to be cty.String, got %#v", amiVal))
}
}
return providers.ValidateResourceConfigResponse{
Diagnostics: diags,
}
}
p.ApplyResourceChangeFn = func(req providers.ApplyResourceChangeRequest) (resp providers.ApplyResourceChangeResponse) {
if req.TypeName != "aws_ami_list" {
t.Fatalf("Reached apply for unexpected resource type! %s", req.TypeName)
}
// Pretend like we make a thing and the computed list "ids" is populated
s := req.PlannedState.AsValueMap()
s["id"] = cty.StringVal("someid")
s["ids"] = cty.ListVal([]cty.Value{
cty.StringVal("ami-abc123"),
cty.StringVal("ami-bcd345"),
})
resp.NewState = cty.ObjectVal(s)
return
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan()
if !diags.HasErrors() {
t.Fatalf("Succeeded; want type mismatch error for 'ami' argument")
}
expected := `Inappropriate value for attribute "ami"`
if errStr := diags.Err().Error(); !strings.Contains(errStr, expected) {
t.Fatalf("expected:\n\n%s\n\nto contain:\n\n%s", errStr, expected)
}
}
func TestContext2Plan_selfRef(t *testing.T) {
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"foo": {Type: cty.String, Optional: true},
},
},
},
})
m := testModule(t, "plan-self-ref")
c := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
diags := c.Validate()
if diags.HasErrors() {
t.Fatalf("unexpected validation failure: %s", diags.Err())
}
_, diags = c.Plan()
if !diags.HasErrors() {
t.Fatalf("plan succeeded; want error")
}
gotErrStr := diags.Err().Error()
wantErrStr := "Self-referential block"
if !strings.Contains(gotErrStr, wantErrStr) {
t.Fatalf("missing expected error\ngot: %s\n\nwant: error containing %q", gotErrStr, wantErrStr)
}
}
func TestContext2Plan_selfRefMulti(t *testing.T) {
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"foo": {Type: cty.String, Optional: true},
},
},
},
})
m := testModule(t, "plan-self-ref-multi")
c := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
diags := c.Validate()
if diags.HasErrors() {
t.Fatalf("unexpected validation failure: %s", diags.Err())
}
_, diags = c.Plan()
if !diags.HasErrors() {
t.Fatalf("plan succeeded; want error")
}
gotErrStr := diags.Err().Error()
wantErrStr := "Self-referential block"
if !strings.Contains(gotErrStr, wantErrStr) {
t.Fatalf("missing expected error\ngot: %s\n\nwant: error containing %q", gotErrStr, wantErrStr)
}
}
func TestContext2Plan_selfRefMultiAll(t *testing.T) {
p := testProvider("aws")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"foo": {Type: cty.List(cty.String), Optional: true},
},
},
},
})
m := testModule(t, "plan-self-ref-multi-all")
c := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
diags := c.Validate()
if diags.HasErrors() {
t.Fatalf("unexpected validation failure: %s", diags.Err())
}
_, diags = c.Plan()
if !diags.HasErrors() {
t.Fatalf("plan succeeded; want error")
}
gotErrStr := diags.Err().Error()
// The graph is checked for cycles before we can walk it, so we don't
// encounter the self-reference check.
//wantErrStr := "Self-referential block"
wantErrStr := "Cycle"
if !strings.Contains(gotErrStr, wantErrStr) {
t.Fatalf("missing expected error\ngot: %s\n\nwant: error containing %q", gotErrStr, wantErrStr)
}
}
func TestContext2Plan_invalidOutput(t *testing.T) {
m := testModuleInline(t, map[string]string{
"main.tf": `
data "aws_data_source" "name" {}
output "out" {
value = data.aws_data_source.name.missing
}`,
})
p := testProvider("aws")
p.ReadDataSourceResponse = &providers.ReadDataSourceResponse{
State: cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("data_id"),
"foo": cty.StringVal("foo"),
}),
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan()
if !diags.HasErrors() {
// Should get this error:
// Unsupported attribute: This object does not have an attribute named "missing"
t.Fatal("succeeded; want errors")
}
gotErrStr := diags.Err().Error()
wantErrStr := "Unsupported attribute"
if !strings.Contains(gotErrStr, wantErrStr) {
t.Fatalf("missing expected error\ngot: %s\n\nwant: error containing %q", gotErrStr, wantErrStr)
}
}
func TestContext2Plan_invalidModuleOutput(t *testing.T) {
m := testModuleInline(t, map[string]string{
"child/main.tf": `
data "aws_data_source" "name" {}
output "out" {
value = "${data.aws_data_source.name.missing}"
}`,
"main.tf": `
module "child" {
source = "./child"
}
resource "aws_instance" "foo" {
foo = "${module.child.out}"
}`,
})
p := testProvider("aws")
p.ReadDataSourceResponse = &providers.ReadDataSourceResponse{
State: cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("data_id"),
"foo": cty.StringVal("foo"),
}),
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan()
if !diags.HasErrors() {
// Should get this error:
// Unsupported attribute: This object does not have an attribute named "missing"
t.Fatal("succeeded; want errors")
}
gotErrStr := diags.Err().Error()
wantErrStr := "Unsupported attribute"
if !strings.Contains(gotErrStr, wantErrStr) {
t.Fatalf("missing expected error\ngot: %s\n\nwant: error containing %q", gotErrStr, wantErrStr)
}
}
func TestContext2Plan_variableValidation(t *testing.T) {
m := testModuleInline(t, map[string]string{
"main.tf": `
variable "x" {
default = "bar"
}
resource "aws_instance" "foo" {
foo = var.x
}`,
})
p := testProvider("aws")
p.ValidateResourceConfigFn = func(req providers.ValidateResourceConfigRequest) (resp providers.ValidateResourceConfigResponse) {
foo := req.Config.GetAttr("foo").AsString()
if foo == "bar" {
resp.Diagnostics = resp.Diagnostics.Append(errors.New("foo cannot be bar"))
}
return
}
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) (resp providers.PlanResourceChangeResponse) {
resp.PlannedState = req.ProposedNewState
return
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan()
if !diags.HasErrors() {
// Should get this error:
// Unsupported attribute: This object does not have an attribute named "missing"
t.Fatal("succeeded; want errors")
}
}
func TestContext2Plan_variableSensitivity(t *testing.T) {
m := testModule(t, "plan-variable-sensitivity")
p := testProvider("aws")
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) (resp providers.PlanResourceChangeResponse) {
resp.PlannedState = req.ProposedNewState
return
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 1 {
t.Fatal("expected 1 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "aws_instance.foo":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"foo": cty.StringVal("foo").Mark(marks.Sensitive),
}), ric.After)
if len(res.ChangeSrc.BeforeValMarks) != 0 {
t.Errorf("unexpected BeforeValMarks: %#v", res.ChangeSrc.BeforeValMarks)
}
if len(res.ChangeSrc.AfterValMarks) != 1 {
t.Errorf("unexpected AfterValMarks: %#v", res.ChangeSrc.AfterValMarks)
continue
}
pvm := res.ChangeSrc.AfterValMarks[0]
if got, want := pvm.Path, cty.GetAttrPath("foo"); !got.Equals(want) {
t.Errorf("unexpected path for mark\n got: %#v\nwant: %#v", got, want)
}
if got, want := pvm.Marks, cty.NewValueMarks(marks.Sensitive); !got.Equal(want) {
t.Errorf("unexpected value for mark\n got: %#v\nwant: %#v", got, want)
}
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_variableSensitivityModule(t *testing.T) {
m := testModule(t, "plan-variable-sensitivity-module")
p := testProvider("aws")
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) (resp providers.PlanResourceChangeResponse) {
resp.PlannedState = req.ProposedNewState
return
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
Variables: InputValues{
"another_var": &InputValue{
Value: cty.StringVal("boop"),
SourceType: ValueFromCaller,
},
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 1 {
t.Fatal("expected 1 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "module.child.aws_instance.foo":
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"foo": cty.StringVal("foo").Mark(marks.Sensitive),
"value": cty.StringVal("boop").Mark(marks.Sensitive),
}), ric.After)
if len(res.ChangeSrc.BeforeValMarks) != 0 {
t.Errorf("unexpected BeforeValMarks: %#v", res.ChangeSrc.BeforeValMarks)
}
if len(res.ChangeSrc.AfterValMarks) != 2 {
t.Errorf("expected AfterValMarks to contain two elements: %#v", res.ChangeSrc.AfterValMarks)
continue
}
// validate that the after marks have "foo" and "value"
contains := func(pvmSlice []cty.PathValueMarks, stepName string) bool {
for _, pvm := range pvmSlice {
if pvm.Path.Equals(cty.GetAttrPath(stepName)) {
if pvm.Marks.Equal(cty.NewValueMarks(marks.Sensitive)) {
return true
}
}
}
return false
}
if !contains(res.ChangeSrc.AfterValMarks, "foo") {
t.Error("unexpected AfterValMarks to contain \"foo\" with sensitive mark")
}
if !contains(res.ChangeSrc.AfterValMarks, "value") {
t.Error("unexpected AfterValMarks to contain \"value\" with sensitive mark")
}
default:
t.Fatal("unknown instance:", i)
}
}
}
func checkVals(t *testing.T, expected, got cty.Value) {
t.Helper()
// The GoStringer format seems to result in the closest thing to a useful
// diff for values with marks.
// TODO: if we want to continue using cmp.Diff on cty.Values, we should
// make a transformer that creates a more comparable structure.
valueTrans := cmp.Transformer("gostring", func(v cty.Value) string {
return fmt.Sprintf("%#v\n", v)
})
if !cmp.Equal(expected, got, valueComparer, typeComparer, equateEmpty) {
t.Fatal(cmp.Diff(expected, got, valueTrans, equateEmpty))
}
}
func objectVal(t *testing.T, schema *configschema.Block, m map[string]cty.Value) cty.Value {
t.Helper()
v, err := schema.CoerceValue(
cty.ObjectVal(m),
)
if err != nil {
t.Fatal(err)
}
return v
}
func TestContext2Plan_requiredModuleOutput(t *testing.T) {
m := testModule(t, "plan-required-output")
p := testProvider("test")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test_resource": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
"required": {Type: cty.String, Required: true},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["test_resource"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
t.Run(fmt.Sprintf("%s %s", res.Action, res.Addr), func(t *testing.T) {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
var expected cty.Value
switch i := ric.Addr.String(); i {
case "test_resource.root":
expected = objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"required": cty.UnknownVal(cty.String),
})
case "module.mod.test_resource.for_output":
expected = objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"required": cty.StringVal("val"),
})
default:
t.Fatal("unknown instance:", i)
}
checkVals(t, expected, ric.After)
})
}
}
func TestContext2Plan_requiredModuleObject(t *testing.T) {
m := testModule(t, "plan-required-whole-mod")
p := testProvider("test")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test_resource": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
"required": {Type: cty.String, Required: true},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["test_resource"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 2 {
t.Fatal("expected 2 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
t.Run(fmt.Sprintf("%s %s", res.Action, res.Addr), func(t *testing.T) {
if res.Action != plans.Create {
t.Fatalf("expected resource creation, got %s", res.Action)
}
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
var expected cty.Value
switch i := ric.Addr.String(); i {
case "test_resource.root":
expected = objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"required": cty.UnknownVal(cty.String),
})
case "module.mod.test_resource.for_output":
expected = objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"required": cty.StringVal("val"),
})
default:
t.Fatal("unknown instance:", i)
}
checkVals(t, expected, ric.After)
})
}
}
func TestContext2Plan_expandOrphan(t *testing.T) {
m := testModuleInline(t, map[string]string{
"main.tf": `
module "mod" {
count = 1
source = "./mod"
}
`,
"mod/main.tf": `
resource "aws_instance" "foo" {
}
`,
})
state := states.NewState()
state.EnsureModule(addrs.RootModuleInstance.Child("mod", addrs.IntKey(0))).SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"child","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
state.EnsureModule(addrs.RootModuleInstance.Child("mod", addrs.IntKey(1))).SetResourceInstanceCurrent(
mustResourceInstanceAddr("aws_instance.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"child","type":"aws_instance"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/aws"]`),
)
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatal(diags.ErrWithWarnings())
}
expected := map[string]plans.Action{
`module.mod[1].aws_instance.foo`: plans.Delete,
`module.mod[0].aws_instance.foo`: plans.NoOp,
}
for _, res := range plan.Changes.Resources {
want := expected[res.Addr.String()]
if res.Action != want {
t.Fatalf("expected %s action, got: %q %s", want, res.Addr, res.Action)
}
delete(expected, res.Addr.String())
}
for res, action := range expected {
t.Errorf("missing %s change for %s", action, res)
}
}
func TestContext2Plan_indexInVar(t *testing.T) {
m := testModuleInline(t, map[string]string{
"main.tf": `
module "a" {
count = 1
source = "./mod"
in = "test"
}
module "b" {
count = 1
source = "./mod"
in = length(module.a)
}
`,
"mod/main.tf": `
resource "aws_instance" "foo" {
foo = var.in
}
variable "in" {
}
output"out" {
value = aws_instance.foo.id
}
`,
})
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatal(diags.ErrWithWarnings())
}
}
func TestContext2Plan_targetExpandedAddress(t *testing.T) {
m := testModuleInline(t, map[string]string{
"main.tf": `
module "mod" {
count = 3
source = "./mod"
}
`,
"mod/main.tf": `
resource "aws_instance" "foo" {
count = 2
}
`,
})
p := testProvider("aws")
targets := []addrs.Targetable{}
target, diags := addrs.ParseTargetStr("module.mod[1].aws_instance.foo[0]")
if diags.HasErrors() {
t.Fatal(diags.ErrWithWarnings())
}
targets = append(targets, target.Subject)
target, diags = addrs.ParseTargetStr("module.mod[2]")
if diags.HasErrors() {
t.Fatal(diags.ErrWithWarnings())
}
targets = append(targets, target.Subject)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
Targets: targets,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatal(diags.ErrWithWarnings())
}
expected := map[string]plans.Action{
// the single targeted mod[1] instances
`module.mod[1].aws_instance.foo[0]`: plans.Create,
// the whole mode[2]
`module.mod[2].aws_instance.foo[0]`: plans.Create,
`module.mod[2].aws_instance.foo[1]`: plans.Create,
}
for _, res := range plan.Changes.Resources {
want := expected[res.Addr.String()]
if res.Action != want {
t.Fatalf("expected %s action, got: %q %s", want, res.Addr, res.Action)
}
delete(expected, res.Addr.String())
}
for res, action := range expected {
t.Errorf("missing %s change for %s", action, res)
}
}
func TestContext2Plan_targetResourceInModuleInstance(t *testing.T) {
m := testModuleInline(t, map[string]string{
"main.tf": `
module "mod" {
count = 3
source = "./mod"
}
`,
"mod/main.tf": `
resource "aws_instance" "foo" {
}
`,
})
p := testProvider("aws")
target, diags := addrs.ParseTargetStr("module.mod[1].aws_instance.foo")
if diags.HasErrors() {
t.Fatal(diags.ErrWithWarnings())
}
targets := []addrs.Targetable{target.Subject}
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
Targets: targets,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatal(diags.ErrWithWarnings())
}
expected := map[string]plans.Action{
// the single targeted mod[1] instance
`module.mod[1].aws_instance.foo`: plans.Create,
}
for _, res := range plan.Changes.Resources {
want := expected[res.Addr.String()]
if res.Action != want {
t.Fatalf("expected %s action, got: %q %s", want, res.Addr, res.Action)
}
delete(expected, res.Addr.String())
}
for res, action := range expected {
t.Errorf("missing %s change for %s", action, res)
}
}
func TestContext2Plan_moduleRefIndex(t *testing.T) {
m := testModuleInline(t, map[string]string{
"main.tf": `
module "mod" {
for_each = {
a = "thing"
}
in = null
source = "./mod"
}
module "single" {
source = "./mod"
in = module.mod["a"]
}
`,
"mod/main.tf": `
variable "in" {
}
output "out" {
value = "foo"
}
resource "aws_instance" "foo" {
}
`,
})
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatal(diags.ErrWithWarnings())
}
}
func TestContext2Plan_noChangeDataPlan(t *testing.T) {
m := testModuleInline(t, map[string]string{
"main.tf": `
data "test_data_source" "foo" {}
`,
})
p := new(MockProvider)
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
DataSources: map[string]*configschema.Block{
"test_data_source": {
Attributes: map[string]*configschema.Attribute{
"id": {
Type: cty.String,
Computed: true,
},
"foo": {
Type: cty.String,
Optional: true,
},
},
},
},
})
p.ReadDataSourceResponse = &providers.ReadDataSourceResponse{
State: cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("data_id"),
"foo": cty.StringVal("foo"),
}),
}
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("data.test_data_source.foo").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"data_id", "foo":"foo"}`),
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatal(diags.ErrWithWarnings())
}
for _, res := range plan.Changes.Resources {
if res.Action != plans.NoOp {
t.Fatalf("expected NoOp, got: %q %s", res.Addr, res.Action)
}
}
}
// for_each can reference a resource with 0 instances
func TestContext2Plan_scaleInForEach(t *testing.T) {
p := testProvider("test")
m := testModuleInline(t, map[string]string{
"main.tf": `
locals {
m = {}
}
resource "test_instance" "a" {
for_each = local.m
}
resource "test_instance" "b" {
for_each = test_instance.a
}
`})
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("test_instance.a[0]").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"a0"}`),
Dependencies: []addrs.ConfigResource{},
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`),
)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("test_instance.b").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"b"}`),
Dependencies: []addrs.ConfigResource{mustConfigResourceAddr("test_instance.a")},
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
State: state,
})
_, diags := ctx.Plan()
assertNoErrors(t, diags)
}
func TestContext2Plan_targetedModuleInstance(t *testing.T) {
m := testModule(t, "plan-targeted")
p := testProvider("aws")
p.PlanResourceChangeFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("aws"): testProviderFuncFixed(p),
},
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("mod", addrs.IntKey(0)),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
schema := p.GetProviderSchemaResponse.ResourceTypes["aws_instance"].Block
ty := schema.ImpliedType()
if len(plan.Changes.Resources) != 1 {
t.Fatal("expected 1 changes, got", len(plan.Changes.Resources))
}
for _, res := range plan.Changes.Resources {
ric, err := res.Decode(ty)
if err != nil {
t.Fatal(err)
}
switch i := ric.Addr.String(); i {
case "module.mod[0].aws_instance.foo":
if res.Action != plans.Create {
t.Fatalf("resource %s should be created", i)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"num": cty.NumberIntVal(2),
"type": cty.UnknownVal(cty.String),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
}
}
func TestContext2Plan_dataRefreshedInPlan(t *testing.T) {
m := testModuleInline(t, map[string]string{
"main.tf": `
data "test_data_source" "d" {
}
`})
p := testProvider("test")
p.ReadDataSourceResponse = &providers.ReadDataSourceResponse{
State: cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("this"),
"foo": cty.NullVal(cty.String),
}),
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatal(diags.ErrWithWarnings())
}
d := plan.PriorState.ResourceInstance(mustResourceInstanceAddr("data.test_data_source.d"))
if d == nil || d.Current == nil {
t.Fatal("data.test_data_source.d not found in state:", plan.PriorState)
}
if d.Current.Status != states.ObjectReady {
t.Fatal("expected data.test_data_source.d to be fully read in refreshed state, got status", d.Current.Status)
}
}
func TestContext2Plan_dataReferencesResource(t *testing.T) {
p := testProvider("test")
p.ReadDataSourceFn = func(req providers.ReadDataSourceRequest) (resp providers.ReadDataSourceResponse) {
resp.Diagnostics = resp.Diagnostics.Append(fmt.Errorf("data source should not be read"))
return resp
}
m := testModuleInline(t, map[string]string{
"main.tf": `
locals {
x = "value"
}
resource "test_resource" "a" {
value = local.x
}
// test_resource.a.value can be resolved during plan, but the reference implies
// that the data source should wait until the resource is created.
data "test_data_source" "d" {
foo = test_resource.a.value
}
// ensure referencing an indexed instance that has not yet created will also
// delay reading the data source
resource "test_resource" "b" {
count = 2
value = local.x
}
data "test_data_source" "e" {
foo = test_resource.b[0].value
}
`})
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan()
assertNoErrors(t, diags)
}
func TestContext2Plan_skipRefresh(t *testing.T) {
p := testProvider("test")
p.PlanResourceChangeFn = testDiffFn
m := testModuleInline(t, map[string]string{
"main.tf": `
resource "test_instance" "a" {
}
`})
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("test_instance.a").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"a","type":"test_instance"}`),
Dependencies: []addrs.ConfigResource{},
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
State: state,
SkipRefresh: true,
})
plan, diags := ctx.Plan()
assertNoErrors(t, diags)
if p.ReadResourceCalled {
t.Fatal("Resource should not have been refreshed")
}
for _, c := range plan.Changes.Resources {
if c.Action != plans.NoOp {
t.Fatalf("expected no changes, got %s for %q", c.Action, c.Addr)
}
}
}
func TestContext2Plan_dataInModuleDependsOn(t *testing.T) {
p := testProvider("test")
readDataSourceB := false
p.ReadDataSourceFn = func(req providers.ReadDataSourceRequest) (resp providers.ReadDataSourceResponse) {
cfg := req.Config.AsValueMap()
foo := cfg["foo"].AsString()
cfg["id"] = cty.StringVal("ID")
cfg["foo"] = cty.StringVal("new")
if foo == "b" {
readDataSourceB = true
}
resp.State = cty.ObjectVal(cfg)
return resp
}
m := testModuleInline(t, map[string]string{
"main.tf": `
module "a" {
source = "./mod_a"
}
module "b" {
source = "./mod_b"
depends_on = [module.a]
}`,
"mod_a/main.tf": `
data "test_data_source" "a" {
foo = "a"
}`,
"mod_b/main.tf": `
data "test_data_source" "b" {
foo = "b"
}`,
})
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan()
assertNoErrors(t, diags)
// The change to data source a should not prevent data source b from being
// read.
if !readDataSourceB {
t.Fatal("data source b was not read during plan")
}
}
func TestContext2Plan_rpcDiagnostics(t *testing.T) {
m := testModuleInline(t, map[string]string{
"main.tf": `
resource "test_instance" "a" {
}
`,
})
p := testProvider("test")
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
resp := testDiffFn(req)
resp.Diagnostics = resp.Diagnostics.Append(tfdiags.SimpleWarning("don't frobble"))
return resp
}
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test_instance": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatal(diags.Err())
}
if len(diags) == 0 {
t.Fatal("expected warnings")
}
for _, d := range diags {
des := d.Description().Summary
if !strings.Contains(des, "frobble") {
t.Fatalf(`expected frobble, got %q`, des)
}
}
}
// ignore_changes needs to be re-applied to the planned value for provider
// using the LegacyTypeSystem
func TestContext2Plan_legacyProviderIgnoreChanges(t *testing.T) {
m := testModuleInline(t, map[string]string{
"main.tf": `
resource "test_instance" "a" {
lifecycle {
ignore_changes = [data]
}
}
`,
})
p := testProvider("test")
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) (resp providers.PlanResourceChangeResponse) {
m := req.ProposedNewState.AsValueMap()
// this provider "hashes" the data attribute as bar
m["data"] = cty.StringVal("bar")
resp.PlannedState = cty.ObjectVal(m)
resp.LegacyTypeSystem = true
return resp
}
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test_instance": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
"data": {Type: cty.String, Optional: true},
},
},
},
})
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("test_instance.a").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"a","data":"foo"}`),
Dependencies: []addrs.ConfigResource{},
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
State: state,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatal(diags.Err())
}
for _, c := range plan.Changes.Resources {
if c.Action != plans.NoOp {
t.Fatalf("expected no changes, got %s for %q", c.Action, c.Addr)
}
}
}
func TestContext2Plan_validateIgnoreAll(t *testing.T) {
m := testModuleInline(t, map[string]string{
"main.tf": `
resource "test_instance" "a" {
lifecycle {
ignore_changes = all
}
}
`,
})
p := testProvider("test")
p.GetProviderSchemaResponse = getProviderSchemaResponseFromProviderSchema(&ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test_instance": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
"data": {Type: cty.String, Optional: true},
},
},
},
})
p.ValidateResourceConfigFn = func(req providers.ValidateResourceConfigRequest) providers.ValidateResourceConfigResponse {
var diags tfdiags.Diagnostics
if req.TypeName == "test_instance" {
if !req.Config.GetAttr("id").IsNull() {
diags = diags.Append(errors.New("id cannot be set in config"))
}
}
return providers.ValidateResourceConfigResponse{
Diagnostics: diags,
}
}
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("test_instance.a").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"a","data":"foo"}`),
Dependencies: []addrs.ConfigResource{},
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
State: state,
})
_, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatal(diags.Err())
}
}
func TestContext2Plan_dataRemovalNoProvider(t *testing.T) {
m := testModuleInline(t, map[string]string{
"main.tf": `
resource "test_instance" "a" {
}
`,
})
p := testProvider("test")
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("test_instance.a").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"a","data":"foo"}`),
Dependencies: []addrs.ConfigResource{},
},
mustProviderConfig(`provider["registry.terraform.io/hashicorp/test"]`),
)
// the provider for this data source is no longer in the config, but that
// should not matter for state removal.
root.SetResourceInstanceCurrent(
mustResourceInstanceAddr("data.test_data_source.d").Resource,
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"d"}`),
Dependencies: []addrs.ConfigResource{},
},
mustProviderConfig(`provider["registry.terraform.io/local/test"]`),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
// We still need to be able to locate the provider to decode the
// state, since we do not know during init that this provider is
// only used for an orphaned data source.
addrs.NewProvider("registry.terraform.io", "local", "test"): testProviderFuncFixed(p),
},
State: state,
})
_, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatal(diags.Err())
}
}
func TestContext2Plan_noSensitivityChange(t *testing.T) {
m := testModuleInline(t, map[string]string{
"main.tf": `
variable "sensitive_var" {
default = "hello"
sensitive = true
}
resource "test_resource" "foo" {
value = var.sensitive_var
sensitive_value = var.sensitive_var
}`,
})
p := testProvider("test")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
State: states.BuildState(func(s *states.SyncState) {
s.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_resource",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"foo", "value":"hello", "sensitive_value":"hello"}`),
AttrSensitivePaths: []cty.PathValueMarks{
{Path: cty.Path{cty.GetAttrStep{Name: "value"}}, Marks: cty.NewValueMarks(marks.Sensitive)},
{Path: cty.Path{cty.GetAttrStep{Name: "sensitive_value"}}, Marks: cty.NewValueMarks(marks.Sensitive)},
},
},
addrs.AbsProviderConfig{
Provider: addrs.NewDefaultProvider("test"),
Module: addrs.RootModule,
},
)
}),
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatal(diags.Err())
}
for _, c := range plan.Changes.Resources {
if c.Action != plans.NoOp {
t.Fatalf("expected no changes, got %s for %q", c.Action, c.Addr)
}
}
}
func TestContext2Plan_variableCustomValidationsSensitive(t *testing.T) {
m := testModule(t, "validate-variable-custom-validations-child-sensitive")
p := testProvider("test")
ctx := testContext2(t, &ContextOpts{
Config: m,
Providers: map[addrs.Provider]providers.Factory{
addrs.NewDefaultProvider("test"): testProviderFuncFixed(p),
},
})
_, diags := ctx.Plan()
if !diags.HasErrors() {
t.Fatal("succeeded; want errors")
}
if got, want := diags.Err().Error(), `Invalid value for variable: Value must not be "nope".`; !strings.Contains(got, want) {
t.Fatalf("wrong error:\ngot: %s\nwant: message containing %q", got, want)
}
}
func TestContext2Plan_nullOutputNoOp(t *testing.T) {
// this should always plan a NoOp change for the output
m := testModuleInline(t, map[string]string{
"main.tf": `
output "planned" {
value = false ? 1 : null
}
`,
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: states.BuildState(func(s *states.SyncState) {
r := s.Module(addrs.RootModuleInstance)
r.SetOutputValue("planned", cty.NullVal(cty.DynamicPseudoType), false)
}),
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatal(diags.Err())
}
for _, c := range plan.Changes.Outputs {
if c.Action != plans.NoOp {
t.Fatalf("expected no changes, got %s for %q", c.Action, c.Addr)
}
}
}
func TestContext2Plan_createOutput(t *testing.T) {
// this should always plan a NoOp change for the output
m := testModuleInline(t, map[string]string{
"main.tf": `
output "planned" {
value = 1
}
`,
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: states.NewState(),
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatal(diags.Err())
}
for _, c := range plan.Changes.Outputs {
if c.Action != plans.Create {
t.Fatalf("expected Create change, got %s for %q", c.Action, c.Addr)
}
}
}
////////////////////////////////////////////////////////////////////////////////
// NOTE: Due to the size of this file, new tests should be added to
// context_plan2_test.go.
////////////////////////////////////////////////////////////////////////////////
| ewbankkit/terraform | internal/terraform/context_plan_test.go | GO | mpl-2.0 | 194,480 |
// ======================================================================
using System;
using System.Collections.Generic;
using System.Reflection;
using CommandLine;
using CommandLine.Text;
namespace MangaRack.Internals {
/// <summary>
/// Represents a collection of options.
/// </summary>
class Options {
#region Constructor
/// <summary>
/// Initialize a new instance of the Options class.
/// </summary>
public Options() {
FileExtension = "cbz";
MaximumParallelWorkerThreads = Environment.ProcessorCount;
SourceFile = string.Format("{0}.txt", Assembly.GetExecutingAssembly().GetName().Name);
}
#endregion
#region Methods
/// <summary>
/// Returns a string that represents the current object.
/// </summary>
[HelpOption]
public override string ToString() {
var text = HelpText.AutoBuild(this);
text.AddPreOptionsLine("\r\n Usage: mangarack [options] [location, ...]");
return text.ToString();
}
#endregion
#region Properties
/// <summary>
/// Indicates whether animation framing is disabled.
/// </summary>
[Option('a', "animation", HelpText = "Disable animation framing.")]
public bool DisableAnimationFraming { get; set; }
/// <summary>
/// Indicates whether duplication prevention is disabled.
/// </summary>
[Option('d', "duplication", HelpText = "Disable duplication prevention.")]
public bool DisableDuplicationPrevention { get; set; }
/// <summary>
/// Indicates whether footer incision is disabled.
/// </summary>
[Option('f', "footer", HelpText = "Disable footer incision.")]
public bool DisableFooterIncision { get; set; }
/// <summary>
/// Indicates whether grayscale size comparison and save is disabled.
/// </summary>
[Option('g', "grayscale", HelpText = "Disable grayscale size comparison and save.")]
public bool DisableGrayscaleSizeComparisonAndSave { get; set; }
/// <summary>
/// Indicates whether image processing is disabled.
/// </summary>
[Option('i', "image", HelpText = "Disable image processing.")]
public bool DisableImageProcessing { get; set; }
/// <summary>
/// Indicates whether keep-alive behavior is disabled.
/// </summary>
[Option('k', "keep-alive", HelpText = "Disable keep-alive behavior.")]
public bool DisableKeepAliveBehavior { get; set; }
/// <summary>
/// Indicates whether embedded meta-information is disabled.
/// </summary>
[Option('m', "meta", HelpText = "Disable embedded meta information.")]
public bool DisableMetaInformation { get; set; }
/// <summary>
/// Indicates whether repair and error tracking is disabled.
/// </summary>
[Option('r', "repair", HelpText = "Disable repair and error tracking.")]
public bool DisableRepairAndErrorTracking { get; set; }
/// <summary>
/// Indicates whether total elapsed time notification is disabled.
/// </summary>
[Option('t', "total", HelpText = "Disable total elapsed time notification.")]
public bool DisableTotalElapsedTime { get; set; }
/// <summary>
/// Indicates whether embedded meta-information is overwritten.
/// </summary>
[Option('o', "overwrite", HelpText = "Enable embedded meta information overwriting.")]
public bool EnableOverwriteMetaInformation { get; set; }
/// <summary>
/// Indicates whether persistent synchronization is enabled.
/// </summary>
[Option('p', "persistent", HelpText = "Enable persistent synchronization.")]
public bool EnablePersistentSynchronization { get; set; }
/// <summary>
/// Contains the file extension for each output file.
/// </summary>
[Option('e', "extension", HelpText = "The file extension for each output file. (Default: cbz)")]
public string FileExtension { get; set; }
/// <summary>
/// Contains the chapter filter.
/// </summary>
[Option('c', "chapter", HelpText = "The chapter filter.")]
public double FilterOnChapter { get; set; }
/// <summary>
/// Contains the volume filter.
/// </summary>
[Option('v', "volume", HelpText = "The volume filter.")]
public double FilterOnVolume { get; set; }
/// <summary>
/// Contains each location.
/// </summary>
[ValueList(typeof(List<string>))]
public IList<string> Locations { get; set; }
/// <summary>
/// Contains the maximum parallel worker threads.
/// </summary>
[Option('w', "worker", HelpText = "The maximum parallel worker threads. (Default: # cores)")]
public int MaximumParallelWorkerThreads { get; set; }
/// <summary>
/// Contains the batch-mode source file.
/// </summary>
[Option('s', "source", HelpText = "The batch-mode source file. (Default: MangaRack.txt)")]
public string SourceFile { get; set; }
#endregion
}
} | Deathspike/mangarack.cs | MangaRack/Internals/Options.cs | C# | mpl-2.0 | 4,812 |
/*
Copyright (c) 2003-2015, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang("placeholder","pt",{title:"Propriedades dos marcadores",toolbar:"Símbolo",name:"Nome do marcador",invalidName:"O marcador não pode estar em branco e não pode conter qualquer dos seguintes carateres: [, ], <, >",pathName:"símbolo"}); | crunchmail/dentifrice | dist/ckeditor/plugins/placeholder/lang/pt.js | JavaScript | mpl-2.0 | 406 |
#include <minizinc/solvers/gecode_solverfactory.hh>
#include <minizinc/solvers/gecode_solverinstance.hh>
namespace MiniZinc {
namespace {
void get_wrapper() { static GecodeSolverFactory _gecode_solverfactory; }
} // namespace
GecodeSolverFactoryInitialiser::GecodeSolverFactoryInitialiser() { get_wrapper(); }
} // namespace MiniZinc
| MiniZinc/libminizinc | solvers/gecode/gecode_solverfactory.cpp | C++ | mpl-2.0 | 337 |
# -*- coding: utf-8 -*-
# © 2009 Pexego/Comunitea
# © 2011-2012 Iker Coranti (www.avanzosc.es)
# © 2014 Juanjo Algaz (gutierrezweb.es)
# © 2014-2016 Pedro M. Baeza
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl-3.0).
{
"name": "Account balance reporting engine",
"version": "8.0.1.2.0",
"author": "Pexego, "
"AvanzOSC, "
"Tecnativa, "
"Odoo Community Association (OCA)",
"website": "http://www.pexego.es",
"category": "Accounting & Finance",
"contributors": [
"Juanjo Algaz <juanjoa@malagatic.com>",
"Joaquín Gutierrez <joaquing.pedrosa@gmail.com>",
"Pedro M. Baeza <pedro.baeza@tecnativa.com>",
"Oihane Crucelaegui <oihanecrucelaegi@avanzosc.es>",
],
"license": 'AGPL-3',
"depends": [
"account",
],
"data": [
"security/ir.model.access.csv",
"views/account_account_view.xml",
"views/account_balance_reporting_template_view.xml",
"views/account_balance_reporting_report_view.xml",
"views/account_balance_reporting_menu.xml",
"report/account_balance_reporting_reports.xml",
"report/report_generic.xml",
"wizard/wizard_print_view.xml",
],
"installable": True,
}
| Endika/l10n-spain | account_balance_reporting/__openerp__.py | Python | agpl-3.0 | 1,278 |
/*
Copyright (C) 1999 Claude SIMON (http://q37.info/contact/).
This file is part of the Epeios framework.
The Epeios framework is free software: you can redistribute it and/or
modify it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
The Epeios framework is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with the Epeios framework. If not, see <http://www.gnu.org/licenses/>
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "rgstry.h"
#include "err.h"
#include "cio.h"
using cio::CIn;
using cio::COut;
using cio::CErr;
void Generic( int argc, char *argv[] )
{
qRH
qRB
qRR
qRT
qRE
}
int main( int argc, char *argv[] )
{
qRFH
qRFB
COut << "Test of library " << RGSTRY_NAME << ' ' << __DATE__" "__TIME__"\n";
qRFR
qRFT
qRFE
return ERRExitValue;
}
| epeios-q37/epeios | devel/rgstry/rgstry_test.cpp | C++ | agpl-3.0 | 1,175 |
<?php
namespace UpgradeDigital;
/**
* Resource tag count model represents a count of resources grouped by a
* particular tag set e.g. room type.
* PHP version 5
*
* @package Client
* @author Damien Allison <damien@upgrade-digital.com>
* @copyright 2015 Upgrade Digital
* @license https://github.com/Upgrade-Digital/client/blob/master/LICENSE.md
* @link https://github.com/Upgrade-Digital/client
*/
class ResourceTagCount {
/**
* @var string
*/
public $urn;
/**
* The tag used to group the results, typcically resourceCode
* @var Tag
*/
public $tag;
/**
* Tag list used to pass information on the resource tag grouping.
* @var Tag[]
*/
/**
* @var integer
*/
public $count;
/**
* @var ResourceRate[]
*/
public $rates;
/**
* @var ProductSelector[]
*/
public $products;
}
| Upgrade-Digital/client | php/UpgradeDigital/ResourceTagCount.php | PHP | agpl-3.0 | 864 |
# -*- coding:utf-8 -*-
#
#
# Copyright (C) 2013 Michael Telahun Makonnen <mmakonnen@gmail.com>.
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
'name': 'Capture picture with webcam',
'version': '1.0',
'category': 'Generic Modules/Human Resources',
'description': """
TApplicant WebCam
=========
Capture employee pictures with an attached web cam.
""",
'author': "Michael Telahun Makonnen <mmakonnen@gmail.com>,"
"Odoo Community Association (OCA)",
'website': 'http://miketelahun.wordpress.com',
'license': 'AGPL-3',
'depends': [
'hr',
'web',
'trip'
],
'js': [
'static/src/js/jquery.webcam.js',
'static/src/js/tapplicant_webcam.js',
],
'css': [
'static/src/css/tapplicant_webcam.css',
],
'qweb': [
'static/src/xml/tapplicant_webcam.xml',
],
'data': [
'tapplicant_webcam_data.xml',
'tapplicant_webcam_view.xml',
],
'installable': True,
'active': False,
}
| nishad-jobsglobal/odoo-marriot | openerp/addons/tapplicant_webcam/__openerp__.py | Python | agpl-3.0 | 1,685 |
/*
* Copyright (C) 2000 - 2021 Silverpeas
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* As a special exception to the terms and conditions of version 3.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* Open Source Software ("FLOSS") applications as described in Silverpeas's
* FLOSS exception. You should have received a copy of the text describing
* the FLOSS exception, and it is also available here:
* "https://www.silverpeas.org/legal/floss_exception.html"
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.silverpeas.core.mylinks.dao;
import org.silverpeas.core.persistence.jdbc.sql.JdbcSqlQuery;
import java.sql.SQLException;
import java.util.List;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.empty;
/**
* @author silveryocha
*/
public class MyLinksDAOITUtil {
private MyLinksDAOITUtil() {
}
static void assertLinkIds(final List<Integer> actualIds, final Integer... expectedIds) {
if (expectedIds.length == 0) {
assertThat(actualIds, empty());
} else {
assertThat(actualIds, contains(expectedIds));
}
}
static List<Integer> getAllLinkIds() throws SQLException {
return JdbcSqlQuery.createSelect("linkid")
.from("SB_MyLinks_Link")
.orderBy("linkid")
.execute(r -> r.getInt(1));
}
static void assertCategoryIds(final List<Integer> actualIds, final Integer... expectedIds) {
if (expectedIds.length == 0) {
assertThat(actualIds, empty());
} else {
assertThat(actualIds, contains(expectedIds));
}
}
static List<Integer> getAllCategoryIds() throws SQLException {
return JdbcSqlQuery.createSelect("catid")
.from("SB_MyLinks_Cat")
.orderBy("catid")
.execute(r -> r.getInt(1));
}
static void assertOfCouples(final List<String> actualCouples, final String... expectedCouples) {
if (expectedCouples.length == 0) {
assertThat(actualCouples, empty());
} else {
assertThat(actualCouples, contains(expectedCouples));
}
}
static List<String> getAllOfCouples() throws SQLException {
return JdbcSqlQuery.createSelect("*")
.from("SB_MyLinks_LinkCat")
.orderBy("catid, linkid")
.execute(r -> r.getInt("catid") + "/" + r.getInt("linkid"));
}
}
| SilverDav/Silverpeas-Core | core-services/mylinks/src/integration-test/java/org/silverpeas/core/mylinks/dao/MyLinksDAOITUtil.java | Java | agpl-3.0 | 2,953 |
/*
* Funambol is a mobile platform developed by Funambol, Inc.
* Copyright (C) 2008 Funambol, Inc.
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License version 3 as published by
* the Free Software Foundation with the addition of the following permission
* added to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED
* WORK IN WHICH THE COPYRIGHT IS OWNED BY FUNAMBOL, FUNAMBOL DISCLAIMS THE
* WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program; if not, see http://www.gnu.org/licenses or write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301 USA.
*
* You can contact Funambol, Inc. headquarters at 643 Bair Island Road, Suite
* 305, Redwood City, CA 94063, USA, or at email address info@funambol.com.
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Public License version 3.
*
* In accordance with Section 7(b) of the GNU Affero General Public License
* version 3, these Appropriate Legal Notices must retain the display of the
* "Powered by Funambol" logo. If the display of the logo is not reasonably
* feasible for technical reasons, the Appropriate Legal Notices must display
* the words "Powered by Funambol".
*/
package com.funambol.syncml.client;
import java.util.Enumeration;
import java.util.Vector;
import java.util.Date;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import com.funambol.sync.SyncItem;
import com.funambol.sync.SourceConfig;
import com.funambol.sync.SyncException;
import com.funambol.sync.SyncAnchor;
import com.funambol.sync.client.RawFileSyncSource;
import com.funambol.sync.client.ChangesTracker;
import com.funambol.syncml.protocol.SyncMLStatus;
import com.funambol.platform.FileAdapter;
import com.funambol.util.Log;
import com.funambol.util.Base64;
/**
* An implementation of TrackableSyncSource, providing
* the ability to sync briefcases (files). The source can handle both raw files
* and OMA files (file objects). By default the source formats items according
* to the OMA file object spec, but it is capable of receiving also raw files,
* if their MIME type is not OMA file objects.
*/
public class FileSyncSource extends RawFileSyncSource {
private static final String TAG_LOG = "FileSyncSource";
protected class FileSyncItem extends RawFileSyncItem {
protected String prologue;
protected String epilogue;
public FileSyncItem(String fileName, String key) throws IOException {
super(fileName, key, null, SyncItem.STATE_NEW, null);
}
public FileSyncItem(String fileName, String key, String type, char state,
String parent) throws IOException {
super(fileName, key, type, state, parent);
FileAdapter file = new FileAdapter(fileName);
if (SourceConfig.FILE_OBJECT_TYPE.equals(getType())) {
// Initialize the prologue
FileObject fo = new FileObject();
fo.setName(file.getName());
fo.setModified(new Date(file.lastModified()));
prologue = fo.formatPrologue();
// Initialize the epilogue
epilogue = fo.formatEpilogue();
// Compute the size of the FileObject
int bodySize = Base64.computeEncodedSize((int)file.getSize());
// Set the size
setObjectSize(prologue.length() + bodySize + epilogue.length());
} else {
// The size is the raw file size
setObjectSize(file.getSize());
}
// Release the file object
file.close();
}
/**
* Creates a new output stream to write to. If the item type is
* FileDataObject, then the output stream takes care of parsing the XML
* part of the object and it fills a FileObject that can be retrieved
* later. @see FileObjectOutputStream for more details
* Note that the output stream is unique, so that is can be reused
* across different syncml messages.
*/
public OutputStream getOutputStream() throws IOException {
if (os == null) {
os = super.getOutputStream();
// If this item is a file object, we shall use the
// FileObjectOutputStream
if (SourceConfig.FILE_OBJECT_TYPE.equals(getType())) {
FileObject fo = new FileObject();
os = new FileObjectOutputStream(fo, os);
}
}
return os;
}
/**
* Creates a new input stream to read from. If the source is configured
* to handle File Data Object, then the stream returns the XML
* description of the file. @see FileObjectInputStream for more details.
*/
public InputStream getInputStream() throws IOException {
FileAdapter file = new FileAdapter(fileName);
InputStream is = super.getInputStream();
// If this item is a file object, we shall use the
// FileObjectOutputStream
if (SourceConfig.FILE_OBJECT_TYPE.equals(getType())) {
is = new FileObjectInputStream(prologue, is, epilogue,
(int)file.getSize());
}
return is;
}
// If we do not reimplement the getContent, it will return a null
// content, but this is not used in the ss, so there's no need to
// redefine it
}
protected String directory;
protected String extensions[] = {};
//------------------------------------------------------------- Constructors
/**
* FileSyncSource constructor: initialize source config
*/
public FileSyncSource(SourceConfig config, ChangesTracker tracker, String directory) {
super(config, tracker, directory);
}
protected void applyFileProperties(FileSyncItem fsi) throws IOException {
OutputStream os = fsi.getOutputStream();
if (os instanceof FileObjectOutputStream) {
FileObjectOutputStream foos = (FileObjectOutputStream)os;
applyFileObjectProperties(fsi, foos);
// The key for this item must be updated with the real
// file name
FileObject fo = foos.getFileObject();
String newName = fo.getName();
// The name is mandatory, but we try to be more robust here
// and deal with items with no name
if (newName != null) {
fsi.setKey(directory + newName);
}
}
}
protected void applyFileObjectProperties(FileSyncItem fsi, FileObjectOutputStream foos) throws IOException {
FileObject fo = foos.getFileObject();
String newName = fo.getName();
FileAdapter file = new FileAdapter(fsi.getFileName());
if (newName != null) {
// Rename the file
file.rename(directory + newName);
} else {
Log.error(TAG_LOG, "The received item does not have a valid name.");
}
file.close();
// Apply the modified date if present
FileAdapter newFile = new FileAdapter(directory + newName);
if (newFile != null) {
Date lastModified = fo.getModified();
if (newFile.isSetLastModifiedSupported() && lastModified != null) {
newFile.setLastModified(lastModified.getTime());
}
newFile.close();
}
}
}
| zjujunge/funambol | externals/java-sdk/syncml/src/main/java/com/funambol/syncml/client/FileSyncSource.java | Java | agpl-3.0 | 8,195 |
<?php
class order_CancelOrderAction extends f_action_BaseJSONAction
{
/**
* @param Context $context
* @param Request $request
*/
public function _execute($context, $request)
{
$labels = array("");
foreach ($this->getDocumentInstanceArrayFromRequest($request) as $order)
{
if ($order instanceof order_persistentdocument_order)
{
$order->getDocumentService()->cancelOrder($order);
$this->logAction($order);
$labels[] = $order->getOrderNumber();
}
}
return $this->sendJSON(array('message' =>
LocaleService::getInstance()->transBO('m.order.bo.actions.cancel-order-success',
array(), array('OrderNumbers' => implode("\n ", $labels)))));
}
} | RBSChange/modules.order | actions/CancelOrderAction.class.php | PHP | agpl-3.0 | 697 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-2014 OpenERP (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" High-level objects for fields. """
from collections import OrderedDict
from datetime import date, datetime
from functools import partial
from operator import attrgetter
from types import NoneType
import logging
import pytz
import xmlrpclib
from openerp.tools import float_round, frozendict, html_sanitize, ustr, OrderedSet
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT as DATE_FORMAT
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT as DATETIME_FORMAT
DATE_LENGTH = len(date.today().strftime(DATE_FORMAT))
DATETIME_LENGTH = len(datetime.now().strftime(DATETIME_FORMAT))
EMPTY_DICT = frozendict()
_logger = logging.getLogger(__name__)
class SpecialValue(object):
""" Encapsulates a value in the cache in place of a normal value. """
def __init__(self, value):
self.value = value
def get(self):
return self.value
class FailedValue(SpecialValue):
""" Special value that encapsulates an exception instead of a value. """
def __init__(self, exception):
self.exception = exception
def get(self):
raise self.exception
def _check_value(value):
""" Return ``value``, or call its getter if ``value`` is a :class:`SpecialValue`. """
return value.get() if isinstance(value, SpecialValue) else value
def resolve_all_mro(cls, name, reverse=False):
""" Return the (successively overridden) values of attribute ``name`` in ``cls``
in mro order, or inverse mro order if ``reverse`` is true.
"""
klasses = reversed(cls.__mro__) if reverse else cls.__mro__
for klass in klasses:
if name in klass.__dict__:
yield klass.__dict__[name]
class MetaField(type):
""" Metaclass for field classes. """
by_type = {}
def __new__(meta, name, bases, attrs):
""" Combine the ``_slots`` dict from parent classes, and determine
``__slots__`` for them on the new class.
"""
base_slots = {}
for base in reversed(bases):
base_slots.update(getattr(base, '_slots', ()))
slots = dict(base_slots)
slots.update(attrs.get('_slots', ()))
attrs['__slots__'] = set(slots) - set(base_slots)
attrs['_slots'] = slots
return type.__new__(meta, name, bases, attrs)
def __init__(cls, name, bases, attrs):
super(MetaField, cls).__init__(name, bases, attrs)
if cls.type and cls.type not in MetaField.by_type:
MetaField.by_type[cls.type] = cls
# compute class attributes to avoid calling dir() on fields
cls.column_attrs = []
cls.related_attrs = []
cls.description_attrs = []
for attr in dir(cls):
if attr.startswith('_column_'):
cls.column_attrs.append((attr[8:], attr))
elif attr.startswith('_related_'):
cls.related_attrs.append((attr[9:], attr))
elif attr.startswith('_description_'):
cls.description_attrs.append((attr[13:], attr))
class Field(object):
""" The field descriptor contains the field definition, and manages accesses
and assignments of the corresponding field on records. The following
attributes may be provided when instanciating a field:
:param string: the label of the field seen by users (string); if not
set, the ORM takes the field name in the class (capitalized).
:param help: the tooltip of the field seen by users (string)
:param readonly: whether the field is readonly (boolean, by default ``False``)
:param required: whether the value of the field is required (boolean, by
default ``False``)
:param index: whether the field is indexed in database (boolean, by
default ``False``)
:param default: the default value for the field; this is either a static
value, or a function taking a recordset and returning a value
:param states: a dictionary mapping state values to lists of UI attribute-value
pairs; possible attributes are: 'readonly', 'required', 'invisible'.
Note: Any state-based condition requires the ``state`` field value to be
available on the client-side UI. This is typically done by including it in
the relevant views, possibly made invisible if not relevant for the
end-user.
:param groups: comma-separated list of group xml ids (string); this
restricts the field access to the users of the given groups only
:param bool copy: whether the field value should be copied when the record
is duplicated (default: ``True`` for normal fields, ``False`` for
``one2many`` and computed fields, including property fields and
related fields)
:param string oldname: the previous name of this field, so that ORM can rename
it automatically at migration
.. _field-computed:
.. rubric:: Computed fields
One can define a field whose value is computed instead of simply being
read from the database. The attributes that are specific to computed
fields are given below. To define such a field, simply provide a value
for the attribute ``compute``.
:param compute: name of a method that computes the field
:param inverse: name of a method that inverses the field (optional)
:param search: name of a method that implement search on the field (optional)
:param store: whether the field is stored in database (boolean, by
default ``False`` on computed fields)
:param compute_sudo: whether the field should be recomputed as superuser
to bypass access rights (boolean, by default ``False``)
The methods given for ``compute``, ``inverse`` and ``search`` are model
methods. Their signature is shown in the following example::
upper = fields.Char(compute='_compute_upper',
inverse='_inverse_upper',
search='_search_upper')
@api.depends('name')
def _compute_upper(self):
for rec in self:
rec.upper = rec.name.upper() if rec.name else False
def _inverse_upper(self):
for rec in self:
rec.name = rec.upper.lower() if rec.upper else False
def _search_upper(self, operator, value):
if operator == 'like':
operator = 'ilike'
return [('name', operator, value)]
The compute method has to assign the field on all records of the invoked
recordset. The decorator :meth:`openerp.api.depends` must be applied on
the compute method to specify the field dependencies; those dependencies
are used to determine when to recompute the field; recomputation is
automatic and guarantees cache/database consistency. Note that the same
method can be used for several fields, you simply have to assign all the
given fields in the method; the method will be invoked once for all
those fields.
By default, a computed field is not stored to the database, and is
computed on-the-fly. Adding the attribute ``store=True`` will store the
field's values in the database. The advantage of a stored field is that
searching on that field is done by the database itself. The disadvantage
is that it requires database updates when the field must be recomputed.
The inverse method, as its name says, does the inverse of the compute
method: the invoked records have a value for the field, and you must
apply the necessary changes on the field dependencies such that the
computation gives the expected value. Note that a computed field without
an inverse method is readonly by default.
The search method is invoked when processing domains before doing an
actual search on the model. It must return a domain equivalent to the
condition: ``field operator value``.
.. _field-related:
.. rubric:: Related fields
The value of a related field is given by following a sequence of
relational fields and reading a field on the reached model. The complete
sequence of fields to traverse is specified by the attribute
:param related: sequence of field names
Some field attributes are automatically copied from the source field if
they are not redefined: ``string``, ``help``, ``readonly``, ``required`` (only
if all fields in the sequence are required), ``groups``, ``digits``, ``size``,
``translate``, ``sanitize``, ``selection``, ``comodel_name``, ``domain``,
``context``. All semantic-free attributes are copied from the source
field.
By default, the values of related fields are not stored to the database.
Add the attribute ``store=True`` to make it stored, just like computed
fields. Related fields are automatically recomputed when their
dependencies are modified.
.. _field-company-dependent:
.. rubric:: Company-dependent fields
Formerly known as 'property' fields, the value of those fields depends
on the company. In other words, users that belong to different companies
may see different values for the field on a given record.
:param company_dependent: whether the field is company-dependent (boolean)
.. _field-incremental-definition:
.. rubric:: Incremental definition
A field is defined as class attribute on a model class. If the model
is extended (see :class:`~openerp.models.Model`), one can also extend
the field definition by redefining a field with the same name and same
type on the subclass. In that case, the attributes of the field are
taken from the parent class and overridden by the ones given in
subclasses.
For instance, the second class below only adds a tooltip on the field
``state``::
class First(models.Model):
_name = 'foo'
state = fields.Selection([...], required=True)
class Second(models.Model):
_inherit = 'foo'
state = fields.Selection(help="Blah blah blah")
"""
__metaclass__ = MetaField
type = None # type of the field (string)
relational = False # whether the field is a relational one
_slots = {
'_attrs': EMPTY_DICT, # dictionary of field attributes; it contains:
# - all attributes after __init__()
# - free attributes only after set_class_name()
'automatic': False, # whether the field is automatically created ("magic" field)
'inherited': False, # whether the field is inherited (_inherits)
'column': None, # the column corresponding to the field
'setup_done': False, # whether the field has been set up
'name': None, # name of the field
'model_name': None, # name of the model of this field
'comodel_name': None, # name of the model of values (if relational)
'store': True, # whether the field is stored in database
'index': False, # whether the field is indexed in database
'manual': False, # whether the field is a custom field
'copy': True, # whether the field is copied over by BaseModel.copy()
'depends': (), # collection of field dependencies
'recursive': False, # whether self depends on itself
'compute': None, # compute(recs) computes field on recs
'compute_sudo': False, # whether field should be recomputed as admin
'inverse': None, # inverse(recs) inverses field on recs
'search': None, # search(recs, operator, value) searches on self
'related': None, # sequence of field names, for related fields
'related_sudo': True, # whether related fields should be read as admin
'company_dependent': False, # whether ``self`` is company-dependent (property field)
'default': None, # default(recs) returns the default value
'string': None, # field label
'help': None, # field tooltip
'readonly': False, # whether the field is readonly
'required': False, # whether the field is required
'states': None, # set readonly and required depending on state
'groups': None, # csv list of group xml ids
'change_default': False, # whether the field may trigger a "user-onchange"
'deprecated': None, # whether the field is deprecated
'inverse_fields': (), # collection of inverse fields (objects)
'computed_fields': (), # fields computed with the same method as self
'related_field': None, # corresponding related field
'_triggers': (), # invalidation and recomputation triggers
}
def __init__(self, string=None, **kwargs):
kwargs['string'] = string
attrs = {key: val for key, val in kwargs.iteritems() if val is not None}
self._attrs = attrs or EMPTY_DICT
def __getattr__(self, name):
""" Access non-slot field attribute. """
try:
return self._attrs[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
""" Set slot or non-slot field attribute. """
try:
object.__setattr__(self, name, value)
except AttributeError:
if self._attrs:
self._attrs[name] = value
else:
self._attrs = {name: value} # replace EMPTY_DICT
def __delattr__(self, name):
""" Remove non-slot field attribute. """
try:
del self._attrs[name]
except KeyError:
raise AttributeError(name)
def new(self, **kwargs):
""" Return a field of the same type as ``self``, with its own parameters. """
return type(self)(**kwargs)
def set_class_name(self, cls, name):
""" Assign the model class and field name of ``self``. """
self_attrs = self._attrs
for attr, value in self._slots.iteritems():
setattr(self, attr, value)
self.model_name = cls._name
self.name = name
# determine all inherited field attributes
attrs = {}
for field in resolve_all_mro(cls, name, reverse=True):
if isinstance(field, type(self)):
attrs.update(field._attrs)
else:
attrs.clear()
attrs.update(self_attrs) # necessary in case self is not in cls
# initialize ``self`` with ``attrs``
if attrs.get('compute'):
# by default, computed fields are not stored, not copied and readonly
attrs['store'] = attrs.get('store', False)
attrs['copy'] = attrs.get('copy', False)
attrs['readonly'] = attrs.get('readonly', not attrs.get('inverse'))
if attrs.get('related'):
# by default, related fields are not stored and not copied
attrs['store'] = attrs.get('store', False)
attrs['copy'] = attrs.get('copy', False)
# fix for function fields overridden by regular columns
if not isinstance(attrs.get('column'), (NoneType, fields.function)):
attrs.pop('store', None)
for attr, value in attrs.iteritems():
setattr(self, attr, value)
if not self.string and not self.related:
# related fields get their string from their parent field
self.string = name.replace('_', ' ').capitalize()
# determine self.default and cls._defaults in a consistent way
self._determine_default(cls, name)
def _determine_default(self, cls, name):
""" Retrieve the default value for ``self`` in the hierarchy of ``cls``, and
determine ``self.default`` and ``cls._defaults`` accordingly.
"""
self.default = None
# traverse the class hierarchy upwards, and take the first field
# definition with a default or _defaults for self
for klass in cls.__mro__:
if name in klass.__dict__:
field = klass.__dict__[name]
if not isinstance(field, type(self)):
# klass contains another value overridden by self
return
if 'default' in field._attrs:
# take the default in field, and adapt it for cls._defaults
value = field._attrs['default']
if callable(value):
from openerp import api
self.default = value
cls._defaults[name] = api.model(
lambda recs: self.convert_to_write(value(recs))
)
else:
self.default = lambda recs: value
cls._defaults[name] = value
return
defaults = klass.__dict__.get('_defaults') or {}
if name in defaults:
# take the value from _defaults, and adapt it for self.default
value = defaults[name]
if callable(value):
func = lambda recs: value(recs._model, recs._cr, recs._uid, recs._context)
else:
func = lambda recs: value
self.default = lambda recs: self.convert_to_cache(
func(recs), recs, validate=False,
)
cls._defaults[name] = value
return
def __str__(self):
return "%s.%s" % (self.model_name, self.name)
def __repr__(self):
return "%s.%s" % (self.model_name, self.name)
############################################################################
#
# Field setup
#
def setup(self, env):
""" Make sure that ``self`` is set up, except for recomputation triggers. """
if not self.setup_done:
if self.related:
self._setup_related(env)
else:
self._setup_regular(env)
self.setup_done = True
#
# Setup of non-related fields
#
def _setup_regular(self, env):
""" Setup the attributes of a non-related field. """
recs = env[self.model_name]
def make_depends(deps):
return tuple(deps(recs) if callable(deps) else deps)
# convert compute into a callable and determine depends
if isinstance(self.compute, basestring):
# if the compute method has been overridden, concatenate all their _depends
self.depends = ()
for method in resolve_all_mro(type(recs), self.compute, reverse=True):
self.depends += make_depends(getattr(method, '_depends', ()))
self.compute = getattr(type(recs), self.compute)
else:
self.depends = make_depends(getattr(self.compute, '_depends', ()))
# convert inverse and search into callables
if isinstance(self.inverse, basestring):
self.inverse = getattr(type(recs), self.inverse)
if isinstance(self.search, basestring):
self.search = getattr(type(recs), self.search)
#
# Setup of related fields
#
def _setup_related(self, env):
""" Setup the attributes of a related field. """
# fix the type of self.related if necessary
if isinstance(self.related, basestring):
self.related = tuple(self.related.split('.'))
# determine the chain of fields, and make sure they are all set up
recs = env[self.model_name]
fields = []
for name in self.related:
field = recs._fields[name]
field.setup(env)
recs = recs[name]
fields.append(field)
self.related_field = field
# check type consistency
if self.type != field.type:
raise Warning("Type of related field %s is inconsistent with %s" % (self, field))
# determine dependencies, compute, inverse, and search
self.depends = ('.'.join(self.related),)
self.compute = self._compute_related
if not (self.readonly or field.readonly):
self.inverse = self._inverse_related
if field._description_searchable:
# allow searching on self only if the related field is searchable
self.search = self._search_related
# copy attributes from field to self (string, help, etc.)
for attr, prop in self.related_attrs:
if not getattr(self, attr):
setattr(self, attr, getattr(field, prop))
for attr, value in field._attrs.iteritems():
if attr not in self._attrs:
setattr(self, attr, value)
# special case for states: copy it only for inherited fields
if not self.states and self.inherited:
self.states = field.states
# special case for required: check if all fields are required
if not self.store and not self.required:
self.required = all(field.required for field in fields)
def _compute_related(self, records):
""" Compute the related field ``self`` on ``records``. """
# when related_sudo, bypass access rights checks when reading values
others = records.sudo() if self.related_sudo else records
for record, other in zip(records, others):
if not record.id:
# draft record, do not switch to another environment
other = record
# traverse the intermediate fields; follow the first record at each step
for name in self.related[:-1]:
other = other[name][:1]
record[self.name] = other[self.related[-1]]
def _inverse_related(self, records):
""" Inverse the related field ``self`` on ``records``. """
# store record values, otherwise they may be lost by cache invalidation!
record_value = {record: record[self.name] for record in records}
for record in records:
other = record
# traverse the intermediate fields, and keep at most one record
for name in self.related[:-1]:
other = other[name][:1]
if other:
other[self.related[-1]] = record_value[record]
def _search_related(self, records, operator, value):
""" Determine the domain to search on field ``self``. """
return [('.'.join(self.related), operator, value)]
# properties used by _setup_related() to copy values from related field
_related_comodel_name = property(attrgetter('comodel_name'))
_related_string = property(attrgetter('string'))
_related_help = property(attrgetter('help'))
_related_readonly = property(attrgetter('readonly'))
_related_groups = property(attrgetter('groups'))
@property
def base_field(self):
""" Return the base field of an inherited field, or ``self``. """
return self.related_field.base_field if self.inherited else self
#
# Setup of field triggers
#
# The triggers is a collection of pairs (field, path) of computed fields
# that depend on ``self``. When ``self`` is modified, it invalidates the cache
# of each ``field``, and registers the records to recompute based on ``path``.
# See method ``modified`` below for details.
#
def add_trigger(self, trigger):
""" Add a recomputation trigger on ``self``. """
if trigger not in self._triggers:
self._triggers += (trigger,)
def setup_triggers(self, env):
""" Add the necessary triggers to invalidate/recompute ``self``. """
model = env[self.model_name]
for path in self.depends:
self._setup_dependency([], model, path.split('.'))
def _setup_dependency(self, path0, model, path1):
""" Make ``self`` depend on ``model``; `path0 + path1` is a dependency of
``self``, and ``path0`` is the sequence of field names from ``self.model``
to ``model``.
"""
env = model.env
head, tail = path1[0], path1[1:]
if head == '*':
# special case: add triggers on all fields of model (except self)
fields = set(model._fields.itervalues()) - set([self])
else:
fields = [model._fields[head]]
for field in fields:
if field == self:
_logger.debug("Field %s is recursively defined", self)
self.recursive = True
continue
#_logger.debug("Add trigger on %s to recompute %s", field, self)
field.add_trigger((self, '.'.join(path0 or ['id'])))
# add trigger on inverse fields, too
for invf in field.inverse_fields:
#_logger.debug("Add trigger on %s to recompute %s", invf, self)
invf.add_trigger((self, '.'.join(path0 + [head])))
# recursively traverse the dependency
if tail:
comodel = env[field.comodel_name]
self._setup_dependency(path0 + [head], comodel, tail)
@property
def dependents(self):
""" Return the computed fields that depend on ``self``. """
return (field for field, path in self._triggers)
############################################################################
#
# Field description
#
def get_description(self, env):
""" Return a dictionary that describes the field ``self``. """
desc = {'type': self.type}
for attr, prop in self.description_attrs:
value = getattr(self, prop)
if callable(value):
value = value(env)
if value is not None:
desc[attr] = value
return desc
# properties used by get_description()
_description_store = property(attrgetter('store'))
_description_manual = property(attrgetter('manual'))
_description_depends = property(attrgetter('depends'))
_description_related = property(attrgetter('related'))
_description_company_dependent = property(attrgetter('company_dependent'))
_description_readonly = property(attrgetter('readonly'))
_description_required = property(attrgetter('required'))
_description_states = property(attrgetter('states'))
_description_groups = property(attrgetter('groups'))
_description_change_default = property(attrgetter('change_default'))
_description_deprecated = property(attrgetter('deprecated'))
@property
def _description_searchable(self):
return bool(self.store or self.search or (self.column and self.column._fnct_search))
@property
def _description_sortable(self):
return self.store or (self.inherited and self.related_field._description_sortable)
def _description_string(self, env):
if self.string and env.lang:
field = self.base_field
name = "%s,%s" % (field.model_name, field.name)
trans = env['ir.translation']._get_source(name, 'field', env.lang)
return trans or self.string
return self.string
def _description_help(self, env):
if self.help and env.lang:
name = "%s,%s" % (self.model_name, self.name)
trans = env['ir.translation']._get_source(name, 'help', env.lang)
return trans or self.help
return self.help
############################################################################
#
# Conversion to column instance
#
def to_column(self):
""" Return a column object corresponding to ``self``, or ``None``. """
if not self.store and self.compute:
# non-stored computed fields do not have a corresponding column
self.column = None
return None
# determine column parameters
#_logger.debug("Create fields._column for Field %s", self)
args = {}
for attr, prop in self.column_attrs:
args[attr] = getattr(self, prop)
for attr, value in self._attrs.iteritems():
args[attr] = value
if self.company_dependent:
# company-dependent fields are mapped to former property fields
args['type'] = self.type
args['relation'] = self.comodel_name
self.column = fields.property(**args)
elif self.column:
# let the column provide a valid column for the given parameters
self.column = self.column.new(_computed_field=bool(self.compute), **args)
else:
# create a fresh new column of the right type
self.column = getattr(fields, self.type)(**args)
return self.column
# properties used by to_column() to create a column instance
_column_copy = property(attrgetter('copy'))
_column_select = property(attrgetter('index'))
_column_manual = property(attrgetter('manual'))
_column_string = property(attrgetter('string'))
_column_help = property(attrgetter('help'))
_column_readonly = property(attrgetter('readonly'))
_column_required = property(attrgetter('required'))
_column_states = property(attrgetter('states'))
_column_groups = property(attrgetter('groups'))
_column_change_default = property(attrgetter('change_default'))
_column_deprecated = property(attrgetter('deprecated'))
############################################################################
#
# Conversion of values
#
def null(self, env):
""" return the null value for this field in the given environment """
return False
def convert_to_cache(self, value, record, validate=True):
""" convert ``value`` to the cache level in ``env``; ``value`` may come from
an assignment, or have the format of methods :meth:`BaseModel.read`
or :meth:`BaseModel.write`
:param record: the target record for the assignment, or an empty recordset
:param bool validate: when True, field-specific validation of
``value`` will be performed
"""
return value
def convert_to_read(self, value, use_name_get=True):
""" convert ``value`` from the cache to a value as returned by method
:meth:`BaseModel.read`
:param bool use_name_get: when True, value's diplay name will
be computed using :meth:`BaseModel.name_get`, if relevant
for the field
"""
return False if value is None else value
def convert_to_write(self, value, target=None, fnames=None):
""" convert ``value`` from the cache to a valid value for method
:meth:`BaseModel.write`.
:param target: optional, the record to be modified with this value
:param fnames: for relational fields only, an optional collection of
field names to convert
"""
return self.convert_to_read(value)
def convert_to_onchange(self, value):
""" convert ``value`` from the cache to a valid value for an onchange
method v7.
"""
return self.convert_to_write(value)
def convert_to_export(self, value, env):
""" convert ``value`` from the cache to a valid value for export. The
parameter ``env`` is given for managing translations.
"""
if not value:
return ''
return value if env.context.get('export_raw_data') else ustr(value)
def convert_to_display_name(self, value, record=None):
""" convert ``value`` from the cache to a suitable display name. """
return ustr(value)
############################################################################
#
# Descriptor methods
#
def __get__(self, record, owner):
""" return the value of field ``self`` on ``record`` """
if record is None:
return self # the field is accessed through the owner class
if not record:
# null record -> return the null value for this field
return self.null(record.env)
# only a single record may be accessed
record.ensure_one()
try:
return record._cache[self]
except KeyError:
pass
# cache miss, retrieve value
if record.id:
# normal record -> read or compute value for this field
self.determine_value(record)
else:
# draft record -> compute the value or let it be null
self.determine_draft_value(record)
# the result should be in cache now
return record._cache[self]
def __set__(self, record, value):
""" set the value of field ``self`` on ``record`` """
env = record.env
# only a single record may be updated
record.ensure_one()
# adapt value to the cache level
value = self.convert_to_cache(value, record)
if env.in_draft or not record.id:
# determine dependent fields
spec = self.modified_draft(record)
# set value in cache, inverse field, and mark record as dirty
record._cache[self] = value
if env.in_onchange:
for invf in self.inverse_fields:
invf._update(value, record)
record._set_dirty(self.name)
# determine more dependent fields, and invalidate them
if self.relational:
spec += self.modified_draft(record)
env.invalidate(spec)
else:
# simply write to the database, and update cache
record.write({self.name: self.convert_to_write(value)})
record._cache[self] = value
############################################################################
#
# Computation of field values
#
def _compute_value(self, records):
""" Invoke the compute method on ``records``. """
# initialize the fields to their corresponding null value in cache
for field in self.computed_fields:
records._cache[field] = field.null(records.env)
records.env.computed[field].update(records._ids)
self.compute(records)
for field in self.computed_fields:
records.env.computed[field].difference_update(records._ids)
def compute_value(self, records):
""" Invoke the compute method on ``records``; the results are in cache. """
with records.env.do_in_draft():
try:
self._compute_value(records)
except (AccessError, MissingError):
# some record is forbidden or missing, retry record by record
for record in records:
try:
self._compute_value(record)
except Exception as exc:
record._cache[self.name] = FailedValue(exc)
def determine_value(self, record):
""" Determine the value of ``self`` for ``record``. """
env = record.env
if self.column and not (self.depends and env.in_draft):
# this is a stored field or an old-style function field
if self.depends:
# this is a stored computed field, check for recomputation
recs = record._recompute_check(self)
if recs:
# recompute the value (only in cache)
self.compute_value(recs)
# HACK: if result is in the wrong cache, copy values
if recs.env != env:
for source, target in zip(recs, recs.with_env(env)):
try:
values = target._convert_to_cache({
f.name: source[f.name] for f in self.computed_fields
}, validate=False)
except MissingError as e:
values = FailedValue(e)
target._cache.update(values)
# the result is saved to database by BaseModel.recompute()
return
# read the field from database
record._prefetch_field(self)
elif self.compute:
# this is either a non-stored computed field, or a stored computed
# field in draft mode
if self.recursive:
self.compute_value(record)
else:
recs = record._in_cache_without(self)
self.compute_value(recs)
else:
# this is a non-stored non-computed field
record._cache[self] = self.null(env)
def determine_draft_value(self, record):
""" Determine the value of ``self`` for the given draft ``record``. """
if self.compute:
self._compute_value(record)
else:
record._cache[self] = SpecialValue(self.null(record.env))
def determine_inverse(self, records):
""" Given the value of ``self`` on ``records``, inverse the computation. """
if self.inverse:
self.inverse(records)
def determine_domain(self, records, operator, value):
""" Return a domain representing a condition on ``self``. """
if self.search:
return self.search(records, operator, value)
else:
return [(self.name, operator, value)]
############################################################################
#
# Notification when fields are modified
#
def modified(self, records):
""" Notify that field ``self`` has been modified on ``records``: prepare the
fields/records to recompute, and return a spec indicating what to
invalidate.
"""
# invalidate the fields that depend on self, and prepare recomputation
spec = [(self, records._ids)]
for field, path in self._triggers:
if path and field.store:
# don't move this line to function top, see log
env = records.env(user=SUPERUSER_ID, context={'active_test': False})
target = env[field.model_name].search([(path, 'in', records.ids)])
if target:
spec.append((field, target._ids))
# recompute field on target in the environment of records,
# and as user admin if required
if field.compute_sudo:
target = target.with_env(records.env(user=SUPERUSER_ID))
else:
target = target.with_env(records.env)
target._recompute_todo(field)
else:
spec.append((field, None))
return spec
def modified_draft(self, records):
""" Same as :meth:`modified`, but in draft mode. """
env = records.env
# invalidate the fields on the records in cache that depend on
# ``records``, except fields currently being computed
spec = []
for field, path in self._triggers:
target = env[field.model_name]
computed = target.browse(env.computed[field])
if path == 'id':
target = records - computed
elif path:
target = (target.browse(env.cache[field]) - computed).filtered(
lambda rec: rec._mapped_cache(path) & records
)
else:
target = target.browse(env.cache[field]) - computed
if target:
spec.append((field, target._ids))
return spec
class Boolean(Field):
type = 'boolean'
def convert_to_cache(self, value, record, validate=True):
return bool(value)
def convert_to_export(self, value, env):
if env.context.get('export_raw_data'):
return value
return ustr(value)
class Integer(Field):
type = 'integer'
_slots = {
'group_operator': None, # operator for aggregating values
'group_expression': None, # advance expression for aggregating values
}
_related_group_operator = property(attrgetter('group_operator'))
_column_group_operator = property(attrgetter('group_operator'))
_related_group_expression = property(attrgetter('group_expression'))
_column_group_expression = property(attrgetter('group_expression'))
def convert_to_cache(self, value, record, validate=True):
if isinstance(value, dict):
# special case, when an integer field is used as inverse for a one2many
return value.get('id', False)
return int(value or 0)
def convert_to_read(self, value, use_name_get=True):
# Integer values greater than 2^31-1 are not supported in pure XMLRPC,
# so we have to pass them as floats :-(
if value and value > xmlrpclib.MAXINT:
return float(value)
return value
def _update(self, records, value):
# special case, when an integer field is used as inverse for a one2many
records._cache[self] = value.id or 0
def convert_to_export(self, value, env):
if value or value == 0:
return value if env.context.get('export_raw_data') else ustr(value)
return ''
class Float(Field):
""" The precision digits are given by the attribute
:param digits: a pair (total, decimal), or a function taking a database
cursor and returning a pair (total, decimal)
"""
type = 'float'
_slots = {
'_digits': None, # digits argument passed to class initializer
'group_operator': None, # operator for aggregating values
'group_expression': None, # advance expression for aggregating values
}
def __init__(self, string=None, digits=None, **kwargs):
super(Float, self).__init__(string=string, _digits=digits, **kwargs)
@property
def digits(self):
if callable(self._digits):
with fields._get_cursor() as cr:
return self._digits(cr)
else:
return self._digits
def _setup_digits(self, env):
""" Setup the digits for ``self`` and its corresponding column """
pass
def _setup_regular(self, env):
super(Float, self)._setup_regular(env)
self._setup_digits(env)
_related__digits = property(attrgetter('_digits'))
_related_group_operator = property(attrgetter('group_operator'))
_related_group_expression = property(attrgetter('group_expression'))
_description_digits = property(attrgetter('digits'))
_column_digits = property(lambda self: not callable(self._digits) and self._digits)
_column_digits_compute = property(lambda self: callable(self._digits) and self._digits)
_column_group_operator = property(attrgetter('group_operator'))
_column_group_expression = property(attrgetter('group_expression'))
def convert_to_cache(self, value, record, validate=True):
# apply rounding here, otherwise value in cache may be wrong!
value = float(value or 0.0)
digits = self.digits
return float_round(value, precision_digits=digits[1]) if digits else value
def convert_to_export(self, value, env):
if value or value == 0.0:
return value if env.context.get('export_raw_data') else ustr(value)
return ''
class _String(Field):
""" Abstract class for string fields. """
_slots = {
'translate': False, # whether the field is translated
}
_column_translate = property(attrgetter('translate'))
_related_translate = property(attrgetter('translate'))
_description_translate = property(attrgetter('translate'))
class Char(_String):
""" Basic string field, can be length-limited, usually displayed as a
single-line string in clients
:param int size: the maximum size of values stored for that field
:param bool translate: whether the values of this field can be translated
"""
type = 'char'
_slots = {
'size': None, # maximum size of values (deprecated)
}
_column_size = property(attrgetter('size'))
_related_size = property(attrgetter('size'))
_description_size = property(attrgetter('size'))
def _setup_regular(self, env):
super(Char, self)._setup_regular(env)
assert isinstance(self.size, (NoneType, int)), \
"Char field %s with non-integer size %r" % (self, self.size)
def convert_to_cache(self, value, record, validate=True):
if value is None or value is False:
return False
return ustr(value)[:self.size]
class Text(_String):
""" Very similar to :class:`~.Char` but used for longer contents, does not
have a size and usually displayed as a multiline text box.
:param translate: whether the value of this field can be translated
"""
type = 'text'
def convert_to_cache(self, value, record, validate=True):
if value is None or value is False:
return False
return ustr(value)
class Html(_String):
type = 'html'
_slots = {
'sanitize': True, # whether value must be sanitized
'strip_style': False, # whether to strip style attributes
}
_column_sanitize = property(attrgetter('sanitize'))
_related_sanitize = property(attrgetter('sanitize'))
_description_sanitize = property(attrgetter('sanitize'))
_column_strip_style = property(attrgetter('strip_style'))
_related_strip_style = property(attrgetter('strip_style'))
_description_strip_style = property(attrgetter('strip_style'))
def convert_to_cache(self, value, record, validate=True):
if value is None or value is False:
return False
if validate and self.sanitize:
return html_sanitize(value, strip_style=self.strip_style)
return value
class Date(Field):
type = 'date'
@staticmethod
def today(*args):
""" Return the current day in the format expected by the ORM.
This function may be used to compute default values.
"""
return date.today().strftime(DATE_FORMAT)
@staticmethod
def context_today(record, timestamp=None):
""" Return the current date as seen in the client's timezone in a format
fit for date fields. This method may be used to compute default
values.
:param datetime timestamp: optional datetime value to use instead of
the current date and time (must be a datetime, regular dates
can't be converted between timezones.)
:rtype: str
"""
today = timestamp or datetime.now()
context_today = None
tz_name = record._context.get('tz') or record.env.user.tz
if tz_name:
try:
today_utc = pytz.timezone('UTC').localize(today, is_dst=False) # UTC = no DST
context_today = today_utc.astimezone(pytz.timezone(tz_name))
except Exception:
_logger.debug("failed to compute context/client-specific today date, using UTC value for `today`",
exc_info=True)
return (context_today or today).strftime(DATE_FORMAT)
@staticmethod
def from_string(value):
""" Convert an ORM ``value`` into a :class:`date` value. """
if not value:
return None
value = value[:DATE_LENGTH]
return datetime.strptime(value, DATE_FORMAT).date()
@staticmethod
def to_string(value):
""" Convert a :class:`date` value into the format expected by the ORM. """
return value.strftime(DATE_FORMAT) if value else False
def convert_to_cache(self, value, record, validate=True):
if not value:
return False
if isinstance(value, basestring):
if validate:
# force parsing for validation
self.from_string(value)
return value[:DATE_LENGTH]
return self.to_string(value)
def convert_to_export(self, value, env):
if not value:
return ''
return self.from_string(value) if env.context.get('export_raw_data') else ustr(value)
class Datetime(Field):
type = 'datetime'
@staticmethod
def now(*args):
""" Return the current day and time in the format expected by the ORM.
This function may be used to compute default values.
"""
return datetime.now().strftime(DATETIME_FORMAT)
@staticmethod
def context_timestamp(record, timestamp):
"""Returns the given timestamp converted to the client's timezone.
This method is *not* meant for use as a _defaults initializer,
because datetime fields are automatically converted upon
display on client side. For _defaults you :meth:`fields.datetime.now`
should be used instead.
:param datetime timestamp: naive datetime value (expressed in UTC)
to be converted to the client timezone
:rtype: datetime
:return: timestamp converted to timezone-aware datetime in context
timezone
"""
assert isinstance(timestamp, datetime), 'Datetime instance expected'
tz_name = record._context.get('tz') or record.env.user.tz
utc_timestamp = pytz.utc.localize(timestamp, is_dst=False) # UTC = no DST
if tz_name:
try:
context_tz = pytz.timezone(tz_name)
return utc_timestamp.astimezone(context_tz)
except Exception:
_logger.debug("failed to compute context/client-specific timestamp, "
"using the UTC value",
exc_info=True)
return utc_timestamp
@staticmethod
def from_string(value):
""" Convert an ORM ``value`` into a :class:`datetime` value. """
if not value:
return None
value = value[:DATETIME_LENGTH]
if len(value) == DATE_LENGTH:
value += " 00:00:00"
return datetime.strptime(value, DATETIME_FORMAT)
@staticmethod
def to_string(value):
""" Convert a :class:`datetime` value into the format expected by the ORM. """
return value.strftime(DATETIME_FORMAT) if value else False
def convert_to_cache(self, value, record, validate=True):
if not value:
return False
if isinstance(value, basestring):
if validate:
# force parsing for validation
self.from_string(value)
value = value[:DATETIME_LENGTH]
if len(value) == DATE_LENGTH:
value += " 00:00:00"
return value
return self.to_string(value)
def convert_to_export(self, value, env):
if not value:
return ''
return self.from_string(value) if env.context.get('export_raw_data') else ustr(value)
def convert_to_display_name(self, value, record=None):
assert record, 'Record expected'
return Datetime.to_string(Datetime.context_timestamp(record, Datetime.from_string(value)))
class Binary(Field):
type = 'binary'
class Selection(Field):
"""
:param selection: specifies the possible values for this field.
It is given as either a list of pairs (``value``, ``string``), or a
model method, or a method name.
:param selection_add: provides an extension of the selection in the case
of an overridden field. It is a list of pairs (``value``, ``string``).
The attribute ``selection`` is mandatory except in the case of
:ref:`related fields <field-related>` or :ref:`field extensions
<field-incremental-definition>`.
"""
type = 'selection'
_slots = {
'selection': None, # [(value, string), ...], function or method name
}
def __init__(self, selection=None, string=None, **kwargs):
if callable(selection):
from openerp import api
selection = api.expected(api.model, selection)
super(Selection, self).__init__(selection=selection, string=string, **kwargs)
def _setup_regular(self, env):
super(Selection, self)._setup_regular(env)
assert self.selection is not None, "Field %s without selection" % self
def _setup_related(self, env):
super(Selection, self)._setup_related(env)
# selection must be computed on related field
field = self.related_field
self.selection = lambda model: field._description_selection(model.env)
def set_class_name(self, cls, name):
super(Selection, self).set_class_name(cls, name)
# determine selection (applying 'selection_add' extensions)
for field in resolve_all_mro(cls, name, reverse=True):
if isinstance(field, type(self)):
# We cannot use field.selection or field.selection_add here
# because those attributes are overridden by ``set_class_name``.
if 'selection' in field._attrs:
self.selection = field._attrs['selection']
if 'selection_add' in field._attrs:
# use an OrderedDict to update existing values
selection_add = field._attrs['selection_add']
self.selection = OrderedDict(self.selection + selection_add).items()
else:
self.selection = None
def _description_selection(self, env):
""" return the selection list (pairs (value, label)); labels are
translated according to context language
"""
selection = self.selection
if isinstance(selection, basestring):
return getattr(env[self.model_name], selection)()
if callable(selection):
return selection(env[self.model_name])
# translate selection labels
if env.lang:
name = "%s,%s" % (self.model_name, self.name)
translate = partial(
env['ir.translation']._get_source, name, 'selection', env.lang)
return [(value, translate(label) if label else label) for value, label in selection]
else:
return selection
@property
def _column_selection(self):
if isinstance(self.selection, basestring):
method = self.selection
return lambda self, *a, **kw: getattr(self, method)(*a, **kw)
else:
return self.selection
def get_values(self, env):
""" return a list of the possible values """
selection = self.selection
if isinstance(selection, basestring):
selection = getattr(env[self.model_name], selection)()
elif callable(selection):
selection = selection(env[self.model_name])
return [value for value, _ in selection]
def convert_to_cache(self, value, record, validate=True):
if not validate:
return value or False
if value in self.get_values(record.env):
return value
elif not value:
return False
raise ValueError("Wrong value for %s: %r" % (self, value))
def convert_to_export(self, value, env):
if not isinstance(self.selection, list):
# FIXME: this reproduces an existing buggy behavior!
return value if value else ''
for item in self._description_selection(env):
if item[0] == value:
return item[1]
return False
class Reference(Selection):
type = 'reference'
_slots = {
'size': None, # maximum size of values (deprecated)
}
_related_size = property(attrgetter('size'))
_column_size = property(attrgetter('size'))
def _setup_regular(self, env):
super(Reference, self)._setup_regular(env)
assert isinstance(self.size, (NoneType, int)), \
"Reference field %s with non-integer size %r" % (self, self.size)
def convert_to_cache(self, value, record, validate=True):
if isinstance(value, BaseModel):
if ((not validate or value._name in self.get_values(record.env))
and len(value) <= 1):
return value.with_env(record.env) or False
elif isinstance(value, basestring):
res_model, res_id = value.split(',')
return record.env[res_model].browse(int(res_id))
elif not value:
return False
raise ValueError("Wrong value for %s: %r" % (self, value))
def convert_to_read(self, value, use_name_get=True):
return "%s,%s" % (value._name, value.id) if value else False
def convert_to_export(self, value, env):
return value.name_get()[0][1] if value else ''
def convert_to_display_name(self, value, record=None):
return ustr(value and value.display_name)
class _Relational(Field):
""" Abstract class for relational fields. """
relational = True
_slots = {
'domain': [], # domain for searching values
'context': {}, # context for searching values
}
def _setup_regular(self, env):
super(_Relational, self)._setup_regular(env)
if self.comodel_name not in env.registry:
_logger.warning("Field %s with unknown comodel_name %r"
% (self, self.comodel_name))
self.comodel_name = '_unknown'
@property
def _related_domain(self):
if callable(self.domain):
# will be called with another model than self's
return lambda recs: self.domain(recs.env[self.model_name])
else:
# maybe not correct if domain is a string...
return self.domain
_related_context = property(attrgetter('context'))
_description_relation = property(attrgetter('comodel_name'))
_description_context = property(attrgetter('context'))
def _description_domain(self, env):
return self.domain(env[self.model_name]) if callable(self.domain) else self.domain
_column_obj = property(attrgetter('comodel_name'))
_column_domain = property(attrgetter('domain'))
_column_context = property(attrgetter('context'))
def null(self, env):
return env[self.comodel_name]
def modified(self, records):
# Invalidate cache for self.inverse_fields, too. Note that recomputation
# of fields that depend on self.inverse_fields is already covered by the
# triggers (see above).
spec = super(_Relational, self).modified(records)
for invf in self.inverse_fields:
spec.append((invf, None))
return spec
class Many2one(_Relational):
""" The value of such a field is a recordset of size 0 (no
record) or 1 (a single record).
:param comodel_name: name of the target model (string)
:param domain: an optional domain to set on candidate values on the
client side (domain or string)
:param context: an optional context to use on the client side when
handling that field (dictionary)
:param ondelete: what to do when the referred record is deleted;
possible values are: ``'set null'``, ``'restrict'``, ``'cascade'``
:param auto_join: whether JOINs are generated upon search through that
field (boolean, by default ``False``)
:param delegate: set it to ``True`` to make fields of the target model
accessible from the current model (corresponds to ``_inherits``)
The attribute ``comodel_name`` is mandatory except in the case of related
fields or field extensions.
"""
type = 'many2one'
_slots = {
'ondelete': 'set null', # what to do when value is deleted
'auto_join': False, # whether joins are generated upon search
'delegate': False, # whether self implements delegation
}
def __init__(self, comodel_name=None, string=None, **kwargs):
super(Many2one, self).__init__(comodel_name=comodel_name, string=string, **kwargs)
def set_class_name(self, cls, name):
super(Many2one, self).set_class_name(cls, name)
# determine self.delegate
if not self.delegate:
self.delegate = name in cls._inherits.values()
_column_ondelete = property(attrgetter('ondelete'))
_column_auto_join = property(attrgetter('auto_join'))
def _update(self, records, value):
""" Update the cached value of ``self`` for ``records`` with ``value``. """
records._cache[self] = value
def convert_to_cache(self, value, record, validate=True):
if isinstance(value, (NoneType, int, long)):
return record.env[self.comodel_name].browse(value)
if isinstance(value, BaseModel):
if value._name == self.comodel_name and len(value) <= 1:
return value.with_env(record.env)
raise ValueError("Wrong value for %s: %r" % (self, value))
elif isinstance(value, tuple):
return record.env[self.comodel_name].browse(value[0])
elif isinstance(value, dict):
return record.env[self.comodel_name].new(value)
else:
return self.null(record.env)
def convert_to_read(self, value, use_name_get=True):
if use_name_get and value:
# evaluate name_get() as superuser, because the visibility of a
# many2one field value (id and name) depends on the current record's
# access rights, and not the value's access rights.
try:
value_sudo = value.sudo()
# performance trick: make sure that all records of the same
# model as value in value.env will be prefetched in value_sudo.env
value_sudo.env.prefetch[value._name].update(value.env.prefetch[value._name])
return value_sudo.name_get()[0]
except MissingError:
# Should not happen, unless the foreign key is missing.
return False
else:
return value.id
def convert_to_write(self, value, target=None, fnames=None):
return value.id
def convert_to_onchange(self, value):
return value.id
def convert_to_export(self, value, env):
return value.name_get()[0][1] if value else ''
def convert_to_display_name(self, value, record=None):
return ustr(value.display_name)
class UnionUpdate(SpecialValue):
""" Placeholder for a value update; when this value is taken from the cache,
it returns ``record[field.name] | value`` and stores it in the cache.
"""
def __init__(self, field, record, value):
self.args = (field, record, value)
def get(self):
field, record, value = self.args
# in order to read the current field's value, remove self from cache
del record._cache[field]
# read the current field's value, and update it in cache only
record._cache[field] = new_value = record[field.name] | value
return new_value
class _RelationalMulti(_Relational):
""" Abstract class for relational fields *2many. """
def _update(self, records, value):
""" Update the cached value of ``self`` for ``records`` with ``value``. """
for record in records:
if self in record._cache:
record._cache[self] = record[self.name] | value
else:
record._cache[self] = UnionUpdate(self, record, value)
def convert_to_cache(self, value, record, validate=True):
if isinstance(value, BaseModel):
if value._name == self.comodel_name:
return value.with_env(record.env)
elif isinstance(value, list):
# value is a list of record ids or commands
comodel = record.env[self.comodel_name]
ids = OrderedSet(record[self.name].ids)
# modify ids with the commands
for command in value:
if isinstance(command, (tuple, list)):
if command[0] == 0:
ids.add(comodel.new(command[2]).id)
elif command[0] == 1:
comodel.browse(command[1]).update(command[2])
ids.add(command[1])
elif command[0] == 2:
# note: the record will be deleted by write()
ids.discard(command[1])
elif command[0] == 3:
ids.discard(command[1])
elif command[0] == 4:
ids.add(command[1])
elif command[0] == 5:
ids.clear()
elif command[0] == 6:
ids = OrderedSet(command[2])
elif isinstance(command, dict):
ids.add(comodel.new(command).id)
else:
ids.add(command)
# return result as a recordset
return comodel.browse(list(ids))
elif not value:
return self.null(record.env)
raise ValueError("Wrong value for %s: %s" % (self, value))
def convert_to_read(self, value, use_name_get=True):
return value.ids
def convert_to_write(self, value, target=None, fnames=None):
# remove/delete former records
if target is None:
set_ids = []
result = [(6, 0, set_ids)]
add_existing = lambda id: set_ids.append(id)
else:
tag = 2 if self.type == 'one2many' else 3
result = [(tag, record.id) for record in target[self.name] - value]
add_existing = lambda id: result.append((4, id))
if fnames is None:
# take all fields in cache, except the inverses of self
fnames = set(value._fields) - set(MAGIC_COLUMNS)
for invf in self.inverse_fields:
fnames.discard(invf.name)
# add new and existing records
for record in value:
if not record.id:
values = {k: v for k, v in record._cache.iteritems() if k in fnames}
values = record._convert_to_write(values)
result.append((0, 0, values))
elif record._is_dirty():
values = {k: record._cache[k] for k in record._get_dirty() if k in fnames}
values = record._convert_to_write(values)
result.append((1, record.id, values))
else:
add_existing(record.id)
return result
def convert_to_export(self, value, env):
return ','.join(name for id, name in value.name_get()) if value else ''
def convert_to_display_name(self, value, record=None):
raise NotImplementedError()
def _compute_related(self, records):
""" Compute the related field ``self`` on ``records``. """
for record in records:
value = record
# traverse the intermediate fields, and keep at most one record
for name in self.related[:-1]:
value = value[name][:1]
record[self.name] = value[self.related[-1]]
class One2many(_RelationalMulti):
""" One2many field; the value of such a field is the recordset of all the
records in ``comodel_name`` such that the field ``inverse_name`` is equal to
the current record.
:param comodel_name: name of the target model (string)
:param inverse_name: name of the inverse ``Many2one`` field in
``comodel_name`` (string)
:param domain: an optional domain to set on candidate values on the
client side (domain or string)
:param context: an optional context to use on the client side when
handling that field (dictionary)
:param auto_join: whether JOINs are generated upon search through that
field (boolean, by default ``False``)
:param limit: optional limit to use upon read (integer)
The attributes ``comodel_name`` and ``inverse_name`` are mandatory except in
the case of related fields or field extensions.
"""
type = 'one2many'
_slots = {
'inverse_name': None, # name of the inverse field
'auto_join': False, # whether joins are generated upon search
'limit': None, # optional limit to use upon read
'copy': False, # o2m are not copied by default
}
def __init__(self, comodel_name=None, inverse_name=None, string=None, **kwargs):
super(One2many, self).__init__(
comodel_name=comodel_name,
inverse_name=inverse_name,
string=string,
**kwargs
)
def _setup_regular(self, env):
super(One2many, self)._setup_regular(env)
if self.inverse_name:
# link self to its inverse field and vice-versa
comodel = env[self.comodel_name]
invf = comodel._fields[self.inverse_name]
# In some rare cases, a ``One2many`` field can link to ``Int`` field
# (res_model/res_id pattern). Only inverse the field if this is
# a ``Many2one`` field.
if isinstance(invf, Many2one):
self.inverse_fields += (invf,)
invf.inverse_fields += (self,)
_description_relation_field = property(attrgetter('inverse_name'))
_column_fields_id = property(attrgetter('inverse_name'))
_column_auto_join = property(attrgetter('auto_join'))
_column_limit = property(attrgetter('limit'))
class Many2many(_RelationalMulti):
""" Many2many field; the value of such a field is the recordset.
:param comodel_name: name of the target model (string)
The attribute ``comodel_name`` is mandatory except in the case of related
fields or field extensions.
:param relation: optional name of the table that stores the relation in
the database (string)
:param column1: optional name of the column referring to "these" records
in the table ``relation`` (string)
:param column2: optional name of the column referring to "those" records
in the table ``relation`` (string)
The attributes ``relation``, ``column1`` and ``column2`` are optional. If not
given, names are automatically generated from model names, provided
``model_name`` and ``comodel_name`` are different!
:param domain: an optional domain to set on candidate values on the
client side (domain or string)
:param context: an optional context to use on the client side when
handling that field (dictionary)
:param limit: optional limit to use upon read (integer)
"""
type = 'many2many'
_slots = {
'relation': None, # name of table
'column1': None, # column of table referring to model
'column2': None, # column of table referring to comodel
'limit': None, # optional limit to use upon read
}
def __init__(self, comodel_name=None, relation=None, column1=None, column2=None,
string=None, **kwargs):
super(Many2many, self).__init__(
comodel_name=comodel_name,
relation=relation,
column1=column1,
column2=column2,
string=string,
**kwargs
)
def _setup_regular(self, env):
super(Many2many, self)._setup_regular(env)
if not self.relation and self.store:
# retrieve self.relation from the corresponding column
column = self.to_column()
if isinstance(column, fields.many2many):
self.relation, self.column1, self.column2 = \
column._sql_names(env[self.model_name])
if self.relation:
m2m = env.registry._m2m
# if inverse field has already been setup, it is present in m2m
invf = m2m.get((self.relation, self.column2, self.column1))
if invf:
self.inverse_fields += (invf,)
invf.inverse_fields += (self,)
else:
# add self in m2m, so that its inverse field can find it
m2m[(self.relation, self.column1, self.column2)] = self
_column_rel = property(attrgetter('relation'))
_column_id1 = property(attrgetter('column1'))
_column_id2 = property(attrgetter('column2'))
_column_limit = property(attrgetter('limit'))
class Serialized(Field):
""" Minimal support for existing sparse and serialized fields. """
type = 'serialized'
def convert_to_cache(self, value, record, validate=True):
return value or {}
class Id(Field):
""" Special case for field 'id'. """
type = 'integer'
_slots = {
'string': 'ID',
'store': True,
'readonly': True,
}
def to_column(self):
self.column = fields.integer(self.string)
return self.column
def __get__(self, record, owner):
if record is None:
return self # the field is accessed through the class owner
if not record:
return False
return record.ensure_one()._ids[0]
def __set__(self, record, value):
raise TypeError("field 'id' cannot be assigned")
# imported here to avoid dependency cycle issues
from openerp import SUPERUSER_ID, registry
from .exceptions import Warning, AccessError, MissingError
from .models import BaseModel, MAGIC_COLUMNS
from .osv import fields
| Antiun/odoo | openerp/fields.py | Python | agpl-3.0 | 75,603 |
package io.fidelcoria.ayfmap.controller;
import java.util.HashMap;
import java.util.Map;
import org.springframework.stereotype.Component;
import javafx.fxml.FXML;
import javafx.scene.control.Label;
import javafx.scene.control.Tab;
import javafx.scene.control.TabPane;
@Component
public class MainController {
@FXML
Label actionHeaderBar;
@FXML
TabPane actionTabPane;
@FXML
private GenerateTabController generateTabController;
@FXML
private ImportTabController importTabController;
@FXML
private DataTabController dataTabController;
private static final Map<String, String> tabTitles;
static {
tabTitles = new HashMap<>();
tabTitles.put("generate-tab", "Generate Documents");
tabTitles.put("import-tab", "Import Documents");
tabTitles.put("edit-tab", "Edit Data");
}
/**
* Update the actionHeaderBar to reflect the selected tab
*/
public void tabClicked() {
for (Tab tab : actionTabPane.getTabs()) {
if (tab.isSelected()) {
String title = tabTitles.get(tab.getId());
actionHeaderBar.setText(title);
break;
}
}
}
}
| fidelcoria/AYFM-Scheduling | AssignmentPlanner/src/main/java/io/fidelcoria/ayfmap/controller/MainController.java | Java | agpl-3.0 | 1,099 |
$(function() {
function FirmwareUpdaterViewModel(parameters) {
var self = this;
self.settingsViewModel = parameters[0];
self.loginState = parameters[1];
self.connection = parameters[2];
self.printerState = parameters[3];
self.configPathAvrdude = ko.observable();
self.hexFileName = ko.observable(undefined);
self.hexFileURL = ko.observable(undefined);
self.alertMessage = ko.observable("");
self.alertType = ko.observable("alert-warning");
self.showAlert = ko.observable(false);
self.missingParamToFlash = ko.observable(false);
self.progressBarText = ko.observable();
self.isBusy = ko.observable(false);
self.updateAvailable = ko.observable(false);
self.pathBroken = ko.observable(false);
self.pathOk = ko.observable(false);
self.pathText = ko.observable();
self.pathHelpVisible = ko.computed(function() {
return self.pathBroken() || self.pathOk();
});
self.inSettingsDialog = false;
self.selectHexPath = $("#settings_firmwareupdater_selectHexPath");
self.configurationDialog = $("#settings_plugin_firmwareupdater_configurationdialog");
self.selectHexPath.fileupload({
dataType: "hex",
maxNumberOfFiles: 1,
autoUpload: false,
add: function(e, data) {
if (data.files.length == 0) {
return false;
}
self.hexData = data;
self.hexFileName(data.files[0].name);
}
})
self.startFlashFromFile = function() {
if (!self.loginState.isAdmin()){
self.alertType("alert-warning")
self.alertMessage(gettext("Administrator privileges are needed to flash firmware."));
self.showAlert(true);
return false;
}
if (self.printerState.isPrinting() || self.printerState.isPaused()){
self.alertType("alert-warning")
self.alertMessage(gettext("Printer is printing. Please wait for the print to be finished."));
self.showAlert(true);
return false;
}
if (!self.settingsViewModel.settings.plugins.firmwareupdater.avrdude_path()) {
self.alertType("alert-warning")
self.alertMessage(gettext("AVRDUDE path not configured"));
self.showAlert(true);
return false;
}
if (!self.hexFileName()) {
self.alertType("alert-warning")
self.alertMessage(gettext("Hex file path not specified"));
self.showAlert(true);
return false;
}
if (!self.connection.selectedPort()) {
self.alertType("alert-warning")
self.alertMessage(gettext("Port not selected"));
self.showAlert(true);
return false;
}
self.progressBarText("Flashing firmware...");
self.isBusy(true);
self.showAlert(false);
var form = {
selected_port: self.connection.selectedPort()
};
self.hexData.formData = form;
self.hexData.submit();
}
self.startFlashFromURL = function() {
if (!self.loginState.isAdmin()){
self.alertType("alert-warning")
self.alertMessage(gettext("Administrator privileges are needed to flash firmware."));
self.showAlert(true);
return false;
}
if (self.printerState.isPrinting() || self.printerState.isPaused()){
self.alertType("alert-warning")
self.alertMessage(gettext("Printer is printing. Please wait for the print to be finished."));
self.showAlert(true);
return false;
}
if (!self.settingsViewModel.settings.plugins.firmwareupdater.avrdude_path()) {
self.alertType("alert-warning")
self.alertMessage(gettext("AVRDUDE path not configured"));
self.showAlert(true);
return false;
}
if (!self.hexFileURL()) {
self.alertType("alert-warning")
self.alertMessage(gettext("Hex file URL not specified"));
self.showAlert(true);
return false;
}
if (!self.connection.selectedPort()) {
self.alertType("alert-warning")
self.alertMessage(gettext("Port not selected"));
self.showAlert(true);
return false;
}
self.isBusy(true);
self.showAlert(false);
self.progressBarText("Flashing firmware...");
$.ajax({
url: PLUGIN_BASEURL + "firmwareupdater/flashFirmwareWithURL",
type: "POST",
dataType: "json",
data: JSON.stringify({
selected_port: self.connection.selectedPort(),
hex_url: self.hexFileURL()
}),
contentType: "application/json; charset=UTF-8"
})
}
self.checkForUpdates = function() {
if (self.printerState.isPrinting() || self.printerState.isPaused()){
self.alertType("alert-warning")
self.alertMessage(gettext("Printer is printing. Please wait for the print to be finished."));
self.showAlert(true);
return false;
}
if (!self.connection.selectedPort()) {
self.alertType("alert-warning")
self.alertMessage(gettext("Port not selected"));
self.showAlert(true);
return false;
}
self.isBusy(true);
self.showAlert(false);
$.ajax({
url: PLUGIN_BASEURL + "firmwareupdater/checkForUpdates",
type: "POST",
dataType: "json",
data: JSON.stringify({
selected_port: self.connection.selectedPort(),
}),
contentType: "application/json; charset=UTF-8"
});
}
self.flashUpdate = function() {
if (self.printerState.isPrinting() || self.printerState.isPaused()){
self.alertType("alert-warning")
self.alertMessage(gettext("Printer is printing. Please wait for the print to be finished."));
self.showAlert(true);
return false;
}
if (!self.settingsViewModel.settings.plugins.firmwareupdater.avrdude_path()) {
self.alertType("alert-warning")
self.alertMessage(gettext("AVRDUDE path not configured"));
self.showAlert(true);
return false;
}
if (!self.connection.selectedPort()) {
self.alertType("alert-warning")
self.alertMessage(gettext("Port not selected"));
self.showAlert(true);
return false;
}
self.isBusy(true);
self.showAlert(false);
self.progressBarText("Flashing firmware...");
console.log(AJAX_BASEURL + "system");
$.ajax({
url: PLUGIN_BASEURL + "firmwareupdater/flashUpdate",
type: "POST",
dataType: "json",
data: JSON.stringify({
selected_port: self.connection.selectedPort()
}),
contentType: "application/json; charset=UTF-8"
});
}
self.onDataUpdaterPluginMessage = function(plugin, data) {
if (plugin != "firmwareupdater") {
return;
}
if (data.type == "status" && data.status_type == "check_update_status") {
if (data.status_value == "progress") {
self.progressBarText(data.status_description);
return;
}
if (data.status_value == "update_available") {
if (!self.inSettingsDialog) {
self.showUpdateAvailablePopup(data.status_description);
}
self.updateAvailable(true);
self.isBusy(false);
return;
}
if (data.status_value == "up_to_date") {
self.updateAvailable(false);
self.isBusy(false);
self.showAlert(false);
if (self.inSettingsDialog) {
self.alertType("alert-success");
self.alertMessage(data.status_description);
self.showAlert(true);
}
return;
}
if (data.status_value == "error") {
self.updateAvailable(false);
self.isBusy(false);
self.alertType("alert-danger");
self.alertMessage(data.status_description);
self.showAlert(true);
return;
}
}
if (data.type == "status" && data.status_type == "flashing_status") {
if (data.status_value == "starting_flash") {
self.isBusy(true);
} else if (data.status_value == "progress") {
self.progressBarText(data.status_description);
} else if (data.status_value == "info") {
self.alertType("alert-info");
self.alertMessage(data.status_description);
self.showAlert(true);
} else if (data.status_value == "successful") {
self.showPopup("success", "Flashing Successful", "");
self.isBusy(false);
self.showAlert(false);
self.hexFileName(undefined);
self.hexFileURL(undefined);
} else if (data.status_value == "error") {
self.showPopup("error", "Flashing Failed", data.status_description);
self.isBusy(false);
self.showAlert(false);
}
}
}
self.showPluginConfig = function() {
self.configPathAvrdude(self.settingsViewModel.settings.plugins.firmwareupdater.avrdude_path());
self.configurationDialog.modal();
}
self.onConfigClose = function() {
self._saveAvrdudePath();
self.configurationDialog.modal("hide");
self.onConfigHidden();
if (self.configPathAvrdude()) {
self.showAlert(false);
}
}
self._saveAvrdudePath = function() {
var data = {
plugins: {
firmwareupdater: {
avrdude_path: self.configPathAvrdude(),
}
}
}
self.settingsViewModel.saveData(data);
}
self.onConfigHidden = function() {
self.pathBroken(false);
self.pathOk(false);
self.pathText("");
}
self.testAvrdudePath = function() {
$.ajax({
url: API_BASEURL + "util/test",
type: "POST",
dataType: "json",
data: JSON.stringify({
command: "path",
path: self.configPathAvrdude(),
check_type: "file",
check_access: "x"
}),
contentType: "application/json; charset=UTF-8",
success: function(response) {
if (!response.result) {
if (!response.exists) {
self.pathText(gettext("The path doesn't exist"));
} else if (!response.typeok) {
self.pathText(gettext("The path is not a file"));
} else if (!response.access) {
self.pathText(gettext("The path is not an executable"));
}
} else {
self.pathText(gettext("The path is valid"));
}
self.pathOk(response.result);
self.pathBroken(!response.result);
}
})
}
self.isReadyToFlashFromFile = function() {
if (self.printerState.isPrinting() || self.printerState.isPaused()){
return false;
}
if (!self.settingsViewModel.settings.plugins.firmwareupdater.avrdude_path()) {
return false;
}
if (!self.connection.selectedPort()) {
return false;
}
if (!self.hexFileName()) {
return false;
}
self.showAlert(false);
return true;
}
self.isReadyToFlashFromURL = function() {
if (self.printerState.isPrinting() || self.printerState.isPaused()){
return false;
}
if (!self.settingsViewModel.settings.plugins.firmwareupdater.avrdude_path()) {
return false;
}
if (!self.connection.selectedPort()) {
return false;
}
if (!self.hexFileURL()) {
return false;
}
self.showAlert(false);
return true;
}
self.isReadyToCheck = function() {
if (self.printerState.isPrinting() || self.printerState.isPaused()){
return false;
}
if (!self.connection.selectedPort()) {
return false;
}
return true;
}
self.isReadyToUpdate = function() {
if (self.printerState.isPrinting() || self.printerState.isPaused()){
return false;
}
if (!self.settingsViewModel.settings.plugins.firmwareupdater.avrdude_path()) {
return false;
}
if (!self.connection.selectedPort() || self.connection.selectedPort() == "AUTO") {
return false;
}
return true;
}
self.onSettingsShown = function() {
self.inSettingsDialog = true;
}
self.onSettingsHidden = function() {
self.inSettingsDialog = false;
self.showAlert(false);
}
// Popup Messages
self.showUpdateAvailablePopup = function(new_fw_version) {
self.updateAvailablePopup = new PNotify({
title: gettext('Firmware Update Available'),
text: gettext('Version ') + new_fw_version,
icon: true,
hide: false,
type: 'success',
buttons: {
closer: true,
sticker: false,
},
history: {
history: false
}
});
};
self.showPopup = function(message_type, title, text){
if (self.popup !== undefined){
self.closePopup();
}
self.popup = new PNotify({
title: gettext(title),
text: text,
type: message_type,
hide: false
});
}
self.closePopup = function() {
if (self.popup !== undefined) {
self.popup.remove();
}
};
}
OCTOPRINT_VIEWMODELS.push([
FirmwareUpdaterViewModel,
["settingsViewModel", "loginStateViewModel", "connectionViewModel", "printerStateViewModel"],
[document.getElementById("settings_plugin_firmwareupdater")]
]);
});
| mcecchi/SuperOcto | OctoPrint-FirmwareUpdater/octoprint_firmwareupdater/static/js/firmwareupdater.js | JavaScript | agpl-3.0 | 16,233 |
/*
************************************************************************
******************* CANADIAN ASTRONOMY DATA CENTRE *******************
************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES **************
*
* (c) 2009. (c) 2009.
* Government of Canada Gouvernement du Canada
* National Research Council Conseil national de recherches
* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6
* All rights reserved Tous droits réservés
*
* NRC disclaims any warranties, Le CNRC dénie toute garantie
* expressed, implied, or énoncée, implicite ou légale,
* statutory, of any kind with de quelque nature que ce
* respect to the software, soit, concernant le logiciel,
* including without limitation y compris sans restriction
* any warranty of merchantability toute garantie de valeur
* or fitness for a particular marchande ou de pertinence
* purpose. NRC shall not be pour un usage particulier.
* liable in any event for any Le CNRC ne pourra en aucun cas
* damages, whether direct or être tenu responsable de tout
* indirect, special or general, dommage, direct ou indirect,
* consequential or incidental, particulier ou général,
* arising from the use of the accessoire ou fortuit, résultant
* software. Neither the name de l'utilisation du logiciel. Ni
* of the National Research le nom du Conseil National de
* Council of Canada nor the Recherches du Canada ni les noms
* names of its contributors may de ses participants ne peuvent
* be used to endorse or promote être utilisés pour approuver ou
* products derived from this promouvoir les produits dérivés
* software without specific prior de ce logiciel sans autorisation
* written permission. préalable et particulière
* par écrit.
*
* This file is part of the Ce fichier fait partie du projet
* OpenCADC project. OpenCADC.
*
* OpenCADC is free software: OpenCADC est un logiciel libre ;
* you can redistribute it and/or vous pouvez le redistribuer ou le
* modify it under the terms of modifier suivant les termes de
* the GNU Affero General Public la “GNU Affero General Public
* License as published by the License” telle que publiée
* Free Software Foundation, par la Free Software Foundation
* either version 3 of the : soit la version 3 de cette
* License, or (at your option) licence, soit (à votre gré)
* any later version. toute version ultérieure.
*
* OpenCADC is distributed in the OpenCADC est distribué
* hope that it will be useful, dans l’espoir qu’il vous
* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE
* without even the implied GARANTIE : sans même la garantie
* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ
* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF
* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence
* General Public License for Générale Publique GNU Affero
* more details. pour plus de détails.
*
* You should have received Vous devriez avoir reçu une
* a copy of the GNU Affero copie de la Licence Générale
* General Public License along Publique GNU Affero avec
* with OpenCADC. If not, see OpenCADC ; si ce n’est
* <http://www.gnu.org/licenses/>. pas le cas, consultez :
* <http://www.gnu.org/licenses/>.
*
* $Revision: 4 $
*
************************************************************************
*/
package ca.nrc.cadc.vos.server;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringWriter;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import ca.nrc.cadc.io.ByteCountInputStream;
import ca.nrc.cadc.uws.JobInfo;
import ca.nrc.cadc.uws.Parameter;
import ca.nrc.cadc.uws.web.InlineContentException;
import ca.nrc.cadc.uws.web.InlineContentHandler;
import ca.nrc.cadc.uws.web.UWSInlineContentHandler;
import ca.nrc.cadc.vos.Transfer;
import ca.nrc.cadc.vos.TransferParsingException;
import ca.nrc.cadc.vos.TransferReader;
import ca.nrc.cadc.vos.TransferWriter;
import ca.nrc.cadc.vos.VOSURI;
public class TransferInlineContentHandler implements UWSInlineContentHandler
{
private static Logger log = Logger.getLogger(TransferInlineContentHandler.class);
// 6Kb XML Doc size limit
private static final long DOCUMENT_SIZE_MAX = 6144L;
private static final String TEXT_XML = "text/xml";
public TransferInlineContentHandler() { }
public Content accept(String name, String contentType, InputStream inputStream)
throws InlineContentException, IOException
{
if (!contentType.equals(TEXT_XML))
throw new IllegalArgumentException("Transfer document expected Content-Type is " + TEXT_XML + " not " + contentType);
if (inputStream == null)
throw new IOException("The InputStream is closed");
// wrap the input stream in a byte counter to limit bytes read
ByteCountInputStream sizeLimitInputStream =
new ByteCountInputStream(inputStream, DOCUMENT_SIZE_MAX);
try
{
TransferReader reader = new TransferReader(true);
Transfer transfer = reader.read(sizeLimitInputStream, VOSURI.SCHEME);
log.debug("Transfer: read " + sizeLimitInputStream.getByteCount() + " bytes.");
TransferWriter tw = new TransferWriter();
StringWriter sw = new StringWriter();
tw.write(transfer, sw);
Content content = new Content();
content.name = CONTENT_JOBINFO;
content.value = new JobInfo(sw.toString(), contentType, true);;
return content;
}
catch (TransferParsingException e)
{
throw new InlineContentException("Unable to create JobInfo from Transfer Document", e);
}
}
}
| opencadc/vos | cadc-vos-server/src/main/java/ca/nrc/cadc/vos/server/TransferInlineContentHandler.java | Java | agpl-3.0 | 6,452 |
# frozen_string_literal: true
require 'spec_helper'
describe 'SharedStrings' do
it 'custom_shared_strings_name.xlsx' do
xlsx = OoxmlParser::Parser.parse('spec/workbook/shared_strings/custom_shared_strings_name.xlsx')
expect(xlsx.shared_strings_table.count).to eq(1)
end
end
| ONLYOFFICE/ooxml_parser | spec/workbook/shared_strings_spec.rb | Ruby | agpl-3.0 | 288 |
/*******************************************************************************
* This file is part of Termitaria, a project management tool
* Copyright (C) 2008-2013 CodeSphere S.R.L., www.codesphere.ro
*
* Termitaria is free software; you can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License
* as published by the Free Software Foundation; either version 3 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Termitaria. If not, see <http://www.gnu.org/licenses/> .
******************************************************************************/
package ro.cs.logaudit.entity;
/**
* @author matti_joona
*
*/
public class Role {
private int roleId;
private String name;
private String description;
private String observation;
private Module module;
/**
* @return the module
*/
public Module getModule() {
return module;
}
/**
* @param module the module to set
*/
public void setModule(Module module) {
this.module = module;
}
/**
* @return the roleId
*/
public int getRoleId() {
return roleId;
}
/**
* @param roleId the roleId to set
*/
public void setRoleId(int roleId) {
this.roleId = roleId;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* @return the description
*/
public String getDescription() {
return description;
}
/**
* @param description the description to set
*/
public void setDescription(String description) {
this.description = description;
}
/**
* @return the observation
*/
public String getObservation() {
return observation;
}
/**
* @param observation the observation to set
*/
public void setObservation(String observation) {
this.observation = observation;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuffer sb = new StringBuffer("[");
sb.append(this.getClass().getSimpleName());
sb.append(": ");
sb.append("roleId = ") .append(roleId) .append(", ");
sb.append("name = ") .append(name) .append(", ");
sb.append("description = ") .append(description).append(", ");
sb.append("observation = ") .append(observation).append(", ");
sb.append("module = ") .append(module) .append("]");
return sb.toString();
}
}
| CodeSphere/termitaria | TermitariaAudit/JavaSource/ro/cs/logaudit/entity/Role.java | Java | agpl-3.0 | 2,881 |
<?php
include_once '../../../../../../lib/defaults.php';
include_once '../fns/place_point_method_page.php';
include_once '../../../../../fns/ApiDoc/trueResult.php';
place_point_method_page('delete', [
[
'name' => 'id',
'description' => 'The ID of the point to delete.',
],
], ApiDoc\trueResult(), [
'POINT_NOT_FOUND' => "A point with the ID doesn't exist.",
]);
| zvini/website | www/help/api-doc/place/point/delete/index.php | PHP | agpl-3.0 | 392 |
package de.dvdb.domain.model.social;
import java.io.Serializable;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name = "dvdb2_fbsession")
public class FacebookSession implements Serializable {
private static final long serialVersionUID = -8753714944734959457L;
private Long id;
private String sessionKey;
private Long user;
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
@Column(name = "user_id")
public Long getUser() {
return user;
}
public void setUser(Long user) {
this.user = user;
}
@Column(name = "sessionkey")
public String getSessionKey() {
return sessionKey;
}
public void setSessionKey(String sessionKey) {
this.sessionKey = sessionKey;
}
}
| chris-dvdb/dvdb.de | dvdb-ejb/src/main/java/de/dvdb/domain/model/social/FacebookSession.java | Java | agpl-3.0 | 961 |
/* Copyright (c) 2006-2012 by OpenLayers Contributors (see authors.txt for
* full list of contributors). Published under the 2-clause BSD license.
* See license.txt in the OpenLayers distribution or repository for the
* full text of the license. */
/**
* @requires OpenLayers/Format/SLD/v1.js
* @requires OpenLayers/Format/Filter/v1_0_0.js
*/
/**
* Class: OpenLayers.Format.SLD.v1_0_0
* Write SLD version 1.0.0.
*
* Inherits from:
* - <OpenLayers.Format.SLD.v1>
*/
OpenLayers.Format.SLD.v1_0_0 = OpenLayers.Class(
OpenLayers.Format.SLD.v1, {
/**
* Constant: VERSION
* {String} 1.0.0
*/
VERSION: "1.0.0",
/**
* Property: schemaLocation
* {String} http://www.opengis.net/sld
* http://schemas.opengis.net/sld/1.0.0/StyledLayerDescriptor.xsd
*/
schemaLocation: "http://www.opengis.net/sld http://schemas.opengis.net/sld/1.0.0/StyledLayerDescriptor.xsd",
/**
* Constructor: OpenLayers.Format.SLD.v1_0_0
* Instances of this class are not created directly. Use the
* <OpenLayers.Format.SLD> constructor instead.
*
* Parameters:
* options - {Object} An optional object whose properties will be set on
* this instance.
*/
CLASS_NAME: "OpenLayers.Format.SLD.v1_0_0"
});
| B3Partners/geo-ov | src/main/webapp/openlayers/lib/OpenLayers/Format/SLD/v1_0_0.js | JavaScript | agpl-3.0 | 1,351 |
// ===================================================================================================
// _ __ _ _
// | |/ /__ _| | |_ _ _ _ _ __ _
// | ' </ _` | | _| || | '_/ _` |
// |_|\_\__,_|_|\__|\_,_|_| \__,_|
//
// This file is part of the Kaltura Collaborative Media Suite which allows users
// to do with audio, video, and animation what Wiki platfroms allow them to do with
// text.
//
// Copyright (C) 2006-2015 Kaltura Inc.
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
// @ignore
// ===================================================================================================
package com.kaltura.client.types;
import org.w3c.dom.Element;
import com.kaltura.client.KalturaParams;
import com.kaltura.client.KalturaApiException;
/**
* This class was generated using generate.php
* against an XML schema provided by Kaltura.
*
* MANUAL CHANGES TO THIS CLASS WILL BE OVERWRITTEN.
*/
@SuppressWarnings("serial")
public class KalturaUiConfAdminFilter extends KalturaUiConfAdminBaseFilter {
public KalturaUiConfAdminFilter() {
}
public KalturaUiConfAdminFilter(Element node) throws KalturaApiException {
super(node);
}
public KalturaParams toParams() throws KalturaApiException {
KalturaParams kparams = super.toParams();
kparams.add("objectType", "KalturaUiConfAdminFilter");
return kparams;
}
}
| moskiteau/KalturaGeneratedAPIClientsJava | src/main/java/com/kaltura/client/types/KalturaUiConfAdminFilter.java | Java | agpl-3.0 | 2,117 |
/*
* _Bullseye.cpp
*
* Created on: Aug 21, 2015
* Author: yankai
*/
#include "_Bullseye.h"
#ifdef USE_OPENCV
#ifdef USE_CUDA
namespace kai
{
_Bullseye::_Bullseye()
{
m_abs = 90;
m_scale1 = 0.25;
m_scale2 = 0.0625;
m_thr1 = 200;
m_thr2 = 255;
}
_Bullseye::~_Bullseye()
{
}
bool _Bullseye::init(void* pKiss)
{
IF_F(!this->_DetectorBase::init(pKiss));
Kiss* pK = (Kiss*)pKiss;
pK->v("abs", &m_abs);
pK->v("scale1", &m_scale1);
pK->v("scale2", &m_scale2);
pK->v("thr1", &m_thr1);
pK->v("thr2", &m_thr2);
m_nClass = 1;
return true;
}
bool _Bullseye::start(void)
{
NULL_F(m_pT);
return m_pT->start(getUpdate, this);
}
void _Bullseye::update(void)
{
while(m_pT->bRun())
{
m_pT->autoFPSfrom();
if(check() >= 0)
{
detect();
if(m_bGoSleep)
m_pU->clear();
}
m_pT->autoFPSto();
}
}
int _Bullseye::check(void)
{
NULL__(m_pU,-1);
NULL__(m_pV,-1);
IF__(m_pV->BGR()->bEmpty(),-1);
return this->_DetectorBase::check();
}
void _Bullseye::detect(void)
{
GpuMat mBGR = *(m_pV->BGR()->gm());
GpuMat mHSV;
cuda::cvtColor(mBGR, mHSV, COLOR_BGR2HSV);
vector<GpuMat> vmHSV(3);
split(mHSV, vmHSV);
GpuMat mH = vmHSV[0];
GpuMat mS = vmHSV[1];
GpuMat mV = vmHSV[2];
GpuMat gHred;
GpuMat gScaleHred;
GpuMat gScaleS;
GpuMat gBulleye;
GpuMat gThr;
cuda::absdiff(mH, Scalar(m_abs), gHred);
cuda::multiply(gHred, Scalar(m_scale1), gScaleHred);
cuda::multiply(mS, Scalar(m_scale2), gScaleS);
cuda::multiply(gScaleHred, gScaleS, gBulleye);
cuda::threshold(gBulleye, gThr, m_thr1, m_thr2, THRESH_BINARY); //THRESH_BINARY_INV);
Mat mThr;
gThr.download(mThr);
vector< vector< Point > > vvContours;
findContours(mThr, vvContours, RETR_EXTERNAL, CHAIN_APPROX_NONE);
float kx = 1.0/mBGR.cols;
float ky = 1.0/mBGR.rows;
_Object o;
vector<Point> vPoly;
for (unsigned int i=0; i<vvContours.size(); i++)
{
vPoly.clear();
approxPolyDP( vvContours[i], vPoly, 3, true );
Rect r = boundingRect(vPoly);
o.init();
o.setTstamp(m_pT->getTfrom());
o.setBB2D(rect2BB<vFloat4>(r));
o.scale(kx,ky);
o.setTopClass(0, o.area());
m_pU->add(o);
LOG_I("ID: "+ i2str(o.getTopClass()));
}
m_pU->swap();
}
}
#endif
#endif
| yankailab/OpenKAI | src/Detector/_Bullseye.cpp | C++ | agpl-3.0 | 2,195 |
<?php
/**
* SowerPHP
* Copyright (C) SowerPHP (http://sowerphp.org)
*
* Este programa es software libre: usted puede redistribuirlo y/o
* modificarlo bajo los términos de la Licencia Pública General Affero de GNU
* publicada por la Fundación para el Software Libre, ya sea la versión
* 3 de la Licencia, o (a su elección) cualquier versión posterior de la
* misma.
*
* Este programa se distribuye con la esperanza de que sea útil, pero
* SIN GARANTÍA ALGUNA; ni siquiera la garantía implícita
* MERCANTIL o de APTITUD PARA UN PROPÓSITO DETERMINADO.
* Consulte los detalles de la Licencia Pública General Affero de GNU para
* obtener una información más detallada.
*
* Debería haber recibido una copia de la Licencia Pública General Affero de GNU
* junto a este programa.
* En caso contrario, consulte <http://www.gnu.org/licenses/agpl.html>.
*/
// namespace del modelo
namespace website\Honorarios;
/**
* Clase para mapear la tabla boleta_tercero de la base de datos
* Comentario de la tabla:
* Esta clase permite trabajar sobre un conjunto de registros de la tabla boleta_tercero
* @author SowerPHP Code Generator
* @version 2019-08-09 15:59:48
*/
class Model_BoletaTerceros extends \Model_Plural_App
{
// Datos para la conexión a la base de datos
protected $_database = 'default'; ///< Base de datos del modelo
protected $_table = 'boleta_tercero'; ///< Tabla del modelo
private $tasas_retencion = [
201608 => 0.1000,
202001 => 0.1075,
202101 => 0.1150,
202201 => 0.1225,
202301 => 0.1300,
202401 => 0.1375,
202501 => 0.1450,
202601 => 0.1525,
202701 => 0.1600,
202801 => 0.1700,
];
/**
* Método que sincroniza las boletas de terceros recibidas por la empresa
* en el SII con el registro local de boletas en LibreDTE
* @author Esteban De La Fuente Rubio, DeLaF (esteban[at]sasco.cl)
* @version 2021-06-29
*/
public function sincronizar($meses)
{
// periodos a procesar
$periodo_actual = (int)date('Ym');
$periodos = [$periodo_actual];
for ($i = 0; $i < $meses-1; $i++) {
$periodos[] = \sowerphp\general\Utility_Date::previousPeriod($periodos[$i]);
}
sort($periodos);
// sincronizar periodos
foreach ($periodos as $periodo) {
$boletas = $this->getBoletas($periodo);
foreach ($boletas as $boleta) {
list($receptor_rut, $receptor_dv) = explode('-', $boleta['receptor_rut']);
$Receptor = new \website\Dte\Model_Contribuyente($receptor_rut);
if (!$Receptor->razon_social) {
$Receptor->rut = $receptor_rut;
$Receptor->dv = $receptor_dv;
$Receptor->razon_social = mb_substr($boleta['receptor_nombre'], 0, 100);
$Receptor->save();
}
$BoletaTercero = new Model_BoletaTercero($this->getContribuyente()->rut, $boleta['numero']);
$BoletaTercero->receptor = $Receptor->rut;
$BoletaTercero->anulada = (int)($boleta['estado'] == 'ANUL');
$BoletaTercero->set($boleta);
if (!$BoletaTercero->save()) {
throw new \Exception('No fue posible guardar la BTE #'.$BoletaTercero->numero.' de '.$Receptor->getRUT().' del día '.\sowerphp\general\Utility_Date::format($BoletaTercero->fecha));
}
}
}
}
/**
* Método que obtiene las boletas emitidas desde el SII
* @author Esteban De La Fuente Rubio, DeLaF (esteban[at]sasco.cl)
* @version 2020-01-26
*/
public function getBoletas($periodo)
{
$r = libredte_api_consume('/sii/bte/emitidas/documentos/'.$this->getContribuyente()->getRUT().'/'.$periodo.'?formato=json', [
'auth' => [
'pass' => [
'rut' => $this->getContribuyente()->getRUT(),
'clave' => $this->getContribuyente()->config_sii_pass,
],
],
]);
if ($r['status']['code']!=200) {
if ($r['status']['code']==404) {
return [];
}
throw new \Exception('Error al obtener boletas de terceros del período '.(int)$periodo.' desde el SII: '.$r['body'], $r['status']['code']);
}
return $r['body'];
}
/**
* Método que entrega un resumen por período de las boletas de terceros
* emitidas
* @author Esteban De La Fuente Rubio, DeLaF (esteban[at]sasco.cl)
* @version 2019-08-10
*/
public function getPeriodos($periodo = null)
{
$periodo_col = $this->db->date('Ym', 'fecha');
$where = ['emisor = :emisor', 'anulada = false'];
$vars = [':emisor'=>$this->getContribuyente()->rut];
if ($periodo) {
$where[] = $periodo_col.' = :periodo';
$vars[':periodo'] = $periodo;
}
return $this->db->getTable('
SELECT
'.$periodo_col.' AS periodo,
COUNT(*) AS cantidad,
MIN(fecha) AS fecha_inicial,
MAX(fecha) AS fecha_final,
SUM(total_honorarios) AS honorarios,
SUM(total_liquido) AS liquido,
SUM(total_retencion) AS retencion
FROM boleta_tercero
WHERE '.implode(' AND ', $where).'
GROUP BY '.$periodo_col.'
ORDER BY '.$periodo_col.' DESC
', $vars);
}
/**
* Método que entrega el resumen de cierto período
* @author Esteban De La Fuente Rubio, DeLaF (esteban[at]sasco.cl)
* @version 2019-08-10
*/
public function getPeriodo($periodo)
{
$datos = $this->getPeriodos($periodo);
return !empty($datos) ? $datos[0] : [];
}
/**
* Método que entrega las boletas de cierto período
* @author Esteban De La Fuente Rubio, DeLaF (esteban[at]sasco.cl)
* @version 2019-08-23
*/
public function buscar(array $filtros = [], $order = 'ASC')
{
$where = ['b.emisor = :emisor'];
$vars = [':emisor'=>$this->getContribuyente()->rut];
if (!empty($filtros['periodo'])) {
$periodo_col = $this->db->date('Ym', 'b.fecha');
$where[] = $periodo_col.' = :periodo';
$vars[':periodo'] = $filtros['periodo'];
}
if (!empty($filtros['receptor'])) {
if (strpos($filtros['receptor'], '-')) {
list($rut, $dv) = explode('-', str_replace('.', '', $filtros['receptor']));
} else {
$rut = (int)$filtros['receptor'];
}
$where[] = 'b.receptor = :receptor';
$vars[':receptor'] = $rut;
}
if (!empty($filtros['fecha_desde'])) {
$where[] = 'b.fecha >= :fecha_desde';
$vars[':fecha_desde'] = $filtros['fecha_desde'];
}
if (!empty($filtros['fecha_hasta'])) {
$where[] = 'b.fecha <= :fecha_hasta';
$vars[':fecha_hasta'] = $filtros['fecha_hasta'];
}
if (!empty($filtros['honorarios_desde'])) {
$where[] = 'b.total_honorarios >= :honorarios_desde';
$vars[':honorarios_desde'] = $filtros['honorarios_desde'];
}
if (!empty($filtros['honorarios_hasta'])) {
$where[] = 'b.total_honorarios <= :honorarios_hasta';
$vars[':honorarios_hasta'] = $filtros['honorarios_hasta'];
}
if (isset($filtros['anulada'])) {
if ($filtros['anulada']) {
$where[] = 'b.anulada = true';
} else {
$where[] = 'b.anulada = false';
}
}
if (isset($filtros['sucursal_sii']) and is_numeric($filtros['sucursal_sii'])) {
if ($filtros['sucursal_sii']) {
$where[] = 'b.sucursal_sii = :sucursal_sii';
$vars[':sucursal_sii'] = $filtros['sucursal_sii'];
} else {
$where[] = 'b.sucursal_sii IS NULL';
}
}
$boletas = $this->db->getTable('
SELECT
b.codigo,
b.receptor AS receptor_rut,
c.dv AS receptor_dv,
c.razon_social AS receptor_razon_social,
b.numero,
b.fecha,
b.fecha_emision,
b.total_honorarios AS honorarios,
b.total_liquido AS liquido,
b.total_retencion AS retencion,
b.anulada,
b.sucursal_sii
FROM
boleta_tercero AS b
LEFT JOIN contribuyente AS c ON c.rut = b.receptor
WHERE
'.implode(' AND ', $where).'
ORDER BY b.fecha '.$order.', b.numero '.$order.'
', $vars);
foreach ($boletas as &$b) {
$b['sucursal'] = $this->getContribuyente()->getSucursal($b['sucursal_sii'])->sucursal;
}
return $boletas;
}
/**
* Método que emite una BTE en el SII y entrega el objeto local para trabajar
* @author Esteban De La Fuente Rubio, DeLaF (esteban[at]sasco.cl)
* @version 2021-07-29
*/
public function emitir($boleta)
{
// crear receptor si no existe
list($receptor_rut, $receptor_dv) = explode('-', $boleta['Encabezado']['Receptor']['RUTRecep']);
$Receptor = new \website\Dte\Model_Contribuyente($receptor_rut);
if (!$Receptor->razon_social) {
$Receptor->rut = $receptor_rut;
$Receptor->dv = $receptor_dv;
$Receptor->razon_social = mb_substr($boleta['Encabezado']['Receptor']['RznSocRecep'], 0, 100);
$Receptor->save();
}
// consumir servicio web y emitir boleta
$r = libredte_api_consume('/sii/bte/emitidas/emitir', [
'auth' => [
'pass' => [
'rut' => $this->getContribuyente()->getRUT(),
'clave' => $this->getContribuyente()->config_sii_pass,
],
],
'boleta' => $boleta,
]);
if ($r['status']['code']!=200) {
throw new \Exception('Error al emitir boleta: '.$r['body'], $r['status']['code']);
}
$boleta = $r['body'];
if (empty($boleta['Encabezado']['IdDoc']['CodigoBarras'])) {
throw new \Exception('No fue posible emitir la boleta o bien no se pudo obtener el código de barras de la boleta emitida');
}
// crear registro de la boleta en la base de datos
$BoletaTercero = new Model_BoletaTercero();
$BoletaTercero->emisor = $this->getContribuyente()->rut;
$BoletaTercero->numero = $boleta['Encabezado']['IdDoc']['Folio'];
$BoletaTercero->codigo = $boleta['Encabezado']['IdDoc']['CodigoBarras'];
$BoletaTercero->receptor = $Receptor->rut;
$BoletaTercero->fecha = $boleta['Encabezado']['IdDoc']['FchEmis'];
$BoletaTercero->fecha_emision = date('Y-m-d');
$BoletaTercero->total_honorarios = $boleta['Encabezado']['Totales']['MntBruto'];
$BoletaTercero->total_retencion = $boleta['Encabezado']['Totales']['MntRetencion'];
$BoletaTercero->total_liquido = $boleta['Encabezado']['Totales']['MntNeto'];
$BoletaTercero->anulada = 0;
if (!empty($boleta['Encabezado']['Emisor']['CdgSIISucur'])) {
$BoletaTercero->sucursal_sii = $boleta['Encabezado']['Emisor']['CdgSIISucur'];
}
$BoletaTercero->save();
// guardar datos del receptor si es posible
$Receptor = $BoletaTercero->getReceptor();
if (!$Receptor->usuario) {
$Receptor->razon_social = mb_substr($boleta['Encabezado']['Receptor']['RznSocRecep'],0,100);
$Receptor->direccion = mb_substr($boleta['Encabezado']['Receptor']['DirRecep'],0,70);
$comuna = (new \sowerphp\app\Sistema\General\DivisionGeopolitica\Model_Comunas())->getComunaByName($boleta['Encabezado']['Receptor']['CmnaRecep']);
if ($comuna) {
$Receptor->comuna = $comuna;
}
try {
$Receptor->save();
} catch (\Exception $e) {
}
}
// entregar boleta emitida
return $BoletaTercero;
}
/**
* Método que entrega las tasas de retencion para personas a honorarios
* @author Esteban De La Fuente Rubio, DeLaF (esteban[at]sasco.cl)
* @version 2020-01-26
*/
public function getTasasRetencion()
{
krsort($this->tasas_retencion);
return $this->tasas_retencion;
}
}
| LibreDTE/libredte-webapp | website/Module/Honorarios/Model/BoletaTerceros.php | PHP | agpl-3.0 | 12,733 |
#!/usr/bin/env python
from __future__ import print_function, division
import multiprocessing
import os
import csv
import datetime
import logging
from datetime import datetime
import argparse
import shutil
import math
from glob import glob
import gzip
from shi7 import __version__
from shi7.shi7 import TRUE_FALSE_DICT, read_fastq, axe_adaptors_single_end, axe_adaptors_paired_end, flash_part1, \
flash_part2, split_fwd_rev, match_pairs, link_manicured_names
def make_arg_parser():
parser = argparse.ArgumentParser(description='This is the commandline interface for shi7_learning',
usage='shi7_learning v{version}\nshi7_learning.py -i <input> -o <output> ...'.format(version=__version__))
parser.add_argument('-i', '--input', help='Set the directory path of the fastq directory OR oligos.txt if splitting', required=True)
parser.add_argument('-o', '--output', help='Set the directory path of the output (default: cwd)', default=os.getcwd())
parser.add_argument('--debug', help='Retain all intermediate files (default: Disabled)', dest='debug', action='store_true')
parser.add_argument('-t', '--threads', help='Set the number of threads (default: %(default)s)',
default=min(multiprocessing.cpu_count(), 16))
parser.add_argument('-v', '--version', action='version', version='%(prog)s ' + __version__)
parser.set_defaults()
return parser
def subsample_fastqs(path_fastqs, num_files=10, num_sequences=1000):
for i, path_fastq in enumerate(path_fastqs):
if i >= num_files:
return
with open(path_fastq) as fastq_inf:
fastq_gen = read_fastq(fastq_inf)
yield limit_fastq(fastq_gen, num_sequences=num_sequences)
def limit_fastq(fastq_gen, num_sequences=1000):
for i in range(num_sequences):
try:
yield next(fastq_gen)
except StopIteration:
return
def get_seq_length_qual_scores(path_fastqs, output_path, num_files=10, num_sequences=1000):
subsampled_fastqs = subsample_fastqs(path_fastqs, num_files=num_files, num_sequences=num_sequences)
sequence_len_sum = 0.
quality_sum = 0
num_sequences = 0.
for fastq_path, fastq_gen in zip(path_fastqs, subsampled_fastqs):
with open(os.path.join(output_path, os.path.basename(fastq_path)), 'w') as outf:
for header, sequence, quality in fastq_gen:
outf.write("@%s\n%s\n+\n%s\n" % (header, sequence, quality))
sequence_len_sum += len(sequence)
quality_sum += sum([ord(i) for i in quality])
num_sequences += 1.
# Return (average length of sequences, average quality score)
return sequence_len_sum/num_sequences, quality_sum/sequence_len_sum
def count_num_lines(path):
with open(path) as path_inf:
return sum(1 for line in path_inf)
def get_file_size(path):
return os.path.getsize(path)
def check_sequence_name(path_R1, path_R2):
with open(path_R1) as path_inf_R1, open(path_R2) as path_inf_R2:
fastq_gen_R1 = read_fastq(path_inf_R1)
fastq_gen_R2 = read_fastq(path_inf_R2)
for gen_R1, gen_R2 in zip(fastq_gen_R1,fastq_gen_R2):
title_R1, title_R2 = gen_R1[0], gen_R2[0]
if len(title_R1) != len(title_R2):
return False
diff_idx = [i for i in range(len(title_R1)) if title_R1[i] != title_R2[i]]
if len(diff_idx) != 1:
return False
if int(title_R2[diff_idx[0]]) - int(title_R1[diff_idx[0]]) != 1:
return False
return True
def detect_paired_end(path_fastqs):
path_fastqs = [f for f in path_fastqs if f.endswith('.fastq') or f.endswith('.fq') or f.endswith('.fastq.gz') or f.endswith('.fq.gz')]
if len(path_fastqs) % 2 == 1: return False, [path_fastqs, None, None, None]
pair_obj = match_pairs(path_fastqs, True)
path_fastqs = pair_obj[0]
if pair_obj[1]==None: return False, pair_obj
return True, pair_obj
def get_directory_size(path):
return sum([get_file_size(os.path.join(path, fastq)) for fastq in os.listdir(path)])
def remove_directory_contents(path):
for f in os.listdir(path):
os.remove(os.path.join(path, f))
def choose_axe_adaptors(path_subsampled_fastqs, paired_end, output_path, threads):
adapters = ['TruSeq2', 'TruSeq3', 'TruSeq3-2', 'Nextera']
threads = min(threads, multiprocessing.cpu_count(), 16)
original_size = get_directory_size(os.path.dirname(path_subsampled_fastqs[0]))
logging.info('Original size of the subsampled_fastqs = ' + str(original_size))
best_size = original_size
best_adap = None
for adapter in adapters:
if paired_end:
axe_adaptors_paired_end(path_subsampled_fastqs, output_path, adapter, threads, shell=False)
else:
axe_adaptors_single_end(path_subsampled_fastqs, output_path, adapter, threads, shell=False)
fastqs_path_size = get_directory_size(output_path)
logging.info("Adapters: {adapter}\tFile Size: {filesize}".format(adapter=adapter, filesize=fastqs_path_size))
if fastqs_path_size <= best_size:
best_size = fastqs_path_size
best_adap = adapter
if best_size < 0.995*original_size:
# Actually write the best files again for use in later steps
logging.info("Best Adapters: {adapter}\tFile Size: {filesize}".format(adapter=best_adap, filesize=best_size))
if paired_end:
files = axe_adaptors_paired_end(path_subsampled_fastqs, output_path, best_adap, threads, shell=False)
else:
files = axe_adaptors_single_end(path_subsampled_fastqs, output_path, best_adap, threads, shell=False)
return best_adap, best_size, files
else:
return None, original_size, path_subsampled_fastqs
def flash_stitchable_and_check_outies(adapter_output_filenames, flash_output_path, threads):
flash_output_str = flash_part1(adapter_output_filenames, flash_output_path, max_overlap=700, \
min_overlap=10, allow_outies=True, threads=threads, shell=False)
allow_outies_count = 0
for flash_out in flash_output_str:
flash_str_list = flash_out.strip().split('\n')
outies_info = flash_str_list[-8]
outies_percent = float(outies_info[outies_info.find('(')+1:outies_info.find('%')])
if outies_percent >= 15:
allow_outies_count += 1
path_flash_fqs = flash_part2(flash_output_str, flash_output_path)
path_R1_fastqs, _ = split_fwd_rev(adapter_output_filenames)
matched_count = 0
for original_fq, flash_fq in zip(path_R1_fastqs, path_flash_fqs):
if count_num_lines(flash_fq) > count_num_lines(original_fq)*0.3:
matched_count = matched_count + 1
return matched_count/len(path_flash_fqs) >= 0.75, allow_outies_count/len(flash_output_str) >= 0.75, path_flash_fqs
def flash_check_cv(flash_output_path):
hist_files = [os.path.join(flash_output_path, f) for f in os.listdir(flash_output_path) if f.endswith('.hist')]
total_cv = total_mean = 0
for f in hist_files:
with open(f) as inf:
csv_inf = csv.reader(inf, delimiter="\t")
x2f = 0
sum = 0
cnt = 0
for row in csv_inf:
row = [int(r) for r in row]
cnt = cnt + row[1]
sum = sum + row[0] * row[1]
x2f = x2f + row[0] * row[0] * row[1]
mean = sum/cnt
std = math.sqrt((x2f - sum*sum/cnt)/(cnt-1))
cv = std/mean
total_cv = total_cv + cv
total_mean = total_mean + mean
total_files = len(hist_files)
return total_cv/total_files, total_mean/total_files
def trimmer_learning(flash_output_filenames):
filter_q_sum = 0
trim_q_sum = 0
totbases = 0
tottrim = 0
num = 0
for fq_path in flash_output_filenames:
with open(fq_path) as fq_inf:
fq_gen = read_fastq(fq_inf)
for gen in fq_gen:
num = num + 1
qualities = gen[2]
totbases = totbases + len(qualities)
qualities = [ord(qual)-33 for qual in qualities]
filter_q_sum = filter_q_sum + sum(qualities)
if (len(qualities) >= 20):
trim_q_sum = trim_q_sum + sum(qualities[:10]) + sum(qualities[-10:])
tottrim = tottrim + 20
logging.info('num seqs: %d' % num)
logging.info('filter_q_sum: %d' % filter_q_sum)
logging.info('trim_q_sum: %d' % trim_q_sum)
logging.info('total bases considered: %d (trim: %d)' % (totbases, tottrim))
logging.info('filter_q: %d' % (filter_q_sum/totbases))
logging.info('trim_q: %d' % (trim_q_sum/tottrim))
filter_q = math.floor(filter_q_sum/totbases)
trim_q = math.floor(trim_q_sum/tottrim)-1
trim_q = trim_q if trim_q > filter_q - 3 else filter_q - 3
return filter_q, trim_q
def template_input(input):
input = os.path.abspath(input)
# input, input_cmd
return "input\t{}".format(input), ["--input", input]
def template_paired_end(bool):
# bool, paired_end
if bool:
return "paired_end\t{}".format(str(bool)), None
else:
return "paired_end\t{}".format(str(bool)), ["-SE"]
def template_trim(filt_q, trim_q):
return "filt_q: %d, trim_q: %d" % (filt_q, trim_q), ["--filter_qual", str(filt_q), "--trim_qual", str(trim_q)]
def template_cv(minstitch, maxstitch):
return "minstitch: %d, maxstitch: %d" % (minstitch, maxstitch), ["--min_overlap", str(minstitch), "--max_overlap", str(maxstitch)]
def template_output(output):
# output, output_cmd
output = os.path.abspath(output)
return "output\t{}".format(output), ["--output", output]
def template_choose_axe_adaptors(best_adapt, best_size):
if best_adapt:
return "axe_adaptors\t" + best_adapt, ["--adaptor", best_adapt]
else:
return "axe_adaptors\tNA", ["--adaptor", "None"]
def template_flash(stitches, do_outies):
return "stitches: %s, outies: %s" % (stitches, do_outies), ["--flash", str(stitches), "--allow_outies", str(do_outies)]
def main():
start_time = datetime.now()
parser = make_arg_parser()
args = parser.parse_args()
learning_params = ["shi7.py"]
learning_pretty = ["SHI7 version", __version__]
input = os.path.abspath(args.input)
output = os.path.abspath(args.output)
# Make output folder
if not os.path.exists(output):
os.makedirs(output)
# Put in the logging file
logging.basicConfig(filename=os.path.join(output, 'shi7_learning.log'), filemode='w', level=logging.DEBUG, \
format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
# Make temp outfolder
if os.path.exists(os.path.join(args.output, 'temp')):
shutil.rmtree(os.path.join(args.output, 'temp'))
logging.info('Existing temp directory deleted.')
os.makedirs(os.path.join(args.output, 'temp'))
else:
os.makedirs(os.path.join(args.output, 'temp'))
path_fastqs = [os.path.join(input, f) for f in os.listdir(input) if f.endswith('fastq') or f.endswith('fq') or f.endswith('fq.gz') or f.endswith('fastq.gz')]
if len(path_fastqs) == 0:
msg = "No FASTQS found in input folder {}".format(input)
logging.critical(msg)
raise IOError(msg)
# Record the input
results, addon = template_input(input)
logging.info(results)
if addon:
learning_params.extend(addon)
# Write temp subsampled fastqs
subsampled_fastq_path = os.path.join(output, 'temp', 'subsampled')
os.makedirs(subsampled_fastq_path)
totbases = totseqs = 0
for file in path_fastqs:
basename = os.path.basename(file)
if(file.endswith('.fastq') or file.endswith('.fq')):
fastq_inf = open(file)
else:
fastq_inf = gzip.open(file, 'rt')
fastq_gen = read_fastq(fastq_inf)
if(basename.endswith('.gz')):
basename = basename[:-3]
with open(os.path.join(subsampled_fastq_path, basename), 'w') as outf:
for header, seq, quality in limit_fastq(fastq_gen):
outf.write("@{header}\n{seq}\n+\n{quality}\n".format(header=header, seq=seq, quality=quality))
totbases += len(seq)
totseqs += 1
avlen = totbases/totseqs
path_fastqs = glob(os.path.join(subsampled_fastq_path , "*"))
# Detect if paired end
paired_end, pair_obj = detect_paired_end(path_fastqs)
path_fastqs = pair_obj[0]
link_outdir = os.path.join(output, 'temp', 'link')
os.makedirs(link_outdir)
snames = [os.path.basename(n) for n in path_fastqs]
path_fastqs = link_manicured_names(path_fastqs, snames, link_outdir, not paired_end, pair_obj[1:])
results, addon = template_paired_end(paired_end)
logging.info(results)
if addon: learning_params.extend(addon)
learning_pretty += ["Paired end",paired_end]
# Detect adapters
axe_adaptors_path = os.path.join(output, 'temp', 'axe_adaptors')
os.makedirs(axe_adaptors_path)
best_adap, best_size, fastq_paths = choose_axe_adaptors(path_fastqs, paired_end, axe_adaptors_path, int(args.threads))
results, addon = template_choose_axe_adaptors(best_adap, best_size)
logging.info(results)
if addon: learning_params.extend(addon)
learning_pretty += ["Detected adaptors",best_adap]
# Detect output folder
results, addon = template_output(output)
logging.info(results)
if addon: learning_params.extend(addon)
# Detect stitching
stitched_path = os.path.join(output, 'temp', 'flash')
os.makedirs(stitched_path)
if paired_end:
stitches, do_outies, fastq_paths = flash_stitchable_and_check_outies(fastq_paths, stitched_path, int(args.threads))
else: stitches, do_outies = False, False
results, addon = template_flash(stitches, do_outies)
logging.info(results)
if addon: learning_params.extend(addon)
if paired_end:
learning_pretty += ["Stitching",stitches]
if stitches: learning_pretty += ["Outies allowed",do_outies]
filt_q, trim_q = trimmer_learning(fastq_paths)
results, addon = template_trim(int(filt_q), int(trim_q))
logging.info(results)
if addon: learning_params.extend(addon)
learning_pretty += ["Filter quality",filt_q,"Trimming quality",trim_q]
# Check whether to implement stitching bounds
if stitches:
cv, mean = flash_check_cv(stitched_path)
if cv < 0.1:
learning_pretty += ["Amplicon mode",True]
logging.info("CV: %f, Mean: %f, Avlen: %f" % (cv, mean, avlen))
if avlen > mean: avlen = mean
mr = math.ceil(cv*mean)
logging.info("SD was: %d" % mr)
minstitch, maxstitch = int(2*avlen - mean-mr), int(2*avlen - mean+mr)
if minstitch < 8: minstitch = 8
logging.info("Amplicon mode: stitch range [%d, %d]" % (minstitch, maxstitch))
results, addon = template_cv(minstitch, maxstitch)
logging.info(results)
if addon: learning_params.extend(addon)
learning_pretty += ["Amplicon stitch minimum",minstitch]
learning_pretty += ["Amplicon stitch maximum",maxstitch]
else: learning_pretty += ["Amplicon mode",False]
#print(str(learning_params))
with open(os.path.join(args.output, "shi7_cmd.sh"), "w") as output:
cmd = " ".join(learning_params)
output.write(cmd)
print(cmd)
with open(os.path.join(args.output, "learning_params.txt"),"w") as output:
for ix in range(0,len(learning_pretty),2):
output.write(str(learning_pretty[ix]) + "\t" + str(learning_pretty[ix+1]) + "\n")
if not args.debug:
shutil.rmtree(os.path.join(args.output, 'temp'))
logging.info('Execution time: %s' % (datetime.now() - start_time))
if __name__ == "__main__":
main()
| knights-lab/shi7 | shi7/shi7_learning.py | Python | agpl-3.0 | 15,977 |
package cn.dlb.bim.ifc.engine.jvm;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import cn.dlb.bim.component.PlatformServer;
import cn.dlb.bim.component.PlatformServerConfig;
import cn.dlb.bim.ifc.emf.PackageMetaData;
import cn.dlb.bim.ifc.engine.IRenderEngine;
import cn.dlb.bim.ifc.engine.IRenderEngineFactory;
import cn.dlb.bim.ifc.engine.RenderEngineException;
import cn.dlb.bim.utils.PathUtils;
public class JvmRenderEngineFactory implements IRenderEngineFactory {
private static final Logger LOGGER = LoggerFactory.getLogger(JvmRenderEngineFactory.class);
private Path nativeFolder;
private Path schemaFile;
private PlatformServer server;
public JvmRenderEngineFactory(PlatformServer server) {
this.server = server;
initialize();
}
public void initialize() {
try {
String os = System.getProperty("os.name").toLowerCase();
String libraryName = "";
if (os.contains("windows")) {
libraryName = "ifcengine.dll";
} else if (os.contains("osx") || os.contains("os x") || os.contains("darwin")) {
libraryName = "libIFCEngine.dylib";
} else if (os.contains("linux")) {
libraryName = "libifcengine.so";
}
InputStream inputStream = Files.newInputStream(server.getPlatformServerConfig().getCompileClassRoute().resolve("lib/" + System.getProperty("sun.arch.data.model") + "/" + libraryName));
if (inputStream != null) {
try {
Path tmpFolder = server.getPlatformServerConfig().getTempDir();
nativeFolder = tmpFolder.resolve("ifcenginedll");
Path file = nativeFolder.resolve(libraryName);
if (Files.exists(nativeFolder)) {
try {
PathUtils.removeDirectoryWithContent(nativeFolder);
} catch (IOException e) {
// Ignore
}
}
Files.createDirectories(nativeFolder);
OutputStream outputStream = Files.newOutputStream(file);
try {
IOUtils.copy(inputStream, outputStream);
} finally {
outputStream.close();
}
} finally {
inputStream.close();
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public IRenderEngine createRenderEngine(String schema) throws RenderEngineException {
try {
PackageMetaData packageMetaData = server.getMetaDataManager().getPackageMetaData(schema);
schemaFile = packageMetaData.getSchemaPath();
if (schemaFile == null) {
throw new RenderEngineException("No schema file");
}
List<String> classPathEntries = new ArrayList<>();
// for (Dependency dependency : pluginContext.getDependencies()) {
// Path path = dependency.getPath();
// classPathEntries.add(path.toAbsolutePath().toString());
// }
return new JvmIfcEngine(schemaFile, nativeFolder, server.getPlatformServerConfig().getTempDir(), server.getPlatformServerConfig().getClassPath()
, classPathEntries);
} catch (RenderEngineException e) {
throw e;
}
}
}
| shenan4321/BIMplatform | src/cn/dlb/bim/ifc/engine/jvm/JvmRenderEngineFactory.java | Java | agpl-3.0 | 3,114 |
'use strict';
angular.module('GO.Modules.GroupOffice.Contacts').factory('GO.Modules.GroupOffice.Contacts.Model.ContactGroup', [
'GO.Core.Factories.Data.Model',
function (Model) {
var ContactGroup = GO.extend(Model, function () {
//rename function because this record has a delete attribute on the server
this.deleteRecord = this.delete;
this.$parent.constructor.call(this, arguments);
});
ContactGroup.prototype.getStoreRoute = function () {
return 'contacts/'+this.contactId+'/permissions';
};
ContactGroup.prototype.$keys = ['groupId'];
return ContactGroup;
}]);
| Intermesh/groupoffice-webclient | app/modules/groupoffice/contacts/model/contact-group.js | JavaScript | agpl-3.0 | 610 |
<?php
namespace WEEEOpen\TaralloTest\Database;
use PHPUnit\Framework\TestCase;
use WEEEOpen\Tarallo\Database\Database;
use WEEEOpen\Tarallo\Item;
abstract class DatabaseTest extends TestCase {
protected $db = null;
// this cannot be done, PLAIN AND SIMPLE. Even though it comes straight from an example inside documentation.
// setUp() comes from a trait, so there's no way to override it AND call it. parent::setUp() calls a pointless empty function.
// Excellent documentation, very clear, would rate it 10/10.
//protected function setUp() {
// if(!extension_loaded('pdo_mysql')) {
// $this->markTestSkipped('The PDO MySQL extension is not available.');
// }
//}
protected static function getPdo(): \PDO {
require_once __DIR__ . DIRECTORY_SEPARATOR . '..' . DIRECTORY_SEPARATOR . '..' . DIRECTORY_SEPARATOR . 'config' . DIRECTORY_SEPARATOR . 'config.php';
return new \PDO(TARALLO_DB_DSN, TARALLO_DB_USERNAME, TARALLO_DB_PASSWORD, [
\PDO::ATTR_ERRMODE => \PDO::ERRMODE_EXCEPTION,
\PDO::ATTR_CASE => \PDO::CASE_NATURAL,
\PDO::ATTR_DEFAULT_FETCH_MODE => \PDO::FETCH_ASSOC,
// \PDO::ATTR_AUTOCOMMIT => false, // PHPUnit crashes and burns with autocommits disabled and, for some unfathomable reason, two SEPARATE, DISTINCT, UNIQUE PDO object will forcefully share the same connection to MySQL (apparently?), so there's no way to have a connection with autocommits and another one without.
\PDO::ATTR_EMULATE_PREPARES => false,
]);
}
public static function setUpBeforeClass(): void {
$pdo = null;
$retries = 0;
$started = false;
while($retries <= 20) {
try {
$pdo = self::getPdo();
$started = true;
break;
} catch(\PDOException $e) {
$retries++;
sleep(1);
}
}
if(!$started) {
throw new \RuntimeException("Database not up after $retries seconds");
}
$retries = 0;
$found = false;
while($retries <= 20) {
$result = $pdo->query("SHOW EVENTS LIKE 'DuplicateItemProductFeaturesCleanup'");
if($result !== false) {
$result->fetchAll(\PDO::FETCH_ASSOC);
$found = true;
break;
}
sleep(1);
$retries++;
}
if(!$found) {
throw new \RuntimeException("Database not ready after $retries seconds");
}
}
public function setUp(): void {
$pdo = self::getPdo();
$pdo->exec(/** @lang MariaDB */ "SET FOREIGN_KEY_CHECKS = 0; TRUNCATE TABLE Audit; SET FOREIGN_KEY_CHECKS = 1;");
$pdo->exec(/** @lang MariaDB */ "SET FOREIGN_KEY_CHECKS = 0; TRUNCATE TABLE AuditProduct; SET FOREIGN_KEY_CHECKS = 1;");
$pdo->exec(/** @lang MariaDB */ "SET FOREIGN_KEY_CHECKS = 0; TRUNCATE TABLE Item; SET FOREIGN_KEY_CHECKS = 1;");
$pdo->exec(/** @lang MariaDB */ "SET FOREIGN_KEY_CHECKS = 0; TRUNCATE TABLE ItemFeature; SET FOREIGN_KEY_CHECKS = 1;");
$pdo->exec(/** @lang MariaDB */ "SET FOREIGN_KEY_CHECKS = 0; TRUNCATE TABLE Product; SET FOREIGN_KEY_CHECKS = 1;");
$pdo->exec(/** @lang MariaDB */ "SET FOREIGN_KEY_CHECKS = 0; TRUNCATE TABLE ProductFeature; SET FOREIGN_KEY_CHECKS = 1;");
$pdo->exec(/** @lang MariaDB */ "SET FOREIGN_KEY_CHECKS = 0; TRUNCATE TABLE Tree; SET FOREIGN_KEY_CHECKS = 1;");
$pdo->exec(/** @lang MariaDB */ "SET FOREIGN_KEY_CHECKS = 0; TRUNCATE TABLE Prefixes; SET FOREIGN_KEY_CHECKS = 1;");
$pdo->exec(/** @lang MariaDB */ "SET FOREIGN_KEY_CHECKS = 0; TRUNCATE TABLE Product; SET FOREIGN_KEY_CHECKS = 1;");
$pdo->exec(/** @lang MariaDB */ "SET FOREIGN_KEY_CHECKS = 0; TRUNCATE TABLE SearchResult; SET FOREIGN_KEY_CHECKS = 1;");
$pdo->exec(/** @lang MariaDB */ "SET FOREIGN_KEY_CHECKS = 0; TRUNCATE TABLE Search; SET FOREIGN_KEY_CHECKS = 1;");
$pdo->exec(/** @lang MariaDB */ "INSERT INTO Prefixes(Prefix, `Integer`) VALUES ('M', 10), ('T', 75), ('', 60);");
}
/**
* @return Database
*/
protected function getDb(): Database {
if($this->db === null) {
$this->getPdo();
$db = new Database(TARALLO_DB_USERNAME, TARALLO_DB_PASSWORD, TARALLO_DB_DSN);
//$dbr = new \ReflectionObject($db);
//$prop = $dbr->getProperty('pdo');
//$prop->setAccessible(true);
//$prop->setValue($db, $this->getPdo());
$this->db = $db;
}
return $this->db;
}
protected static function itemCompare(Item $a, Item $b): bool {
if($a->getCode() !== $b->getCode()) {
return false;
}
// TODO: compare recursively
//if($a->getProductFromStrings() !== $b->getProductFromStrings()) {
// return false;
//}
if(count($a->getFeatures()) !== count($b->getFeatures())) {
return false;
}
if(!empty(array_diff_assoc($a->getFeatures(), $b->getFeatures()))) {
return false;
}
if(count($a->getContent()) !== count($b->getContent())) {
return false;
}
$bContent = $b->getContent();
foreach($a->getContent() as $item) {
$code = $item->getCode();
foreach($bContent as $item2) {
if($code === $item2->getCode()) {
if(!static::itemCompare($item, $item2)) {
return false;
}
}
}
}
return true;
}
} | WEEE-Open/tarallo-backend | tests/Database/DatabaseTest.php | PHP | agpl-3.0 | 4,897 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: OpenDrive Ltda
# Copyright (c) 2013 Opendrive Ltda
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly advised to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from openerp.osv import osv, fields
from openerp.tools.translate import _
class Partner(osv.osv):
_inherit = 'res.partner'
_columns = {
'legal_representative': fields.char(
'Legal Representative',
),
}
| kailIII/emaresa | rent.resp/partner.py | Python | agpl-3.0 | 1,548 |
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using SSCMS.Dto;
using SSCMS.Utils;
namespace SSCMS.Web.Controllers.Admin.Common.Form
{
public partial class LayerImageUploadController
{
[HttpGet, Route(Route)]
public async Task<ActionResult<Options>> Get([FromQuery] SiteRequest request)
{
var options = new Options();
if (request.SiteId > 0)
{
var site = await _siteRepository.GetAsync(request.SiteId);
if (site == null) return this.Error("无法确定内容对应的站点");
options = TranslateUtils.JsonDeserialize(site.Get<string>(nameof(LayerImageUploadController)), new Options
{
IsEditor = true,
IsMaterial = true,
IsThumb = false,
ThumbWidth = 1024,
ThumbHeight = 1024,
IsLinkToOriginal = true
});
}
return options;
}
}
} | siteserver/cms | src/SSCMS.Web/Controllers/Admin/Common/Form/LayerImageUploadController.Get.cs | C# | agpl-3.0 | 1,054 |
(function() {
'use strict';
var getModulesList = function(modules) {
return modules.map(function(moduleName) {
return {name: moduleName};
});
};
var jsOptimize = process.env.REQUIRE_BUILD_PROFILE_OPTIMIZE !== undefined ?
process.env.REQUIRE_BUILD_PROFILE_OPTIMIZE : 'uglify2';
return {
namespace: 'RequireJS',
/**
* List the modules that will be optimized. All their immediate and deep
* dependencies will be included in the module's file when the build is
* done.
*/
modules: getModulesList([
'course_bookmarks/js/course_bookmarks_factory',
'course_search/js/course_search_factory',
'course_search/js/dashboard_search_factory',
'discussion/js/discussion_board_factory',
'discussion/js/discussion_profile_page_factory',
'js/api_admin/catalog_preview_factory',
'js/courseware/courseware_factory',
'js/discovery/discovery_factory',
'js/edxnotes/views/notes_visibility_factory',
'js/edxnotes/views/page_factory',
'js/financial-assistance/financial_assistance_form_factory',
'js/groups/views/cohorts_dashboard_factory',
'js/discussions_management/views/discussions_dashboard_factory',
'js/header_factory',
'js/learner_dashboard/course_entitlement_factory',
'js/learner_dashboard/unenrollment_factory',
'js/learner_dashboard/entitlement_unenrollment_factory',
'js/learner_dashboard/program_details_factory',
'js/learner_dashboard/program_list_factory',
'js/student_account/logistration_factory',
'js/student_account/views/account_settings_factory',
'js/student_account/views/finish_auth_factory',
'js/views/message_banner',
'learner_profile/js/learner_profile_factory',
'lms/js/preview/preview_factory',
'support/js/certificates_factory',
'support/js/enrollment_factory',
'support/js/manage_user_factory',
'teams/js/teams_tab_factory',
'js/dateutil_factory'
]),
/**
* By default all the configuration for optimization happens from the command
* line or by properties in the config file, and configuration that was
* passed to requirejs as part of the app's runtime "main" JS file is *not*
* considered. However, if you prefer the "main" JS file configuration
* to be read for the build so that you do not have to duplicate the values
* in a separate configuration, set this property to the location of that
* main JS file. The first requirejs({}), require({}), requirejs.config({}),
* or require.config({}) call found in that file will be used.
* As of 2.1.10, mainConfigFile can be an array of values, with the last
* value's config take precedence over previous values in the array.
*/
mainConfigFile: 'require-config.js',
/**
* Set paths for modules. If relative paths, set relative to baseUrl above.
* If a special value of "empty:" is used for the path value, then that
* acts like mapping the path to an empty file. It allows the optimizer to
* resolve the dependency to path, but then does not include it in the output.
* Useful to map module names that are to resources on a CDN or other
* http: URL when running in the browser and during an optimization that
* file should be skipped because it has no dependencies.
*/
paths: {
gettext: 'empty:',
'coffee/src/ajax_prefix': 'empty:',
jquery: 'empty:',
'jquery-migrate': 'empty:',
'jquery.cookie': 'empty:',
'jquery.url': 'empty:',
backbone: 'empty:',
underscore: 'empty:',
'underscore.string': 'empty:',
logger: 'empty:',
utility: 'empty:',
URI: 'empty:',
'common/js/discussion/views/discussion_inline_view': 'empty:',
modernizr: 'empty',
'which-country': 'empty',
// Don't bundle UI Toolkit helpers as they are loaded into the "edx" namespace
'edx-ui-toolkit/js/utils/html-utils': 'empty:',
'edx-ui-toolkit/js/utils/string-utils': 'empty:'
},
/**
* Inline requireJS text templates.
*/
inlineText: true,
/**
* Stub out requireJS text in the optimized file, but leave available for non-optimized development use.
*/
stubModules: ['text'],
/**
* If shim config is used in the app during runtime, duplicate the config
* here. Necessary if shim config is used, so that the shim's dependencies
* are included in the build. Using "mainConfigFile" is a better way to
* pass this information though, so that it is only listed in one place.
* However, if mainConfigFile is not an option, the shim config can be
* inlined in the build config.
*/
shim: {},
/**
* Introduced in 2.1.2: If using "dir" for an output directory, normally the
* optimize setting is used to optimize the build bundles (the "modules"
* section of the config) and any other JS file in the directory. However, if
* the non-build bundle JS files will not be loaded after a build, you can
* skip the optimization of those files, to speed up builds. Set this value
* to true if you want to skip optimizing those other non-build bundle JS
* files.
*/
skipDirOptimize: true,
/**
* When the optimizer copies files from the source location to the
* destination directory, it will skip directories and files that start
* with a ".". If you want to copy .directories or certain .files, for
* instance if you keep some packages in a .packages directory, or copy
* over .htaccess files, you can set this to null. If you want to change
* the exclusion rules, change it to a different regexp. If the regexp
* matches, it means the directory will be excluded. This used to be
* called dirExclusionRegExp before the 1.0.2 release.
* As of 1.0.3, this value can also be a string that is converted to a
* RegExp via new RegExp().
*/
fileExclusionRegExp: /^\.|spec|spec_helpers/,
/**
* Allow CSS optimizations. Allowed values:
* - "standard": @import inlining and removal of comments, unnecessary
* whitespace and line returns.
* Removing line returns may have problems in IE, depending on the type
* of CSS.
* - "standard.keepLines": like "standard" but keeps line returns.
* - "none": skip CSS optimizations.
* - "standard.keepComments": keeps the file comments, but removes line
* returns. (r.js 1.0.8+)
* - "standard.keepComments.keepLines": keeps the file comments and line
* returns. (r.js 1.0.8+)
* - "standard.keepWhitespace": like "standard" but keeps unnecessary whitespace.
*/
optimizeCss: 'none',
/**
* How to optimize all the JS files in the build output directory.
* Right now only the following values are supported:
* - "uglify": Uses UglifyJS to minify the code.
* - "uglify2": Uses UglifyJS2.
* - "closure": Uses Google's Closure Compiler in simple optimization
* mode to minify the code. Only available if REQUIRE_ENVIRONMENT is "rhino" (the default).
* - "none": No minification will be done.
*/
optimize: jsOptimize,
/**
* Sets the logging level. It is a number:
* TRACE: 0,
* INFO: 1,
* WARN: 2,
* ERROR: 3,
* SILENT: 4
* Default is 0.
*/
logLevel: 1
};
}());
| TeachAtTUM/edx-platform | lms/static/lms/js/build.js | JavaScript | agpl-3.0 | 8,162 |
# -*- coding: utf-8 -*-
import time
from datetime import timedelta
class CookieJar:
def __init__(self, pluginname, account=None):
self.cookies = {}
self.plugin = pluginname
self.account = account
def add_cookies(self, clist):
for c in clist:
name = c.split("\t")[5]
self.cookies[name] = c
def get_cookies(self):
return list(self.cookies.values())
def parse_cookie(self, name):
if name in self.cookies:
return self.cookies[name].split("\t")[6]
else:
return None
def get_cookie(self, name):
return self.parse_cookie(name)
def set_cookie(
self,
domain,
name,
value,
path="/",
exp=time.time() + timedelta(hours=744).total_seconds(), #: 31 days retention
):
self.cookies[
name
] = f".{domain}\tTRUE\t{path}\tFALSE\t{exp}\t{name}\t{value}"
def clear(self):
self.cookies = {}
| vuolter/pyload | src/pyload/core/network/cookie_jar.py | Python | agpl-3.0 | 1,007 |
/**
* vinimay
* Vinimay is a decentralised social network focused on giving back control of its data to the user
*
* OpenAPI spec version: 0.1.0
*
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
export interface DeletionServerInput {
/**
* The token identifying the relationship
*/
token: string;
/**
* Request signature, required if the frienship was previously accepted
*/
signature?: string;
}
| JosephCaillet/vinimay | client/src/providers/apiClient/model/deletionServerInput.ts | TypeScript | agpl-3.0 | 569 |
#
# Copyright (C) 2011 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
class ContextModulesController < ApplicationController
include Api::V1::ContextModule
include WebZipExportHelper
before_action :require_context
add_crumb(proc { t('#crumbs.modules', "Modules") }) { |c| c.send :named_context_url, c.instance_variable_get("@context"), :context_context_modules_url }
before_action { |c| c.active_tab = "modules" }
module ModuleIndexHelper
include ContextModulesHelper
def load_module_file_details
attachment_tags = @context.module_items_visible_to(@current_user).where(content_type: 'Attachment').preload(:content => :folder)
attachment_tags.inject({}) do |items, file_tag|
items[file_tag.id] = {
id: file_tag.id,
content_id: file_tag.content_id,
content_details: content_details(file_tag, @current_user, :for_admin => true)
}
items
end
end
def modules_cache_key
@modules_cache_key ||= begin
visible_assignments = @current_user.try(:assignment_and_quiz_visibilities, @context)
cache_key_items = [@context.cache_key, @can_edit, @is_student, @can_view_unpublished, 'all_context_modules_draft_10', collection_cache_key(@modules), Time.zone, Digest::MD5.hexdigest(visible_assignments.to_s)]
cache_key = cache_key_items.join('/')
cache_key = add_menu_tools_to_cache_key(cache_key)
cache_key = add_mastery_paths_to_cache_key(cache_key, @context, @modules, @current_user)
end
end
def load_modules
@modules = @context.modules_visible_to(@current_user)
@modules.each(&:check_for_stale_cache_after_unlocking!)
@collapsed_modules = ContextModuleProgression.for_user(@current_user).for_modules(@modules).pluck(:context_module_id, :collapsed).select{|cm_id, collapsed| !!collapsed }.map(&:first)
@can_edit = can_do(@context, @current_user, :manage_content)
@is_student = @context.grants_right?(@current_user, session, :participate_as_student)
@can_view_unpublished = @context.grants_right?(@current_user, session, :read_as_admin)
modules_cache_key
@is_cyoe_on = @current_user && ConditionalRelease::Service.enabled_in_context?(@context)
if allow_web_export_download?
@allow_web_export_download = true
@last_web_export = @context.web_zip_exports.visible_to(@current_user).order('epub_exports.created_at').last
end
@menu_tools = {}
placements = [:assignment_menu, :discussion_topic_menu, :file_menu, :module_menu, :quiz_menu, :wiki_page_menu]
tools = ContextExternalTool.all_tools_for(@context, placements: placements,
:root_account => @domain_root_account, :current_user => @current_user).to_a
placements.select { |p| @menu_tools[p] = tools.select{|t| t.has_placement? p} }
module_file_details = load_module_file_details if @context.grants_right?(@current_user, session, :manage_content)
js_env :course_id => @context.id,
:CONTEXT_URL_ROOT => polymorphic_path([@context]),
:FILES_CONTEXTS => [{asset_string: @context.asset_string}],
:MODULE_FILE_DETAILS => module_file_details,
:MODULE_FILE_PERMISSIONS => {
usage_rights_required: @context.feature_enabled?(:usage_rights_required),
manage_files: @context.grants_right?(@current_user, session, :manage_files)
}
if master_courses?
is_master_course = MasterCourses::MasterTemplate.is_master_course?(@context)
is_child_course = MasterCourses::ChildSubscription.is_child_course?(@context)
if is_master_course || is_child_course
js_env(:MASTER_COURSE_SETTINGS => {
:IS_MASTER_COURSE => is_master_course,
:IS_CHILD_COURSE => is_child_course,
:MASTER_COURSE_DATA_URL => context_url(@context, :context_context_modules_master_course_info_url)
})
end
end
conditional_release_js_env(includes: :active_rules)
end
end
include ModuleIndexHelper
def index
if authorized_action(@context, @current_user, :read)
log_asset_access([ "modules", @context ], "modules", "other")
load_modules
set_tutorial_js_env
if @is_student && tab_enabled?(@context.class::TAB_MODULES)
@modules.each{|m| m.evaluate_for(@current_user) }
session[:module_progressions_initialized] = true
end
end
end
def choose_mastery_path
if authorized_action(@context, @current_user, :participate_as_student)
id = params[:id]
item = @context.context_module_tags.not_deleted.find(params[:id])
if item.present? && item.published? && item.context_module.published?
rules = ConditionalRelease::Service.rules_for(@context, @current_user, item, session)
rule = conditional_release_rule_for_module_item(item, conditional_release_rules: rules)
# locked assignments always have 0 sets, so this check makes it not return 404 if locked
# but instead progress forward and return a warning message if is locked later on
if rule.present? && (rule[:locked] || !rule[:selected_set_id] || rule[:assignment_sets].length > 1)
if !rule[:locked]
options = rule[:assignment_sets].map { |set|
option = {
setId: set[:id]
}
option[:assignments] = set[:assignments].map { |a|
assg = assignment_json(a[:model], @current_user, session)
assg[:assignmentId] = a[:assignment_id]
assg
}
option
}
js_env({
CHOOSE_MASTERY_PATH_DATA: {
options: options,
selectedOption: rule[:selected_set_id],
courseId: @context.id,
moduleId: item.context_module.id,
itemId: id
}
})
css_bundle :choose_mastery_path
js_bundle :choose_mastery_path
@page_title = join_title(t('Choose Assignment Set'), @context.name)
return render :html => '', :layout => true
else
flash[:warning] = t('Module Item is locked.')
return redirect_to named_context_url(@context, :context_context_modules_url)
end
end
end
return render status: 404, template: 'shared/errors/404_message'
end
end
def item_redirect
if authorized_action(@context, @current_user, :read)
@tag = @context.context_module_tags.not_deleted.find(params[:id])
if !(@tag.unpublished? || @tag.context_module.unpublished?) || authorized_action(@tag.context_module, @current_user, :view_unpublished_items)
reevaluate_modules_if_locked(@tag)
@progression = @tag.context_module.evaluate_for(@current_user) if @tag.context_module
@progression.uncollapse! if @progression && @progression.collapsed?
content_tag_redirect(@context, @tag, :context_context_modules_url, :modules)
end
end
end
def item_redirect_mastery_paths
@tag = @context.context_module_tags.not_deleted.find(params[:id])
type_controllers = {
assignment: 'assignments',
quiz: 'quizzes/quizzes',
discussion_topic: 'discussion_topics'
}
if @tag
if authorized_action(@tag.content, @current_user, :update)
controller = type_controllers[@tag.content_type_class.to_sym]
if controller.present?
redirect_to url_for(
controller: controller,
action: 'edit',
id: @tag.content_id,
anchor: 'mastery-paths-editor',
return_to: params[:return_to]
)
else
render status: 404, template: 'shared/errors/404_message'
end
end
else
render status: 404, template: 'shared/errors/404_message'
end
end
def module_redirect
if authorized_action(@context, @current_user, :read)
@module = @context.context_modules.not_deleted.find(params[:context_module_id])
@tags = @module.content_tags_visible_to(@current_user)
if params[:last]
@tags.pop while @tags.last && @tags.last.content_type == 'ContextModuleSubHeader'
else
@tags.shift while @tags.first && @tags.first.content_type == 'ContextModuleSubHeader'
end
@tag = params[:last] ? @tags.last : @tags.first
if !@tag
flash[:notice] = t 'module_empty', %{There are no items in the module "%{module}"}, :module => @module.name
redirect_to named_context_url(@context, :context_context_modules_url, :anchor => "module_#{@module.id}")
return
end
reevaluate_modules_if_locked(@tag)
@progression = @tag.context_module.evaluate_for(@current_user) if @tag && @tag.context_module
@progression.uncollapse! if @progression && @progression.collapsed?
content_tag_redirect(@context, @tag, :context_context_modules_url)
end
end
def reevaluate_modules_if_locked(tag)
# if the object is locked for this user, reevaluate all the modules and clear the cache so it will be checked again when loaded
if tag.content && tag.content.respond_to?(:locked_for?)
locked = tag.content.locked_for?(@current_user, :context => @context)
if locked
@context.context_modules.active.each { |m| m.evaluate_for(@current_user) }
if tag.content.respond_to?(:clear_locked_cache)
tag.content.clear_locked_cache(@current_user)
end
end
end
end
def create
if authorized_action(@context.context_modules.temp_record, @current_user, :create)
@module = @context.context_modules.build
@module.workflow_state = 'unpublished'
@module.attributes = context_module_params
respond_to do |format|
if @module.save
format.html { redirect_to named_context_url(@context, :context_context_modules_url) }
format.json { render :json => @module.as_json(:include => :content_tags, :methods => :workflow_state, :permissions => {:user => @current_user, :session => session}) }
else
format.html
format.json { render :json => @module.errors, :status => :bad_request }
end
end
end
end
def reorder
if authorized_action(@context.context_modules.temp_record, @current_user, :update)
m = @context.context_modules.not_deleted.first
m.update_order(params[:order].split(","))
# Need to invalidate the ordering cache used by context_module.rb
@context.touch
# I'd like to get rid of this saving every module, but we have to
# update the list of prerequisites since a reorder can cause
# prerequisites to no longer be valid
@modules = @context.context_modules.not_deleted.to_a
@modules.each do |m|
m.updated_at = Time.now
m.save_without_touching_context
end
@context.touch
# # Background this, not essential that it happen right away
# ContextModule.send_later(:update_tag_order, @context)
render :json => @modules.map{ |m| m.as_json(include: :content_tags, methods: :workflow_state) }
end
end
def content_tag_assignment_data
if authorized_action(@context, @current_user, :read)
info = {}
now = Time.now.utc.iso8601
all_tags = @context.module_items_visible_to(@current_user)
user_is_admin = @context.grants_right?(@current_user, session, :read_as_admin)
preload_assignments_and_quizzes(all_tags, user_is_admin)
all_tags.each do |tag|
info[tag.id] = if tag.can_have_assignment? && tag.assignment
tag.assignment.context_module_tag_info(@current_user, @context, user_is_admin: user_is_admin)
elsif tag.content_type_quiz?
tag.content.context_module_tag_info(@current_user, @context, user_is_admin: user_is_admin)
else
{:points_possible => nil, :due_date => nil}
end
end
render :json => info
end
end
def content_tag_master_course_data
return not_found unless master_courses?
if authorized_action(@context, @current_user, :read_as_admin)
info = {}
is_child_course = MasterCourses::ChildSubscription.is_child_course?(@context)
is_master_course = MasterCourses::MasterTemplate.is_master_course?(@context)
if is_child_course || is_master_course
tag_scope = @context.module_items_visible_to(@current_user).where(:content_type => %w{Assignment Attachment DiscussionTopic Quizzes::Quiz WikiPage})
tag_scope = tag_scope.where(:id => params[:tag_id]) if params[:tag_id]
tag_ids = tag_scope.pluck(:id)
restriction_info = {}
if tag_ids.any?
restriction_info = is_child_course ?
MasterCourses::MasterContentTag.fetch_module_item_restrictions_for_child(tag_ids) :
MasterCourses::MasterContentTag.fetch_module_item_restrictions_for_master(tag_ids)
end
info[:tag_restrictions] = restriction_info
end
render :json => info
end
end
def prerequisites_needing_finishing_for(mod, progression, before_tag=nil)
tags = mod.content_tags_visible_to(@current_user)
pres = []
tags.each do |tag|
if req = (mod.completion_requirements || []).detect{|r| r[:id] == tag.id }
progression.requirements_met ||= []
if !progression.requirements_met.any?{|r| r[:id] == req[:id] && r[:type] == req[:type] }
if !before_tag || tag.position <= before_tag.position
pre = {
:url => named_context_url(@context, :context_context_modules_item_redirect_url, tag.id),
:id => tag.id,
:context_module_id => mod.id,
:title => tag.title
}
pre[:requirement] = req
pre[:requirement_description] = ContextModule.requirement_description(req)
pre[:available] = !progression.locked? && (!mod.require_sequential_progress || tag.position <= progression.current_position)
pres << pre
end
end
end
end
pres
end
protected :prerequisites_needing_finishing_for
def content_tag_prerequisites_needing_finishing
type, id = ActiveRecord::Base.parse_asset_string params[:code]
raise ActiveRecord::RecordNotFound if id == 0
if type == 'ContentTag'
@tag = @context.context_module_tags.active.where(id: id).first
else
@tag = @context.context_module_tags.active.where(context_module_id: params[:context_module_id], content_id: id, content_type: type).first
end
@module = @context.context_modules.active.find(params[:context_module_id])
@progression = @module.evaluate_for(@current_user)
@progression.current_position ||= 0 if @progression
res = {};
if !@progression
elsif @progression.locked?
res[:locked] = true
res[:modules] = []
previous_modules = @context.context_modules.active.where('position<?', @module.position).order(:position).to_a
previous_modules.reverse!
valid_previous_modules = []
prereq_ids = @module.prerequisites.select{|p| p[:type] == 'context_module' }.map{|p| p[:id] }
previous_modules.each do |mod|
if prereq_ids.include?(mod.id)
valid_previous_modules << mod
prereq_ids += mod.prerequisites.select{|p| p[:type] == 'context_module' }.map{|p| p[:id] }
end
end
valid_previous_modules.reverse!
valid_previous_modules.each do |mod|
prog = mod.evaluate_for(@current_user)
res[:modules] << {
:id => mod.id,
:name => mod.name,
:prerequisites => prerequisites_needing_finishing_for(mod, prog),
:locked => prog.locked?
} unless prog.completed?
end
elsif @module.require_sequential_progress && @progression.current_position && @tag && @tag.position && @progression.current_position < @tag.position
res[:locked] = true
pres = prerequisites_needing_finishing_for(@module, @progression, @tag)
res[:modules] = [{
:id => @module.id,
:name => @module.name,
:prerequisites => pres,
:locked => false
}]
else
res[:locked] = false
end
render :json => res
end
def toggle_collapse
if authorized_action(@context, @current_user, :read)
@module = @context.modules_visible_to(@current_user).find(params[:context_module_id])
@progression = @module.evaluate_for(@current_user) #context_module_progressions.find_by_user_id(@current_user)
@progression ||= ContextModuleProgression.new
if params[:collapse] == '1'
@progression.collapsed = true
elsif params[:collapse]
@progression.uncollapse!
else
@progression.collapsed = !@progression.collapsed
end
@progression.save unless @progression.new_record?
respond_to do |format|
format.html { redirect_to named_context_url(@context, :context_context_modules_url) }
format.json { render :json => (@progression.collapsed ? @progression : @module.content_tags_visible_to(@current_user) )}
end
end
end
def show
@module = @context.context_modules.not_deleted.find(params[:id])
if authorized_action @module, @current_user, :read
respond_to do |format|
format.html { redirect_to named_context_url(@context, :context_context_modules_url, :anchor => "module_#{params[:id]}") }
format.json { render :json => @module.content_tags_visible_to(@current_user) }
end
end
end
def reorder_items
@module = @context.context_modules.not_deleted.find(params[:context_module_id])
if authorized_action(@module, @current_user, :update)
order = params[:order].split(",").map{|id| id.to_i}
tags = @context.context_module_tags.not_deleted.where(id: order)
affected_module_ids = (tags.map(&:context_module_id) + [@module.id]).uniq.compact
affected_items = []
items = order.map{|id| tags.detect{|t| t.id == id.to_i } }.compact.uniq
items.each_with_index do |item, idx|
item.position = idx + 1
item.context_module_id = @module.id
if item.changed?
item.skip_touch = true
item.save
affected_items << item
end
end
ContentTag.touch_context_modules(affected_module_ids)
ContentTag.update_could_be_locked(affected_items)
@context.touch
@module.reload
render :json => @module.as_json(:include => :content_tags, :methods => :workflow_state, :permissions => {:user => @current_user, :session => session})
end
end
def item_details
if authorized_action(@context, @current_user, :read)
# namespaced models are separated by : in the url
code = params[:id].gsub(":", "/").split("_")
id = code.pop.to_i
type = code.join("_").classify
@modules = @context.modules_visible_to(@current_user)
@tags = @context.context_module_tags.active.sort_by{|t| t.position ||= 999}
result = {}
possible_tags = @tags.find_all {|t| t.content_type == type && t.content_id == id }
if possible_tags.size > 1
# if there's more than one tag for the item, but the caller didn't
# specify which one they want, we don't want to return any information.
# this way the module item prev/next links won't appear with misleading navigation info.
if params[:module_item_id]
result[:current_item] = possible_tags.detect { |t| t.id == params[:module_item_id].to_i }
end
else
result[:current_item] = possible_tags.first
if !result[:current_item]
obj = @context.find_asset(params[:id], [:attachment, :discussion_topic, :assignment, :quiz, :wiki_page, :content_tag])
if obj.is_a?(ContentTag)
result[:current_item] = @tags.detect{|t| t.id == obj.id }
elsif obj.is_a?(DiscussionTopic) && obj.assignment_id
result[:current_item] = @tags.detect{|t| t.content_type == 'Assignment' && t.content_id == obj.assignment_id }
elsif obj.is_a?(Quizzes::Quiz) && obj.assignment_id
result[:current_item] = @tags.detect{|t| t.content_type == 'Assignment' && t.content_id == obj.assignment_id }
end
end
end
result[:current_item].evaluate_for(@current_user) rescue nil
if result[:current_item] && result[:current_item].position
result[:previous_item] = @tags.reverse.detect{|t| t.id != result[:current_item].id && t.context_module_id == result[:current_item].context_module_id && t.position && t.position <= result[:current_item].position && t.content_type != "ContextModuleSubHeader" }
result[:next_item] = @tags.detect{|t| t.id != result[:current_item].id && t.context_module_id == result[:current_item].context_module_id && t.position && t.position >= result[:current_item].position && t.content_type != "ContextModuleSubHeader" }
current_module = @modules.detect{|m| m.id == result[:current_item].context_module_id}
if current_module
result[:previous_module] = @modules.reverse.detect{|m| (m.position || 0) < (current_module.position || 0) }
result[:next_module] = @modules.detect{|m| (m.position || 0) > (current_module.position || 0) }
end
end
render :json => result
end
end
include ContextModulesHelper
def add_item
@module = @context.context_modules.not_deleted.find(params[:context_module_id])
if authorized_action(@module, @current_user, :update)
@tag = @module.add_item(params[:item])
unless @tag.valid?
return render :json => @tag.errors, :status => :bad_request
end
json = @tag.as_json
json['content_tag'].merge!(
publishable: module_item_publishable?(@tag),
published: @tag.published?,
publishable_id: module_item_publishable_id(@tag),
unpublishable: module_item_unpublishable?(@tag),
graded: @tag.graded?,
content_details: content_details(@tag, @current_user),
assignment_id: @tag.assignment.try(:id),
is_cyoe_able: cyoe_able?(@tag),
is_duplicate_able: @tag.duplicate_able?,
)
render json: json
end
end
def remove_item
@tag = @context.context_module_tags.not_deleted.find(params[:id])
if authorized_action(@tag.context_module, @current_user, :update)
@module = @tag.context_module
@tag.destroy
render :json => @tag
end
end
def update_item
@tag = @context.context_module_tags.not_deleted.find(params[:id])
if authorized_action(@tag.context_module, @current_user, :update)
@tag.title = params[:content_tag][:title] if params[:content_tag] && params[:content_tag][:title]
@tag.url = params[:content_tag][:url] if %w(ExternalUrl ContextExternalTool).include?(@tag.content_type) && params[:content_tag] && params[:content_tag][:url]
@tag.indent = params[:content_tag][:indent] if params[:content_tag] && params[:content_tag][:indent]
@tag.new_tab = params[:content_tag][:new_tab] if params[:content_tag] && params[:content_tag][:new_tab]
unless @tag.save
return render :json => @tag.errors, :status => :bad_request
end
@tag.update_asset_name!(@current_user) if params[:content_tag][:title]
render :json => @tag
end
end
def progressions
if authorized_action(@context, @current_user, :read)
if request.format == :json
if @context.grants_right?(@current_user, session, :view_all_grades)
if params[:user_id] && @user = @context.students.find(params[:user_id])
@progressions = @context.context_modules.active.map{|m| m.evaluate_for(@user) }
else
if @context.large_roster
@progressions = []
else
context_module_ids = @context.context_modules.active.pluck(:id)
@progressions = ContextModuleProgression.where(:context_module_id => context_module_ids).each{|p| p.evaluate }
end
end
elsif @context.grants_right?(@current_user, session, :participate_as_student)
@progressions = @context.context_modules.active.order(:id).map{|m| m.evaluate_for(@current_user) }
else
# module progressions don't apply, but unlock_at still does
@progressions = @context.context_modules.active.order(:id).map do |m|
{ :context_module_progression =>
{ :context_module_id => m.id,
:workflow_state => (m.to_be_unlocked ? 'locked' : 'unlocked'),
:requirements_met => [],
:incomplete_requirements => [] } }
end
end
render :json => @progressions
elsif !@context.grants_right?(@current_user, session, :view_all_grades)
@restrict_student_list = true
student_ids = @context.observer_enrollments.for_user(@current_user).map(&:associated_user_id)
student_ids << @current_user.id if @context.user_is_student?(@current_user)
students = UserSearch.scope_for(@context, @current_user, {:enrollment_type => 'student'}).where(:id => student_ids)
@visible_students = students.map { |u| user_json(u, @current_user, session) }
end
end
end
def update
@module = @context.context_modules.not_deleted.find(params[:id])
if authorized_action(@module, @current_user, :update)
if params[:publish]
@module.publish
@module.publish_items!
elsif params[:unpublish]
@module.unpublish
end
if @module.update_attributes(context_module_params)
json = @module.as_json(:include => :content_tags, :methods => :workflow_state, :permissions => {:user => @current_user, :session => session})
json['context_module']['relock_warning'] = true if @module.relock_warning?
render :json => json
else
render :json => @module.errors, :status => :bad_request
end
end
end
def destroy
@module = @context.context_modules.not_deleted.find(params[:id])
if authorized_action(@module, @current_user, :delete)
@module.destroy
respond_to do |format|
format.html { redirect_to named_context_url(@context, :context_context_modules_url) }
format.json { render :json => @module.as_json(:methods => :workflow_state) }
end
end
end
private
def preload_assignments_and_quizzes(tags, user_is_admin)
assignment_tags = tags.select{|ct| ct.can_have_assignment?}
return unless assignment_tags.any?
ActiveRecord::Associations::Preloader.new.preload(assignment_tags, :content)
content_with_assignments = assignment_tags.
select{|ct| ct.content_type != "Assignment" && ct.content.assignment_id}.map(&:content)
ActiveRecord::Associations::Preloader.new.preload(content_with_assignments, :assignment) if content_with_assignments.any?
if user_is_admin && should_preload_override_data?
assignments = assignment_tags.map(&:assignment).compact
plain_quizzes = assignment_tags.select{|ct| ct.content.is_a?(Quizzes::Quiz) && !ct.content.assignment}.map(&:content)
preload_has_too_many_overrides(assignments, :assignment_id)
preload_has_too_many_overrides(plain_quizzes, :quiz_id)
overrideables = (assignments + plain_quizzes).select{|o| !o.has_too_many_overrides}
if overrideables.any?
ActiveRecord::Associations::Preloader.new.preload(overrideables, :assignment_overrides)
overrideables.each { |o| o.has_no_overrides = true if o.assignment_overrides.size == 0 }
end
end
end
def should_preload_override_data?
key = ['preloaded_module_override_data', @context.global_asset_string, @current_user].cache_key
# if the user has been touched we should preload all of the overrides because it's almost certain we'll need them all
if Rails.cache.read(key)
false
else
Rails.cache.write(key, true)
true
end
end
def preload_has_too_many_overrides(assignments_or_quizzes, override_column)
# find the assignments/quizzes with too many active overrides and mark them as such
if assignments_or_quizzes.any?
ids = AssignmentOverride.active.where(override_column => assignments_or_quizzes).
group(override_column).having("COUNT(*) > ?", Setting.get('assignment_all_dates_too_many_threshold', '25').to_i).
active.pluck(override_column)
if ids.any?
assignments_or_quizzes.each{|o| o.has_too_many_overrides = true if ids.include?(o.id) }
end
end
end
def context_module_params
params.require(:context_module).permit(:name, :unlock_at, :require_sequential_progress, :publish_final_grade, :requirement_count,
:completion_requirements => strong_anything, :prerequisites => strong_anything)
end
end
| venturehive/canvas-lms | app/controllers/context_modules_controller.rb | Ruby | agpl-3.0 | 29,429 |
<?php
/**
* Copyright (c) 2012, Agence Française Informatique (AFI). All rights reserved.
*
* AFI-OPAC 2.0 is free software; you can redistribute it and/or modify
* it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE as published by
* the Free Software Foundation.
*
* There are special exceptions to the terms and conditions of the AGPL as it
* is applied to this software (see README file).
*
* AFI-OPAC 2.0 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU AFFERO GENERAL PUBLIC LICENSE for more details.
*
* You should have received a copy of the GNU AFFERO GENERAL PUBLIC LICENSE
* along with AFI-OPAC 2.0; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
////////////////////////////////////////////////////////////////////////////////
// OPAC3 - Catalogues de notices
////////////////////////////////////////////////////////////////////////////////
class CatalogueLoader extends Storm_Model_Loader {
const DEFAULT_ITEMS_BY_PAGE = 100;
public function loadNoticesFor($catalogue, $itemsByPage = self::DEFAULT_ITEMS_BY_PAGE, $page = 1, $find_all_params = null) {
if (!is_array($find_all_params))
$find_all_params = array();
if (!isset($find_all_params['limitPage']))
$find_all_params['limitPage'] = array($page, $itemsByPage);
if (null == $catalogue)
return array();
if ('' == ($where = $this->clausesFor($catalogue)))
return array();
$find_all_params['where'] = $where;
return Class_Notice::getLoader()->findAllBy($find_all_params);
}
public function countNoticesFor($catalogue) {
if (!$catalogue)
return 0;
if ('' == ($where = $this->clausesFor($catalogue)))
return 0;
return Class_Notice::getLoader()->countBy(array('where' => $where));
}
public function clausesFor($catalogue) {
$conditions = array();
if ($fromUntil = $this->fromUntilClauseFor($catalogue))
$conditions[] = $fromUntil;
if ($catalogue->isMatchingAllNotices())
return $fromUntil ? $fromUntil : '1=1';
if ($facets = $this->facetsClauseFor($catalogue))
$conditions[] = $facets;
if ($docType = $this->docTypeClauseFor($catalogue))
$conditions[] = $docType;
if ($year = $this->yearClauseFor($catalogue))
$conditions[] = $year;
if ($cote = $this->coteClauseFor($catalogue))
$conditions[] = $cote;
if ($new = $this->nouveauteClauseFor($catalogue))
$conditions[] = $new;
if (0 == count($conditions))
return '';
return implode(' and ', $conditions);
}
/**
* @param $catalogue Class_Catalogue
* @return string
*/
public function facetsClauseFor($catalogue, $against = '') {
$against_ou = '';
$facets = array('B' => $catalogue->getBibliotheque(),
'S' => $catalogue->getSection(),
'G' => $catalogue->getGenre(),
'L' => $catalogue->getLangue(),
'Y' => $catalogue->getAnnexe(),
'E' => $catalogue->getEmplacement());
foreach ($facets as $k => $v)
$against .= Class_Catalogue::getSelectionFacette($k, $v);
$facets = array('A' => $catalogue->getAuteur(),
'M' => $catalogue->getMatiere(),
'D' => $catalogue->getDewey(),
'P' => $catalogue->getPcdm4(),
'T' => $catalogue->getTags(),
'F' => $catalogue->getInteret());
foreach ($facets as $k => $v)
$against_ou .= Class_Catalogue::getSelectionFacette($k, $v, in_array($k, array('M', 'D', 'P')), false);
if ('' != $against_ou)
$against .= ' +(' . $against_ou . ")";
if ('' == $against)
return '';
return "MATCH(facettes) AGAINST('".$against."' IN BOOLEAN MODE)";
}
public function docTypeClauseFor($catalogue) {
if (!$docType = $catalogue->getTypeDoc())
return '';
$parts = explode(';', $docType);
if (1 == count($parts))
return 'type_doc=' . $parts[0];
return 'type_doc IN (' . implode(', ', $parts) . ')';
}
public function yearClauseFor($catalogue) {
$clauses = array();
if ($start = $catalogue->getAnneeDebut())
$clauses[] = "annee >= '" . $start . "'";
if($end = $catalogue->getAnneeFin())
$clauses[] = "annee <= '" . $end . "'";
if (0 == count($clauses))
return '';
return implode(' and ', $clauses);
}
public function coteClauseFor($catalogue) {
$clauses = array();
if ($start = $catalogue->getCoteDebut())
$clauses[] = "cote >= '" . strtoupper($start) . "'";
if ($end = $catalogue->getCoteFin())
$clauses[] = "cote <= '". strtoupper($end) . "'";
if (0 == count($clauses))
return '';
return implode(' and ', $clauses);
}
public function nouveauteClauseFor($catalogue) {
if (1 != $catalogue->getNouveaute())
return '';
return 'date_creation >= \'' . date('Y-m-d') . '\'';
}
public function fromUntilClauseFor($catalogue) {
$clauses = array();
if ($start = $catalogue->getFrom())
$clauses[] = "left(date_maj, 10) >= '" . $start . "'";
if($end = $catalogue->getUntil())
$clauses[] = "left(date_maj, 10) <= '" . $end . "'";
if (0 == count($clauses))
return '';
return implode(' and ', $clauses);
}
}
class Class_Catalogue extends Storm_Model_Abstract {
protected $_table_name = 'catalogue';
protected $_table_primary = 'ID_CATALOGUE';
protected $_loader_class = 'CatalogueLoader';
protected $_default_attribute_values = array('oai_spec' => '',
'description' => '',
'bibliotheque' => '',
'section' => '',
'genre' => '',
'langue' => '',
'annexe' => '',
'emplacement' => '',
'auteur' => '',
'matiere' => '',
'dewey' => '',
'pcdm4' => '',
'tags' => '',
'interet' => '',
'type_doc' => '',
'annee_debut' => '',
'annee_fin' => '',
'cote_debut' => '',
'cote_fin' => '',
'nouveaute' => '');
protected $_from;
protected $_until;
public static function getLoader() {
return self::getLoaderFor(__CLASS__);
}
public static function newCatalogueForAll() {
return new AllNoticesCatalogue();
}
public function acceptVisitor($visitor) {
$visitor->visitOaiSpec($this->getOaiSpec());
$visitor->visitLibelle($this->getLibelle());
if ($this->hasDescription())
$visitor->visitDescription($this->getDescription());
}
public function getNotices($page = 1, $itemsByPage = CatalogueLoader::DEFAULT_ITEMS_BY_PAGE, $params = null) {
return self::getLoader()->loadNoticesFor($this, $itemsByPage, $page, $params);
}
public function getNoticesCount() {
return self::getLoader()->countNoticesFor($this);
}
public function isMatchingAllNotices() {
return false;
}
//------------------------------------------------------------------------------
// Rend les notices et les stats (test d'un catalogue)
//------------------------------------------------------------------------------
public function getTestCatalogue() {
// Notices et temps d'execution
$preferences = $this->toArray();
$preferences['nb_notices'] = 20;
$requetes=$this->getRequetes($preferences);
$ret["requete"]=$requetes["req_liste"];
$temps=time();
$ret["notices"] = $this->getNotices(null, null,
array('limitPage' => array(1, $preferences['nb_notices']),
'order' => 'alpha_titre'));
$ret["temps_execution"]=(time()-$temps);
$ret["nb_notices"]=fetchOne($requetes["req_comptage"]);
// Avec vignettes en cache
$req=$requetes["req_comptage"];
if(strpos($req,"where") > 0) $req.=" and "; else $req.=" where ";
$req.="url_vignette > '' and url_vignette != 'NO'";
$ret["avec_vignettes"]=fetchOne($req);
return $ret;
}
public function shouldCacheContent() {
if (Class_Users::getLoader()->isCurrentUserAdmin())
return false;
return Class_AdminVar::isCacheEnabled();
}
public function getNoticesFromCacheByPreferences($preferences, $cache_vignette) {
$cache_key = md5(serialize($preferences).$cache_vignette);
$cache = Zend_Registry::get('cache');
if ($this->shouldCacheContent() && $cache->test($cache_key))
return unserialize($cache->load($cache_key));
$notices = $this->_fetchAllNoticesByPreferences($preferences, $cache_vignette);
$cache->save(serialize($notices), $cache_key);
return $notices;
}
//------------------------------------------------------------------------------
// Rend les notices selon les preferences (kiosques)
//------------------------------------------------------------------------------
public function getNoticesByPreferences($preferences,$cache_vignette=false) {
$notices = $this->getNoticesFromCacheByPreferences($preferences, $cache_vignette);
if ((int)$preferences["aleatoire"] !== 1)
return $notices;
shuffle($notices);
return array_slice ($notices, 0, $preferences["nb_notices"]);
}
public function _fetchAllNoticesByPreferences($preferences, $cache_vignette) {
// Lancer la requete
$requetes=$this->getRequetes($preferences);
if (!array_key_exists("req_liste", $requetes))
return array();
$req_liste = str_replace('select *',
'select notices.id_notice, notices.editeur, notices.annee, notices.date_creation, notices.facettes, notices.clef_oeuvre',
$requetes["req_liste"]);
$catalogue=fetchAll($req_liste);
if (!$catalogue)
return array();
//Instanciations
$class_notice = new Class_Notice();
$class_img = new Class_WebService_Vignette();
$notices = array();
// Formatter les notices
foreach($catalogue as $notice) {
$enreg=$class_notice->getNotice($notice["id_notice"],'TA');
$vignette = '';
if ($cache_vignette) {
if($cache_vignette=="url") $mode=false; else $mode=true;
$vignette=$class_img->getImage($enreg["id_notice"],$mode);
}
if (!$cache_vignette or $vignette) {
$notices[]=array(
"id_notice" => $enreg["id_notice"],
"titre" => $enreg["T"],
"auteur" => $enreg["A"],
"vignette" => $vignette,
"type_doc" => $enreg["type_doc"],
"editeur" => $notice["editeur"],
"annee" => $notice["annee"],
"date_creation" => $notice["date_creation"],
"facettes" => $notice["facettes"],
"clef_oeuvre" => $notice["clef_oeuvre"]);
}
}
return $notices;
}
//------------------------------------------------------------------------------
// Rend les notices selon les preferences
//------------------------------------------------------------------------------
public function getRequetes($preferences, $no_limit=false) {
// Si panier traitement special
if (isset($preferences["id_panier"]) && (0 !== (int)$preferences["id_panier"]))
return $this->getRequetesPanier($preferences);
// Lire les proprietes du catalogue
$against = $this->selectionFacettesForCatalogueRequestByPreferences($preferences);
if ($catalogue = $this->getLoader()->find($preferences['id_catalogue'])) {
$conditions = array($this->getLoader()->facetsClauseFor($catalogue, $against));
$conditions []= $this->getLoader()->docTypeClauseFor($catalogue);
$conditions []= $this->getLoader()->yearClauseFor($catalogue);
$conditions []= $this->getLoader()->coteClauseFor($catalogue);
$conditions []= $this->getLoader()->nouveauteClauseFor($catalogue);
} else {
$conditions = $against ? array("MATCH(facettes) AGAINST('".$against."' IN BOOLEAN MODE)") : array();
}
// Notices avec vignettes uniquement
if (isset($preferences['only_img']) && ($preferences["only_img"] == 1))
$conditions[]="url_vignette > '' and url_vignette != 'NO'";
// Notices avec avis seulement
$join = (isset($preferences['avec_avis']) && ($preferences["avec_avis"] == 1))
? " INNER JOIN notices_avis ON notices.clef_oeuvre=notices_avis.clef_oeuvre "
: '';
// Clause where
if ($where = implode(' and ', array_filter($conditions)))
$where = ' where '.$where;
// Calcul des requetes
$order_by = $this->orderByForCatalogueRequestByPreferences($preferences);
$limite = $this->limitForCatalogueRequestByPreferences($preferences, $no_limit);
$ret["req_liste"]="select * from notices ".$join.$where.$order_by.$limite;
$ret["req_comptage"]="select count(*) from notices ".$join.$where;
$ret["req_facettes"]="select notices.id_notice,type_doc,facettes from notices ".$join.$where.$limite;
return $ret;
}
public function selectionFacettesForCatalogueRequestByPreferences($preferences) {
if (!isset($preferences["facettes"]))
return '';
$against = '';
$facettes=explode(";", $preferences["facettes"]);
foreach($facettes as $facette) {
$facette=trim($facette);
$against.=$this->getSelectionFacette(substr($facette,0,1),substr($facette,1));
}
return $against;
}
public function orderByForCatalogueRequestByPreferences($preferences) {
if(!array_key_exists("tri", $preferences) || $preferences["tri"]==0)
return " order by alpha_titre ";
if ($preferences["tri"]==1)
return " order by date_creation DESC ";
if ($preferences["tri"]==2)
return " order by nb_visu DESC ";
}
public function limitForCatalogueRequestByPreferences($preferences, $no_limit=false) {
$limite = 0;
if (isset($preferences["aleatoire"]) && (int)$preferences["aleatoire"]==1)
$limite = (int)$preferences["nb_analyse"];
else if (isset($preferences['nb_notices']))
$limite = (int)$preferences["nb_notices"];
if ($limite and !$no_limit)
return " LIMIT 0,".$limite;
return " LIMIT 5000"; //LL: j'ai rajouté une limite max car explosion mémoire sur des catalogues mal définis
}
//----------------------------------------------------------------------------
// Calcul de la clause against pour une facette
//----------------------------------------------------------------------------
public static function getSelectionFacette($type, $valeurs, $descendants = false, $signe = true) {
if (!$valeurs)
return false;
$valeurs = explode(';', $valeurs);
$cond = '';
foreach ($valeurs as $valeur) {
if (!$valeur)
continue;
if (!$descendants) {
$cond .= $type . $valeur . ' ';
continue;
}
if ('M' != $type) {
$cond .= $type . $valeur . '* ';
continue;
}
if (!$matiere = Class_Matiere::getLoader()->find($valeur))
continue;
if ('' != ($sous_vedettes = trim($matiere->getSousVedettes())))
$valeur .= str_replace(' ', ' M', ' ' . $sous_vedettes);
$cond .= $type . $valeur . ' ';
}
$cond = trim($cond);
if ($signe)
return ' +(' . $cond . ')';
return ' ' . $cond;
}
//------------------------------------------------------------------------------
// Rend les requetes pour un panier selon les preferences
//------------------------------------------------------------------------------
public function getRequetesPanier($preferences)
{
if (array_key_exists('id_user', $preferences))
$panier = Class_PanierNotice::getLoader()->findFirstBy(array('id_user' => $preferences['id_user'],
'id_panier' => $preferences['id_panier']));
else $panier = Class_PanierNotice::getLoader()->find($preferences['id_panier']);
if (!$panier)
return array("nombre" => 0);
$cles_notices = $panier->getClesNotices();
if (empty($cles_notices))
{
$ret["nombre"]=0;
return $ret;
}
foreach($cles_notices as $notice) {
if(!trim($notice)) continue;
if(isset($in_sql)) $in_sql .=","; else $in_sql = '';
$in_sql.="'".$notice."'";
}
// Nombre a lire
if($preferences["aleatoire"]==1) $limite=$preferences["nb_analyse"];
else $limite=$preferences["nb_notices"];
if($limite) $limite="LIMIT 0,".$limite; else $limite="";
// Ordre
$order_by ="";
if($preferences["tri"]==0) $order_by=" order by alpha_titre ";
if($preferences["tri"]==1) $order_by=" order by date_creation DESC ";
if($preferences["tri"]==2) $order_by=" order by nb_visu DESC ";
$condition = '';
// Notices avec vignettes uniquement
if (array_isset("only_img", $preferences) && $preferences["only_img"] == 1)
$condition=" and url_vignette > '' and url_vignette != 'NO' ";
// Notices avec avis seulement
$join = '';
if (array_isset("avec_avis", $preferences) && $preferences["avec_avis"] == 1)
$join = " INNER JOIN notices_avis ON notices.clef_oeuvre=notices_avis.clef_oeuvre ";
// Retour
$ret["req_liste"]="select * from notices ".$join."where notices.clef_alpha in(".$in_sql.")".$condition.$order_by.$limite;
$ret["req_comptage"]="select count(*) from notices ".$join."where notices.clef_alpha in(".$in_sql.")".$condition;
$ret["req_facettes"]="select id_notice,type_doc,facettes from notices ".$join."where notices.clef_alpha in(".$in_sql.") ".$condition.$limite;
return $ret;
}
//-------------------------------------------------------------------------------
// liste des catalogues (structure complete)
//-------------------------------------------------------------------------------
public function getCatalogue($id_catalogue)
{
if($id_catalogue) return fetchEnreg("select * from catalogue where ID_CATALOGUE=$id_catalogue");
else return fetchAll("select * from catalogue order by LIBELLE");
}
//-------------------------------------------------------------------------------
// liste des catalogues pour une combo
//-------------------------------------------------------------------------------
static function getCataloguesForCombo() {
$liste = array();
$catalogues=fetchAll("select * from catalogue order by libelle");
if(!$catalogues) return $liste;
$liste[""]=" ";
foreach($catalogues as $catalogue)
$liste[$catalogue["ID_CATALOGUE"]]=$catalogue["LIBELLE"];
return $liste;
}
public function setAnneeDebut($value) {
return $this->checkAndSetAnnee('annee_debut', $value);
}
public function setAnneeFin($value) {
return $this->checkAndSetAnnee('annee_fin', $value);
}
public function checkAndSetAnnee($attribute, $value) {
$value = (int)$value;
if ($value < 1000 || $value > date("Y"))
$value = '';
return parent::_set($attribute, $value);
}
public function validate() {
$this->checkAttribute('libelle', $this->getLibelle(), 'Le libellé est requis');
$this->checkAttribute('annee_fin',
!($this->getAnneeDebut() and $this->getAnneeFin()) || $this->getAnneeDebut() <= $this->getAnneeFin(),
"L'année de début doit être inférieure ou égale à l'année de fin");
$this->checkAttribute('oai_spec',
!$this->getOaiSpec() || preg_match('/^[a-zA-Z0-9_.-]+$/', $this->getOaiSpec()),
"La spec OAI ne peut contenir que les caractères suivants: de a à z, 0 à 9, - _ .");
}
public function setFrom($from) {
$this->_from = $from;
return $this;
}
public function getFrom() {
return $this->_from;
}
public function setUntil($until) {
$this->_until = $until;
return $this;
}
public function getUntil() {
return $this->_until;
}
}
class AllNoticesCatalogue extends Class_Catalogue {
public function isMatchingAllNotices() {
return true;
}
}
?> | lolgzs/opacce | library/Class/Catalogue.php | PHP | agpl-3.0 | 19,460 |
class WorkAssignmentPlugin < Noosfero::Plugin
def self.plugin_name
"Work Assignment"
end
def self.plugin_description
_("New kind of content for organizations.")
end
def self.can_download_submission?(user, submission)
return unless submission
submission.published? || (user && (submission.author == user || user.has_permission?('view_private_content', submission.profile) ||
submission.display_unpublished_article_to?(user)))
end
def self.is_submission?(content)
content && content.parent && content.parent.parent && content.parent.parent.kind_of?(WorkAssignmentPlugin::WorkAssignment)
end
def content_types
[WorkAssignmentPlugin::WorkAssignment] if context.respond_to?(:profile) && context.profile.organization?
end
def stylesheet?
true
end
def content_remove_new(content)
content.kind_of?(WorkAssignmentPlugin::WorkAssignment)
end
def content_remove_upload(content)
if content.kind_of?(WorkAssignmentPlugin::WorkAssignment)
!content.profile.members.include?(context.send(:user))
end
end
def content_viewer_controller_filters
block = proc do
path = get_path(params[:page], params[:format])
content = profile.articles.find_by_path(path)
if WorkAssignmentPlugin.is_submission?(content) && !WorkAssignmentPlugin.can_download_submission?(user, content)
render_access_denied
end
end
{ :type => 'before_filter',
:method_name => 'work_assingment_only_admin_or_owner_download',
:options => {:only => 'view_page'},
:block => block }
end
def cms_controller_filters
block = proc do
if request.post? && params[:uploaded_files]
email_notification = params[:article_email_notification]
unless !email_notification || email_notification.empty?
email_contact = WorkAssignmentPlugin::EmailContact.new(:subject => @parent.name, :receiver => email_notification, :sender => user)
WorkAssignmentPlugin::EmailContact::EmailSender.build_mail_message(email_contact, @uploaded_files)
if email_contact.deliver
session[:notice] = _('Notification successfully sent')
else
session[:notice] = _('Notification not sent')
end
end
end
end
{ :type => 'after_filter',
:method_name => 'send_email_after_upload_file',
:options => {:only => 'upload_files'},
:block => block }
end
def upload_files_extra_fields(article)
proc do
@article = Article.find_by_id(article)
if params[:parent_id] && !@article.nil? && @article.type == "WorkAssignmentPlugin::WorkAssignment"
render :partial => 'notify_text_field', :locals => { :size => '45'}
end
end
end
end
| EcoAlternative/noosfero-ecosol | plugins/work_assignment/lib/work_assignment_plugin.rb | Ruby | agpl-3.0 | 2,790 |
<?php namespace OniiChan\Domain;
interface Entity
{
/**
* Return an Entity identifier
*
* @return Identifier
*/
public function id();
}
| flaxandteal/onii-chan | laravel/src/lib/OniiChan/Domain/Entity.php | PHP | agpl-3.0 | 153 |
/*
* Copyright 2015 Erwin Müller <erwin.mueller@deventm.org>
*
* This file is part of sscontrol-httpd-yourls.
*
* sscontrol-httpd-yourls is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* sscontrol-httpd-yourls is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License
* for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with sscontrol-httpd-yourls. If not, see <http://www.gnu.org/licenses/>.
*/
package com.anrisoftware.sscontrol.httpd.yourls;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.ACCESS_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.API_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.BACKUP_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.CONVERT_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.DATABASE_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.DEBUG_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.DRIVER_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.GMT_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.HOST_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.LANGUAGE_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.MODE_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.OFFSET_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.OVERRIDE_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.PASSWORD_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.PORT_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.PREFIX_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.RESERVED_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.SITE_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.STATS_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.TARGET_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.UNIQUE_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.URLS_KEY;
import static com.anrisoftware.sscontrol.httpd.yourls.YourlsServiceStatement.USER_KEY;
import java.net.URI;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import com.anrisoftware.sscontrol.core.api.ServiceException;
import com.anrisoftware.sscontrol.core.groovy.statementsmap.StatementsException;
import com.anrisoftware.sscontrol.core.groovy.statementsmap.StatementsMap;
import com.anrisoftware.sscontrol.core.groovy.statementstable.StatementsTable;
import com.anrisoftware.sscontrol.core.groovy.statementstable.StatementsTableFactory;
import com.anrisoftware.sscontrol.core.overridemode.OverrideMode;
import com.anrisoftware.sscontrol.core.yesno.YesNoFlag;
import com.anrisoftware.sscontrol.httpd.domain.Domain;
import com.anrisoftware.sscontrol.httpd.webserviceargs.DefaultWebService;
import com.anrisoftware.sscontrol.httpd.webserviceargs.DefaultWebServiceFactory;
import com.google.inject.assistedinject.Assisted;
/**
* <i>Yourls</i> service.
*
* @see <a href="http://yourls.org/>http://yourls.org/</a>
*
* @author Erwin Mueller, erwin.mueller@deventm.org
* @since 1.0
*/
class YourlsServiceImpl implements YourlsService {
/**
* The <i>Yourls</i> service name.
*/
public static final String SERVICE_NAME = "yourls";
private final DefaultWebService service;
private final StatementsMap statementsMap;
private StatementsTable statementsTable;
/**
* @see YourlsServiceFactory#create(Map, Domain)
*/
@Inject
YourlsServiceImpl(DefaultWebServiceFactory webServiceFactory,
@Assisted Map<String, Object> args, @Assisted Domain domain) {
this.service = webServiceFactory.create(SERVICE_NAME, args, domain);
this.statementsMap = service.getStatementsMap();
setupStatements(statementsMap, args);
}
private void setupStatements(StatementsMap map, Map<String, Object> args) {
map.addAllowed(DATABASE_KEY, OVERRIDE_KEY, BACKUP_KEY, ACCESS_KEY,
USER_KEY, GMT_KEY, UNIQUE_KEY, CONVERT_KEY, RESERVED_KEY,
SITE_KEY, LANGUAGE_KEY);
map.setAllowValue(true, DATABASE_KEY, ACCESS_KEY, RESERVED_KEY,
SITE_KEY, LANGUAGE_KEY);
map.addAllowedKeys(DATABASE_KEY, USER_KEY, PASSWORD_KEY, HOST_KEY,
PORT_KEY, PREFIX_KEY, DRIVER_KEY);
map.addAllowedKeys(OVERRIDE_KEY, MODE_KEY);
map.addAllowedKeys(BACKUP_KEY, TARGET_KEY);
map.addAllowedKeys(ACCESS_KEY, STATS_KEY, API_KEY);
map.addAllowedKeys(GMT_KEY, OFFSET_KEY);
map.addAllowedKeys(UNIQUE_KEY, URLS_KEY);
map.addAllowedKeys(CONVERT_KEY, MODE_KEY);
}
@Inject
public final void setStatementsTable(StatementsTableFactory factory) {
StatementsTable table = factory.create(this, SERVICE_NAME);
table.addAllowed(DEBUG_KEY, USER_KEY);
table.setAllowArbitraryKeys(true, DEBUG_KEY);
table.addAllowedKeys(USER_KEY, PASSWORD_KEY);
this.statementsTable = table;
}
@Override
public Domain getDomain() {
return service.getDomain();
}
@Override
public String getName() {
return SERVICE_NAME;
}
public void setAlias(String alias) throws ServiceException {
service.setAlias(alias);
}
@Override
public String getAlias() {
return service.getAlias();
}
public void setId(String id) throws ServiceException {
service.setId(id);
}
@Override
public String getId() {
return service.getId();
}
public void setRef(String ref) throws ServiceException {
service.setRef(ref);
}
@Override
public String getRef() {
return service.getRef();
}
public void setRefDomain(String ref) throws ServiceException {
service.setRefDomain(ref);
}
@Override
public String getRefDomain() {
return service.getRefDomain();
}
public void setPrefix(String prefix) throws ServiceException {
service.setPrefix(prefix);
}
@Override
public String getPrefix() {
return service.getPrefix();
}
@Override
public Map<String, Object> debugLogging(String key) {
return statementsTable.tableKeys(DEBUG_KEY, key);
}
@Override
public Map<String, Object> getDatabase() {
@SuppressWarnings("serial")
Map<String, Object> map = new HashMap<String, Object>() {
@Override
public Object put(String key, Object value) {
if (value != null) {
return super.put(key, value);
} else {
return null;
}
}
};
StatementsMap m = statementsMap;
map.put(DATABASE_KEY.toString(), m.value(DATABASE_KEY));
map.put(USER_KEY.toString(), m.mapValue(DATABASE_KEY, USER_KEY));
map.put(PASSWORD_KEY.toString(), m.mapValue(DATABASE_KEY, PASSWORD_KEY));
map.put(HOST_KEY.toString(), m.mapValue(DATABASE_KEY, HOST_KEY));
map.put(PORT_KEY.toString(), m.mapValue(DATABASE_KEY, PORT_KEY));
map.put(PREFIX_KEY.toString(), m.mapValue(DATABASE_KEY, PREFIX_KEY));
map.put(DRIVER_KEY.toString(), m.mapValue(DATABASE_KEY, DRIVER_KEY));
return map.size() == 0 ? null : map;
}
@Override
public OverrideMode getOverrideMode() {
return statementsMap.mapValue(OVERRIDE_KEY, MODE_KEY);
}
@Override
public URI getBackupTarget() {
return statementsMap.mapValueAsURI(BACKUP_KEY, TARGET_KEY);
}
@Override
public Access getSiteAccess() {
return statementsMap.value(ACCESS_KEY);
}
@Override
public Access getStatsAccess() {
return statementsMap.mapValue(ACCESS_KEY, STATS_KEY);
}
@Override
public Access getApiAccess() {
return statementsMap.mapValue(ACCESS_KEY, API_KEY);
}
@Override
public Integer getGmtOffset() {
return statementsMap.mapValue(GMT_KEY, OFFSET_KEY);
}
@Override
public Boolean getUniqueUrls() {
Object value = statementsMap.mapValue(UNIQUE_KEY, URLS_KEY);
if (value instanceof YesNoFlag) {
return ((YesNoFlag) value).asBoolean();
} else {
return (Boolean) value;
}
}
@Override
public Convert getUrlConvertMode() {
return statementsMap.mapValue(CONVERT_KEY, MODE_KEY);
}
@Override
public List<String> getReserved() {
return statementsMap.valueAsStringList(RESERVED_KEY);
}
@Override
public String getLanguage() {
return statementsMap.value(LANGUAGE_KEY);
}
@Override
public Map<String, String> getUsers() {
return statementsTable.tableKeys(USER_KEY, PASSWORD_KEY);
}
@Override
public String getSite() {
return statementsMap.value(SITE_KEY);
}
public Object methodMissing(String name, Object args) {
try {
return service.methodMissing(name, args);
} catch (StatementsException e) {
return statementsTable.methodMissing(name, args);
}
}
@Override
public String toString() {
return service.toString();
}
}
| devent/sscontrol | sscontrol-httpd-yourls/src/main/java/com/anrisoftware/sscontrol/httpd/yourls/YourlsServiceImpl.java | Java | agpl-3.0 | 10,069 |
/*
* This file is part of ELKI:
* Environment for Developing KDD-Applications Supported by Index-Structures
*
* Copyright (C) 2019
* ELKI Development Team
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package elki.utilities.datastructures;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.Random;
import org.junit.Test;
/**
* Test the Kuhn-Munkres implementation.
*
* @author Erich Schubert
*/
public class KuhnMunkresWongTest {
@Test
public void test1() {
int[] assignment = new KuhnMunkresWong().run(KuhnMunkresTest.TEST1);
double sum = 0.;
for(int i = 0; i < assignment.length; i++) {
assertTrue("Unassigned row " + i, assignment[i] >= 0);
sum += KuhnMunkresTest.TEST1[i][assignment[i]];
}
assertEquals("Assignment not optimal", 55, sum, 0);
}
@Test
public void test2() {
int[] assignment = new KuhnMunkresWong().run(KuhnMunkresTest.TEST2);
double sum = 0.;
for(int i = 0; i < assignment.length; i++) {
assertTrue("Unassigned row " + i, assignment[i] >= 0);
sum += KuhnMunkresTest.TEST2[i][assignment[i]];
}
assertEquals("Assignment not optimal", 4, sum, 0);
}
@Test
public void testNonSq() {
int[] assignment = new KuhnMunkresWong().run(KuhnMunkresTest.NONSQUARE);
double sum = 0.;
for(int i = 0; i < assignment.length; i++) {
assertTrue("Unassigned row " + i, assignment[i] >= 0);
sum += KuhnMunkresTest.NONSQUARE[i][assignment[i]];
}
assertEquals("Assignment not optimal", 637518, sum, 0);
}
@Test
public void testDifficult() {
int[] assignment = new KuhnMunkresWong().run(KuhnMunkresTest.DIFFICULT);
double sum = 0.;
for(int i = 0; i < assignment.length; i++) {
assertTrue("Unassigned row " + i, assignment[i] >= 0);
sum += KuhnMunkresTest.DIFFICULT[i][assignment[i]];
}
assertEquals("Assignment not optimal", 2.24, sum, 1e-4);
}
@Test
public void testDifficult2() {
int[] assignment = new KuhnMunkresWong().run(KuhnMunkresTest.DIFFICULT2);
double sum = 0.;
for(int i = 0; i < assignment.length; i++) {
assertTrue("Unassigned row " + i, assignment[i] >= 0);
sum += KuhnMunkresTest.DIFFICULT2[i][assignment[i]];
}
assertEquals("Assignment not optimal", 0.8802, sum, 1e-4);
}
@Test
public void testLarge() {
long seed = 0L;
Random rnd = new Random(seed);
double[][] mat = new double[100][100];
for(int i = 0; i < mat.length; i++) {
double[] row = mat[i];
for(int j = 0; j < row.length; j++) {
row[j] = Math.abs(rnd.nextDouble());
}
}
int[] assignment = new KuhnMunkresWong().run(mat);
double sum = 0.;
for(int i = 0; i < assignment.length; i++) {
assertTrue("Unassigned row " + i, assignment[i] >= 0);
sum += mat[i][assignment[i]];
}
if(seed == 0) {
if(mat.length == 10 && mat[0].length == 10) {
assertEquals("sum", 1.467733381753002, sum, 1e-8);
// Duration: 0.007970609
}
if(mat.length == 100 && mat[0].length == 100) {
assertEquals("sum", 1.5583906418867581, sum, 1e-8);
// Duration: 0.015696813
}
if(mat.length == 1000 && mat[0].length == 1000) {
assertEquals("sum", 1.6527526146559663, sum, 1e-8);
// Duration: 0.8892345580000001
}
if(mat.length == 10000 && mat[0].length == 10000) {
assertEquals("sum", 1.669458072091596, sum, 1e-8);
// Duration: 3035.95495334
}
}
}
}
| elki-project/elki | elki-core-util/src/test/java/elki/utilities/datastructures/KuhnMunkresWongTest.java | Java | agpl-3.0 | 4,174 |
Brimir::Application.routes.draw do
devise_for :users, controllers: { omniauth_callbacks: 'omniauth' }
resources :users
namespace :tickets do
resource :deleted, only: :destroy, controller: :deleted
resource :selected, only: :update, controller: :selected
end
resources :tickets, only: [:index, :show, :update, :new, :create]
resources :labelings, only: [:destroy, :create]
resources :rules
resources :labels, only: [:destroy, :update, :index]
resources :replies, only: [:create, :new]
get '/attachments/:id/:format' => 'attachments#show'
resources :attachments, only: [:index, :new]
resources :email_addresses, only: [:index, :create, :new, :destroy]
root to: 'tickets#index'
namespace :api do
namespace :v1 do
resources :tickets, only: [ :index, :show ]
resources :sessions, only: [ :create ]
end
end
end
| vartana/brimir | config/routes.rb | Ruby | agpl-3.0 | 876 |
#include <stddef.h>
#include "logger/logger.h"
#include "wrappers.h"
using namespace drivers::i2c;
I2CResult I2CFallbackBus::Write(const I2CAddress address, gsl::span<const uint8_t> inData)
{
const I2CResult systemBusResult = this->_innerBuses.Bus.Write(address, inData);
if (systemBusResult == I2CResult::OK)
{
return systemBusResult;
}
LOGF(LOG_LEVEL_WARNING, "Fallbacking to payload bus. System bus error %d. Transfer to %X", num(systemBusResult), address);
const I2CResult payloadBusResult = this->_innerBuses.Payload.Write(address, inData);
return payloadBusResult;
}
I2CResult I2CFallbackBus::Read(const I2CAddress address, gsl::span<uint8_t> outData)
{
const I2CResult systemBusResult = this->_innerBuses.Bus.Read(address, outData);
if (systemBusResult == I2CResult::OK)
{
return systemBusResult;
}
LOGF(LOG_LEVEL_WARNING, "Fallbacking to payload bus. System bus error %d. Transfer to %X", num(systemBusResult), address);
const I2CResult payloadBusResult = this->_innerBuses.Payload.Read(address, outData);
return payloadBusResult;
}
I2CResult I2CFallbackBus::WriteRead(const I2CAddress address, gsl::span<const uint8_t> inData, gsl::span<uint8_t> outData)
{
const I2CResult systemBusResult = this->_innerBuses.Bus.WriteRead(address, inData, outData);
if (systemBusResult == I2CResult::OK)
{
return systemBusResult;
}
LOGF(LOG_LEVEL_WARNING, "Fallbacking to payload bus. System bus error %d. Transfer to %X", num(systemBusResult), address);
const I2CResult payloadBusResult = this->_innerBuses.Payload.WriteRead(address, inData, outData);
return payloadBusResult;
}
I2CFallbackBus::I2CFallbackBus(I2CInterface& buses) : _innerBuses(buses)
{
}
| PW-Sat2/PWSat2OBC | libs/drivers/i2c/fallback.cpp | C++ | agpl-3.0 | 1,832 |
<?php
/**
* Soap Handler Class
*
*
* @package Np_Soap
* @subpackage Np_Soap
* @copyright Copyright (C) 2012-2013 S.D.O.C. LTD. All rights reserved.
* @license GNU Affero Public License version 3 or later; see LICENSE.txt
*/
/**
* Np_Db Class Definition
*
* @package Np_Soap
* @subpackage Np_Soap
*/
class Np_Soap_Handler {
/**
* sendMessage os the function defined by our wsdl .
*
* it will be called by
* other providers in order to send transactio messages to internal.
* 1.the functions receives the params .
* 2.logs them to database
* 3.validates whether they are in correct format and not null
* 4.sends Internal the message to internal via http request.
* 5.returns the resulting ack code from the params validation.
*
* @param Array $params
* @return array "NP_ACK" or string
*/
public function sendMessage($params) {
$data = $this->intoArray($params);
$reqModel = new Application_Model_Request($data); //prepares data for sending internal the message
$ack = $reqModel->Execute();
// log all received calls if request log enabled
if (Application_Model_General::getSettings('EnableRequestLog')) {
Application_Model_General::logRequestResponse($params, $ack, $data['REQUEST_ID'], '[Input] ');
}
if ($ack === FALSE || (strpos(strtolower($ack), "ack") === FALSE)) {
$ack = "Ack00";
}
return array('NP_ACK' => array('ACK_CODE' => $ack, //returns default value for testing need to fix
'ACK_DATE' => Application_Model_General::getDateIso()));
}
/**
* turns the soap array into a simple array for sending to internal.
* sets soap "signature" so array may be validated and sent back
* through soap after it reaches internal's proxy
*
* @param Array $params
* @return Array $params associative array
*/
public function intoArray($params) {
$data = (array) $params->NP_MESSAGE; //takes data out of np message array
$xmlString = simplexml_load_string($data['BODY']); //loads xml string from xml object in body
if ($xmlString == NULL) {
$xmlString[0] = "NULL";
}
$header = (array) $data['HEADER'];
$msgtype = $header['MSG_TYPE'];
$xmlArray = $xmlString[0]->$msgtype;
$convertedData = $this->convertArray($msgtype, $xmlArray, $header);
//sets soap "signature"
$convertedData['SOAP'] = 1;
return $convertedData; //returns simple array (1 level only)
}
/**
* convert Xml data to associative array
*
* @param string $msgType message type
* @param simple_xml $xmlArray simple xml object
* @param array $header the header data to join to the return data
*
* @return array converted data with header and the xml
* @todo refactoring to inner bridge classes
*/
function convertArray($msgType, $xmlArray, $header) {
$data = $header;
switch ($msgType) {
case "Check":
$nType = Application_Model_General::getSettings("NetworkType");
if ($nType === "M") {
$networkType = "mobile";
$data['NETWORK_TYPE'] = (string) $nType;
} else {
$networkType = "fixed";
$data['NETWORK_TYPE'] = (string) $nType;
}
if (!empty($xmlArray->$networkType->mobileNumberIdentified) && $xmlArray->$networkType->mobileNumberIdentified !== NULL) {
$data['IDENTIFICATION_VALUE'] = (string) $xmlArray->$networkType->mobileNumberIdentified->identificationValue;
$data['IDENTIFICATION_VALUE_2ND'] = (string) $xmlArray->$networkType->mobileNumberIdentified->identificationValue2nd;
$data['IDENTIFICATION_VALUE_3RD'] = (string) $xmlArray->$networkType->mobileNumberIdentified->identificationValue3rd;
$data['NUMBER_TYPE'] = (string) $xmlArray->$networkType->mobileNumberIdentified->numberType;
$data['NUMBER'] = (string) $xmlArray->$networkType->mobileNumberIdentified->number;
} else {
$data['NUMBER_TYPE'] = (string) $xmlArray->$networkType->mobileNumberUnidentified->numberType;
$data['NUMBER'] = (string) $xmlArray->$networkType->mobileNumberUnidentified->number;
}
break;
case "Request":
$data['PORT_TIME'] = (string) $xmlArray->portingDateTime;
break;
case "Update":
$data['PORT_TIME'] = (string) $xmlArray->portingDateTime;
break;
case "Cancel":
break;
case "KD_update":
$data['KD_UPDATE_TYPE'] = (string) $xmlArray[0];
$data['REMARK'] = (string) $xmlArray->remark;
break;
case "Execute":
break;
case "Publish":
$data['DONOR'] = (string) $xmlArray->donor;
$data['CONNECT_TIME'] = (string) $xmlArray->connectDateTime;
$data['PUBLISH_TYPE'] = (string) $xmlArray->publishType;
$data['DISCONNECT_TIME'] = (string) $xmlArray->disconnectDateTime;
if (isset($xmlArray->fixed)) {
$data['NUMBER_TYPE'] = (string) $xmlArray->fixed->fixedNumberSingle->numberType;
if (isset($xmlArray->fixed->fixedNumberRange)) {
$data['NUMBER_TYPE'] = (string) $xmlArray->fixed->fixedNumberRange->numberType;
$data['FROM_NUMBER'] = (string) $xmlArray->fixed->fixedNumberRange->fromNumber;
$data['TO_NUMBER'] = (string) $xmlArray->fixed->fixedNumberRange->toNumber;
} else {
$data['NUMBER'] = (string) $xmlArray->fixed->fixedNumberSingle->number;
}
} else {
$data['NUMBER_TYPE'] = (string) $xmlArray->mobile->numberType;
$data['NUMBER'] = (string) $xmlArray->mobile->number;
}
break;
case "Cancel_publish":
$data['DONOR'] = (string) $xmlArray->donor;
break;
case "Return":
$data['NUMBER'] = (string) $xmlArray->number;
if (isset($xmlArray->mobile)) {
$data['NETWORK_TYPE'] = (string) $xmlArray->mobile->networkType;
$data['NUMBER_TYPE'] = (string) $xmlArray->mobile->numberType;
} else {
$data['NETWORK_TYPE'] = (string) $xmlArray->fixed->networkType;
$data['NUMBER_TYPE'] = (string) $xmlArray->fixed->numberType;
}
break;
case "Inquire_number":
$data['NUMBER'] = (string) $xmlArray->number;
break;
case "Up_system":
$res = Application_Model_General::saveShutDownDetails($data['FROM'], "UP");
break;
case "Down_system":
$res = Application_Model_General::saveShutDownDetails($data['FROM'], "DOWN");
break;
case "Check_response":
$data['ESSENTIAL_INFO_1'] = (string) $xmlArray->essentialInfo1;
$data['ESSENTIAL_INFO_2'] = (string) $xmlArray->essentialInfo2;
$data['ESSENTIAL_INFO_3'] = (string) $xmlArray->essentialInfo3;
$data['ESSENTIAL_INFO_4'] = (string) $xmlArray->essentialInfo4;
$data['ESSENTIAL_INFO_5'] = (string) $xmlArray->essentialInfo5;
case "Request_response":
// this check because check_response go through this code (TODO: refactoring)
if (isset($xmlArray->portingDateTime)) {
$data['PORT_TIME'] = (string) $xmlArray->portingDateTime;
}
case "Update_response":
case "Cancel_response":
case "Execute_response":
case "Publish_response":
case "KD_update_response":
case "Inquire_number_response":
case "Cancel_publish_response":
case "Return_response":
if (isset($xmlArray->positiveApproval)) {
$data['APPROVAL_IND'] = "Y";
} else {
$data['APPROVAL_IND'] = "N";
$data['REJECT_REASON_CODE'] = (string) $xmlArray->negativeApproval->rejectReasonCode;
}
$data['REQUEST_TRX_NO'] = (string) $xmlArray->requestTrxNo;
$data['REQUEST_RETRY_DATE'] = (string) $xmlArray->requestRetryDate;
}
return $data;
}
}
| mehulsbhatt/NPG | library/Np/Soap/Handler.php | PHP | agpl-3.0 | 7,386 |
# -*- coding: utf-8 -*-
# © 2014 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or later(http://www.gnu.org/licenses/agpl.html)
import time
from datetime import datetime
import openerp.addons.decimal_precision as dp
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DATETIME_FORMATS_MAP
from openerp.tools import float_compare
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp import netsvc
from openerp import tools
class mrp_production(osv.osv):
_inherit = 'mrp.production'
def _action_compute_lines(self, cr, uid, ids, properties=None, context=None):
""" Computes bills of material of a product.
@param properties: List containing dictionaries of properties.
@return: No. of products.
"""
if properties is None:
properties = []
results = []
bom_obj = self.pool.get('mrp.bom')
uom_obj = self.pool.get('product.uom')
prod_line_obj = self.pool.get('mrp.production.product.line')
workcenter_line_obj = self.pool.get('mrp.production.workcenter.line')
for production in self.browse(cr, uid, ids):
#unlink product_lines
prod_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.product_lines], context=context)
#unlink workcenter_lines
workcenter_line_obj.unlink(cr, SUPERUSER_ID, [line.id for line in production.workcenter_lines], context=context)
# search BoM structure and route
bom_point = production.bom_id
bom_id = production.bom_id.id
if not bom_point:
bom_id = bom_obj._bom_find(cr, uid, production.product_id.id, production.product_uom.id, properties)
if bom_id:
bom_point = bom_obj.browse(cr, uid, bom_id)
routing_id = bom_point.routing_id.id or False
self.write(cr, uid, [production.id], {'bom_id': bom_id, 'routing_id': routing_id})
if not bom_id:
continue
# get components and workcenter_lines from BoM structure
factor = uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, bom_point.product_uom.id)
res = bom_obj._bom_explode(cr, uid, bom_point, factor / bom_point.product_qty, properties, routing_id=production.routing_id.id)
results = res[0] # product_lines
results2 = res[1] # workcenter_lines
# reset product_lines in production order
for line in results:
line['production_id'] = production.id
prod_line_obj.create(cr, uid, line)
#reset workcenter_lines in production order
for line in results2:
line['production_id'] = production.id
workcenter_line_obj.create(cr, uid, line)
return results
def action_ready(self, cr, uid, ids, context=None):
""" Changes the production state to Ready and location id of stock move.
@return: True
"""
move_obj = self.pool.get('stock.move')
self.write(cr, uid, ids, {'state': 'ready'})
for production in self.browse(cr, uid, ids, context=context):
if not production.bom_id:
produce_move_id = self._make_production_produce_line(cr, uid, production, context=context)
for (production_id,name) in self.name_get(cr, uid, ids):
production = self.browse(cr, uid, production_id)
if production.move_prod_id and production.move_prod_id.location_id.id != production.location_dest_id.id:
move_obj.write(cr, uid, [production.move_prod_id.id],
{'location_id': production.location_dest_id.id})
return True
def action_produce(self, cr, uid, production_id, production_qty, production_mode, context=None):
production = self.browse(cr, uid, production_id, context=context)
if not production.bom_id and production.state == 'ready':
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'mrp.production', production_id, 'button_produce', cr)
return super(mrp_production, self).action_produce(cr, uid, production_id, production_qty, production_mode, context=context)
mrp_production()
| Elico-Corp/openerp-7.0 | mrp_mo_nopicking/mrp.py | Python | agpl-3.0 | 4,435 |
/**
* @(#)MediaExportParameters.java
*
* This file is part of the Non-Linear Book project.
* Copyright (c) 2012-2016 Anton P. Kolosov
* Authors: Anton P. Kolosov, et al.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License version 3
* as published by the Free Software Foundation with the addition of the
* following permission added to Section 15 as permitted in Section 7(a):
* FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED BY
* ANTON P. KOLOSOV. ANTON P. KOLOSOV DISCLAIMS THE WARRANTY OF NON INFRINGEMENT
* OF THIRD PARTY RIGHTS
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Public License for more details.
* You should have received a copy of the GNU Affero General Public License
* along with this program; if not, see http://www.gnu.org/licenses or write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA, 02110-1301 USA.
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Public License.
*
* You can be released from the requirements of the license by purchasing
* a commercial license. Buying such a license is mandatory as soon as you
* develop commercial activities involving the Non-Linear Book software without
* disclosing the source code of your own applications.
*
* For more information, please contact Anton P. Kolosov at this
* address: antokolos@gmail.com
*
* Copyright (c) 2012 Anton P. Kolosov All rights reserved.
*/
package com.nlbhub.nlb.domain;
import com.nlbhub.nlb.api.PropertyManager;
/**
* The MediaExportParameters class represents parameters used when saving media files during export of the scheme
* to some end format (such as INSTEAD game).
*
* @author Anton P. Kolosov
* @version 1.0 8/9/12
*/
public class MediaExportParameters {
public enum Preset {CUSTOM, DEFAULT, NOCHANGE, COMPRESSED};
private static final MediaExportParameters NOCHANGE = new MediaExportParameters(Preset.NOCHANGE, false, 0);
private static final MediaExportParameters COMPRESSED = new MediaExportParameters(Preset.COMPRESSED, true, 80);
private static final MediaExportParameters DEFAULT = (
new MediaExportParameters(
Preset.DEFAULT,
PropertyManager.getSettings().getDefaultConfig().getExport().isConvertpng2jpg(),
PropertyManager.getSettings().getDefaultConfig().getExport().getQuality()
)
);
private Preset m_preset = Preset.CUSTOM;
private boolean m_convertPNG2JPG;
private int m_quality;
public static MediaExportParameters fromPreset(Preset preset) {
switch (preset) {
case NOCHANGE:
return MediaExportParameters.NOCHANGE;
case COMPRESSED:
return MediaExportParameters.COMPRESSED;
default:
return MediaExportParameters.DEFAULT;
}
}
public static MediaExportParameters getDefault() {
return DEFAULT;
}
/*
public MediaExportParameters(boolean convertPNG2JPG, int quality) {
m_preset = Preset.CUSTOM;
m_convertPNG2JPG = convertPNG2JPG;
m_quality = quality;
}
*/
private MediaExportParameters(Preset preset, boolean convertPNG2JPG, int quality) {
m_preset = preset;
m_convertPNG2JPG = convertPNG2JPG;
m_quality = quality;
}
public Preset getPreset() {
return m_preset;
}
public boolean isConvertPNG2JPG() {
return m_convertPNG2JPG;
}
public int getQuality() {
return m_quality;
}
}
| Antokolos/NLB | NLBL/src/main/java/com/nlbhub/nlb/domain/MediaExportParameters.java | Java | agpl-3.0 | 3,959 |
<div align="center" class="heading_gray">
<h3>User Registration</h3>
</div>
<br/>
<?php echo form_open('user/user_registration/userinsert', array('id' => 'formUser'));
echo blue_box_top();
?>
<table width="100%" border="0" cellspacing="0" cellpadding="4" align="center" class="heading_tab" style="margin-top:15px;">
<tr>
<th colspan="4" align="left">User Creation</th>
</tr>
<tr>
<td width="10%" class=""> </td>
<td align="left" width="27%" class="table_row_first"> User Name : </td>
<td align="left" width="55%" class="table_row_first"><?php echo form_input("txtNewUserName",@$selected_user[0]['user_name'], 'class="input_box" id="txtNewUserName"' );?></td>
<td width="18%"> </td>
</tr>
<tr>
<td> </td>
<td align="left" width="27%" class="table_row_first"> Password : </td>
<td align="left" width="55%" class="table_row_first"><?php echo form_password("txtNewPassword",'', 'class="input_box" id="txtNewPassword"' );?></td>
<td width="18%"> </td>
</tr>
<!--<tr>
<td> </td>
<td align="left" width="27%" class="table_row_first"> User Type : </td>
<td align="left" width="55%" class="table_row_first"><?php echo (@$school_show == 'show') ? @$school_details[0]['class_end'] : form_dropdown("userType", array(0=>'...........', 1=>'Admin', 2 => 'User'), 'id="userType"','id="userType"');?></td>
<td width="18%"> </td>
</tr>-->
<?php
if(count(@$user_rights) > 0 ){?>
<tr>
<td> </td>
<td align="left" width="27%" class="table_row_first" valign="top"> User Rights : </td>
<td align="left" width="55%" class="table_row_first">
<?php
$functionality_label = '';
for($i=0; $i<count($user_rights); $i++){
if($functionality_label != $user_rights[$i]['label_name']){
$functionality_label = $user_rights[$i]['label_name'];
?>
<div class="clear_both"></div>
<div class="functionality_label"><?php echo $user_rights[$i]['label_name']?></div>
<?php
}
if(count(@$selected_user_rights) > 0 && @$selected_user_rights[0] != 0){
$checked = (in_array($user_rights[$i]['rf_id'], @$selected_user_rights)) ? 'TRUE' : '';
} else {
$checked = '';
}
$data = array(
'name' => 'chkRight_'.$user_rights[$i]['rf_id'],
'id' => 'chkRight_'.$user_rights[$i]['rf_id'],
'value' => $user_rights[$i]['rf_id'],
'checked' => $checked,
'style' => 'margin:5px',
);
?>
<div class="clear_both"></div>
<div class="functionalities">
<?php
echo form_checkbox($data);
echo form_label($user_rights[$i]['rf_functionality'], 'chkRight_'.$user_rights[$i]['rf_id']).'<br>';
?>
</div>
<?php
}
?>
</td>
<td width="18%"> </td>
</tr>
<?php }?>
<tr>
<td align="center" colspan="4">
<?php echo (@$selected_user[0]['user_id'] != '') ? form_button('Update User', 'Update User', 'onClick="javascript: return fnsUserUpdate(\''.@$selected_user[0]['user_id'].'\')"').' '.form_button('Cancel', 'Cancel', 'onClick="javascript: return cancel()"'):form_submit('Add User', 'Add User', 'onClick="javascript: return fnsUserAdd()"');?> </td>
</tr>
</table>
<input type="hidden" name="hidUserId" id="hidUserId" />
<?php
echo blue_box_bottom();
echo form_close();
?>
| itschool/kalolsavam-subdistrict | system/application/views/user/user_registration.php | PHP | agpl-3.0 | 3,619 |
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateOauthAuthCodesTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('oauth_auth_codes', function (Blueprint $table) {
$table->string('id', 100)->primary();
$table->unsignedBigInteger('user_id')->index();
$table->unsignedBigInteger('client_id');
$table->text('scopes')->nullable();
$table->boolean('revoked');
$table->dateTime('expires_at')->nullable();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('oauth_auth_codes');
}
}
| monicahq/monica | database/migrations/2016_06_01_000001_create_oauth_auth_codes_table.php | PHP | agpl-3.0 | 852 |
require 'spec_helper'
require 'rollbar/middleware/sinatra'
require 'sinatra/base'
require 'rack/test'
class SinatraDummy < Sinatra::Base
class DummyError < StandardError; end
use Rollbar::Middleware::Sinatra
get '/foo' do
raise DummyError.new
end
get '/bar' do
'this will not crash'
end
post '/crash_post' do
raise DummyError.new
end
end
describe Rollbar::Middleware::Sinatra, :reconfigure_notifier => true do
include Rack::Test::Methods
def app
SinatraDummy
end
let(:logger_mock) { double('logger').as_null_object }
before do
Rollbar.configure do |config|
config.logger = logger_mock
config.framework = 'Sinatra'
end
end
let(:uncaught_level) do
Rollbar.configuration.uncaught_exception_level
end
let(:expected_report_args) do
[uncaught_level, exception, { :use_exception_level_filters => true }]
end
describe '#call' do
context 'for a crashing endpoint' do
# this is the default for test mode in Sinatra
context 'with raise_errors? == true' do
let(:exception) { kind_of(SinatraDummy::DummyError) }
before do
allow(app.settings).to receive(:raise_errors?).and_return(true)
end
it 'reports the error to Rollbar API and raises error' do
expect(Rollbar).to receive(:log).with(*expected_report_args)
expect do
get '/foo'
end.to raise_error(SinatraDummy::DummyError)
end
end
context 'with raise_errors? == false' do
let(:exception) { kind_of(SinatraDummy::DummyError) }
before do
allow(app.settings).to receive(:raise_errors?).and_return(false)
end
it 'reports the error to Rollbar, but nothing is raised' do
expect(Rollbar).to receive(:log).with(*expected_report_args)
get '/foo'
end
end
end
context 'for a NOT crashing endpoint' do
it 'doesnt report any error to Rollbar API' do
expect(Rollbar).not_to receive(:log)
get '/bar'
end
end
context 'if the middleware itself fails' do
let(:exception) { Exception.new }
before do
allow_any_instance_of(described_class).to receive(:framework_error).and_raise(exception)
allow(app.settings).to receive(:raise_errors?).and_return(false)
end
it 'reports the report error' do
expect(Rollbar).to receive(:log).with(*expected_report_args)
expect do
get '/foo'
end.to raise_error(exception)
end
end
context 'with GET parameters' do
let(:exception) { kind_of(SinatraDummy::DummyError) }
let(:params) do
{
'key' => 'value'
}
end
it 'appear in the sent payload' do
expect do
get '/foo', params
end.to raise_error(exception)
expect(Rollbar.last_report[:request][:GET]).to be_eql(params)
end
end
context 'with POST parameters' do
let(:exception) { kind_of(SinatraDummy::DummyError) }
let(:params) do
{
'key' => 'value'
}
end
it 'appear in the sent payload' do
expect do
post '/crash_post', params
end.to raise_error(exception)
expect(Rollbar.last_report[:request][:POST]).to be_eql(params)
end
end
context 'with JSON POST parameters' do
let(:exception) { kind_of(SinatraDummy::DummyError) }
let(:params) do
{
'key' => 'value'
}
end
it 'appears in the sent payload when application/json is the content type' do
expect do
post '/crash_post', params.to_json, { 'CONTENT_TYPE' => 'application/json' }
end.to raise_error(exception)
expect(Rollbar.last_report[:request][:body]).to be_eql(params.to_json)
end
it 'appears in the sent payload when the accepts header contains json' do
expect do
post '/crash_post', params, { 'ACCEPT' => 'application/vnd.github.v3+json' }
end.to raise_error(exception)
expect(Rollbar.last_report[:request][:POST]).to be_eql(params)
end
end
it 'resets the notifier scope in every request' do
get '/bar'
id1 = Rollbar.scope_object.object_id
get '/bar'
id2 = Rollbar.scope_object.object_id
expect(id1).not_to be_eql(id2)
end
context 'with person data' do
let(:exception) { kind_of(SinatraDummy::DummyError) }
let(:person_data) do
{ 'email' => 'person@example.com' }
end
it 'includes person data from env' do
expect do
get '/foo', {}, 'rollbar.person_data' => person_data
end.to raise_error(exception)
expect(Rollbar.last_report[:person]).to be_eql(person_data)
end
it 'includes empty person data when not in env' do
expect do
get '/foo'
end.to raise_error(exception)
expect(Rollbar.last_report[:person]).to be_eql({})
end
end
end
end
| NullVoxPopuli/aeonvera | vendor/bundle/ruby/2.4.0/gems/rollbar-2.15.2/spec/rollbar/middleware/sinatra_spec.rb | Ruby | agpl-3.0 | 5,036 |
/*
* Asqatasun - Automated webpage assessment
* Copyright (C) 2008-2020 Asqatasun.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contact us by mail: asqatasun AT asqatasun DOT org
*/
package org.asqatasun.rules.rgaa30;
import org.asqatasun.ruleimplementation.AbstractNotTestedRuleImplementation;
/**
* Implementation of the rule 1.10.1 of the referential Rgaa 3.0.
*
* For more details about the implementation, refer to <a href="http://doc.asqatasun.org/en/90_Rules/rgaa3.0/01.Images/Rule-1-10-1.html">the rule 1.10.1 design page.</a>
* @see <a href="http://references.modernisation.gouv.fr/referentiel-technique-0#test-1-10-1"> 1.10.1 rule specification</a>
*/
public class Rgaa30Rule011001 extends AbstractNotTestedRuleImplementation {
/**
* Default constructor
*/
public Rgaa30Rule011001 () {
super();
}
}
| Asqatasun/Asqatasun | rules/rules-rgaa3.0/src/main/java/org/asqatasun/rules/rgaa30/Rgaa30Rule011001.java | Java | agpl-3.0 | 1,487 |
'use strict';
var phonetic = require('phonetic');
var socketio = require('socket.io');
var _ = require('underscore');
var load = function(http) {
var io = socketio(http);
var ioNamespace = '/';
var getEmptyRoomId = function() {
var roomId = null;
do {
roomId = phonetic.generate().toLowerCase()
} while (io.nsps[ioNamespace].adapter.rooms[roomId]);
return roomId;
};
var sendRoomInfo = function(socket, info) {
if (!info.roomId) {
return;
}
var clients = io.nsps[ioNamespace].adapter.rooms[info.roomId];
io.sockets.in(info.roomId).emit('room.info', {
id: info.roomId,
count: clients ? Object.keys(clients).length : 0
});
};
var onJoin = function(socket, info, data) {
if (info.roomId) {
return;
}
info.roomId = data && data.roomId ? data.roomId : null;
if (!info.roomId || !io.nsps[ioNamespace].adapter.rooms[data.roomId]) {
info.roomId = getEmptyRoomId(socket);
console.log('[Socket] Assigning room id ' + info.roomId + ' to ip ' + socket.handshake.address);
} else {
console.log('[Socket] Assigning room id ' + info.roomId + ' to ip ' + socket.handshake.address + ' (from client)');
}
socket.join(info.roomId);
socket.emit('join', {
roomId: info.roomId
});
sendRoomInfo(socket, info);
};
var onEvent = function(socket, info, event, data) {
if (!info.roomId) {
return;
}
socket.broadcast.to(info.roomId).emit(event, data);
};
var onChunk = function(socket, info, data) {
socket.emit('file.ack', {
guid: data.guid
});
onEvent(socket, info, 'file.chunk', data);
};
var onConnection = function(socket) {
console.log('[Socket] New connection from ip ' + socket.handshake.address);
var info = {
roomId: null
};
socket.on('disconnect', function() {
console.log('[Socket] Connection from ip ' + socket.handshake.address + ' disconnected');
sendRoomInfo(socket, info);
});
socket.on('join', _.partial(onJoin, socket, info));
socket.on('file.start', _.partial(onEvent, socket, info, 'file.start'));
socket.on('file.chunk', _.partial(onChunk, socket, info));
}
io.on('connection', onConnection);
};
module.exports = {
load: load
}; | ryanpetris/droppy | socket.js | JavaScript | agpl-3.0 | 2,696 |
/*
* Fluffy Meow - Torrent RSS generator for TV series
* Copyright (C) 2015 Victor Denisov
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.plukh.fluffymeow.aws;
import com.amazonaws.services.ec2.AmazonEC2;
import com.amazonaws.services.ec2.AmazonEC2Client;
import com.amazonaws.services.ec2.model.DescribeTagsRequest;
import com.amazonaws.services.ec2.model.DescribeTagsResult;
import com.amazonaws.services.ec2.model.Filter;
import com.amazonaws.services.ec2.model.TagDescription;
import org.apache.http.client.fluent.Request;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.IOException;
public class AWSInstanceInfoProviderImpl implements InstanceInfoProvider {
private static final Logger log = LogManager.getLogger(AWSInstanceInfoProviderImpl.class);
private static final String NAME_TAG = "Name";
private static final String DEPLOYMENT_ID_TAG = "deploymentId";
private InstanceInfo instanceInfo;
public AWSInstanceInfoProviderImpl() {
}
@Override
public InstanceInfo getInstanceInfo() {
if (instanceInfo == null) {
try {
AmazonEC2 ec2 = new AmazonEC2Client();
String instanceId = Request.Get("http://169.254.169.254/latest/meta-data/instance-id").execute().returnContent().asString();
if (log.isDebugEnabled()) log.debug("Instance Id: " + instanceId);
DescribeTagsRequest tagsRequest = new DescribeTagsRequest().withFilters(
new Filter().withName("resource-id").withValues(instanceId),
new Filter().withName("key").withValues(NAME_TAG, DEPLOYMENT_ID_TAG));
DescribeTagsResult tagsResult = ec2.describeTags(tagsRequest);
String name = getTag(tagsResult, NAME_TAG);
if (log.isDebugEnabled()) log.debug("Instance name: " + name);
String deploymentId = getTag(tagsResult, DEPLOYMENT_ID_TAG);
if (log.isDebugEnabled()) log.debug("Deployment: " + deploymentId);
instanceInfo = new InstanceInfo()
.withInstanceId(instanceId)
.withName(name)
.withDeploymentId(deploymentId);
} catch (IOException e) {
throw new AWSInstanceInfoException("Error retrieving AWS instance info", e);
}
}
return instanceInfo;
}
private String getTag(DescribeTagsResult tagsResult, String tagName) {
for (TagDescription tag : tagsResult.getTags()) {
if (tag.getKey().equals(tagName)) return tag.getValue();
}
return null;
}
}
| vdenisov/fluffy-meow | src/main/java/org/plukh/fluffymeow/aws/AWSInstanceInfoProviderImpl.java | Java | agpl-3.0 | 3,343 |
require_relative 'api_fixtures_helper'
class FakeApiResponse
include ApiFixturesHelper
def app_init
parse(app_init_plist)
end
def all_cinemas
parse(all_cinemas_plist)
end
def film_times(cinema_id, film_id)
parse(film_times_plist(cinema_id, film_id))
end
end
| andycroll/odeon_uk | test/support/fake_api_response.rb | Ruby | agpl-3.0 | 288 |
/*
* Copyright (c) 2011-2015 libbitcoin developers (see AUTHORS)
* Copyright (c) 2016-2017 metaverse core developers (see MVS-AUTHORS)
*
* This file is part of metaverse.
*
* metaverse is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License with
* additional permissions to the one published by the Free Software
* Foundation, either version 3 of the License, or (at your option)
* any later version. For more information see LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <metaverse/bitcoin/message/alert.hpp>
#include <boost/iostreams/stream.hpp>
#include <metaverse/bitcoin/message/version.hpp>
#include <metaverse/bitcoin/utility/assert.hpp>
#include <metaverse/bitcoin/utility/container_sink.hpp>
#include <metaverse/bitcoin/utility/container_source.hpp>
#include <metaverse/bitcoin/utility/istream_reader.hpp>
#include <metaverse/bitcoin/utility/ostream_writer.hpp>
namespace libbitcoin {
namespace message {
const std::string alert::command = "alert";
const uint32_t alert::version_minimum = version::level::minimum;
const uint32_t alert::version_maximum = version::level::maximum;
alert alert::factory_from_data(uint32_t version, const data_chunk& data)
{
alert instance;
instance.from_data(version, data);
return instance;
}
alert alert::factory_from_data(uint32_t version, std::istream& stream)
{
alert instance;
instance.from_data(version, stream);
return instance;
}
alert alert::factory_from_data(uint32_t version, reader& source)
{
alert instance;
instance.from_data(version, source);
return instance;
}
bool alert::is_valid() const
{
return !payload.empty() || !signature.empty();
}
void alert::reset()
{
payload.clear();
payload.shrink_to_fit();
signature.clear();
signature.shrink_to_fit();
}
bool alert::from_data(uint32_t version, const data_chunk& data)
{
boost::iostreams::stream<byte_source<data_chunk>> istream(data);
return from_data(version, istream);
}
bool alert::from_data(uint32_t version, std::istream& stream)
{
istream_reader source(stream);
return from_data(version, source);
}
bool alert::from_data(uint32_t version, reader& source)
{
reset();
auto size = source.read_variable_uint_little_endian();
BITCOIN_ASSERT(size <= bc::max_size_t);
const auto payload_size = static_cast<size_t>(size);
size_t signature_size = 0;
auto result = static_cast<bool>(source);
if (result)
{
payload = source.read_data(payload_size);
result = source && (payload.size() == payload_size);
}
if (result)
{
size = source.read_variable_uint_little_endian();
BITCOIN_ASSERT(size <= bc::max_size_t);
signature_size = static_cast<size_t>(size);
result = source;
}
if (result)
{
signature = source.read_data(signature_size);
result = source && (signature.size() == signature_size);
}
if (!result)
reset();
return result;
}
data_chunk alert::to_data(uint32_t version) const
{
data_chunk data;
boost::iostreams::stream<byte_sink<data_chunk>> ostream(data);
to_data(version, ostream);
ostream.flush();
BITCOIN_ASSERT(data.size() == serialized_size(version));
return data;
}
void alert::to_data(uint32_t version, std::ostream& stream) const
{
ostream_writer sink(stream);
to_data(version, sink);
}
void alert::to_data(uint32_t version, writer& sink) const
{
sink.write_variable_uint_little_endian(payload.size());
sink.write_data(payload);
sink.write_variable_uint_little_endian(signature.size());
sink.write_data(signature);
}
uint64_t alert::serialized_size(uint32_t version) const
{
return variable_uint_size(payload.size()) + payload.size() +
variable_uint_size(signature.size()) + signature.size();
}
bool operator==(const alert& left, const alert& right)
{
bool result = (left.payload.size() == right.payload.size()) &&
(left.signature.size() == right.signature.size());
for (size_t i = 0; i < left.payload.size() && result; i++)
result = (left.payload[i] == right.payload[i]);
for (size_t i = 0; i < left.signature.size() && result; i++)
result = (left.signature[i] == right.signature[i]);
return result;
}
bool operator!=(const alert& left, const alert& right)
{
return !(left == right);
}
} // end message
} // end libbitcoin
| sikamedia/metaverse | src/lib/bitcoin/message/alert.cpp | C++ | agpl-3.0 | 4,834 |
module BABYLON {
export class Animation {
private _keys: Array<any>;
private _offsetsCache = {};
private _highLimitsCache = {};
private _stopped = false;
public _target;
private _easingFunction: IEasingFunction;
public targetPropertyPath: string[];
public currentFrame: number;
public static CreateAndStartAnimation(name: string, mesh: AbstractMesh, tartgetProperty: string,
framePerSecond: number, totalFrame: number,
from: any, to: any, loopMode?: number) {
var dataType = undefined;
if (!isNaN(parseFloat(from)) && isFinite(from)) {
dataType = Animation.ANIMATIONTYPE_FLOAT;
} else if (from instanceof Quaternion) {
dataType = Animation.ANIMATIONTYPE_QUATERNION;
} else if (from instanceof Vector3) {
dataType = Animation.ANIMATIONTYPE_VECTOR3;
} else if (from instanceof Vector2) {
dataType = Animation.ANIMATIONTYPE_VECTOR2;
} else if (from instanceof Color3) {
dataType = Animation.ANIMATIONTYPE_COLOR3;
}
if (dataType == undefined) {
return null;
}
var animation = new Animation(name, tartgetProperty, framePerSecond, dataType, loopMode);
var keys = [];
keys.push({ frame: 0, value: from });
keys.push({ frame: totalFrame, value: to });
animation.setKeys(keys);
mesh.animations.push(animation);
return mesh.getScene().beginAnimation(mesh, 0, totalFrame,(animation.loopMode === 1));
}
constructor(public name: string, public targetProperty: string, public framePerSecond: number, public dataType: number, public loopMode?: number) {
this.targetPropertyPath = targetProperty.split(".");
this.dataType = dataType;
this.loopMode = loopMode === undefined ? Animation.ANIMATIONLOOPMODE_CYCLE : loopMode;
}
// Methods
public isStopped(): boolean {
return this._stopped;
}
public getKeys(): any[] {
return this._keys;
}
public getEasingFunction() {
return this._easingFunction;
}
public setEasingFunction(easingFunction: EasingFunction) {
this._easingFunction = easingFunction;
}
public floatInterpolateFunction(startValue: number, endValue: number, gradient: number): number {
return startValue + (endValue - startValue) * gradient;
}
public quaternionInterpolateFunction(startValue: Quaternion, endValue: Quaternion, gradient: number): Quaternion {
return Quaternion.Slerp(startValue, endValue, gradient);
}
public vector3InterpolateFunction(startValue: Vector3, endValue: Vector3, gradient: number): Vector3 {
return Vector3.Lerp(startValue, endValue, gradient);
}
public vector2InterpolateFunction(startValue: Vector2, endValue: Vector2, gradient: number): Vector2 {
return Vector2.Lerp(startValue, endValue, gradient);
}
public color3InterpolateFunction(startValue: Color3, endValue: Color3, gradient: number): Color3 {
return Color3.Lerp(startValue, endValue, gradient);
}
public matrixInterpolateFunction(startValue: Matrix, endValue: Matrix, gradient: number): Matrix {
var startScale = new Vector3(0, 0, 0);
var startRotation = new Quaternion();
var startTranslation = new Vector3(0, 0, 0);
startValue.decompose(startScale, startRotation, startTranslation);
var endScale = new Vector3(0, 0, 0);
var endRotation = new Quaternion();
var endTranslation = new Vector3(0, 0, 0);
endValue.decompose(endScale, endRotation, endTranslation);
var resultScale = this.vector3InterpolateFunction(startScale, endScale, gradient);
var resultRotation = this.quaternionInterpolateFunction(startRotation, endRotation, gradient);
var resultTranslation = this.vector3InterpolateFunction(startTranslation, endTranslation, gradient);
var result = Matrix.Compose(resultScale, resultRotation, resultTranslation);
return result;
}
public clone(): Animation {
var clone = new Animation(this.name, this.targetPropertyPath.join("."), this.framePerSecond, this.dataType, this.loopMode);
clone.setKeys(this._keys);
return clone;
}
public setKeys(values: Array<any>): void {
this._keys = values.slice(0);
this._offsetsCache = {};
this._highLimitsCache = {};
}
private _getKeyValue(value: any): any {
if (typeof value === "function") {
return value();
}
return value;
}
private _interpolate(currentFrame: number, repeatCount: number, loopMode: number, offsetValue?, highLimitValue?) {
if (loopMode === Animation.ANIMATIONLOOPMODE_CONSTANT && repeatCount > 0) {
return highLimitValue.clone ? highLimitValue.clone() : highLimitValue;
}
this.currentFrame = currentFrame;
// Try to get a hash to find the right key
var startKey = Math.max(0, Math.min(this._keys.length - 1, Math.floor(this._keys.length * (currentFrame - this._keys[0].frame) / (this._keys[this._keys.length - 1].frame - this._keys[0].frame)) - 1));
if (this._keys[startKey].frame >= currentFrame) {
while (startKey - 1 >= 0 && this._keys[startKey].frame >= currentFrame) {
startKey--;
}
}
for (var key = startKey; key < this._keys.length ; key++) {
if (this._keys[key + 1].frame >= currentFrame) {
var startValue = this._getKeyValue(this._keys[key].value);
var endValue = this._getKeyValue(this._keys[key + 1].value);
// gradient : percent of currentFrame between the frame inf and the frame sup
var gradient = (currentFrame - this._keys[key].frame) / (this._keys[key + 1].frame - this._keys[key].frame);
// check for easingFunction and correction of gradient
if (this._easingFunction != null) {
gradient = this._easingFunction.ease(gradient);
}
switch (this.dataType) {
// Float
case Animation.ANIMATIONTYPE_FLOAT:
switch (loopMode) {
case Animation.ANIMATIONLOOPMODE_CYCLE:
case Animation.ANIMATIONLOOPMODE_CONSTANT:
return this.floatInterpolateFunction(startValue, endValue, gradient);
case Animation.ANIMATIONLOOPMODE_RELATIVE:
return offsetValue * repeatCount + this.floatInterpolateFunction(startValue, endValue, gradient);
}
break;
// Quaternion
case Animation.ANIMATIONTYPE_QUATERNION:
var quaternion = null;
switch (loopMode) {
case Animation.ANIMATIONLOOPMODE_CYCLE:
case Animation.ANIMATIONLOOPMODE_CONSTANT:
quaternion = this.quaternionInterpolateFunction(startValue, endValue, gradient);
break;
case Animation.ANIMATIONLOOPMODE_RELATIVE:
quaternion = this.quaternionInterpolateFunction(startValue, endValue, gradient).add(offsetValue.scale(repeatCount));
break;
}
return quaternion;
// Vector3
case Animation.ANIMATIONTYPE_VECTOR3:
switch (loopMode) {
case Animation.ANIMATIONLOOPMODE_CYCLE:
case Animation.ANIMATIONLOOPMODE_CONSTANT:
return this.vector3InterpolateFunction(startValue, endValue, gradient);
case Animation.ANIMATIONLOOPMODE_RELATIVE:
return this.vector3InterpolateFunction(startValue, endValue, gradient).add(offsetValue.scale(repeatCount));
}
// Vector2
case Animation.ANIMATIONTYPE_VECTOR2:
switch (loopMode) {
case Animation.ANIMATIONLOOPMODE_CYCLE:
case Animation.ANIMATIONLOOPMODE_CONSTANT:
return this.vector2InterpolateFunction(startValue, endValue, gradient);
case Animation.ANIMATIONLOOPMODE_RELATIVE:
return this.vector2InterpolateFunction(startValue, endValue, gradient).add(offsetValue.scale(repeatCount));
}
// Color3
case Animation.ANIMATIONTYPE_COLOR3:
switch (loopMode) {
case Animation.ANIMATIONLOOPMODE_CYCLE:
case Animation.ANIMATIONLOOPMODE_CONSTANT:
return this.color3InterpolateFunction(startValue, endValue, gradient);
case Animation.ANIMATIONLOOPMODE_RELATIVE:
return this.color3InterpolateFunction(startValue, endValue, gradient).add(offsetValue.scale(repeatCount));
}
// Matrix
case Animation.ANIMATIONTYPE_MATRIX:
switch (loopMode) {
case Animation.ANIMATIONLOOPMODE_CYCLE:
case Animation.ANIMATIONLOOPMODE_CONSTANT:
// return this.matrixInterpolateFunction(startValue, endValue, gradient);
case Animation.ANIMATIONLOOPMODE_RELATIVE:
return startValue;
}
default:
break;
}
break;
}
}
return this._getKeyValue(this._keys[this._keys.length - 1].value);
}
public animate(delay: number, from: number, to: number, loop: boolean, speedRatio: number): boolean {
if (!this.targetPropertyPath || this.targetPropertyPath.length < 1) {
this._stopped = true;
return false;
}
var returnValue = true;
// Adding a start key at frame 0 if missing
if (this._keys[0].frame !== 0) {
var newKey = { frame: 0, value: this._keys[0].value };
this._keys.splice(0, 0, newKey);
}
// Check limits
if (from < this._keys[0].frame || from > this._keys[this._keys.length - 1].frame) {
from = this._keys[0].frame;
}
if (to < this._keys[0].frame || to > this._keys[this._keys.length - 1].frame) {
to = this._keys[this._keys.length - 1].frame;
}
// Compute ratio
var range = to - from;
var offsetValue;
// ratio represents the frame delta between from and to
var ratio = delay * (this.framePerSecond * speedRatio) / 1000.0;
var highLimitValue = 0;
if (ratio > range && !loop) { // If we are out of range and not looping get back to caller
returnValue = false;
highLimitValue = this._getKeyValue(this._keys[this._keys.length - 1].value);
} else {
// Get max value if required
if (this.loopMode !== Animation.ANIMATIONLOOPMODE_CYCLE) {
var keyOffset = to.toString() + from.toString();
if (!this._offsetsCache[keyOffset]) {
var fromValue = this._interpolate(from, 0, Animation.ANIMATIONLOOPMODE_CYCLE);
var toValue = this._interpolate(to, 0, Animation.ANIMATIONLOOPMODE_CYCLE);
switch (this.dataType) {
// Float
case Animation.ANIMATIONTYPE_FLOAT:
this._offsetsCache[keyOffset] = toValue - fromValue;
break;
// Quaternion
case Animation.ANIMATIONTYPE_QUATERNION:
this._offsetsCache[keyOffset] = toValue.subtract(fromValue);
break;
// Vector3
case Animation.ANIMATIONTYPE_VECTOR3:
this._offsetsCache[keyOffset] = toValue.subtract(fromValue);
// Vector2
case Animation.ANIMATIONTYPE_VECTOR2:
this._offsetsCache[keyOffset] = toValue.subtract(fromValue);
// Color3
case Animation.ANIMATIONTYPE_COLOR3:
this._offsetsCache[keyOffset] = toValue.subtract(fromValue);
default:
break;
}
this._highLimitsCache[keyOffset] = toValue;
}
highLimitValue = this._highLimitsCache[keyOffset];
offsetValue = this._offsetsCache[keyOffset];
}
}
if (offsetValue === undefined) {
switch (this.dataType) {
// Float
case Animation.ANIMATIONTYPE_FLOAT:
offsetValue = 0;
break;
// Quaternion
case Animation.ANIMATIONTYPE_QUATERNION:
offsetValue = new Quaternion(0, 0, 0, 0);
break;
// Vector3
case Animation.ANIMATIONTYPE_VECTOR3:
offsetValue = Vector3.Zero();
break;
// Vector2
case Animation.ANIMATIONTYPE_VECTOR2:
offsetValue = Vector2.Zero();
break;
// Color3
case Animation.ANIMATIONTYPE_COLOR3:
offsetValue = Color3.Black();
}
}
// Compute value
var repeatCount = (ratio / range) >> 0;
var currentFrame = returnValue ? from + ratio % range : to;
var currentValue = this._interpolate(currentFrame, repeatCount, this.loopMode, offsetValue, highLimitValue);
// Set value
if (this.targetPropertyPath.length > 1) {
var property = this._target[this.targetPropertyPath[0]];
for (var index = 1; index < this.targetPropertyPath.length - 1; index++) {
property = property[this.targetPropertyPath[index]];
}
property[this.targetPropertyPath[this.targetPropertyPath.length - 1]] = currentValue;
} else {
this._target[this.targetPropertyPath[0]] = currentValue;
}
if (this._target.markAsDirty) {
this._target.markAsDirty(this.targetProperty);
}
if (!returnValue) {
this._stopped = true;
}
return returnValue;
}
// Statics
private static _ANIMATIONTYPE_FLOAT = 0;
private static _ANIMATIONTYPE_VECTOR3 = 1;
private static _ANIMATIONTYPE_QUATERNION = 2;
private static _ANIMATIONTYPE_MATRIX = 3;
private static _ANIMATIONTYPE_COLOR3 = 4;
private static _ANIMATIONTYPE_VECTOR2 = 5;
private static _ANIMATIONLOOPMODE_RELATIVE = 0;
private static _ANIMATIONLOOPMODE_CYCLE = 1;
private static _ANIMATIONLOOPMODE_CONSTANT = 2;
public static get ANIMATIONTYPE_FLOAT(): number {
return Animation._ANIMATIONTYPE_FLOAT;
}
public static get ANIMATIONTYPE_VECTOR3(): number {
return Animation._ANIMATIONTYPE_VECTOR3;
}
public static get ANIMATIONTYPE_VECTOR2(): number {
return Animation._ANIMATIONTYPE_VECTOR2;
}
public static get ANIMATIONTYPE_QUATERNION(): number {
return Animation._ANIMATIONTYPE_QUATERNION;
}
public static get ANIMATIONTYPE_MATRIX(): number {
return Animation._ANIMATIONTYPE_MATRIX;
}
public static get ANIMATIONTYPE_COLOR3(): number {
return Animation._ANIMATIONTYPE_COLOR3;
}
public static get ANIMATIONLOOPMODE_RELATIVE(): number {
return Animation._ANIMATIONLOOPMODE_RELATIVE;
}
public static get ANIMATIONLOOPMODE_CYCLE(): number {
return Animation._ANIMATIONLOOPMODE_CYCLE;
}
public static get ANIMATIONLOOPMODE_CONSTANT(): number {
return Animation._ANIMATIONLOOPMODE_CONSTANT;
}
}
}
| brosnanyuen/Project-S | Babylon.js-master/Babylon/Animations/babylon.animation.ts | TypeScript | agpl-3.0 | 17,965 |
//
//{block name="backend/create_backend_order/view/toolbar"}
//
Ext.define('Shopware.apps.SwagBackendOrder.view.main.Toolbar', {
extend: 'Ext.toolbar.Toolbar',
alternateClassName: 'SwagBackendOrder.view.main.Toolbar',
alias: 'widget.createbackendorder-toolbar',
dock: 'top',
ui: 'shopware-ui',
padding: '0 10 0 10',
snippets: {
buttons: {
openCustomer: '{s namespace="backend/swag_backend_order/view/toolbar" name="swag_backend_order/toolbar/button/open_customer"}Open Customer{/s}',
createCustomer: '{s namespace="backend/swag_backend_order/view/toolbar" name="swag_backend_order/toolbar/button/create_customer"}Create Customer{/s}',
createGuest: '{s namespace="backend/swag_backend_order/view/toolbar" name="swag_backend_order/toolbar/button/create_guest"}Create Guest{/s}'
},
shop: {
noCustomer: '{s namespace="backend/swag_backend_order/view/toolbar" name="swag_backend_order/toolbar/shop/label/no_costumer"}Shop: No customer selected.{/s}',
default: '{s namespace="backend/swag_backend_order/view/toolbar" name="swag_backend_order/toolbar/shop/label/default"}Shop: {/s}'
},
currencyLabel: '{s namespace="backend/swag_backend_order/view/toolbar" name="swag_backend_order/toolbar/currency/label"}Choose currency{/s}',
languageLabel: '{s namespace="backend/swag_backend_order/view/toolbar" name="swag_backend_order/toolbar/language/label"}Language{/s}'
},
/**
*
*/
initComponent: function () {
var me = this;
me.items = me.createToolbarItems();
me.languageStore = Ext.create('Ext.data.Store', {
name: 'languageStore',
fields: ['id', 'mainId', 'categoryId', 'name', 'title', 'default']
});
/**
* automatically selects the standard currency
*/
me.currencyStore = me.subApplication.getStore('Currency');
me.currencyStore.on('load', function () {
me.changeCurrencyComboBox.bindStore(me.currencyStore);
var standardCurrency = me.currencyStore.findExact('default', 1);
if (standardCurrency > -1) {
me.currencyModel = me.currencyStore.getAt(standardCurrency);
me.changeCurrencyComboBox.select(me.currencyModel);
me.currencyModel.set('selected', 1);
} else {
me.changeCurrencyComboBox.select(me.currencyStore.first());
me.currencyStore.first().set('selected', 1);
}
});
me.customerSearchField.on('valueselect', function () {
me.openCustomerButton.setDisabled(false);
});
//selects and loads the language sub shops
var customerStore = me.subApplication.getStore('Customer');
customerStore.on('load', function () {
if (typeof customerStore.getAt(0) !== 'undefined') {
var shopName = '',
customerModel = customerStore.getAt(0);
var languageId = customerModel.get('languageId');
var index = customerModel.languageSubShop().findExact('id', languageId);
if (index >= 0) {
shopName = customerModel.languageSubShop().getAt(index).get('name');
} else {
index = customerModel.shop().findExact('id', languageId);
shopName = customerModel.shop().getAt(index).get('name');
}
me.shopLabel.setText(me.snippets.shop.default + shopName);
me.fireEvent('changeCustomer');
me.getLanguageShops(customerModel.shop().getAt(0).get('id'), customerStore.getAt(0).get('languageId'));
}
});
me.callParent(arguments);
},
/**
* register the events
*/
registerEvents: function () {
this.addEvents(
'changeSearchField'
)
},
/**
* creates the top toolbar items
*
* @returns []
*/
createToolbarItems: function () {
var me = this;
me.customerSearchField = me.createCustomerSearch('customerName', 'id', 'email');
me.createCustomerButton = Ext.create('Ext.button.Button', {
text: me.snippets.buttons.createCustomer,
handler: function () {
me.fireEvent('createCustomer', false);
}
});
me.createGuestButton = Ext.create('Ext.button.Button', {
text: me.snippets.buttons.createGuest,
handler: function () {
me.fireEvent('createCustomer', true);
}
});
me.openCustomerButton = Ext.create('Ext.button.Button', {
text: me.snippets.buttons.openCustomer,
disabled: true,
margin: '0 30 0 0',
handler: function () {
me.fireEvent('openCustomer');
}
});
me.shopLabel = Ext.create('Ext.form.Label', {
text: me.snippets.shop.noCustomer,
style: {
fontWeight: 'bold'
}
});
me.languageComboBox = Ext.create('Ext.form.field.ComboBox', {
fieldLabel: me.snippets.languageLabel,
labelWidth: 65,
store: me.languageStore,
queryMode: 'local',
displayField: 'name',
width: '20%',
valueField: 'id',
listeners: {
change: {
fn: function (comboBox, newValue, oldValue, eOpts) {
me.fireEvent('changeLanguage', newValue);
}
}
}
});
me.changeCurrencyComboBox = Ext.create('Ext.form.field.ComboBox', {
fieldLabel: me.snippets.currencyLabel,
stores: me.currencyStore,
queryMode: 'local',
displayField: 'currency',
width: '20%',
valueField: 'id',
listeners: {
change: {
fn: function (comboBox, newValue, oldValue, eOpts) {
me.fireEvent('changeCurrency', comboBox, newValue, oldValue, eOpts);
}
}
}
});
return [
me.changeCurrencyComboBox, me.languageComboBox, me.shopLabel, '->',
me.createCustomerButton, me.createGuestButton, me.openCustomerButton, me.customerSearchField
];
},
/**
*
* @param returnValue
* @param hiddenReturnValue
* @param name
* @return Shopware.form.field.ArticleSearch
*/
createCustomerSearch: function (returnValue, hiddenReturnValue, name) {
var me = this;
me.customerStore = me.subApplication.getStore('Customer');
return Ext.create('Shopware.apps.SwagBackendOrder.view.main.CustomerSearch', {
name: name,
subApplication: me.subApplication,
returnValue: returnValue,
hiddenReturnValue: hiddenReturnValue,
articleStore: me.customerStore,
allowBlank: false,
getValue: function () {
me.store.getAt(me.record.rowIdx).set(name, this.getSearchField().getValue());
return this.getSearchField().getValue();
},
setValue: function (value) {
this.getSearchField().setValue(value);
}
});
},
/**
* @param mainShopId
* @param languageId
*/
getLanguageShops: function (mainShopId, languageId) {
var me = this;
Ext.Ajax.request({
url: '{url action="getLanguageSubShops"}',
params: {
mainShopId: mainShopId
},
success: function (response) {
me.languageStore.removeAll();
var languageSubShops = Ext.JSON.decode(response.responseText);
languageSubShops.data.forEach(function (record) {
me.languageStore.add(record);
});
me.languageComboBox.bindStore(me.languageStore);
//selects the default language shop
var languageIndex = me.languageStore.findExact('mainId', null);
me.languageComboBox.setValue(languageId);
}
});
}
});
//
//{/block}
// | GerDner/luck-docker | engine/Shopware/Plugins/Default/Backend/SwagBackendOrder/Views/backend/swag_backend_order/view/main/toolbar.js | JavaScript | agpl-3.0 | 8,411 |
/**
* This file is part of mycollab-services.
*
* mycollab-services is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* mycollab-services is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with mycollab-services. If not, see <http://www.gnu.org/licenses/>.
*/
package com.esofthead.mycollab.module.crm.service.ibatis;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.esofthead.mycollab.common.interceptor.aspect.Auditable;
import com.esofthead.mycollab.common.interceptor.aspect.Traceable;
import com.esofthead.mycollab.core.persistence.ICrudGenericDAO;
import com.esofthead.mycollab.core.persistence.ISearchableDAO;
import com.esofthead.mycollab.core.persistence.service.DefaultService;
import com.esofthead.mycollab.module.crm.dao.ProductMapper;
import com.esofthead.mycollab.module.crm.dao.ProductMapperExt;
import com.esofthead.mycollab.module.crm.domain.Product;
import com.esofthead.mycollab.module.crm.domain.criteria.ProductSearchCriteria;
import com.esofthead.mycollab.module.crm.service.ProductService;
@Service
@Transactional
public class ProductServiceImpl extends DefaultService<Integer, Product, ProductSearchCriteria>
implements ProductService {
@Autowired
private ProductMapper productMapper;
@Autowired
private ProductMapperExt productMapperExt;
@Override
public ICrudGenericDAO<Integer, Product> getCrudMapper() {
return productMapper;
}
@Override
public ISearchableDAO<ProductSearchCriteria> getSearchMapper() {
return productMapperExt;
}
}
| uniteddiversity/mycollab | mycollab-services/src/main/java/com/esofthead/mycollab/module/crm/service/ibatis/ProductServiceImpl.java | Java | agpl-3.0 | 2,122 |
/*
* jHears, acoustic fingerprinting framework.
* Copyright (C) 2009-2010 Juha Heljoranta.
*
* This file is part of jHears.
*
* jHears is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* jHears is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with jHears. If not, see <http://www.gnu.org/licenses/>.
*/
/**
*
*/
package org.jhears.server;
import java.util.Map;
public interface IUser {
String getName();
Long getId();
Map<String, String> getProperties();
} | inter6/jHears | jhears-server/src/main/java/org/jhears/server/IUser.java | Java | agpl-3.0 | 950 |
<?php
/*********************************************************************************
* SugarCRM Community Edition is a customer relationship management program developed by
* SugarCRM, Inc. Copyright (C) 2004-2012 SugarCRM Inc.
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License version 3 as published by the
* Free Software Foundation with the addition of the following permission added
* to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK
* IN WHICH THE COPYRIGHT IS OWNED BY SUGARCRM, SUGARCRM DISCLAIMS THE WARRANTY
* OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along with
* this program; if not, see http://www.gnu.org/licenses or write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301 USA.
*
* You can contact SugarCRM, Inc. headquarters at 10050 North Wolfe Road,
* SW2-130, Cupertino, CA 95014, USA. or at email address contact@sugarcrm.com.
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Public License version 3.
*
* In accordance with Section 7(b) of the GNU Affero General Public License version 3,
* these Appropriate Legal Notices must retain the display of the "Powered by
* SugarCRM" logo. If the display of the logo is not reasonably feasible for
* technical reasons, the Appropriate Legal Notices must display the words
* "Powered by SugarCRM".
********************************************************************************/
if(!defined('sugarEntry') || !sugarEntry) die('Not A Valid Entry Point');
//Vardef Handler Object
class VarDefHandler {
var $meta_array_name;
var $target_meta_array = false;
var $start_none = false;
var $extra_array = array(); //used to add custom items
var $options_array = array();
var $module_object;
var $start_none_lbl = null;
function VarDefHandler(& $module, $meta_array_name=null){
$this->module_object = $module;
if($meta_array_name!=null){
global $vardef_meta_array;
include("include/VarDefHandler/vardef_meta_arrays.php");
$this->target_meta_array = $vardef_meta_array[$meta_array_name];
}
//end function setup
}
function get_vardef_array($use_singular=false, $remove_dups = false, $use_field_name = false, $use_field_label = false){
global $dictionary;
global $current_language;
global $app_strings;
global $app_list_strings;
$temp_module_strings = return_module_language($current_language, $this->module_object->module_dir);
$base_array = $this->module_object->field_defs;
//$base_array = $dictionary[$this->module_object->object_name]['fields'];
///Inclue empty none set or not
if($this->start_none==true){
if(!empty($this->start_none_lbl)){
$this->options_array[''] = $this->start_none_lbl;
} else {
$this->options_array[''] = $app_strings['LBL_NONE'];
}
}
///used for special one off items added to filter array ex. would be href link for alert templates
if(!empty($this->extra_array)){
foreach($this->extra_array as $key => $value){
$this->options_array[$key] = $value;
}
}
/////////end special one off//////////////////////////////////
foreach($base_array as $key => $value_array){
$compare_results = $this->compare_type($value_array);
if($compare_results == true){
$label_name = '';
if($value_array['type'] == 'link' && !$use_field_label){
$this->module_object->load_relationship($value_array['name']);
if(!empty($app_list_strings['moduleList'][$this->module_object->$value_array['name']->getRelatedModuleName()])){
$label_name = $app_list_strings['moduleList'][$this->module_object->$value_array['name']->getRelatedModuleName()];
}else{
$label_name = $this->module_object->$value_array['name']->getRelatedModuleName();
}
}
else if(!empty($value_array['vname'])){
$label_name = $value_array['vname'];
} else {
$label_name = $value_array['name'];
}
$label_name = get_label($label_name, $temp_module_strings);
if(!empty($value_array['table'])){
//Custom Field
$column_table = $value_array['table'];
} else {
//Non-Custom Field
$column_table = $this->module_object->table_name;
}
if($value_array['type'] == 'link'){
if($use_field_name){
$index = $value_array['name'];
}else{
$index = $this->module_object->$key->getRelatedModuleName();
}
}else{
$index = $key;
}
$value = trim($label_name, ':');
if($remove_dups){
if(!in_array($value, $this->options_array))
$this->options_array[$index] = $value;
}
else
$this->options_array[$index] = $value;
//end if field is included
}
//end foreach
}
if($use_singular == true){
return convert_module_to_singular($this->options_array);
} else {
return $this->options_array;
}
//end get_vardef_array
}
function compare_type($value_array){
//Filter nothing?
if(!is_array($this->target_meta_array)){
return true;
}
////////Use the $target_meta_array;
if(isset($this->target_meta_array['inc_override'])){
foreach($this->target_meta_array['inc_override'] as $attribute => $value){
foreach($value as $actual_value){
if(isset($value_array[$attribute]) && $value_array[$attribute] == $actual_value) return true;
}
if(isset($value_array[$attribute]) && $value_array[$attribute] == $value) return true;
}
}
if(isset($this->target_meta_array['ex_override'])){
foreach($this->target_meta_array['ex_override'] as $attribute => $value){
foreach($value as $actual_value){
if(isset($value_array[$attribute]) && $value_array[$attribute] == $actual_value) return false;
if(isset($value_array[$attribute]) && $value_array[$attribute] == $value) return false;
}
//end foreach inclusion array
}
}
if(isset($this->target_meta_array['inclusion'])){
foreach($this->target_meta_array['inclusion'] as $attribute => $value){
if($attribute=="type"){
foreach($value as $actual_value){
if(isset($value_array[$attribute]) && $value_array[$attribute] != $actual_value) return false;
}
} else {
if(isset($value_array[$attribute]) && $value_array[$attribute] != $value) return false;
}
//end foreach inclusion array
}
}
if(isset($this->target_meta_array['exclusion'])){
foreach($this->target_meta_array['exclusion'] as $attribute => $value){
foreach($value as $actual_value){
if ( $attribute == 'reportable' ) {
if ( $actual_value == 'true' ) $actual_value = 1;
if ( $actual_value == 'false' ) $actual_value = 0;
}
if(isset($value_array[$attribute]) && $value_array[$attribute] == $actual_value) return false;
}
//end foreach inclusion array
}
}
return true;
//end function compare_type
}
//end class VarDefHandler
}
?>
| minh10huy/HiringBossCRM | include/VarDefHandler/VarDefHandler.php | PHP | agpl-3.0 | 7,729 |
/*
* Copyright (C) 2000 - 2021 Silverpeas
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* As a special exception to the terms and conditions of version 3.0 of
* the GPL, you may redistribute this Program in connection with Free/Libre
* Open Source Software ("FLOSS") applications as described in Silverpeas's
* FLOSS exception. You should have received a copy of the text describing
* the FLOSS exception, and it is also available here:
* "https://www.silverpeas.org/legal/floss_exception.html"
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.silverpeas.core.pdc.pdc.model;
import org.silverpeas.core.pdc.pdc.service.PdcManager;
import org.silverpeas.core.pdc.tree.model.TreeNode;
import org.silverpeas.core.persistence.datasource.model.CompositeEntityIdentifier;
import org.silverpeas.core.persistence.datasource.model.jpa.BasicJpaEntity;
import javax.persistence.Entity;
import javax.persistence.NamedQuery;
import javax.persistence.Table;
import javax.persistence.Transient;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.Spliterator;
import java.util.function.Consumer;
/**
* A value of one of the PdC's axis. A value belongs to an axis. An axis represents a given concept
* for which it defines an hierarchic tree of semantic terms belonging to the concept. A value of
* an axis is then the path from the axis origin down to a given node of the tree, where each node
* is a term refining or specifying the parent term a little more. For example, for an axis
* representing the concept of geography, one possible value can be
* "France / Rhônes-Alpes / Isère / Grenoble" where France, Rhônes-Alpes, Isère and Grenoble are
* each a term (thus a node) in the axis.
* "France" is another value, parent of the above one, and that is also a base value of the axis as
* it has no parent (one of the root values of the axis).
*/
@Entity
@Table(name = "pdcaxisvalue")
@NamedQuery(name = "findByAxisId", query = "from PdcAxisValue where axisId = :axisId")
public class PdcAxisValue extends BasicJpaEntity<PdcAxisValue, PdcAxisValuePk> {
private static final long serialVersionUID = 2345886411781136417L;
@Transient
private transient TreeNode treeNode;
@Transient
private transient TreeNodeList treeNodeParents = new TreeNodeList();
protected PdcAxisValue() {
}
/**
* Creates a value of a PdC's axis from the specified tree node. Currently, an axis of the PdC is
* persisted as an hierarchical tree in which each node is a value of the axis.
* @param treeNode the current persistence representation of the axis value.
* @return a PdC axis value.
*/
public static PdcAxisValue aPdcAxisValueFromTreeNode(final TreeNode treeNode) {
try {
List<? extends TreeNode> parents = null;
if (treeNode.hasFather()) {
PdcManager pdcManager = getPdcManager();
parents = pdcManager.getFullPath(treeNode.getFatherId(), treeNode.getTreeId());
}
return new PdcAxisValue().fromTreeNode(treeNode).withAsTreeNodeParents(parents).
inAxisId(treeNode.getTreeId());
} catch (PdcException ex) {
throw new PdcRuntimeException(ex);
}
}
/**
* Creates a value of a PdC's axis from the specified value information. Currently, an axis of the
* PdC is persisted as an hierarchical tree in which each node is a value of the axis. The
* parameters refers the unique identifier of the node and in the tree related to the axis
* identifier.
* @param valueId the unique identifier of the existing value.
* @param axisId the unique identifier of the axis the value belongs to.
* @return a PdC axis value.
*/
public static PdcAxisValue aPdcAxisValue(String valueId, String axisId) {
return new PdcAxisValue().setId(
valueId + CompositeEntityIdentifier.COMPOSITE_SEPARATOR + axisId);
}
/**
* Gets the unique identifier of the axis to which this value belongs to.
* @return the unique identifier of the axis value.
*/
public String getAxisId() {
return getNativeId().getAxisId().toString();
}
/**
* Gets the unique value identifier.
* @return the unique value identifier.
*/
public String getValueId() {
return getNativeId().getValueId().toString();
}
/**
* Gets all the values into which this one can be refined or specifying in a little more. Theses
* values are the children of this one in the semantic tree represented by the axis to which this
* value belongs.
* @return an unmodifiable set of values that are children of this one. If this value is a leaf,
* then an empty set is returned.
*/
public Set<PdcAxisValue> getChildValues() {
try {
Set<PdcAxisValue> children = new HashSet<>();
List<String> childNodeIds = getPdcManager().getDaughterValues(getAxisId(), getValueId());
for (String aNodeId : childNodeIds) {
children.add(aPdcAxisValue(aNodeId, getAxisId()));
}
return Collections.unmodifiableSet(children);
} catch (PdcException ex) {
throw new PdcRuntimeException(ex);
}
}
/**
* Gets the value this one refines or specifies a little more. The returned value is the parent
* of this one in the semantic tree represented by the axis to which this value belongs.
* @return the axis value parent of this one or null if this value has no parent (in that case,
* this value is a base one).
*/
public PdcAxisValue getParentValue() {
final PdcAxisValue parent;
TreeNode node = getTreeNode();
if (node.hasFather()) {
int lastNodeIndex = treeNodeParents.size() - 1;
TreeNode aTreeNode = treeNodeParents.get(lastNodeIndex);
String valueId = aTreeNode.getPK().getId();
String axisId = getAxisId();
PdcAxisValue pdcAxisValue = new PdcAxisValue().setId(
valueId + CompositeEntityIdentifier.COMPOSITE_SEPARATOR + axisId);
parent =
pdcAxisValue.fromTreeNode(aTreeNode).inAxisId(getAxisId())
.withAsTreeNodeParents(treeNodeParents.subList(0, lastNodeIndex));
} else {
parent = null;
}
return parent;
}
/**
* Gets the term carried by this value.
* @return the term of the value.
*/
public String getTerm() {
return getTreeNode().getName();
}
/**
* Gets the term carried by this value and translated in the specified language.
* @param language the language in which the term should be translated.
* @return the term translated in the specified language. If no such translation exists, then
* return the default term as get by calling getTerm() method.
*/
public String getTermTranslatedIn(String language) {
return getTreeNode().getName(language);
}
/**
* Is this value is a base one?
* @return true if this value is an axis base value.
*/
public boolean isBaseValue() {
// as the root in the tree represents the axis itself, a base value is a direct children of the
// root.
return getTreeNodeParents().size() <= 1;
}
/**
* Gets the meaning carried by this value. The meaning is in fact the complete path of terms that
* made this value. For example, in an axis representing the geography, the meaning of the value
* "France / Rhônes-Alpes / Isère" is "Geography / France / Rhônes-Alpes / Isère".
* @return the meaning carried by this value, in other words the complete path of this value.
*/
public String getMeaning() {
return getMeaningTranslatedIn("");
}
/**
* Gets the meaning carried by this value translated in the specified language. The meaning is in
* fact the complete path of translated terms that made this value. For example, in an axis
* representing the geography, the meaning of the value "France / Rhônes-Alpes / Isère" is in
* french "Geographie / France / Rhônes-Alpes / Isère".
* @return the meaning carried by this value, in other words the complete path of this value
* translated in the specified language. If no such translations exist, then the result is
* equivalent to the call of the getMeaning() method.
*/
public String getMeaningTranslatedIn(String language) {
final String meaning;
final String theLanguage = (language == null ? "" : language);
PdcAxisValue theParent = getParentValue();
if (theParent != null) {
meaning = theParent.getMeaningTranslatedIn(theLanguage) + " / ";
} else {
meaning = "";
}
return meaning + getTerm();
}
/**
* Gets the path of this value from the root value (that is a base value of the axis). The path
* is
* made up of the identifiers of each parent value; for example : /0/2/3
* @return the path of its value.
*/
public String getValuePath() {
return getTreeNode().getPath() + getValueId();
}
/**
* Copies this value into another one. In fact, the attributes of the copy refers to the same
* object referred by the attributes of this instance.
* @return a copy of this PdC axis value.
*/
protected PdcAxisValue copy() {
PdcAxisValue copy = PdcAxisValue.aPdcAxisValue(getValueId(), getAxisId());
copy.treeNode = treeNode;
copy.treeNodeParents = treeNodeParents;
return copy;
}
/**
* Gets the axis to which this value belongs to and that is used to classify contents on the PdC.
* @return a PdC axis configured to be used in the classification of contents.
*/
protected UsedAxis getUsedAxis() {
try {
PdcManager pdc = getPdcManager();
UsedAxis usedAxis = pdc.getUsedAxis(getAxisId());
AxisHeader axisHeader = pdc.getAxisHeader(getAxisId());
usedAxis._setAxisHeader(axisHeader);
usedAxis._setAxisName(axisHeader.getName());
return usedAxis;
} catch (PdcException ex) {
throw new PdcRuntimeException(ex);
}
}
/**
* Gets the persisted representation of this axis value. By the same way, the parents of this
* tree node are also set.
* @return a tree node representing this axis value in the persistence layer.
*/
protected TreeNode getTreeNode() {
if (this.treeNode == null || (this.treeNodeParents == null && this.treeNode.hasFather())) {
loadTreeNodes();
}
return this.treeNode;
}
protected void setId(long id) {
getNativeId().setValueId(id);
}
protected PdcAxisValue withId(String id) {
getNativeId().setValueId(Long.valueOf(id));
return this;
}
protected PdcAxisValue inAxisId(String axisId) {
getNativeId().setAxisId(Long.valueOf(axisId));
return this;
}
protected PdcAxisValue fromTreeNode(final TreeNode treeNode) {
getNativeId().setValueId(Long.valueOf(treeNode.getPK().getId()));
this.treeNode = treeNode;
return this;
}
protected PdcAxisValue withAsTreeNodeParents(final List<? extends TreeNode> parents) {
this.treeNodeParents.setAll(parents);
return this;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final PdcAxisValue other = (PdcAxisValue) obj;
if (this.getNativeId().getValueId() != other.getNativeId().getValueId() &&
(this.getNativeId().getValueId() == null ||
!this.getNativeId().getValueId().equals(other.getNativeId().getValueId()))) {
return false;
}
return this.getNativeId().getAxisId() == other.getNativeId().getAxisId() ||
(this.getNativeId().getAxisId() != null &&
!this.getNativeId().getAxisId().equals(other.getNativeId().getAxisId()));
}
@Override
public int hashCode() {
int hash = 5;
hash = 89 * hash +
(this.getNativeId().getValueId() != null ? this.getNativeId().getValueId().hashCode() : 0);
hash = 89 * hash +
(this.getNativeId().getAxisId() != null ? this.getNativeId().getAxisId().hashCode() : 0);
return hash;
}
@Override
public String toString() {
return "";
}
/**
* Converts this PdC axis value to a ClassifyValue instance. This method is for compatibility
* with the old way to manage the classification.
* @return a ClassifyValue instance.
* @throws PdcException if an error occurs while transforming this value into a ClassifyValue
* instance.
*/
public ClassifyValue toClassifyValue() {
ClassifyValue value = new ClassifyValue(Integer.valueOf(getAxisId()), getValuePath() + "/");
List<Value> fullPath = new ArrayList<>();
for (TreeNode aTreeNode : getTreeNodeParents()) {
fullPath.add(new Value(aTreeNode.getPK().getId(), aTreeNode.getTreeId(), aTreeNode.getName(),
aTreeNode.getDescription(), aTreeNode.getCreationDate(), aTreeNode.getCreatorId(),
aTreeNode.getPath(), aTreeNode.getLevelNumber(), aTreeNode.
getOrderNumber(), aTreeNode.getFatherId()));
}
TreeNode lastValue = getTreeNode();
fullPath.add(new Value(lastValue.getPK().getId(), lastValue.getTreeId(), lastValue.getName(),
lastValue.getDescription(), lastValue.getCreationDate(), lastValue.getCreatorId(),
lastValue.getPath(), lastValue.getLevelNumber(), lastValue.getOrderNumber(),
lastValue.getFatherId()));
value.setFullPath(fullPath);
return value;
}
protected TreeNodeList getTreeNodeParents() {
if (this.treeNodeParents == null) {
loadTreeNodes();
}
return this.treeNodeParents;
}
private void loadTreeNodes() {
try {
PdcManager pdc = getPdcManager();
String treeId = pdc.getTreeId(getAxisId());
List<? extends TreeNode> paths = pdc.getFullPath(getValueId(), treeId);
int lastNodeIndex = paths.size() - 1;
this.treeNode = paths.get(lastNodeIndex);
this.treeNodeParents.setAll(paths.subList(0, lastNodeIndex));
} catch (PdcException ex) {
throw new PdcRuntimeException(ex);
}
}
private static PdcManager getPdcManager() {
return PdcManager.get();
}
private class TreeNodeList implements Iterable<TreeNode> {
private final List<TreeNode> treeNodes = new ArrayList<>();
public int size() {
return treeNodes.size();
}
public TreeNode get(final int index) {
return treeNodes.get(index);
}
public List<TreeNode> subList(final int fromIndex, final int toIndex) {
return treeNodes.subList(fromIndex, toIndex);
}
public void setAll(final Collection<? extends TreeNode> nodes) {
this.treeNodes.clear();
this.treeNodes.addAll(nodes);
}
@Override
public Iterator<TreeNode> iterator() {
return this.treeNodes.iterator();
}
@Override
public void forEach(final Consumer<? super TreeNode> action) {
this.treeNodes.forEach(action);
}
@Override
public Spliterator<TreeNode> spliterator() {
return this.treeNodes.spliterator();
}
}
}
| SilverDav/Silverpeas-Core | core-services/pdc/src/main/java/org/silverpeas/core/pdc/pdc/model/PdcAxisValue.java | Java | agpl-3.0 | 15,496 |
package io.github.jhg543.mellex.operation;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import io.github.jhg543.mellex.ASTHelper.*;
import io.github.jhg543.mellex.antlrparser.DefaultSQLBaseListener;
import io.github.jhg543.mellex.antlrparser.DefaultSQLLexer;
import io.github.jhg543.mellex.antlrparser.DefaultSQLParser;
import io.github.jhg543.mellex.antlrparser.DefaultSQLParser.Sql_stmtContext;
import io.github.jhg543.mellex.inputsource.BasicTableDefinitionProvider;
import io.github.jhg543.mellex.inputsource.TableDefinitionProvider;
import io.github.jhg543.mellex.listeners.ColumnDataFlowListener;
import io.github.jhg543.mellex.util.Misc;
import io.github.jhg543.nyallas.graphmodel.*;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Predicate;
public class StringEdgePrinter {
private static final Logger log = LoggerFactory.getLogger(StringEdgePrinter.class);
private static int ERR_NOSQL = 1;
private static int ERR_PARSE = 2;
private static int ERR_SEMANTIC = 3;
private static int ERR_OK = 0;
private static int printSingleFile(Path srcdir, Path dstdir, int scriptNumber, TableDefinitionProvider tp) {
// generate a hash to mark vt table names
String srcHash = Integer.toHexString(srcdir.hashCode());
// create destination dir
try {
Files.createDirectories(dstdir);
} catch (IOException e) {
throw new RuntimeException(e);
}
try (PrintWriter err = new PrintWriter(dstdir.resolve("log").toAbsolutePath().toString(), "utf-8")) {
// trim perl code
String sql = Misc.trimPerlScript(srcdir, StandardCharsets.UTF_8);
if (sql == null) {
err.println("Can't extract sql from file " + srcdir.toString());
return ERR_NOSQL;
}
// log actual sql statement ( for corrent line number ..)
try (PrintWriter writer = new PrintWriter(dstdir.resolve("sql").toAbsolutePath().toString(), "utf-8")) {
writer.append(sql);
}
// antlr parse
AtomicInteger errorCount = new AtomicInteger();
ANTLRInputStream in = new ANTLRInputStream(sql);
DefaultSQLLexer lexer = new DefaultSQLLexer(in);
CommonTokenStream tokens = new CommonTokenStream(lexer);
DefaultSQLParser parser = new DefaultSQLParser(tokens);
parser.removeErrorListeners();
parser.addErrorListener(new BaseErrorListener() {
@Override
public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine,
String msg, RecognitionException e) {
err.println("line" + line + ":" + charPositionInLine + "at" + offendingSymbol + ":" + msg);
errorCount.incrementAndGet();
}
});
err.println("-------Parse start---------");
ParseTree tree = null;
try {
tree = parser.parse();
if (errorCount.get() > 0) {
return ERR_PARSE;
}
} catch (Exception e) {
e.printStackTrace(err);
return ERR_PARSE;
}
err.println("-------Parse OK, Semantic Analysis start --------");
ParseTreeWalker w = new ParseTreeWalker();
try {
ColumnDataFlowListener s = new ColumnDataFlowListener(tp, tokens);
w.walk(s, tree);
} catch (Exception e) {
e.printStackTrace(err);
return ERR_SEMANTIC;
}
err.println("-------Semantic OK, Writing result --------");
// Remove volatile tables
VolatileTableRemover graph = new VolatileTableRemover();
// DAG dag = new DAG();
// ZeroBasedStringIdGenerator ids = new
// ZeroBasedStringIdGenerator();
Map<String, Vertex<String, Integer>> vmap = new HashMap<>();
// Output result and initialize volatile tables removal process
try (PrintWriter out = new PrintWriter(dstdir.resolve("out").toAbsolutePath().toString(), "utf-8")) {
out.println("ScriptID StmtID StmtType DestCol SrcCol ConnectionType");
String template = "%d %d %s %s.%s %s.%s %d\n";
DefaultSQLBaseListener pr = new DefaultSQLBaseListener() {
int stmtNumber = 0;
@Override
public void exitSql_stmt(Sql_stmtContext ctx) {
super.exitSql_stmt(ctx);
String stmtType = null;
SubQuery q = null;
if (ctx.insert_stmt() != null) {
stmtType = "I";
q = ctx.insert_stmt().stmt;
}
if (ctx.create_table_stmt() != null) {
if (ctx.create_table_stmt().insert != null) {
stmtType = "C";
q = ctx.create_table_stmt().insert;
}
}
if (ctx.create_view_stmt() != null) {
stmtType = "V";
q = ctx.create_view_stmt().insert;
}
if (ctx.update_stmt() != null) {
stmtType = "U";
q = ctx.update_stmt().q;
}
if (q != null) {
// what's vt's scope?
Set<String> vts = tp.getVolatileTables().keySet();
String dstTable = q.dbobj.toDotString();
boolean isDstVT = vts.contains(dstTable);
if (isDstVT) {
dstTable = "VT_" + srcHash + "_" + dstTable;
}
for (ResultColumn c : q.columns) {
for (InfSource source : c.inf.getSources()) {
ObjectName srcname = source.getSourceObject();
String srcTable = srcname.toDotStringExceptLast();
boolean isSrcVT = vts.contains(srcTable);
if (isSrcVT) {
srcTable = "VT_" + srcHash + "_" + srcTable;
}
out.append(String.format(template, scriptNumber, stmtNumber, stmtType, dstTable, c.name,
srcTable, srcname.toDotStringLast(), source.getConnectionType().getMarker()));
// collapse volatile table
String dst = dstTable + "." + c.name;
String src = srcTable + "." + srcname.toDotStringLast();
// Integer dstnum = ids.queryNumber(dst);
// Integer srcnum = ids.queryNumber(src);
Vertex<String, Integer> srcv;
srcv = vmap.get(src);
if (srcv == null) {
srcv = graph.addVertex(BasicVertex::new);
vmap.put(src, srcv);
srcv.setVertexData(src);
if (isSrcVT) {
srcv.setMarker(0);
}
}
Vertex<String, Integer> dstv;
dstv = vmap.get(dst);
if (dstv == null) {
dstv = graph.addVertex(BasicVertex::new);
vmap.put(dst, dstv);
dstv.setVertexData(dst);
if (isDstVT) {
dstv.setMarker(0);
}
}
Edge<String, Integer> edge = new BasicEdge<String, Integer>(srcv, dstv);
edge.setEdgeData(source.getConnectionType().getMarker());
graph.addEdge(edge);
}
}
} else {
// log.warn("query null for sm " + stmtNumber);
}
stmtNumber++;
}
};
w.walk(pr, tree);
}
// Int2ObjectMap<Node> collapsed = dag.collapse(scriptNumber);
graph.remove();
// write result (with volatile tables removed)
try (PrintWriter out = new PrintWriter(dstdir.resolve("novt").toAbsolutePath().toString(), "utf-8")) {
out.println("scriptid,dstsch,dsttbl,dstcol,srcsch,srctbl,srccol,contype");
String template = "%d,%s,%s,%s,%s,%s,%s,%d\n";
for (Vertex<String, Integer> v : graph.getVertexes()) {
for (Edge<String, Integer> e : v.getOutgoingEdges()) {
String dst = e.getTarget().getVertexData();
String src = e.getSource().getVertexData();
List<String> t1 = Splitter.on('.').splitToList(dst);
if (t1.size() == 2) {
t1 = new ArrayList<String>(t1);
t1.add(0, "3X_NOSCHEMA_" + scriptNumber);
}
List<String> t2 = Splitter.on('.').splitToList(src);
if (t2.size() == 2) {
t2 = new ArrayList<String>(t1);
t2.add(0, "3X_NOSCHEMA_" + scriptNumber);
}
out.append(String.format(template, scriptNumber, t1.get(0), t1.get(1), t1.get(2), t2.get(0), t2.get(1),
t2.get(2), e.getEdgeData()));
}
}
}
tp.clearVolatileTables();
err.println("-------Success --------");
return 0;
} catch (FileNotFoundException | UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
public static int[] printStringEdge(Path srcdir, Path dstdir, Predicate<Path> filefilter, int scriptNumberStart,
boolean caseSensitive) {
// ensure directories exist
Preconditions.checkState(Files.isDirectory(srcdir));
try {
Files.createDirectories(dstdir);
} catch (IOException e1) {
throw new RuntimeException(e1);
}
// set up variables
GlobalSettings.setCaseSensitive(caseSensitive);
AtomicInteger scriptNumber = new AtomicInteger(scriptNumberStart);
TableDefinitionProvider tp = new BasicTableDefinitionProvider(Misc::nameSym);
int[] stats = new int[10];
// open global output files
try (PrintWriter out = new PrintWriter(dstdir.resolve("stats").toAbsolutePath().toString(), "utf-8");
PrintWriter cols = new PrintWriter(dstdir.resolve("cols").toAbsolutePath().toString(), "utf-8");
PrintWriter numbers = new PrintWriter(dstdir.resolve("number").toAbsolutePath().toString(), "utf-8")) {
// for each file
Files.walk(srcdir).filter(filefilter).sorted().forEach(path -> {
int sn = scriptNumber.getAndIncrement();
numbers.println("" + sn + " " + path.toString());
String srcHash = Integer.toHexString(path.hashCode());
Path workdir = dstdir.resolve(path.getFileName()).resolve(srcHash);
// deal with single files.
int retcode = printSingleFile(path, workdir, sn, tp);
if (retcode > 0) {
out.println(String.format("%s %d %d", path.toString(), retcode, sn));
}
stats[retcode]++;
});
out.println("OK=" + stats[ERR_OK]);
out.println("NOSQL=" + stats[ERR_NOSQL]);
out.println("PARSE=" + stats[ERR_PARSE]);
out.println("SEMANTIC=" + stats[ERR_SEMANTIC]);
tp.getPermanentTables().forEach((name, stmt) -> {
stmt.columns.forEach(colname -> cols.println(name + "." + colname.name));
});
return stats;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public static void main(String[] args) throws Exception {
Predicate<Path> filefilter = x -> Files.isRegularFile(x)
&& (x.getFileName().toString().toLowerCase().endsWith(".sql") || x.getFileName().toString().toLowerCase()
.endsWith(".pl"))
&& x.toString().toUpperCase().endsWith("BIN\\" + x.getFileName().toString().toUpperCase());
// printStringEdge(Paths.get("d:/dataflow/work1/script/mafixed"),
// Paths.get("d:/dataflow/work2/mares"), filefilter, 0, false);
printStringEdge(Paths.get("d:/dataflow/work1/debug"), Paths.get("d:/dataflow/work2/debugres"), filefilter, 0, false);
// printStringEdge(Paths.get("d:/dataflow/work1/f1/sor"),
// Paths.get("d:/dataflow/work2/result2/sor"), filefilter, 0, false);
}
}
| jhg543/mellex | src/main/java/io/github/jhg543/mellex/operation/StringEdgePrinter.java | Java | agpl-3.0 | 11,043 |
require 'open_food_network/referer_parser'
module Admin
class EnterprisesController < ResourceController
before_filter :load_enterprise_set, :only => :index
before_filter :load_countries, :except => [:index, :register, :check_permalink]
before_filter :load_methods_and_fees, :only => [:new, :edit, :update, :create]
before_filter :load_groups, :only => [:new, :edit, :update, :create]
before_filter :load_taxons, :only => [:new, :edit, :update, :create]
before_filter :check_can_change_sells, only: :update
before_filter :check_can_change_bulk_sells, only: :bulk_update
before_filter :override_owner, only: :create
before_filter :override_sells, only: :create
before_filter :check_can_change_owner, only: :update
before_filter :check_can_change_bulk_owner, only: :bulk_update
before_filter :check_can_change_managers, only: :update
before_filter :strip_new_properties, only: [:create, :update]
before_filter :load_properties, only: [:edit, :update]
before_filter :setup_property, only: [:edit]
helper 'spree/products'
include ActionView::Helpers::TextHelper
include OrderCyclesHelper
def index
respond_to do |format|
format.html
format.json do
render json: @collection, each_serializer: Api::Admin::IndexEnterpriseSerializer, spree_current_user: spree_current_user
end
end
end
def welcome
render layout: "spree/layouts/bare_admin"
end
def update
invoke_callbacks(:update, :before)
if @object.update_attributes(params[object_name])
invoke_callbacks(:update, :after)
flash[:success] = flash_message_for(@object, :successfully_updated)
respond_with(@object) do |format|
format.html { redirect_to location_after_save }
format.js { render :layout => false }
format.json { render json: @object, serializer: Api::Admin::IndexEnterpriseSerializer, spree_current_user: spree_current_user }
end
else
invoke_callbacks(:update, :fails)
respond_with(@object) do |format|
format.json { render json: { errors: @object.errors.messages }, status: :unprocessable_entity }
end
end
end
def register
if params[:sells] == 'unspecified'
flash[:error] = "Please select a package"
return render :welcome, layout: "spree/layouts/bare_admin"
end
attributes = { sells: params[:sells], visible: true }
if ['own', 'any'].include? params[:sells]
attributes[:shop_trial_start_date] = @enterprise.shop_trial_start_date || Time.now
end
if @enterprise.update_attributes(attributes)
flash[:success] = "Congratulations! Registration for #{@enterprise.name} is complete!"
redirect_to admin_path
else
flash[:error] = "Could not complete registration for #{@enterprise.name}"
render :welcome, layout: "spree/layouts/bare_admin"
end
end
def bulk_update
@enterprise_set = EnterpriseSet.new(collection, params[:enterprise_set])
touched_enterprises = @enterprise_set.collection.select(&:changed?)
if @enterprise_set.save
flash[:success] = "Enterprises updated successfully"
# 18-3-2015: It seems that the form for this action sometimes loads bogus values for
# the 'sells' field, and submitting that form results in a bunch of enterprises with
# values that have mysteriously changed. This statement is here to help debug that
# issue, and should be removed (along with its display in index.html.haml) when the
# issue has been resolved.
flash[:action] = "Updated #{pluralize(touched_enterprises.count, 'enterprise')}: #{touched_enterprises.map(&:name).join(', ')}"
redirect_to main_app.admin_enterprises_path
else
@enterprise_set.collection.select! { |e| touched_enterprises.include? e }
flash[:error] = 'Update failed'
render :index
end
end
def for_order_cycle
respond_to do |format|
format.json do
render json: ActiveModel::ArraySerializer.new( @collection,
each_serializer: Api::Admin::ForOrderCycle::EnterpriseSerializer, spree_current_user: spree_current_user
).to_json
end
end
end
protected
def build_resource_with_address
enterprise = build_resource_without_address
enterprise.address = Spree::Address.new
enterprise.address.country = Spree::Country.find_by_id(Spree::Config[:default_country_id])
enterprise
end
alias_method_chain :build_resource, :address
# Overriding method on Spree's resource controller,
# so that resources are found using permalink
def find_resource
Enterprise.find_by_permalink(params[:id])
end
private
def load_enterprise_set
@enterprise_set = EnterpriseSet.new(collection) if spree_current_user.admin?
end
def load_countries
@countries = Spree::Country.order(:name)
end
def collection
case action
when :for_order_cycle
order_cycle = OrderCycle.find_by_id(params[:order_cycle_id]) if params[:order_cycle_id]
coordinator = Enterprise.find_by_id(params[:coordinator_id]) if params[:coordinator_id]
order_cycle = OrderCycle.new(coordinator: coordinator) if order_cycle.nil? && coordinator.present?
return OpenFoodNetwork::OrderCyclePermissions.new(spree_current_user, order_cycle).visible_enterprises
when :index
if spree_current_user.admin?
OpenFoodNetwork::Permissions.new(spree_current_user).
editable_enterprises.
order('is_primary_producer ASC, name')
elsif json_request?
OpenFoodNetwork::Permissions.new(spree_current_user).editable_enterprises
else
Enterprise.where("1=0") unless json_request?
end
else
# TODO was ordered with is_distributor DESC as well, not sure why or how we want to sort this now
OpenFoodNetwork::Permissions.new(spree_current_user).
editable_enterprises.
order('is_primary_producer ASC, name')
end
end
def collection_actions
[:index, :for_order_cycle, :bulk_update]
end
def load_methods_and_fees
@payment_methods = Spree::PaymentMethod.managed_by(spree_current_user).sort_by!{ |pm| [(@enterprise.payment_methods.include? pm) ? 0 : 1, pm.name] }
@shipping_methods = Spree::ShippingMethod.managed_by(spree_current_user).sort_by!{ |sm| [(@enterprise.shipping_methods.include? sm) ? 0 : 1, sm.name] }
@enterprise_fees = EnterpriseFee.managed_by(spree_current_user).for_enterprise(@enterprise).order(:fee_type, :name).all
end
def load_groups
@groups = EnterpriseGroup.managed_by(spree_current_user) | @enterprise.groups
end
def load_taxons
@taxons = Spree::Taxon.order(:name)
end
def check_can_change_bulk_sells
unless spree_current_user.admin?
params[:enterprise_set][:collection_attributes].each do |i, enterprise_params|
enterprise_params.delete :sells unless spree_current_user == Enterprise.find_by_id(enterprise_params[:id]).owner
end
end
end
def check_can_change_sells
unless spree_current_user.admin? || spree_current_user == @enterprise.owner
params[:enterprise].delete :sells
end
end
def override_owner
params[:enterprise][:owner_id] = spree_current_user.id unless spree_current_user.admin?
end
def override_sells
unless spree_current_user.admin?
has_hub = spree_current_user.owned_enterprises.is_hub.any?
new_enterprise_is_producer = Enterprise.new(params[:enterprise]).is_primary_producer
params[:enterprise][:sells] = (has_hub && !new_enterprise_is_producer) ? 'any' : 'none'
end
end
def check_can_change_owner
unless ( spree_current_user == @enterprise.owner ) || spree_current_user.admin?
params[:enterprise].delete :owner_id
end
end
def check_can_change_bulk_owner
unless spree_current_user.admin?
params[:enterprise_set][:collection_attributes].each do |i, enterprise_params|
enterprise_params.delete :owner_id
end
end
end
def check_can_change_managers
unless ( spree_current_user == @enterprise.owner ) || spree_current_user.admin?
params[:enterprise].delete :user_ids
end
end
def strip_new_properties
unless spree_current_user.admin? || params[:enterprise][:producer_properties_attributes].nil?
names = Spree::Property.pluck(:name)
params[:enterprise][:producer_properties_attributes].each do |key, property|
params[:enterprise][:producer_properties_attributes].delete key unless names.include? property[:property_name]
end
end
end
def load_properties
@properties = Spree::Property.pluck(:name)
end
def setup_property
@enterprise.producer_properties.build
end
# Overriding method on Spree's resource controller
def location_after_save
referer_path = OpenFoodNetwork::RefererParser::path(request.referer)
refered_from_edit = referer_path == main_app.edit_admin_enterprise_path(@enterprise)
if params[:enterprise].key?(:producer_properties_attributes) && !refered_from_edit
main_app.admin_enterprises_path
else
main_app.edit_admin_enterprise_path(@enterprise)
end
end
end
end
| stveep/openfoodnetwork | app/controllers/admin/enterprises_controller.rb | Ruby | agpl-3.0 | 9,547 |
package com.neverwinterdp.scribengin.dataflow.example.wire;
import java.util.Properties;
import com.neverwinterdp.message.Message;
import com.neverwinterdp.scribengin.dataflow.DataSet;
import com.neverwinterdp.scribengin.dataflow.Dataflow;
import com.neverwinterdp.scribengin.dataflow.DataflowDescriptor;
import com.neverwinterdp.scribengin.dataflow.DataflowSubmitter;
import com.neverwinterdp.scribengin.dataflow.KafkaDataSet;
import com.neverwinterdp.scribengin.dataflow.KafkaWireDataSetFactory;
import com.neverwinterdp.scribengin.dataflow.Operator;
import com.neverwinterdp.scribengin.shell.ScribenginShell;
import com.neverwinterdp.storage.kafka.KafkaStorageConfig;
import com.neverwinterdp.util.JSONSerializer;
import com.neverwinterdp.vm.client.VMClient;
public class ExampleWireDataflowSubmitter {
private String dataflowID;
private int defaultReplication;
private int defaultParallelism;
private int numOfWorker;
private int numOfExecutorPerWorker;
private String inputTopic;
private String outputTopic;
private ScribenginShell shell;
private DataflowSubmitter submitter;
private String localAppHome;
private String dfsAppHome;
public ExampleWireDataflowSubmitter(ScribenginShell shell){
this(shell, new Properties());
}
/**
* Constructor - sets shell to access Scribengin and configuration properties
* @param shell ScribenginShell to connect to Scribengin with
* @param props Properties to configure the dataflow
*/
public ExampleWireDataflowSubmitter(ScribenginShell shell, Properties props){
//This it the shell to communicate with Scribengin with
this.shell = shell;
//The dataflow's ID. All dataflows require a unique ID when running
dataflowID = props.getProperty("dataflow.id", "WireDataflow");
//The default replication factor for Kafka
defaultReplication = Integer.parseInt(props.getProperty("dataflow.replication", "1"));
//The number of DataStreams to deploy
defaultParallelism = Integer.parseInt(props.getProperty("dataflow.parallelism", "2"));
//The number of workers to deploy (i.e. YARN containers)
numOfWorker = Integer.parseInt(props.getProperty("dataflow.numWorker", "5"));
//The number of executors per worker (i.e. threads per YARN container)
numOfExecutorPerWorker = Integer.parseInt(props.getProperty("dataflow.numExecutorPerWorker", "5"));
//The kafka input topic
inputTopic = props.getProperty("dataflow.inputTopic", "input.topic");
//The kafka output topic
outputTopic = props.getProperty("dataflow.outputTopic", "output.topic");
//The example hdfs dataflow local location
localAppHome = props.getProperty("dataflow.localapphome", "N/A");
//DFS location to upload the example dataflow
dfsAppHome = props.getProperty("dataflow.dfsAppHome", "/applications/dataflow/splitterexample");
}
/**
* The logic to submit the dataflow
* @param kafkaZkConnect [host]:[port] of Kafka's Zookeeper conenction
* @throws Exception
*/
public void submitDataflow(String kafkaZkConnect) throws Exception{
//Upload the dataflow to HDFS
VMClient vmClient = shell.getScribenginClient().getVMClient();
vmClient.uploadApp(localAppHome, dfsAppHome);
Dataflow dfl = buildDataflow(kafkaZkConnect);
//Get the dataflow's descriptor
DataflowDescriptor dflDescriptor = dfl.buildDataflowDescriptor();
//Output the descriptor in human-readable JSON
System.out.println(JSONSerializer.INSTANCE.toString(dflDescriptor));
//Ensure all your sources and sinks are up and running first, then...
//Submit the dataflow and wait until it starts running
submitter = new DataflowSubmitter(shell.getScribenginClient(), dfl).submit().waitForDataflowRunning(60000);
}
/**
* Wait for the dataflow to complete within the given timeout
* @param timeout Timeout in ms
* @throws Exception
*/
public void waitForDataflowCompletion(int timeout) throws Exception{
submitter.waitForDataflowStop(timeout);
}
/**
* The logic to build the dataflow configuration
* The main takeaway between this dataflow and the ExampleSimpleDataflowSubmitter
* is the use of dfl.useWireDataSetFactory()
* This factory allows us to tie together operators
* with Kafka topics between them
* @param kafkaZkConnect [host]:[port] of Kafka's Zookeeper conenction
* @return
*/
public Dataflow buildDataflow(String kafkaZkConnect){
//Create the new Dataflow object
// <Message,Message> pertains to the <input,output> object for the data
Dataflow dfl = new Dataflow(dataflowID);
//Example of how to set the KafkaWireDataSetFactory
dfl.
setDefaultParallelism(defaultParallelism).
setDefaultReplication(defaultReplication).
useWireDataSetFactory(new KafkaWireDataSetFactory(kafkaZkConnect));
dfl.getWorkerDescriptor().setNumOfInstances(numOfWorker);
dfl.getWorkerDescriptor().setNumOfExecutor(numOfExecutorPerWorker);
//Define our input source - set name, ZK host:port, and input topic name
KafkaDataSet<Message> inputDs =
dfl.createInput(new KafkaStorageConfig("input", kafkaZkConnect, inputTopic));
//Define our output sink - set name, ZK host:port, and output topic name
DataSet<Message> outputDs =
dfl.createOutput(new KafkaStorageConfig("output", kafkaZkConnect, outputTopic));
//Define which operators to use.
//This will be the logic that ties the datasets and operators together
Operator splitter = dfl.createOperator("splitteroperator", SplitterDataStreamOperator.class);
Operator odd = dfl.createOperator("oddoperator", PersisterDataStreamOperator.class);
Operator even = dfl.createOperator("evenoperator", PersisterDataStreamOperator.class);
//Send all input to the splitter operator
inputDs.useRawReader().connect(splitter);
//The splitter operator then connects to the odd and even operators
splitter.connect(odd)
.connect(even);
//Both the odd and even operator connect to the output dataset
// This is arbitrary, we could connect them to any dataset or operator we wanted
odd.connect(outputDs);
even.connect(outputDs);
return dfl;
}
public String getDataflowID() { return dataflowID; }
public String getInputTopic() { return inputTopic; }
public String getOutputTopic() { return outputTopic; }
} | DemandCube/NeverwinterDP | scribengin/dataflow/example/src/main/java/com/neverwinterdp/scribengin/dataflow/example/wire/ExampleWireDataflowSubmitter.java | Java | agpl-3.0 | 6,521 |
package com.gmail.nossr50.commands.party;
import org.bukkit.ChatColor;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import com.gmail.nossr50.mcMMO;
import com.gmail.nossr50.commands.CommandHelper;
import com.gmail.nossr50.datatypes.PlayerProfile;
import com.gmail.nossr50.events.chat.McMMOPartyChatEvent;
import com.gmail.nossr50.locale.LocaleLoader;
import com.gmail.nossr50.party.Party;
import com.gmail.nossr50.party.PartyManager;
import com.gmail.nossr50.util.Users;
public class PCommand implements CommandExecutor {
private final mcMMO plugin;
public PCommand (mcMMO plugin) {
this.plugin = plugin;
}
@Override
public boolean onCommand(CommandSender sender, Command command, String label, String[] args) {
PlayerProfile profile;
String usage = ChatColor.RED + "Proper usage is /p <party-name> <message>"; //TODO: Needs more locale.
if (CommandHelper.noCommandPermissions(sender, "mcmmo.commands.party")) {
return true;
}
switch (args.length) {
case 0:
if (sender instanceof Player) {
profile = Users.getProfile((Player) sender);
if (profile.getAdminChatMode()) {
profile.toggleAdminChat();
}
profile.togglePartyChat();
if (profile.getPartyChatMode()) {
sender.sendMessage(LocaleLoader.getString("Commands.Party.Chat.On"));
}
else {
sender.sendMessage(LocaleLoader.getString("Commands.Party.Chat.Off"));
}
}
else {
sender.sendMessage(usage);
}
return true;
default:
if (sender instanceof Player) {
Player player = (Player) sender;
Party party = Users.getProfile(player).getParty();
if (party == null) {
player.sendMessage(LocaleLoader.getString("Commands.Party.None"));
return true;
}
StringBuffer buffer = new StringBuffer();
buffer.append(args[0]);
for (int i = 1; i < args.length; i++) {
buffer.append(" ");
buffer.append(args[i]);
}
String message = buffer.toString();
McMMOPartyChatEvent chatEvent = new McMMOPartyChatEvent(player.getName(), party.getName(), message);
plugin.getServer().getPluginManager().callEvent(chatEvent);
if (chatEvent.isCancelled()) {
return true;
}
message = chatEvent.getMessage();
String prefix = ChatColor.GREEN + "(" + ChatColor.WHITE + player.getName() + ChatColor.GREEN + ") ";
plugin.getLogger().info("[P](" + party.getName() + ")" + "<" + player.getName() + "> " + message);
for (Player member : party.getOnlineMembers()) {
member.sendMessage(prefix + message);
}
}
else {
if (args.length < 2) {
sender.sendMessage(usage);
return true;
}
if (!PartyManager.getInstance().isParty(args[0])) {
sender.sendMessage(LocaleLoader.getString("Party.InvalidName"));
return true;
}
StringBuffer buffer = new StringBuffer();
buffer.append(args[1]);
for (int i = 2; i < args.length; i++) {
buffer.append(" ");
buffer.append(args[i]);
}
String message = buffer.toString();
McMMOPartyChatEvent chatEvent = new McMMOPartyChatEvent("Console", args[0], message);
plugin.getServer().getPluginManager().callEvent(chatEvent);
if (chatEvent.isCancelled()) {
return true;
}
message = chatEvent.getMessage();
String prefix = ChatColor.GREEN + "(" + ChatColor.WHITE + "*Console*" + ChatColor.GREEN + ") ";
plugin.getLogger().info("[P](" + args[0] + ")" + "<*Console*> " + message);
for (Player member : PartyManager.getInstance().getOnlineMembers(args[0])) {
member.sendMessage(prefix + message);
}
}
return true;
}
}
}
| javalangSystemwin/mcMMOPlus | src/main/java/com/gmail/nossr50/commands/party/PCommand.java | Java | agpl-3.0 | 4,799 |
import { TYPES } from 'core/app/types'
import { StateManager } from 'core/dialog'
import { ContainerModule, interfaces } from 'inversify'
import { DecisionEngine } from './decision-engine'
import { DialogEngine } from './dialog-engine'
import { FlowService } from './flow/flow-service'
import { FlowNavigator } from './flow/navigator'
import { InstructionFactory } from './instruction/factory'
import { InstructionProcessor } from './instruction/processor'
import { ActionStrategy, TransitionStrategy } from './instruction/strategy'
import { DialogJanitor } from './janitor'
export const DialogContainerModule = new ContainerModule((bind: interfaces.Bind) => {
bind<DialogEngine>(TYPES.DialogEngine)
.to(DialogEngine)
.inSingletonScope()
bind<DecisionEngine>(TYPES.DecisionEngine)
.to(DecisionEngine)
.inSingletonScope()
bind<FlowNavigator>(TYPES.FlowNavigator)
.to(FlowNavigator)
.inSingletonScope()
bind<FlowService>(TYPES.FlowService)
.to(FlowService)
.inSingletonScope()
bind<InstructionFactory>(TYPES.InstructionFactory)
.to(InstructionFactory)
.inSingletonScope()
bind<InstructionProcessor>(TYPES.InstructionProcessor)
.to(InstructionProcessor)
.inSingletonScope()
bind<ActionStrategy>(TYPES.ActionStrategy)
.to(ActionStrategy)
.inRequestScope()
bind<TransitionStrategy>(TYPES.TransitionStrategy)
.to(TransitionStrategy)
.inRequestScope()
bind<DialogJanitor>(TYPES.DialogJanitorRunner)
.to(DialogJanitor)
.inSingletonScope()
bind<StateManager>(TYPES.StateManager)
.to(StateManager)
.inSingletonScope()
})
| botpress/botpress | packages/bp/src/core/dialog/dialog.inversify.ts | TypeScript | agpl-3.0 | 1,613 |
/*
uSquare 1.0 - Universal Responsive Grid
Copyright (c) 2012 Br0 (shindiristudio.com)
Project site: http://codecanyon.net/
Project demo: http://shindiristudio.com/usquare/
*/
(function($) {
function uSquareItem(element, options) {
this.$item = $(element);
this.$parent = options.$parent;
this.options = options;
this.$trigger = this.$(options.trigger);
this.$close = this.$('.close');
this.$info = this.$(options.moreInfo);
this.$trigger_text = this.$trigger.find('.usquare_square_text_wrapper');
this.$usquare_about = this.$info.find('.usquare_about');
this.$trigger.on('click', $.proxy(this.show, this));
this.$close.on('click', $.proxy(this.close, this));
options.$overlay.on('click', $.proxy(this.close, this));
};
uSquareItem.prototype = {
show: function(e) {
e.preventDefault();
if (!this.$parent.data('in_trans'))
{
if (!this.$item.data('showed'))
{
this.$parent.data('in_trans', 1);
this.$item.data('showed', 1);
if (this.options.before_item_opening_callback) this.options.before_item_opening_callback(this.$item);
var item_position = this.$item.position();
var trigger_text_position;
var this_backup=this;
var moving=0;
if (item_position.top>0) // && this.$parent.width()>=640)
{
var parent_position=this.$parent.offset();
var parent_top = parent_position.top;
var non_visible_area=$(window).scrollTop()-parent_top;
var going_to=item_position.top;
if (non_visible_area>0)
{
var non_visible_row=Math.floor(non_visible_area/this.$item.height())+1;
going_to=this.$item.height()*non_visible_row;
going_to=item_position.top-going_to;
}
if (going_to>0) moving=1;
if (moving)
{
this.$item.data('moved', going_to);
var top_string='-'+going_to+'px';
var speed=this.options.opening_speed+(going_to/160)*100;
this.$item.animate({top: top_string}, speed, this.options.easing, function(){
trigger_text_position = this_backup.$item.height() - this_backup.$trigger_text.height();
this_backup.$trigger_text.data('top', trigger_text_position);
this_backup.$trigger_text.css('top', trigger_text_position);
this_backup.$trigger_text.css('bottom', 'auto');
this_backup.$trigger_text.animate({'top': 0}, 'slow');
});
}
}
if (!moving)
{
trigger_text_position = this_backup.$item.height() - this_backup.$trigger_text.height();
this_backup.$trigger_text.data('top', trigger_text_position);
this_backup.$trigger_text.css('top', trigger_text_position);
this_backup.$trigger_text.css('bottom', 'auto');
this_backup.$trigger_text.animate({'top': 0}, 'slow');
}
this.$item.addClass('usquare_block_selected');
var height_backup=this.$info.css('height');
this.$info.css('height', 0);
this.$info.show();
this.$usquare_about.mCustomScrollbar("update");
if (this.options.before_info_rolling_callback) this.options.before_info_rolling_callback(this.$item);
this.$info.animate({height:height_backup}, 'slow', this.options.easing, function()
{
this_backup.$parent.data('in_trans', 0);
if (this_backup.options.after_info_rolling_callback) this_backup.options.after_info_rolling_callback(this_backup.$item);
});
}
}
},
close: function(e) {
e.preventDefault();
if (!this.$parent.data('in_trans'))
{
if (this.$item.data('showed'))
{
var this_backup=this;
this.$info.hide();
var trigger_text_position_top = this_backup.$item.height() - this_backup.$trigger_text.height();
this_backup.$item.removeClass('usquare_block_selected');
if (this.$item.data('moved'))
{
var top_backup=this.$item.data('moved');
var speed=this.options.closing_speed+(top_backup/160)*100;
this.$item.data('moved', 0);
this.$item.animate({'top': 0}, speed, this.options.easing, function()
{
this_backup.$trigger_text.animate({'top': trigger_text_position_top}, 'slow');
});
}
else
{
this_backup.$trigger_text.animate({'top': trigger_text_position_top}, 'slow');
}
this.$item.data('showed', 0);
}
}
},
$: function (selector) {
return this.$item.find(selector);
}
};
function uSquare(element, options) {
var self = this;
this.options = $.extend({}, $.fn.uSquare.defaults, options);
this.$element = $(element);
this.$overlay = this.$('.usquare_module_shade');
this.$items = this.$(this.options.block);
this.$triggers = this.$(this.options.trigger);
this.$closes = this.$('.close');
this.$triggers.on('click', $.proxy(this.overlayShow, this));
this.$closes.on('click', $.proxy(this.overlayHide, this));
this.$overlay.on('click', $.proxy(this.overlayHide, this));
$.each( this.$items, function(i, element) {
new uSquareItem(element, $.extend(self.options, {$overlay: self.$overlay, $parent: self.$element }) );
});
};
uSquare.prototype = {
$: function (selector) {
return this.$element.find(selector);
},
overlayShow: function() {
this.$overlay.fadeIn('slow', function(){
$(this).css({opacity : 0.5});
})
},
overlayHide: function() {
if (!this.$element.data('in_trans'))
{
this.$overlay.fadeOut('slow');
}
}
};
$.fn.uSquare = function ( option ) {
return this.each(function () {
var $this = $(this),
data = $this.data('tooltip'),
options = typeof option == 'object' && option;
data || $this.data('tooltip', (data = new uSquare(this, options)));
(typeof option == 'string') && data[option]();
});
};
$.fn.uSquare.Constructor = uSquare;
$.fn.uSquare.defaults = {
block: '.usquare_block',
trigger: '.usquare_square',
moreInfo: '.usquare_block_extended',
opening_speed: 300,
closing_speed: 500,
easing: 'swing',
before_item_opening_callback: null,
before_info_rolling_callback: null,
after_info_rolling_callback: null
};
})(jQuery);
$(window).load(function() {
$(".usquare_about").mCustomScrollbar();
});
| AhoraMadrid/ahoramadrid.org | js/vendor/jquery.usquare.js | JavaScript | agpl-3.0 | 6,490 |
var searchData=
[
['backtrace',['backtrace',['../class_logger.html#a5deb9b10c43285287a9113f280ee8fab',1,'Logger']]],
['baseexception',['BaseException',['../class_base_exception.html',1,'']]],
['baseexception_2ephp',['BaseException.php',['../_base_exception_8php.html',1,'']]],
['basic_2ephp',['Basic.php',['../_menu_2_basic_8php.html',1,'']]],
['basic_2ephp',['Basic.php',['../_paginator_2_basic_8php.html',1,'']]],
['basic_2ephp',['Basic.php',['../_auth_2_basic_8php.html',1,'']]],
['basic_2ephp',['Basic.php',['../_t_mail_2_basic_8php.html',1,'']]],
['basic_2ephp',['Basic.php',['../_form_2_basic_8php.html',1,'']]],
['basic_2ephp',['Basic.php',['../_grid_2_basic_8php.html',1,'']]],
['basicauth',['BasicAuth',['../class_basic_auth.html',1,'']]],
['basicauth_2ephp',['BasicAuth.php',['../_basic_auth_8php.html',1,'']]],
['beforedelete',['beforeDelete',['../class_s_q_l___relation.html#a44c9d7a3b22619b53d4f49f1070d5235',1,'SQL_Relation']]],
['beforefield',['beforeField',['../class_form___field.html#aa4bbfb40048e1c3fe939621179652be1',1,'Form_Field']]],
['beforeinsert',['beforeInsert',['../class_s_q_l___relation.html#ada6a7f2abf3ba1c19e4ba3711da1a61e',1,'SQL_Relation']]],
['beforeload',['beforeLoad',['../class_s_q_l___relation.html#a665492752f54f9cbc3fd2cae51ca4373',1,'SQL_Relation']]],
['beforemodify',['beforeModify',['../class_s_q_l___relation.html#a3ad587772d12f99af11a3db64d879210',1,'SQL_Relation']]],
['beforesave',['beforeSave',['../class_s_q_l___relation.html#ab9e4fb36c177d9633b81fc184f7bd933',1,'SQL_Relation']]],
['begintransaction',['beginTransaction',['../class_d_b.html#af3380f3b13931d581fa973a382946b32',1,'DB\beginTransaction()'],['../class_d_blite__mysql.html#a06fdc3063ff49b8de811683aae3483e6',1,'DBlite_mysql\beginTransaction()']]],
['belowfield',['belowField',['../class_form___field.html#a27cd7c6e75ed8c09aae8af32905a888d',1,'Form_Field']]],
['box_2ephp',['Box.php',['../_box_8php.html',1,'']]],
['breakhook',['breakHook',['../class_abstract_object.html#a446b3f8327b3272c838ae46f40a9da06',1,'AbstractObject']]],
['bt',['bt',['../class_d_b__dsql.html#aa374d1bfaabf3f546fe8862d09f4a096',1,'DB_dsql']]],
['button',['Button',['../class_button.html',1,'']]],
['button_2ephp',['Button.php',['../_button_8php.html',1,'']]],
['button_2ephp',['Button.php',['../_form_2_button_8php.html',1,'']]],
['button_2ephp',['Button.php',['../_view_2_button_8php.html',1,'']]],
['buttonset',['ButtonSet',['../class_button_set.html',1,'']]],
['buttonset_2ephp',['ButtonSet.php',['../_button_set_8php.html',1,'']]],
['buttonset_2ephp',['ButtonSet.php',['../_view_2_button_set_8php.html',1,'']]]
];
| atk4/atk4-web | dox/html/search/all_62.js | JavaScript | agpl-3.0 | 2,662 |
package org.demo.jdk.utilapis;
public class Bird implements Flyable {
private int speed = 15;
@Override
public void fly() {
System.out.println("I'm Bird, my speed is " + speed + ".");
}
}
| William-Hai/SimpleDemo | src/org/demo/jdk/utilapis/Bird.java | Java | agpl-3.0 | 220 |
<?php
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
// See the LICENCE file in the repository root for full licence text.
namespace App\Libraries;
use App\Exceptions\UserVerificationException;
use App\Mail\UserVerification as UserVerificationMail;
use App\Models\Country;
use App\Models\LoginAttempt;
use Datadog;
use Mail;
class UserVerification
{
private $request;
private $state;
private $user;
public static function fromCurrentRequest()
{
$verification = request()->attributes->get('user_verification');
if ($verification === null) {
$verification = new static(
auth()->user(),
request(),
UserVerificationState::fromCurrentRequest()
);
request()->attributes->set('user_verification', $verification);
}
return $verification;
}
public static function logAttempt(string $source, string $type, string $reason = null): void
{
Datadog::increment(
config('datadog-helper.prefix_web').'.verification.attempts',
1,
compact('reason', 'source', 'type')
);
}
private function __construct($user, $request, $state)
{
$this->user = $user;
$this->request = $request;
$this->state = $state;
}
public function initiate()
{
$statusCode = 401;
app('route-section')->setError("{$statusCode}-verification");
// Workaround race condition causing $this->issue() to be called in parallel.
// Mainly observed when logging in as privileged user.
if ($this->request->ajax()) {
$routeData = app('route-section')->getOriginal();
if ($routeData['controller'] === 'notifications_controller' && $routeData['action'] === 'index') {
return response(['error' => 'verification'], $statusCode);
}
}
$email = $this->user->user_email;
if (!$this->state->issued()) {
static::logAttempt('input', 'new');
$this->issue();
}
if ($this->request->ajax()) {
return response([
'authentication' => 'verify',
'box' => view(
'users._verify_box',
compact('email')
)->render(),
], $statusCode);
} else {
return ext_view('users.verify', compact('email'), null, $statusCode);
}
}
public function isDone()
{
return $this->state->isDone();
}
public function issue()
{
$user = $this->user;
if (!present($user->user_email)) {
return;
}
$keys = $this->state->issue();
LoginAttempt::logAttempt($this->request->getClientIp(), $this->user, 'verify');
$requestCountry = Country
::where('acronym', request_country($this->request))
->pluck('name')
->first();
Mail::to($user)
->queue(new UserVerificationMail(
compact('keys', 'user', 'requestCountry')
));
}
public function markVerifiedAndRespond()
{
$this->state->markVerified();
return response([], 200);
}
public function reissue()
{
if ($this->state->isDone()) {
return $this->markVerifiedAndRespond();
}
$this->issue();
return response(['message' => trans('user_verification.errors.reissued')], 200);
}
public function verify()
{
$key = str_replace(' ', '', $this->request->input('verification_key'));
try {
$this->state->verify($key);
} catch (UserVerificationException $e) {
static::logAttempt('input', 'fail', $e->reasonKey());
if ($e->reasonKey() === 'incorrect_key') {
LoginAttempt::logAttempt($this->request->getClientIp(), $this->user, 'verify-mismatch', $key);
}
if ($e->shouldReissue()) {
$this->issue();
}
return error_popup($e->getMessage());
}
static::logAttempt('input', 'success');
return $this->markVerifiedAndRespond();
}
}
| omkelderman/osu-web | app/Libraries/UserVerification.php | PHP | agpl-3.0 | 4,310 |
/*
Copyright (C) 2015 Jack Fagner
This file is part of OpenTidl.
OpenTidl is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenTidl is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with OpenTidl. If not, see <http://www.gnu.org/licenses/>.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using OpenTidl.Models;
using OpenTidl.Models.Base;
using OpenTidl.Transport;
using OpenTidl.Enums;
using System.IO;
namespace OpenTidl
{
public partial class OpenTidlClient
{
#region image methods
/// <summary>
/// Helper method to retrieve a stream with an album cover image
/// </summary>
public Stream GetAlbumCover(AlbumModel model, AlbumCoverSize size)
{
return GetAlbumCover(model.Cover, model.Id, size);
}
/// <summary>
/// Helper method to retrieve a stream with an album cover image
/// </summary>
public Stream GetAlbumCover(String cover, Int32 albumId, AlbumCoverSize size)
{
var w = 750;
var h = 750;
if (!RestUtility.ParseImageSize(size.ToString(), out w, out h))
throw new ArgumentException("Invalid image size", "size");
String url = null;
if (!String.IsNullOrEmpty(cover))
url = String.Format("http://resources.wimpmusic.com/images/{0}/{1}x{2}.jpg", cover.Replace('-', '/'), w, h);
else
url = String.Format("http://images.tidalhifi.com/im/im?w={1}&h={2}&albumid={0}&noph", albumId, w, h);
return RestClient.GetStream(url);
}
/// <summary>
/// Helper method to retrieve a stream with an artists picture
/// </summary>
public Stream GetArtistPicture(ArtistModel model, ArtistPictureSize size)
{
return GetArtistPicture(model.Picture, model.Id, size);
}
/// <summary>
/// Helper method to retrieve a stream with an artists picture
/// </summary>
public Stream GetArtistPicture(String picture, Int32 artistId, ArtistPictureSize size)
{
var w = 750;
var h = 500;
if (!RestUtility.ParseImageSize(size.ToString(), out w, out h))
throw new ArgumentException("Invalid image size", "size");
String url = null;
if (!String.IsNullOrEmpty(picture))
url = String.Format("http://resources.wimpmusic.com/images/{0}/{1}x{2}.jpg", picture.Replace('-', '/'), w, h);
else
url = String.Format("http://images.tidalhifi.com/im/im?w={1}&h={2}&artistid={0}&noph", artistId, w, h);
return RestClient.GetStream(url);
}
/// <summary>
/// Helper method to retrieve a stream with a playlist image
/// </summary>
public Stream GetPlaylistImage(PlaylistModel model, PlaylistImageSize size)
{
return GetPlaylistImage(model.Image, model.Uuid, size);
}
/// <summary>
/// Helper method to retrieve a stream with a playlist image
/// </summary>
public Stream GetPlaylistImage(String image, String playlistUuid, PlaylistImageSize size)
{
var w = 750;
var h = 500;
if (!RestUtility.ParseImageSize(size.ToString(), out w, out h))
throw new ArgumentException("Invalid image size", "size");
String url = null;
if (!String.IsNullOrEmpty(image))
url = String.Format("http://resources.wimpmusic.com/images/{0}/{1}x{2}.jpg", image.Replace('-', '/'), w, h);
else
url = String.Format("http://images.tidalhifi.com/im/im?w={1}&h={2}&uuid={0}&rows=2&cols=3&noph", playlistUuid, w, h);
return RestClient.GetStream(url);
}
/// <summary>
/// Helper method to retrieve a stream with a video conver image
/// </summary>
public Stream GetVideoImage(VideoModel model, VideoImageSize size)
{
return GetVideoImage(model.ImageId, model.ImagePath, size);
}
/// <summary>
/// Helper method to retrieve a stream with a video conver image
/// </summary>
public Stream GetVideoImage(String imageId, String imagePath, VideoImageSize size)
{
var w = 750;
var h = 500;
if (!RestUtility.ParseImageSize(size.ToString(), out w, out h))
throw new ArgumentException("Invalid image size", "size");
String url = null;
if (!String.IsNullOrEmpty(imageId))
url = String.Format("http://resources.wimpmusic.com/images/{0}/{1}x{2}.jpg", imageId.Replace('-', '/'), w, h);
else
url = String.Format("http://images.tidalhifi.com/im/im?w={1}&h={2}&img={0}&noph", imagePath, w, h);
return RestClient.GetStream(url);
}
#endregion
#region track/video methods
/// <summary>
/// Helper method to retrieve the audio/video stream with correct user-agent, etc.
/// </summary>
public Stream GetStream(String streamUrl)
{
return RestClient.GetStream(streamUrl);
}
#endregion
}
}
| iamsi/OpenTidl | OpenTidl/Methods/OpenTidlStreamMethods.cs | C# | agpl-3.0 | 5,835 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Infrastructure
# Copyright (C) 2014 Ingenieria ADHOC
# No email
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import re
from openerp import netsvc
from openerp.osv import osv, fields
class database_type(osv.osv):
""""""
_name = 'infrastructure.database_type'
_description = 'database_type'
_columns = {
'name': fields.char(string='Name', required=True),
'prefix': fields.char(string='Prefix', required=True, size=4),
'url_prefix': fields.char(string='URL Prefix'),
'automatic_drop': fields.boolean(string='Automatic Drop'),
'automatic_drop_days': fields.integer(string='Automatic Drop Days'),
'protect_db': fields.boolean(string='Protect DBs?'),
'color': fields.integer(string='Color'),
'automatic_deactivation': fields.boolean(string='Atumatic Deactivation?'),
'auto_deactivation_days': fields.integer(string='Automatic Drop Days'),
'url_example': fields.char(string='URL Example'),
'bd_name_example': fields.char(string='BD Name Example'),
'db_back_up_policy_ids': fields.many2many('infrastructure.db_back_up_policy', 'infrastructure_database_type_ids_db_back_up_policy_ids_rel', 'database_type_id', 'db_back_up_policy_id', string='Suggested Backup Policies'),
}
_defaults = {
}
_constraints = [
]
database_type()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| zhaohuaw/odoo-infrastructure | addons/infrastructure/database_type.py | Python | agpl-3.0 | 2,270 |
# frozen_string_literal: true
require 'rails_helper'
describe StudentsController do
let(:user) { create(:student) }
before { allow(controller).to receive(:current_user) { user } }
it { should use_before_action :authorize! }
describe '#index' do
let!(:classroom) { create(:classroom) }
let!(:students_classrooms) { create(:students_classrooms, student_id: user.id, classroom_id: classroom.id) }
it 'should set the current user and js file' do
get :index
expect(assigns(:current_user)).to eq user
expect(assigns(:js_file)).to eq "student"
end
it 'should find the classroom and set flash' do
get :index, params: { joined: "success", classroom: classroom.id }
expect(flash["join-class-notification"]).to eq "You have joined #{classroom.name} 🎉🎊"
end
end
describe '#join_classroom' do
let(:student) { create(:student) }
before { allow(controller).to receive(:current_user) { student } }
it 'should redirect for an invalid class_code' do
get :join_classroom, params: { classcode: 'nonsense_doesnt_exist' }
expect(response).to redirect_to '/classes'
expect(flash[:error]).to match("Oops! There is no class with the code nonsense_doesnt_exist. Ask your teacher for help.")
end
it 'should redirect for a valid class_code' do
classroom = create(:classroom, code: 'existing_code')
get :join_classroom, params: { classcode: classroom.code }
expect(response).to redirect_to "/classrooms/#{classroom.id}?joined=success"
end
end
describe '#account_settings' do
it 'should set the current user and js file' do
get :account_settings
expect(assigns(:current_user)).to eq user
expect(assigns(:js_file)).to eq "student"
end
end
describe '#student_demo' do
context 'when Angie Thomas exists' do
let!(:angie) { create(:user, email: 'angie_thomas_demo@quill.org') }
it 'should sign in angie and redirect to profile' do
get :student_demo
expect(session[:user_id]).to eq angie.id
expect(response).to redirect_to '/classes'
end
end
context 'when angie thomas does not exist' do
it 'should destroy recreate the demo and redirect to student demo' do
expect(Demo::ReportDemoDestroyer).to receive(:destroy_demo).with(nil)
expect(Demo::ReportDemoCreator).to receive(:create_demo).with(nil)
get :student_demo
expect(response).to redirect_to "/student_demo"
end
end
end
describe '#student_demo_ap' do
context 'when bell hooks exists' do
let!(:bell) { create(:user, email: 'bell_hooks_demo@quill.org') }
it 'should sign in bell and redirect to profile' do
get :demo_ap
expect(session[:user_id]).to eq bell.id
expect(response).to redirect_to '/classes'
end
end
context 'when bell hooks does not exist' do
it 'should recreate the demo and redirect to student demo' do
expect(Demo::ReportDemoAPCreator).to receive(:create_demo).with(nil)
get :demo_ap
expect(response).to redirect_to "/student_demo_ap"
end
end
end
describe '#update_account' do
let!(:user) { create(:user, name: "Maya Angelou", email: 'maya_angelou_demo@quill.org', username: "maya-angelou", role: "student") }
let!(:second_user) { create(:user, name: "Harvey Milk", email: 'harvey@quill.org', username: "harvey-milk", role: "student") }
it 'should update the name, email and username' do
put :update_account, params: { email: "pablo@quill.org", username: "pabllo-vittar", name: "Pabllo Vittar" }
expect(user.reload.email).to eq "pablo@quill.org"
expect(user.reload.username).to eq "pabllo-vittar"
expect(user.reload.name).to eq "Pabllo Vittar"
end
it 'should update only the fields that are changed' do
put :update_account, params: { email: "pablo@quill.org", username: "rainha-do-carnaval", name: "Pabllo Vittar" }
expect(user.reload.email).to eq "pablo@quill.org"
expect(user.reload.username).to eq "rainha-do-carnaval"
expect(user.reload.name).to eq "Pabllo Vittar"
end
it 'should not update the email or username if already taken' do
put :update_account, params: { email: "harvey@quill.org", username: "pabllo-vittar", name: "Pabllo Vittar" }
expect(user.reload.errors.messages[:email].first).to eq "That email is taken. Try another."
put :update_account, params: { email: "pablo@quill.org", username: "harvey-milk", name: "Pabllo Vittar" }
expect(user.reload.errors.messages[:username].first).to eq "That username is taken. Try another."
end
end
end
| empirical-org/Empirical-Core | services/QuillLMS/spec/controllers/students_controller_spec.rb | Ruby | agpl-3.0 | 4,686 |
# SPDX-License-Identifier: AGPL-3.0-or-later
"""
SepiaSearch (Videos)
"""
from json import loads
from dateutil import parser, relativedelta
from urllib.parse import urlencode
from datetime import datetime
# about
about = {
"website": 'https://sepiasearch.org',
"wikidata_id": None,
"official_api_documentation": "https://framagit.org/framasoft/peertube/search-index/-/tree/master/server/controllers/api", # NOQA
"use_official_api": True,
"require_api_key": False,
"results": 'JSON',
}
categories = ['videos']
paging = True
time_range_support = True
safesearch = True
supported_languages = [
'en', 'fr', 'ja', 'eu', 'ca', 'cs', 'eo', 'el',
'de', 'it', 'nl', 'es', 'oc', 'gd', 'zh', 'pt',
'sv', 'pl', 'fi', 'ru'
]
base_url = 'https://sepiasearch.org/api/v1/search/videos'
safesearch_table = {
0: 'both',
1: 'false',
2: 'false'
}
time_range_table = {
'day': relativedelta.relativedelta(),
'week': relativedelta.relativedelta(weeks=-1),
'month': relativedelta.relativedelta(months=-1),
'year': relativedelta.relativedelta(years=-1)
}
embedded_url = '<iframe width="540" height="304" src="{url}" frameborder="0" allowfullscreen></iframe>'
def minute_to_hm(minute):
if isinstance(minute, int):
return "%d:%02d" % (divmod(minute, 60))
return None
def request(query, params):
params['url'] = base_url + '?' + urlencode({
'search': query,
'start': (params['pageno'] - 1) * 10,
'count': 10,
'sort': '-match',
'nsfw': safesearch_table[params['safesearch']]
})
language = params['language'].split('-')[0]
if language in supported_languages:
params['url'] += '&languageOneOf[]=' + language
if params['time_range'] in time_range_table:
time = datetime.now().date() + time_range_table[params['time_range']]
params['url'] += '&startDate=' + time.isoformat()
return params
def response(resp):
results = []
search_results = loads(resp.text)
if 'data' not in search_results:
return []
for result in search_results['data']:
title = result['name']
content = result['description']
thumbnail = result['thumbnailUrl']
publishedDate = parser.parse(result['publishedAt'])
embedded = embedded_url.format(url=result.get('embedUrl'))
author = result.get('account', {}).get('displayName')
length = minute_to_hm(result.get('duration'))
url = result['url']
results.append({'url': url,
'title': title,
'content': content,
'author': author,
'length': length,
'template': 'videos.html',
'publishedDate': publishedDate,
'embedded': embedded,
'thumbnail': thumbnail})
return results
| dalf/searx | searx/engines/sepiasearch.py | Python | agpl-3.0 | 2,928 |
<?php
/*
Kevin Froman - Easy IPFS: easily interact with IPFS in php via this simple API
Copyright (C) 2017 Kevin Froman
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
*/
class IPFS {
function foo(){
return 'bar';
}
function cat($hash){
$gotData = '';
$server = curl_init();
curl_setopt($server, CURLOPT_URL, 'http://127.0.0.1:5001/api/v0/cat?arg=' . $hash);
curl_setopt($server, CURLOPT_RETURNTRANSFER, true);
curl_setopt($server, CURLOPT_BINARYTRANSFER, false);
$gotData = curl_exec($server);
curl_close($server);
return $gotData;
}
function resolve($name){
$gotData = '';
$json = '';
$server = curl_init();
curl_setopt($server, CURLOPT_URL, 'http://127.0.0.1:5001/api/v0/name/resolve?arg=' . $name);
curl_setopt($server, CURLOPT_RETURNTRANSFER, true);
$gotData = curl_exec($server);
$gotData = json_decode($gotData, true);
if (isset($gotData['Path'])){
$gotData = str_replace('/ipfs/', '', $gotData['Path']);
}
else{
$gotData = 'invalid';
}
curl_close($server);
return $gotData;
}
function dataAdd($file){
$gotData = '';
$server = curl_init();
curl_setopt($server, CURLOPT_POST, 1);
curl_setopt($server, CURLOPT_RETURNTRANSFER, true);
$data = array('file' => new CurlFile($file));
curl_setopt($server, CURLOPT_SAFE_UPLOAD, false); // required as of PHP 5.6.0
curl_setopt($server, CURLOPT_POSTFIELDS, $data);
curl_setopt($server, CURLOPT_URL, 'http://127.0.0.1:5001/api/v0/add');
$data = curl_exec($server);
curl_close($server);
return $data;
}
function namePublish($data) {
$server = curl_init();
$data = dataAdd($data);
curl_setopt($server, CURLOPT_URL, 'http://127.0.0.1:5001/api/v0/name/publish?arg=' . $data . '&lifetime=1m&');
curl_exec($server);
curl_close($server);
return true;
}
}
?>
| beardog108/seedbin | easy-ipfs.php | PHP | agpl-3.0 | 2,545 |
import React from 'react';
import { render, waitForElement } from 'react-testing-library';
import FakeDataProvider from '@olimat/web/utils/test/FakeDataProvider';
import MockErrorProvider from '@olimat/web/utils/test/MockErrorProvider';
import MockNextContext from '@olimat/web/utils/test/MockNextContext';
import { renderApollo } from '@olimat/web/utils/test/test-utils';
import ExamDetails from './Details';
const MockExamDetails = () => (
<MockNextContext router={{ query: { id: 'theExamId1' } }}>
<ExamDetails />
</MockNextContext>
);
// Talvez uma solução melhor seria criar um mock para o contexto do Next.js
// https://github.com/zeit/next.js/issues/5205
// Outra opção é exportar o componente sem embrulhá-lo com os HoC, e passar os mocks
// https://stackoverflow.com/questions/44204828
describe('<ExamDetails />', () => {
test.skip('renders loading state initially', () => {
const { getByText } = renderApollo(<MockExamDetails />);
getByText(/loading/i);
});
test('renders the details of an exam', async () => {
const customResolvers = {
// We need to update the GraphQL API as well
Exam: () => ({
title: '2017 - Fase 3 - Ano 5',
}),
};
const { getByText, getByTestId } = render(
<FakeDataProvider customResolvers={customResolvers}>
<MockExamDetails />
</FakeDataProvider>,
);
await waitForElement(() => getByText(customResolvers.Exam().title));
const questionListNode = getByTestId('questionList');
expect(questionListNode).toBeInTheDocument();
// toBe(10) couples the test with the mocked server
// https://youtu.be/K445DtQ5oHY?t=1476
expect(questionListNode.children.length).toBe(10);
});
test('renders error message', async () => {
const errorMsg = 'Que pena';
const { getByText } = render(
<MockErrorProvider graphqlErrors={[{ message: errorMsg }]}>
<MockExamDetails />
</MockErrorProvider>,
);
await waitForElement(() => getByText(errorMsg, { exact: false }));
});
});
| iquabius/olimat | packages/web/src/components/Exam/Details.test.tsx | TypeScript | agpl-3.0 | 1,976 |
/*
Classe gerada automaticamente pelo MSTech Code Creator
*/
namespace MSTech.GestaoEscolar.BLL
{
using MSTech.Business.Common;
using MSTech.GestaoEscolar.Entities;
using MSTech.GestaoEscolar.DAL;
using System.Data;
using MSTech.Data.Common;
using System.Collections.Generic;
using System.Linq;
using MSTech.Validation.Exceptions;
/// <summary>
/// Description: CLS_AlunoAvaliacaoTurmaQualidade Business Object.
/// </summary>
public class CLS_AlunoAvaliacaoTurmaQualidadeBO : BusinessBase<CLS_AlunoAvaliacaoTurmaQualidadeDAO, CLS_AlunoAvaliacaoTurmaQualidade>
{
#region Consultas
/// <summary>
/// Seleciona os tipo de qualidade por matrícula do aluno.
/// </summary>
/// <param name="tur_id">ID da turma.</param>
/// <param name="alu_id">ID do aluno.</param>
/// <param name="mtu_id">ID da matrícula turma do aluno.</param>
/// <param name="fav_id">ID do formato de avaliação.</param>
/// <param name="ava_id">ID da avaliação.</param>
/// <returns></returns>
public static DataTable SelecionaPorMatriculaTurma(long tur_id, long alu_id, int mtu_id, int fav_id, int ava_id)
{
return new CLS_AlunoAvaliacaoTurmaQualidadeDAO().SelecionaPorMatriculaTurma(tur_id, alu_id, mtu_id, fav_id, ava_id);
}
/// <summary>
/// Seleciona uma lista de tipo de qualidade por matrícula do aluno.
/// </summary>
/// <param name="tur_id">ID da turma.</param>
/// <param name="alu_id">ID do aluno.</param>
/// <param name="mtu_id">ID da matrícula turma do aluno.</param>
/// <param name="fav_id">ID do formato de avaliação.</param>
/// <param name="ava_id">ID da avaliação.</param>
/// <returns></returns>
public static List<CLS_AlunoAvaliacaoTurmaQualidade> SelecionaListaPorMatriculaTurma(long tur_id, long alu_id, int mtu_id, int fav_id, int ava_id, TalkDBTransaction banco = null)
{
CLS_AlunoAvaliacaoTurmaQualidadeDAO dao = banco == null ?
new CLS_AlunoAvaliacaoTurmaQualidadeDAO() :
new CLS_AlunoAvaliacaoTurmaQualidadeDAO { _Banco = banco };
return (from DataRow dr in dao.SelecionaPorMatriculaTurma(tur_id, alu_id, mtu_id, fav_id, ava_id).Rows
select dao.DataRowToEntity(dr, new CLS_AlunoAvaliacaoTurmaQualidade())).ToList();
}
#endregion
#region Saves
/// <summary>
/// O método salva as qualidade cadastradas para o aluno e deleta as que forem desmarcadas.
/// </summary>
/// <param name="tur_id">ID da turma.</param>
/// <param name="alu_id">ID do aluno.</param>
/// <param name="mtu_id">ID da matrícula turma do aluno.</param>
/// <param name="fav_id">ID do formato de avaliação.</param>
/// <param name="ava_id">ID da avaliação.</param>
/// <param name="lista">Lista de qualidades adicionadas</param>
/// <param name="banco"></param>
/// <returns></returns>
public static bool Salvar(long tur_id, long alu_id, int mtu_id, int fav_id, int ava_id, List<CLS_AlunoAvaliacaoTurmaQualidade> lista, TalkDBTransaction banco)
{
bool retorno = true;
List<CLS_AlunoAvaliacaoTurmaQualidade> listaCadastrados = SelecionaListaPorMatriculaTurma(tur_id, alu_id, mtu_id, fav_id, ava_id, banco);
if (lista.Any())
{
List<CLS_AlunoAvaliacaoTurmaQualidade> listaExcluir = !listaCadastrados.Any() ?
new List<CLS_AlunoAvaliacaoTurmaQualidade>() : listaCadastrados.Where(p => !lista.Contains(p)).ToList();
List<CLS_AlunoAvaliacaoTurmaQualidade> listaSalvar = listaCadastrados.Any() ?
lista.Where(p => !listaCadastrados.Contains(p)).ToList() : lista;
retorno &= !listaExcluir.Any() ? retorno : listaExcluir.Aggregate(true, (excluiu, qualidade) => excluiu & Delete(qualidade, banco));
retorno &= !listaSalvar.Any() ? retorno : listaSalvar.Aggregate(true, (salvou, qualidade) => salvou & Save(qualidade, banco));
}
else
{
retorno &= !listaCadastrados.Any() ? retorno : listaCadastrados.Aggregate(true, (excluiu, qualidade) => excluiu & Delete(qualidade, banco));
}
return retorno;
}
/// <summary>
/// O método salva um registro na tabela CLS_AlunoAvaliacaoTurmaQualidade.
/// </summary>
/// <param name="entity">Entidade CLS_AlunoAvaliacaoTurmaQualidade</param>
/// <param name="banco"></param>
/// <returns></returns>
public static new bool Save(CLS_AlunoAvaliacaoTurmaQualidade entity, TalkDBTransaction banco)
{
if (entity.Validate())
{
return new CLS_AlunoAvaliacaoTurmaQualidadeDAO { _Banco = banco }.Salvar(entity);
}
throw new ValidationException(GestaoEscolarUtilBO.ErrosValidacao(entity));
}
/// <summary>
/// O método salva um registro na tabela CLS_AlunoAvaliacaoTurmaQualidade.
/// </summary>
/// <param name="entity">Entidade CLS_AlunoAvaliacaoTurmaQualidade</param>
/// <returns></returns>
public static new bool Save(CLS_AlunoAvaliacaoTurmaQualidade entity)
{
if (entity.Validate())
{
return new CLS_AlunoAvaliacaoTurmaQualidadeDAO().Salvar(entity);
}
throw new ValidationException(GestaoEscolarUtilBO.ErrosValidacao(entity));
}
#endregion
}
} | prefeiturasp/SME-SGP | Src/MSTech.GestaoEscolar.BLL/CLS_AlunoAvaliacaoTurmaQualidadeBO.cs | C# | agpl-3.0 | 5,771 |
module Laser
# Class that's just a name. Substitute for symbols, which can overlap
# with user-code values.
class PlaceholderObject
def initialize(name)
@name = name
end
def inspect
@name
end
alias_method :to_s, :inspect
end
end
| michaeledgar/laser | lib/laser/support/placeholder_object.rb | Ruby | agpl-3.0 | 269 |
/**
* Copyright (c) 2002-2011 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.graphdb.index;
import java.util.Iterator;
import java.util.NoSuchElementException;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.Relationship;
import org.neo4j.graphdb.Transaction;
/**
* An {@link Iterator} with additional {@link #size()} and {@link #close()}
* methods on it, used for iterating over index query results. It is first and
* foremost an {@link Iterator}, but also an {@link Iterable} JUST so that it
* can be used in a for-each loop. The <code>iterator()</code> method
* <i>always</i> returns <code>this</code>.
*
* The size is calculated before-hand so that calling it is always fast.
*
* When you're done with the result and haven't reached the end of the
* iteration {@link #close()} must be called. Results which are looped through
* entirely closes automatically. Typical use:
*
* <pre>
* IndexHits<Node> hits = index.get( "key", "value" );
* try
* {
* for ( Node node : hits )
* {
* // do something with the hit
* }
* }
* finally
* {
* hits.close();
* }
* </pre>
*
* @param <T> the type of items in the Iterator.
*/
public interface IndexHits<T> extends Iterator<T>, Iterable<T>
{
/**
* Returns the size of this iterable, in most scenarios this value is accurate
* while in some scenarios near-accurate.
*
* There's no cost in calling this method. It's considered near-accurate if this
* {@link IndexHits} object has been returned when inside a {@link Transaction}
* which has index modifications, of a certain nature. Also entities
* ({@link Node}s/{@link Relationship}s) which have been deleted from the graph,
* but are still in the index will also affect the accuracy of the returned size.
*
* @return the near-accurate size if this iterable.
*/
int size();
/**
* Closes the underlying search result. This method should be called
* whenever you've got what you wanted from the result and won't use it
* anymore. It's necessary to call it so that underlying indexes can dispose
* of allocated resources for this search result.
*
* You can however skip to call this method if you loop through the whole
* result, then close() will be called automatically. Even if you loop
* through the entire result and then call this method it will silently
* ignore any consequtive call (for convenience).
*/
void close();
/**
* Returns the first and only item from the result iterator, or {@code null}
* if there was none. If there were more than one item in the result a
* {@link NoSuchElementException} will be thrown. This method must be called
* first in the iteration and will grab the first item from the iteration,
* so the result is considered broken after this call.
*
* @return the first and only item, or {@code null} if none.
*/
T getSingle();
/**
* Returns the score of the most recently fetched item from this iterator
* (from {@link #next()}). The range of the returned values is up to the
* {@link Index} implementation to dictate.
* @return the score of the most recently fetched item from this iterator.
*/
float currentScore();
}
| neo4j-attic/graphdb | kernel/src/main/java/org/neo4j/graphdb/index/IndexHits.java | Java | agpl-3.0 | 4,080 |
import { createSlice, createEntityAdapter, Reducer, AnyAction, PayloadAction } from '@reduxjs/toolkit';
import { fetchAll, fetchDetails, install, uninstall, loadPluginDashboards, panelPluginLoaded } from './actions';
import { CatalogPlugin, PluginListDisplayMode, ReducerState, RequestStatus } from '../types';
import { STATE_PREFIX } from '../constants';
import { PanelPlugin } from '@grafana/data';
export const pluginsAdapter = createEntityAdapter<CatalogPlugin>();
const isPendingRequest = (action: AnyAction) => new RegExp(`${STATE_PREFIX}\/(.*)\/pending`).test(action.type);
const isFulfilledRequest = (action: AnyAction) => new RegExp(`${STATE_PREFIX}\/(.*)\/fulfilled`).test(action.type);
const isRejectedRequest = (action: AnyAction) => new RegExp(`${STATE_PREFIX}\/(.*)\/rejected`).test(action.type);
// Extract the trailing '/pending', '/rejected', or '/fulfilled'
const getOriginalActionType = (type: string) => {
const separator = type.lastIndexOf('/');
return type.substring(0, separator);
};
const slice = createSlice({
name: 'plugins',
initialState: {
items: pluginsAdapter.getInitialState(),
requests: {},
settings: {
displayMode: PluginListDisplayMode.Grid,
},
// Backwards compatibility
// (we need to have the following fields in the store as well to be backwards compatible with other parts of Grafana)
// TODO<remove once the "plugin_admin_enabled" feature flag is removed>
plugins: [],
errors: [],
searchQuery: '',
hasFetched: false,
dashboards: [],
isLoadingPluginDashboards: false,
panels: {},
} as ReducerState,
reducers: {
setDisplayMode(state, action: PayloadAction<PluginListDisplayMode>) {
state.settings.displayMode = action.payload;
},
},
extraReducers: (builder) =>
builder
// Fetch All
.addCase(fetchAll.fulfilled, (state, action) => {
pluginsAdapter.upsertMany(state.items, action.payload);
})
// Fetch Details
.addCase(fetchDetails.fulfilled, (state, action) => {
pluginsAdapter.updateOne(state.items, action.payload);
})
// Install
.addCase(install.fulfilled, (state, action) => {
pluginsAdapter.updateOne(state.items, action.payload);
})
// Uninstall
.addCase(uninstall.fulfilled, (state, action) => {
pluginsAdapter.updateOne(state.items, action.payload);
})
// Load a panel plugin (backward-compatibility)
// TODO<remove once the "plugin_admin_enabled" feature flag is removed>
.addCase(panelPluginLoaded, (state, action: PayloadAction<PanelPlugin>) => {
state.panels[action.payload.meta.id] = action.payload;
})
// Start loading panel dashboards (backward-compatibility)
// TODO<remove once the "plugin_admin_enabled" feature flag is removed>
.addCase(loadPluginDashboards.pending, (state, action) => {
state.isLoadingPluginDashboards = true;
state.dashboards = [];
})
// Load panel dashboards (backward-compatibility)
// TODO<remove once the "plugin_admin_enabled" feature flag is removed>
.addCase(loadPluginDashboards.fulfilled, (state, action) => {
state.isLoadingPluginDashboards = false;
state.dashboards = action.payload;
})
.addMatcher(isPendingRequest, (state, action) => {
state.requests[getOriginalActionType(action.type)] = {
status: RequestStatus.Pending,
};
})
.addMatcher(isFulfilledRequest, (state, action) => {
state.requests[getOriginalActionType(action.type)] = {
status: RequestStatus.Fulfilled,
};
})
.addMatcher(isRejectedRequest, (state, action) => {
state.requests[getOriginalActionType(action.type)] = {
status: RequestStatus.Rejected,
error: action.payload,
};
}),
});
export const { setDisplayMode } = slice.actions;
export const reducer: Reducer<ReducerState, AnyAction> = slice.reducer;
| grafana/grafana | public/app/features/plugins/admin/state/reducer.ts | TypeScript | agpl-3.0 | 4,001 |
# -*- coding: utf-8 -*-
#
# SPDX-FileCopyrightText: 2013-2021 Agora Voting SL <contact@nvotes.com>
#
# SPDX-License-Identifier: AGPL-3.0-only
#
import pickle
import base64
import json
import re
from datetime import datetime
from flask import Blueprint, request, make_response, abort
from frestq.utils import loads, dumps
from frestq.tasks import SimpleTask, TaskError
from frestq.app import app, db
from models import Election, Authority, QueryQueue
from create_election.performer_jobs import check_election_data
from taskqueue import queue_task, apply_task, dequeue_task
public_api = Blueprint('public_api', __name__)
def error(status, message=""):
if message:
data = json.dumps(dict(message=message))
else:
data=""
return make_response(data, status)
@public_api.route('/dequeue', methods=['GET'])
def dequeue():
try:
dequeue_task()
except Exception as e:
return make_response(dumps(dict(status=e.message)), 202)
return make_response(dumps(dict(status="ok")), 202)
@public_api.route('/election', methods=['POST'])
def post_election():
'''
POST /election
Creates an election, with the given input data. This involves communicating
with the different election authorities to generate the joint public key.
Example request:
POST /election
{
"id": 1110,
"title": "Votación de candidatos",
"description": "Selecciona los documentos político, ético y organizativo con los que Podemos",
"director": "wadobo-auth1",
"authorities": "openkratio-authority",
"layout": "pcandidates-election",
"presentation": {
"share_text": "lo que sea",
"theme": "foo",
"urls": [
{
"title": "",
"url": ""
}
],
"theme_css": "whatever"
},
"end_date": "2013-12-09T18:17:14.457000",
"start_date": "2013-12-06T18:17:14.457000",
"questions": [
{
"description": "",
"layout": "pcandidates-election",
"max": 1,
"min": 0,
"num_winners": 1,
"title": "Secretaría General",
"randomize_answer_order": true,
"tally_type": "plurality-at-large",
"answer_total_votes_percentage": "over-total-valid-votes",
"answers": [
{
"id": 0,
"category": "Equipo de Enfermeras",
"details": "",
"sort_order": 1,
"urls": [
{
"title": "",
"url": ""
}
],
"text": "Fulanita de tal",
}
]
}
],
"authorities": [
{
"name": "Asociación Sugus GNU/Linux",
"orchestra_url": "https://sugus.eii.us.es/orchestra",
"ssl_cert": "-----BEGIN CERTIFICATE-----\nMIIFATCCA+mgAwIBAgIQAOli4NZQEWpKZeYX25jjwDANBgkqhkiG9w0BAQUFADBz\n8YOltJ6QfO7jNHU9jh/AxeiRf6MibZn6fvBHvFCrVBvDD43M0gdhMkVEDVNkPaak\nC7AHA/waXZ2EwW57Chr2hlZWAkwkFvsWxNt9BgJAJJt4CIVhN/iau/SaXD0l0t1N\nT0ye54QPYl38Eumvc439Yd1CeVS/HYbP0ISIfpNkkFA5TiQdoA==\n-----END CERTIFICATE-----"
},
{
"name": "Agora Ciudadana",
"orchestra_url": "https://agoravoting.com:6874/orchestra",
"ssl_cert": "-----BEGIN CERTIFICATE-----\nMIIFATCCA+mgAwIBAgIQAOli4NZQEWpKZeYX25jjwDANBgkqhkiG9w0BAQUFADBz\n8YOltJ6QfO7jNHU9jh/AxeiRf6MibZn6fvBHvFCrVBvDD43M0gdhMkVEDVNkPaak\nC7AHA/waXZ2EwW57Chr2hlZWAkwkFvsWxNt9BgJAJJt4CIVhN/iau/SaXD0l0t1N\nT0ye54QPYl38Eumvc439Yd1CeVS/HYbP0ISIfpNkkFA5TiQdoA==\n-----END CERTIFICATE-----"
},
{
"name": "Wadobo Labs",
"orchestra_url": "https://wadobo.com:6874/orchestra",
"ssl_cert": "-----BEGIN CERTIFICATE-----\nMIIFATCCA+mgAwIBAgIQAOli4NZQEWpKZeYX25jjwDANBgkqhkiG9w0BAQUFADBz\n8YOltJ6QfO7jNHU9jh/AxeiRf6MibZn6fvBHvFCrVBvDD43M0gdhMkVEDVNkPaak\nC7AHA/waXZ2EwW57Chr2hlZWAkwkFvsWxNt9BgJAJJt4CIVhN/iau/SaXD0l0t1N\nT0ye54QPYl38Eumvc439Yd1CeVS/HYbP0ISIfpNkkFA5TiQdoA==\n-----END CERTIFICATE-----"
}
]
}
On success, response is empty with status 202 Accepted and returns something
like:
{
"task_id": "ba83ee09-aa83-1901-bb11-e645b52fc558",
}
When the election finally gets processed, the callback_url is called with a
POST containing the protInfo.xml file generated jointly by each
authority, following this example response:
{
"status": "finished",
"reference": {
"election_id": "d9e5ee09-03fa-4890-aa83-2fc558e645b5",
"action": "POST /election"
},
"session_data": [{
"session_id": "deadbeef-03fa-4890-aa83-2fc558e645b5",
"publickey": ["<pubkey codified in hexadecimal>"]
}]
}
Note that this protInfo.xml will contain the election public key, but
also some other information. In particular, it's worth noting that
the http and hint servers' urls for each authority could change later,
if election-orchestra needs it.
If there was an error, then the callback will be called following this
example format:
{
"status": "error",
"reference": {
"session_id": "d9e5ee09-03fa-4890-aa83-2fc558e645b5",
"action": "POST /election"
},
"data": {
"message": "error message"
}
}
'''
data = request.get_json(force=True, silent=True)
d = base64.b64encode(pickle.dumps(data)).decode('utf-8')
queueid = queue_task(task='election', data=d)
return make_response(dumps(dict(queue_id=queueid)), 202)
@public_api.route('/tally', methods=['POST'])
def post_tally():
'''
POST /tally
Tallies an election, with the given input data. This involves communicating
with the different election authorities to do the tally.
Example request:
POST /tally
{
"election_id": 111,
"callback_url": "https://127.0.0.1:5000/public_api/receive_tally",
"votes_url": "https://127.0.0.1:5000/public_data/vota4/encrypted_ciphertexts",
"votes_hash": "ni:///sha-256;f4OxZX_x_FO5LcGBSKHWXfwtSx-j1ncoSt3SABJtkGk"
}
On success, response is empty with status 202 Accepted and returns something
like:
{
"task_id": "ba83ee09-aa83-1901-bb11-e645b52fc558",
}
When the election finally gets processed, the callback_url is called with POST
similar to the following example:
{
"status": "finished",
"reference": {
"election_id": "d9e5ee09-03fa-4890-aa83-2fc558e645b5",
"action": "POST /tally"
},
"data": {
"votes_url": "https://127.0.0.1:5000/public_data/vota4/tally.tar.bz2",
"votes_hash": "ni:///sha-256;f4OxZX_x_FO5LcGBSKHWXfwtSx-j1ncoSt3SABJtkGk"
}
}
If there was an error, then the callback will be called following this
example format:
{
"status": "error",
"reference": {
"election_id": "d9e5ee09-03fa-4890-aa83-2fc558e645b5",
"action": "POST /tally"
},
"data": {
"message": "error message"
}
}
'''
# first of all, parse input data
data = request.get_json(force=True, silent=True)
d = base64.b64encode(pickle.dumps(data)).decode('utf-8')
queueid = queue_task(task='tally', data=d)
return make_response(dumps(dict(queue_id=queueid)), 202)
@public_api.route('/receive_election', methods=['POST'])
def receive_election():
'''
This is a test route to be able to test that callbacks are correctly sent
'''
print("ATTENTION received election callback: ")
print(request.get_json(force=True, silent=True))
return make_response("", 202)
@public_api.route('/receive_tally', methods=['POST'])
def receive_tally():
'''
This is a test route to be able to test that callbacks are correctly sent
'''
print("ATTENTION received tally callback: ")
print(request.get_json(force=True, silent=True))
return make_response("", 202)
| agoravoting/election-orchestra | public_api.py | Python | agpl-3.0 | 8,209 |
class ReportsController < ApplicationController
skip_load_and_authorize_resource
def expenses
@filter = params[:filter]
if (@type = params[:by_type]) && (@group = params[:by_group])
@expenses = ExpenseReport.by(@type, @group).accessible_by(current_ability)
if @filter
@filter.each { |k,v| @expenses = @expenses.send(k, v) unless v.blank? }
end
@expenses = @expenses.order("sum_amount desc")
respond_to do |format|
format.html {
init_form
@expenses = @expenses.page(params[:page] || 1).per(20)
}
format.xlsx { render :xlsx => "expenses", :disposition => "attachment", :filename => "expenses.xlsx" }
end
else
respond_to do |format|
format.html { init_form }
format.xlsx { redirect_to expenses_report_path(:format => :html) }
end
end
end
protected
def init_form
@by_type_options = %w(estimated approved total authorized)
@by_group_options = ExpenseReport.groups.map(&:to_s)
#@events = Event.order(:name)
@request_states = Request.state_machines[:state].states.map {|s| [ s.value, s.human_name] }
@reimbursement_states = Reimbursement.state_machines[:state].states.map {|s| [ s.value, s.human_name] }
@countries = I18n.t(:countries).map {|k,v| [k.to_s,v]}.sort_by(&:last)
end
def set_breadcrumbs
@breadcrumbs = [{:label => :breadcrumb_reports}]
end
end
| karthiksenthil/travel-support-program | app/controllers/reports_controller.rb | Ruby | agpl-3.0 | 1,429 |
/*
* Claudia Project
* http://claudia.morfeo-project.org
*
* (C) Copyright 2010 Telefonica Investigacion y Desarrollo
* S.A.Unipersonal (Telefonica I+D)
*
* See CREDITS file for info about members and contributors.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the Affero GNU General Public License (AGPL) as
* published by the Free Software Foundation; either version 3 of the License,
* or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the Affero GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
* If you want to use this software an plan to distribute a
* proprietary application in any way, and you are not licensing and
* distributing your source code under AGPL, you probably need to
* purchase a commercial license of the product. Please contact
* claudia-support@lists.morfeo-project.org for more information.
*/
package com.telefonica.claudia.smi.deployment;
import java.io.IOException;
import java.io.StringReader;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.log4j.Logger;
import org.restlet.Context;
import org.restlet.data.MediaType;
import org.restlet.data.Request;
import org.restlet.data.Response;
import org.restlet.data.Status;
import org.restlet.resource.DomRepresentation;
import org.restlet.resource.Representation;
import org.restlet.resource.Resource;
import org.restlet.resource.ResourceException;
import org.restlet.resource.Variant;
import org.w3c.dom.Document;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import com.telefonica.claudia.smi.Main;
import com.telefonica.claudia.smi.URICreation;
public class ServiceItemResource extends Resource {
private static Logger log = Logger.getLogger("com.telefonica.claudia.smi.ServiceItemResource");
String vdcId;
String vappId;
String orgId;
public ServiceItemResource(Context context, Request request, Response response) {
super(context, request, response);
this.vappId = (String) getRequest().getAttributes().get("vapp-id");
this.vdcId = (String) getRequest().getAttributes().get("vdc-id");
this.orgId = (String) getRequest().getAttributes().get("org-id");
// Get the item directly from the "persistence layer".
if (this.orgId != null && this.vdcId!=null && this.vappId!=null) {
// Define the supported variant.
getVariants().add(new Variant(MediaType.TEXT_XML));
// By default a resource cannot be updated.
setModifiable(true);
} else {
// This resource is not available.
setAvailable(false);
}
}
/**
* Handle GETS
*/
@Override
public Representation represent(Variant variant) throws ResourceException {
// Generate the right representation according to its media type.
if (MediaType.TEXT_XML.equals(variant.getMediaType())) {
try {
DeploymentDriver actualDriver= (DeploymentDriver) getContext().getAttributes().get(DeploymentApplication.ATTR_PLUGIN_DEPLOYMENT);
String serviceInfo = actualDriver.getService(URICreation.getFQN(orgId, vdcId, vappId));
// Substitute the macros in the description
serviceInfo = serviceInfo.replace("@HOSTNAME", "http://" + Main.serverHost + ":" + Main.serverPort);
if (serviceInfo==null) {
log.error("Null response from the SM.");
getResponse().setStatus(Status.SERVER_ERROR_INTERNAL);
return null;
}
DocumentBuilderFactory dbf =
DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(serviceInfo));
Document doc = db.parse(is);
DomRepresentation representation = new DomRepresentation(
MediaType.TEXT_XML, doc);
log.info("Data returned for service "+ URICreation.getFQN(orgId, vdcId, vappId) + ": \n\n" + serviceInfo);
// Returns the XML representation of this document.
return representation;
} catch (IOException e) {
log.error("Time out waiting for the Lifecycle Controller.");
getResponse().setStatus(Status.SERVER_ERROR_INTERNAL);
return null;
} catch (SAXException e) {
log.error("Retrieved data was not in XML format: " + e.getMessage());
getResponse().setStatus(Status.SERVER_ERROR_INTERNAL);
return null;
} catch (ParserConfigurationException e) {
log.error("Error trying to configure parser.");
getResponse().setStatus(Status.SERVER_ERROR_INTERNAL);
return null;
}
}
return null;
}
/**
* Handle DELETE requests.
*/
@Override
public void removeRepresentations() throws ResourceException {
DeploymentDriver actualDriver= (DeploymentDriver) getContext().getAttributes().get(DeploymentApplication.ATTR_PLUGIN_DEPLOYMENT);
try {
actualDriver.undeploy(URICreation.getFQN(orgId, vdcId, vappId));
} catch (IOException e) {
log.error("Time out waiting for the Lifecycle Controller.");
getResponse().setStatus(Status.SERVER_ERROR_INTERNAL);
return;
}
// Tells the client that the request has been successfully fulfilled.
getResponse().setStatus(Status.SUCCESS_NO_CONTENT);
}
}
| StratusLab/claudia | tcloud-server/src/main/java/com/telefonica/claudia/smi/deployment/ServiceItemResource.java | Java | agpl-3.0 | 6,138 |
// Copyright (c) 2012-present The upper.io/db authors. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package db
import (
"reflect"
"time"
)
// Comparison defines methods for representing comparison operators in a
// portable way across databases.
type Comparison interface {
Operator() ComparisonOperator
Value() interface{}
}
// ComparisonOperator is a type we use to label comparison operators.
type ComparisonOperator uint8
// Comparison operators
const (
ComparisonOperatorNone ComparisonOperator = iota
ComparisonOperatorEqual
ComparisonOperatorNotEqual
ComparisonOperatorLessThan
ComparisonOperatorGreaterThan
ComparisonOperatorLessThanOrEqualTo
ComparisonOperatorGreaterThanOrEqualTo
ComparisonOperatorBetween
ComparisonOperatorNotBetween
ComparisonOperatorIn
ComparisonOperatorNotIn
ComparisonOperatorIs
ComparisonOperatorIsNot
ComparisonOperatorLike
ComparisonOperatorNotLike
ComparisonOperatorRegExp
ComparisonOperatorNotRegExp
ComparisonOperatorAfter
ComparisonOperatorBefore
ComparisonOperatorOnOrAfter
ComparisonOperatorOnOrBefore
)
type dbComparisonOperator struct {
t ComparisonOperator
op string
v interface{}
}
func (c *dbComparisonOperator) CustomOperator() string {
return c.op
}
func (c *dbComparisonOperator) Operator() ComparisonOperator {
return c.t
}
func (c *dbComparisonOperator) Value() interface{} {
return c.v
}
// Gte indicates whether the reference is greater than or equal to the given
// argument.
func Gte(v interface{}) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorGreaterThanOrEqualTo,
v: v,
}
}
// Lte indicates whether the reference is less than or equal to the given
// argument.
func Lte(v interface{}) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorLessThanOrEqualTo,
v: v,
}
}
// Eq indicates whether the constraint is equal to the given argument.
func Eq(v interface{}) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorEqual,
v: v,
}
}
// NotEq indicates whether the constraint is not equal to the given argument.
func NotEq(v interface{}) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorNotEqual,
v: v,
}
}
// Gt indicates whether the constraint is greater than the given argument.
func Gt(v interface{}) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorGreaterThan,
v: v,
}
}
// Lt indicates whether the constraint is less than the given argument.
func Lt(v interface{}) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorLessThan,
v: v,
}
}
// In indicates whether the argument is part of the reference.
func In(v interface{}) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorIn,
v: toInterfaceArray(v),
}
}
// NotIn indicates whether the argument is not part of the reference.
func NotIn(v interface{}) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorNotIn,
v: toInterfaceArray(v),
}
}
// After indicates whether the reference is after the given time.
func After(t time.Time) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorGreaterThan,
v: t,
}
}
// Before indicates whether the reference is before the given time.
func Before(t time.Time) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorLessThan,
v: t,
}
}
// OnOrAfter indicater whether the reference is after or equal to the given
// time value.
func OnOrAfter(t time.Time) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorGreaterThanOrEqualTo,
v: t,
}
}
// OnOrBefore indicates whether the reference is before or equal to the given
// time value.
func OnOrBefore(t time.Time) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorLessThanOrEqualTo,
v: t,
}
}
// Between indicates whether the reference is contained between the two given
// values.
func Between(a interface{}, b interface{}) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorBetween,
v: []interface{}{a, b},
}
}
// NotBetween indicates whether the reference is not contained between the two
// given values.
func NotBetween(a interface{}, b interface{}) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorNotBetween,
v: []interface{}{a, b},
}
}
// Is indicates whether the reference is nil, true or false.
func Is(v interface{}) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorIs,
v: v,
}
}
// IsNot indicates whether the reference is not nil, true nor false.
func IsNot(v interface{}) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorIsNot,
v: v,
}
}
// IsNull indicates whether the reference is a NULL value.
func IsNull() Comparison {
return Is(nil)
}
// IsNotNull indicates whether the reference is a NULL value.
func IsNotNull() Comparison {
return IsNot(nil)
}
/*
// IsDistinctFrom indicates whether the reference is different from
// the given value, including NULL values.
func IsDistinctFrom(v interface{}) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorIsDistinctFrom,
v: v,
}
}
// IsNotDistinctFrom indicates whether the reference is not different from the
// given value, including NULL values.
func IsNotDistinctFrom(v interface{}) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorIsNotDistinctFrom,
v: v,
}
}
*/
// Like indicates whether the reference matches the wildcard value.
func Like(v string) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorLike,
v: v,
}
}
// NotLike indicates whether the reference does not match the wildcard value.
func NotLike(v string) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorNotLike,
v: v,
}
}
/*
// ILike indicates whether the reference matches the wildcard value (case
// insensitive).
func ILike(v string) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorILike,
v: v,
}
}
// NotILike indicates whether the reference does not match the wildcard value
// (case insensitive).
func NotILike(v string) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorNotILike,
v: v,
}
}
*/
// RegExp indicates whether the reference matches the regexp pattern.
func RegExp(v string) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorRegExp,
v: v,
}
}
// NotRegExp indicates whether the reference does not match the regexp pattern.
func NotRegExp(v string) Comparison {
return &dbComparisonOperator{
t: ComparisonOperatorNotRegExp,
v: v,
}
}
// Op represents a custom comparison operator against the reference.
func Op(customOperator string, v interface{}) Comparison {
return &dbComparisonOperator{
op: customOperator,
t: ComparisonOperatorNone,
v: v,
}
}
func toInterfaceArray(v interface{}) []interface{} {
rv := reflect.ValueOf(v)
switch rv.Type().Kind() {
case reflect.Ptr:
return toInterfaceArray(rv.Elem().Interface())
case reflect.Slice:
elems := rv.Len()
args := make([]interface{}, elems)
for i := 0; i < elems; i++ {
args[i] = rv.Index(i).Interface()
}
return args
}
return []interface{}{v}
}
var _ Comparison = &dbComparisonOperator{}
| admpub/nging | vendor/github.com/webx-top/db/comparison.go | GO | agpl-3.0 | 8,541 |
<?php
/**
* plentymarkets shopware connector
* Copyright © 2013-2014 plentymarkets GmbH
*
* According to our dual licensing model, this program can be used either
* under the terms of the GNU Affero General Public License, version 3,
* or under a proprietary license.
*
* The texts of the GNU Affero General Public License, supplemented by an additional
* permission, and of our proprietary license can be found
* in the LICENSE file you have received along with this program.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* "plentymarkets" is a registered trademark of plentymarkets GmbH.
* "shopware" is a registered trademark of shopware AG.
* The licensing of the program under the AGPLv3 does not imply a
* trademark license. Therefore any rights, titles and interests in the
* above trademarks remain entirely with the trademark owners.
*
* @copyright Copyright (c) 2014, plentymarkets GmbH (http://www.plentymarkets.com)
* @author Daniel Bächtle <daniel.baechtle@plentymarkets.com>
*/
/**
* I am a generated class and am required for communicating with plentymarkets.
*/
class PlentySoapObject_DeliveryRow
{
/**
* @var int
*/
public $OrderRowID;
/**
* @var float
*/
public $Quantity;
}
| naturdrogerie/plentymarkets-shopware-connector | Components/Soap/Models/PlentySoapObject/DeliveryRow.php | PHP | agpl-3.0 | 1,450 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'UserProject.drive_auth'
db.add_column(u'user_project', 'drive_auth',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'UserProject.drive_auth'
db.delete_column(u'user_project', 'drive_auth')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'home.category': {
'Meta': {'object_name': 'Category', 'db_table': "u'category'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '150'})
},
'projects.project': {
'Meta': {'object_name': 'Project', 'db_table': "u'project'"},
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['home.Category']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 2, 27, 0, 0)', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'image_original_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'licence': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 2, 27, 0, 0)', 'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'type_field': ('django.db.models.fields.IntegerField', [], {'default': '1', 'null': 'True', 'db_column': "'type'", 'blank': 'True'})
},
'projects.projectpart': {
'Meta': {'object_name': 'ProjectPart', 'db_table': "u'project_part'"},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 2, 27, 0, 0)', 'null': 'True', 'blank': 'True'}),
'created_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'projectpart_created_user'", 'to': "orm['auth.User']"}),
'drive_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 2, 27, 0, 0)', 'null': 'True', 'blank': 'True'}),
'modified_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'projectpart_modified_user'", 'null': 'True', 'to': "orm['auth.User']"}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['projects.Project']"}),
'project_part': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['projects.ProjectPart']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'projects.userproject': {
'Meta': {'object_name': 'UserProject', 'db_table': "u'user_project'"},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 2, 27, 0, 0)', 'null': 'True', 'blank': 'True'}),
'created_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'userproject_created_user'", 'to': "orm['auth.User']"}),
'drive_auth': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'modified_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'userproject_modified_user'", 'null': 'True', 'to': "orm['auth.User']"}),
'permission': ('django.db.models.fields.CharField', [], {'default': '0', 'max_length': '255'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['projects.Project']", 'db_column': "'project_id'"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['projects'] | taikoa/wevolver-server | wevolve/projects/migrations/0006_auto__add_field_userproject_drive_auth.py | Python | agpl-3.0 | 8,173 |
ace.define("ace/mode/jsx", ["require", "exports", "module", "ace/lib/oop", "ace/mode/text", "ace/tokenizer", "ace/mode/jsx_highlight_rules", "ace/mode/matching_brace_outdent", "ace/mode/behaviour/cstyle", "ace/mode/folding/cstyle"], function (e, t, n) {
function l() {
this.HighlightRules = o, this.$outdent = new u, this.$behaviour = new a, this.foldingRules = new f
}
var r = e("../lib/oop"), i = e("./text").Mode, s = e("../tokenizer").Tokenizer, o = e("./jsx_highlight_rules").JsxHighlightRules, u = e("./matching_brace_outdent").MatchingBraceOutdent, a = e("./behaviour/cstyle").CstyleBehaviour, f = e("./folding/cstyle").FoldMode;
r.inherits(l, i), function () {
this.lineCommentStart = "//", this.blockComment = {start: "/*", end: "*/"}, this.getNextLineIndent = function (e, t, n) {
var r = this.$getIndent(t), i = this.getTokenizer().getLineTokens(t, e), s = i.tokens;
if (s.length && s[s.length - 1].type == "comment")return r;
if (e == "start") {
var o = t.match(/^.*[\{\(\[]\s*$/);
o && (r += n)
}
return r
}, this.checkOutdent = function (e, t, n) {
return this.$outdent.checkOutdent(t, n)
}, this.autoOutdent = function (e, t, n) {
this.$outdent.autoOutdent(t, n)
}, this.$id = "ace/mode/jsx"
}.call(l.prototype), t.Mode = l
}), ace.define("ace/mode/jsx_highlight_rules", ["require", "exports", "module", "ace/lib/oop", "ace/lib/lang", "ace/mode/doc_comment_highlight_rules", "ace/mode/text_highlight_rules"], function (e, t, n) {
var r = e("../lib/oop"), i = e("../lib/lang"), s = e("./doc_comment_highlight_rules").DocCommentHighlightRules, o = e("./text_highlight_rules").TextHighlightRules, u = function () {
var e = i.arrayToMap("break|do|instanceof|typeof|case|else|new|var|catch|finally|return|void|continue|for|switch|default|while|function|this|if|throw|delete|in|try|class|extends|super|import|from|into|implements|interface|static|mixin|override|abstract|final|number|int|string|boolean|variant|log|assert".split("|")), t = i.arrayToMap("null|true|false|NaN|Infinity|__FILE__|__LINE__|undefined".split("|")), n = i.arrayToMap("debugger|with|const|export|let|private|public|yield|protected|extern|native|as|operator|__fake__|__readonly__".split("|")), r = "[a-zA-Z_][a-zA-Z0-9_]*\\b";
this.$rules = {start: [
{token: "comment", regex: "\\/\\/.*$"},
s.getStartRule("doc-start"),
{token: "comment", regex: "\\/\\*", next: "comment"},
{token: "string.regexp", regex: "[/](?:(?:\\[(?:\\\\]|[^\\]])+\\])|(?:\\\\/|[^\\]/]))*[/]\\w*\\s*(?=[).,;]|$)"},
{token: "string", regex: '["](?:(?:\\\\.)|(?:[^"\\\\]))*?["]'},
{token: "string", regex: "['](?:(?:\\\\.)|(?:[^'\\\\]))*?[']"},
{token: "constant.numeric", regex: "0[xX][0-9a-fA-F]+\\b"},
{token: "constant.numeric", regex: "[+-]?\\d+(?:(?:\\.\\d*)?(?:[eE][+-]?\\d+)?)?\\b"},
{token: "constant.language.boolean", regex: "(?:true|false)\\b"},
{token: ["storage.type", "text", "entity.name.function"], regex: "(function)(\\s+)(" + r + ")"},
{token: function (r) {
return r == "this" ? "variable.language" : r == "function" ? "storage.type" : e.hasOwnProperty(r) || n.hasOwnProperty(r) ? "keyword" : t.hasOwnProperty(r) ? "constant.language" : /^_?[A-Z][a-zA-Z0-9_]*$/.test(r) ? "language.support.class" : "identifier"
}, regex: r},
{token: "keyword.operator", regex: "!|%|&|\\*|\\-\\-|\\-|\\+\\+|\\+|~|==|=|!=|<=|>=|<<=|>>=|>>>=|<>|<|>|!|&&|\\|\\||\\?\\:|\\*=|%=|\\+=|\\-=|&=|\\^=|\\b(?:in|instanceof|new|delete|typeof|void)"},
{token: "punctuation.operator", regex: "\\?|\\:|\\,|\\;|\\."},
{token: "paren.lparen", regex: "[[({<]"},
{token: "paren.rparen", regex: "[\\])}>]"},
{token: "text", regex: "\\s+"}
], comment: [
{token: "comment", regex: ".*?\\*\\/", next: "start"},
{token: "comment", regex: ".+"}
]}, this.embedRules(s, "doc-", [s.getEndRule("start")])
};
r.inherits(u, o), t.JsxHighlightRules = u
}), ace.define("ace/mode/doc_comment_highlight_rules", ["require", "exports", "module", "ace/lib/oop", "ace/mode/text_highlight_rules"], function (e, t, n) {
var r = e("../lib/oop"), i = e("./text_highlight_rules").TextHighlightRules, s = function () {
this.$rules = {start: [
{token: "comment.doc.tag", regex: "@[\\w\\d_]+"},
{token: "comment.doc.tag", regex: "\\bTODO\\b"},
{defaultToken: "comment.doc"}
]}
};
r.inherits(s, i), s.getStartRule = function (e) {
return{token: "comment.doc", regex: "\\/\\*(?=\\*)", next: e}
}, s.getEndRule = function (e) {
return{token: "comment.doc", regex: "\\*\\/", next: e}
}, t.DocCommentHighlightRules = s
}), ace.define("ace/mode/matching_brace_outdent", ["require", "exports", "module", "ace/range"], function (e, t, n) {
var r = e("../range").Range, i = function () {
};
(function () {
this.checkOutdent = function (e, t) {
return/^\s+$/.test(e) ? /^\s*\}/.test(t) : !1
}, this.autoOutdent = function (e, t) {
var n = e.getLine(t), i = n.match(/^(\s*\})/);
if (!i)return 0;
var s = i[1].length, o = e.findMatchingBracket({row: t, column: s});
if (!o || o.row == t)return 0;
var u = this.$getIndent(e.getLine(o.row));
e.replace(new r(t, 0, t, s - 1), u)
}, this.$getIndent = function (e) {
return e.match(/^\s*/)[0]
}
}).call(i.prototype), t.MatchingBraceOutdent = i
}), ace.define("ace/mode/behaviour/cstyle", ["require", "exports", "module", "ace/lib/oop", "ace/mode/behaviour", "ace/token_iterator", "ace/lib/lang"], function (e, t, n) {
var r = e("../../lib/oop"), i = e("../behaviour").Behaviour, s = e("../../token_iterator").TokenIterator, o = e("../../lib/lang"), u = ["text", "paren.rparen", "punctuation.operator"], a = ["text", "paren.rparen", "punctuation.operator", "comment"], f, l = {}, c = function (e) {
var t = -1;
e.multiSelect && (t = e.selection.id, l.rangeCount != e.multiSelect.rangeCount && (l = {rangeCount: e.multiSelect.rangeCount}));
if (l[t])return f = l[t];
f = l[t] = {autoInsertedBrackets: 0, autoInsertedRow: -1, autoInsertedLineEnd: "", maybeInsertedBrackets: 0, maybeInsertedRow: -1, maybeInsertedLineStart: "", maybeInsertedLineEnd: ""}
}, h = function () {
this.add("braces", "insertion", function (e, t, n, r, i) {
var s = n.getCursorPosition(), u = r.doc.getLine(s.row);
if (i == "{") {
c(n);
var a = n.getSelectionRange(), l = r.doc.getTextRange(a);
if (l !== "" && l !== "{" && n.getWrapBehavioursEnabled())return{text: "{" + l + "}", selection: !1};
if (h.isSaneInsertion(n, r))return/[\]\}\)]/.test(u[s.column]) || n.inMultiSelectMode ? (h.recordAutoInsert(n, r, "}"), {text: "{}", selection: [1, 1]}) : (h.recordMaybeInsert(n, r, "{"), {text: "{", selection: [1, 1]})
} else if (i == "}") {
c(n);
var p = u.substring(s.column, s.column + 1);
if (p == "}") {
var d = r.$findOpeningBracket("}", {column: s.column + 1, row: s.row});
if (d !== null && h.isAutoInsertedClosing(s, u, i))return h.popAutoInsertedClosing(), {text: "", selection: [1, 1]}
}
} else {
if (i == "\n" || i == "\r\n") {
c(n);
var v = "";
h.isMaybeInsertedClosing(s, u) && (v = o.stringRepeat("}", f.maybeInsertedBrackets), h.clearMaybeInsertedClosing());
var p = u.substring(s.column, s.column + 1);
if (p === "}") {
var m = r.findMatchingBracket({row: s.row, column: s.column + 1}, "}");
if (!m)return null;
var g = this.$getIndent(r.getLine(m.row))
} else {
if (!v) {
h.clearMaybeInsertedClosing();
return
}
var g = this.$getIndent(u)
}
var y = g + r.getTabString();
return{text: "\n" + y + "\n" + g + v, selection: [1, y.length, 1, y.length]}
}
h.clearMaybeInsertedClosing()
}
}), this.add("braces", "deletion", function (e, t, n, r, i) {
var s = r.doc.getTextRange(i);
if (!i.isMultiLine() && s == "{") {
c(n);
var o = r.doc.getLine(i.start.row), u = o.substring(i.end.column, i.end.column + 1);
if (u == "}")return i.end.column++, i;
f.maybeInsertedBrackets--
}
}), this.add("parens", "insertion", function (e, t, n, r, i) {
if (i == "(") {
c(n);
var s = n.getSelectionRange(), o = r.doc.getTextRange(s);
if (o !== "" && n.getWrapBehavioursEnabled())return{text: "(" + o + ")", selection: !1};
if (h.isSaneInsertion(n, r))return h.recordAutoInsert(n, r, ")"), {text: "()", selection: [1, 1]}
} else if (i == ")") {
c(n);
var u = n.getCursorPosition(), a = r.doc.getLine(u.row), f = a.substring(u.column, u.column + 1);
if (f == ")") {
var l = r.$findOpeningBracket(")", {column: u.column + 1, row: u.row});
if (l !== null && h.isAutoInsertedClosing(u, a, i))return h.popAutoInsertedClosing(), {text: "", selection: [1, 1]}
}
}
}), this.add("parens", "deletion", function (e, t, n, r, i) {
var s = r.doc.getTextRange(i);
if (!i.isMultiLine() && s == "(") {
c(n);
var o = r.doc.getLine(i.start.row), u = o.substring(i.start.column + 1, i.start.column + 2);
if (u == ")")return i.end.column++, i
}
}), this.add("brackets", "insertion", function (e, t, n, r, i) {
if (i == "[") {
c(n);
var s = n.getSelectionRange(), o = r.doc.getTextRange(s);
if (o !== "" && n.getWrapBehavioursEnabled())return{text: "[" + o + "]", selection: !1};
if (h.isSaneInsertion(n, r))return h.recordAutoInsert(n, r, "]"), {text: "[]", selection: [1, 1]}
} else if (i == "]") {
c(n);
var u = n.getCursorPosition(), a = r.doc.getLine(u.row), f = a.substring(u.column, u.column + 1);
if (f == "]") {
var l = r.$findOpeningBracket("]", {column: u.column + 1, row: u.row});
if (l !== null && h.isAutoInsertedClosing(u, a, i))return h.popAutoInsertedClosing(), {text: "", selection: [1, 1]}
}
}
}), this.add("brackets", "deletion", function (e, t, n, r, i) {
var s = r.doc.getTextRange(i);
if (!i.isMultiLine() && s == "[") {
c(n);
var o = r.doc.getLine(i.start.row), u = o.substring(i.start.column + 1, i.start.column + 2);
if (u == "]")return i.end.column++, i
}
}), this.add("string_dquotes", "insertion", function (e, t, n, r, i) {
if (i == '"' || i == "'") {
c(n);
var s = i, o = n.getSelectionRange(), u = r.doc.getTextRange(o);
if (u !== "" && u !== "'" && u != '"' && n.getWrapBehavioursEnabled())return{text: s + u + s, selection: !1};
var a = n.getCursorPosition(), f = r.doc.getLine(a.row), l = f.substring(a.column - 1, a.column);
if (l == "\\")return null;
var p = r.getTokens(o.start.row), d = 0, v, m = -1;
for (var g = 0; g < p.length; g++) {
v = p[g], v.type == "string" ? m = -1 : m < 0 && (m = v.value.indexOf(s));
if (v.value.length + d > o.start.column)break;
d += p[g].value.length
}
if (!v || m < 0 && v.type !== "comment" && (v.type !== "string" || o.start.column !== v.value.length + d - 1 && v.value.lastIndexOf(s) === v.value.length - 1)) {
if (!h.isSaneInsertion(n, r))return;
return{text: s + s, selection: [1, 1]}
}
if (v && v.type === "string") {
var y = f.substring(a.column, a.column + 1);
if (y == s)return{text: "", selection: [1, 1]}
}
}
}), this.add("string_dquotes", "deletion", function (e, t, n, r, i) {
var s = r.doc.getTextRange(i);
if (!i.isMultiLine() && (s == '"' || s == "'")) {
c(n);
var o = r.doc.getLine(i.start.row), u = o.substring(i.start.column + 1, i.start.column + 2);
if (u == s)return i.end.column++, i
}
})
};
h.isSaneInsertion = function (e, t) {
var n = e.getCursorPosition(), r = new s(t, n.row, n.column);
if (!this.$matchTokenType(r.getCurrentToken() || "text", u)) {
var i = new s(t, n.row, n.column + 1);
if (!this.$matchTokenType(i.getCurrentToken() || "text", u))return!1
}
return r.stepForward(), r.getCurrentTokenRow() !== n.row || this.$matchTokenType(r.getCurrentToken() || "text", a)
}, h.$matchTokenType = function (e, t) {
return t.indexOf(e.type || e) > -1
}, h.recordAutoInsert = function (e, t, n) {
var r = e.getCursorPosition(), i = t.doc.getLine(r.row);
this.isAutoInsertedClosing(r, i, f.autoInsertedLineEnd[0]) || (f.autoInsertedBrackets = 0), f.autoInsertedRow = r.row, f.autoInsertedLineEnd = n + i.substr(r.column), f.autoInsertedBrackets++
}, h.recordMaybeInsert = function (e, t, n) {
var r = e.getCursorPosition(), i = t.doc.getLine(r.row);
this.isMaybeInsertedClosing(r, i) || (f.maybeInsertedBrackets = 0), f.maybeInsertedRow = r.row, f.maybeInsertedLineStart = i.substr(0, r.column) + n, f.maybeInsertedLineEnd = i.substr(r.column), f.maybeInsertedBrackets++
}, h.isAutoInsertedClosing = function (e, t, n) {
return f.autoInsertedBrackets > 0 && e.row === f.autoInsertedRow && n === f.autoInsertedLineEnd[0] && t.substr(e.column) === f.autoInsertedLineEnd
}, h.isMaybeInsertedClosing = function (e, t) {
return f.maybeInsertedBrackets > 0 && e.row === f.maybeInsertedRow && t.substr(e.column) === f.maybeInsertedLineEnd && t.substr(0, e.column) == f.maybeInsertedLineStart
}, h.popAutoInsertedClosing = function () {
f.autoInsertedLineEnd = f.autoInsertedLineEnd.substr(1), f.autoInsertedBrackets--
}, h.clearMaybeInsertedClosing = function () {
f && (f.maybeInsertedBrackets = 0, f.maybeInsertedRow = -1)
}, r.inherits(h, i), t.CstyleBehaviour = h
}), ace.define("ace/mode/folding/cstyle", ["require", "exports", "module", "ace/lib/oop", "ace/range", "ace/mode/folding/fold_mode"], function (e, t, n) {
var r = e("../../lib/oop"), i = e("../../range").Range, s = e("./fold_mode").FoldMode, o = t.FoldMode = function (e) {
e && (this.foldingStartMarker = new RegExp(this.foldingStartMarker.source.replace(/\|[^|]*?$/, "|" + e.start)), this.foldingStopMarker = new RegExp(this.foldingStopMarker.source.replace(/\|[^|]*?$/, "|" + e.end)))
};
r.inherits(o, s), function () {
this.foldingStartMarker = /(\{|\[)[^\}\]]*$|^\s*(\/\*)/, this.foldingStopMarker = /^[^\[\{]*(\}|\])|^[\s\*]*(\*\/)/, this.getFoldWidgetRange = function (e, t, n, r) {
var i = e.getLine(n), s = i.match(this.foldingStartMarker);
if (s) {
var o = s.index;
if (s[1])return this.openingBracketBlock(e, s[1], n, o);
var u = e.getCommentFoldRange(n, o + s[0].length, 1);
return u && !u.isMultiLine() && (r ? u = this.getSectionRange(e, n) : t != "all" && (u = null)), u
}
if (t === "markbegin")return;
var s = i.match(this.foldingStopMarker);
if (s) {
var o = s.index + s[0].length;
return s[1] ? this.closingBracketBlock(e, s[1], n, o) : e.getCommentFoldRange(n, o, -1)
}
}, this.getSectionRange = function (e, t) {
var n = e.getLine(t), r = n.search(/\S/), s = t, o = n.length;
t += 1;
var u = t, a = e.getLength();
while (++t < a) {
n = e.getLine(t);
var f = n.search(/\S/);
if (f === -1)continue;
if (r > f)break;
var l = this.getFoldWidgetRange(e, "all", t);
if (l) {
if (l.start.row <= s)break;
if (l.isMultiLine())t = l.end.row; else if (r == f)break
}
u = t
}
return new i(s, o, u, e.getLine(u).length)
}
}.call(o.prototype)
}) | ahammer/MySaasa | server/src/main/webapp/ace/src-min-noconflict/mode-jsx.js | JavaScript | agpl-3.0 | 17,411 |
<?php
class TreeNode {
public $text = "";
public $id = "";
public $iconCls = "";
public $leaf = true;
public $draggable = false;
public $href = "#";
public $hrefTarget = "";
function __construct($id,$text,$iconCls,$leaf,$draggable,$href,$hrefTarget) {
$this->id = $id;
$this->text = $text;
$this->iconCls = $iconCls;
$this->leaf = $leaf;
$this->draggable = $draggable;
$this->href = $href;
$this->hrefTarget = $hrefTarget;
}
function toJson() {
return G::json_encode($this);
}
}
class ExtJsTreeNode extends TreeNode {
public $children = array();
function add($object) {
$this->children[] = $object;
}
function toJson() {
return G::json_encode($this);
}
}
G::LoadClass('case');
$o = new Cases();
$PRO_UID = $_SESSION['PROCESS'];
$treeArray = array();
//if (isset($_GET['action'])&&$_GET['action']=='test'){
echo "[";
// dynaforms assemble
$extTreeDynaforms = new ExtJsTreeNode("node-dynaforms", G::loadtranslation('ID_DYNAFORMS'), "", false, false, "", "");
$i = 0;
$APP_UID = $_GET['APP_UID'];
$DEL_INDEX = $_GET['DEL_INDEX'];
$steps = $o->getAllDynaformsStepsToRevise($_GET['APP_UID']);
$steps->next();
while ($step = $steps->getRow()) {
require_once 'classes/model/Dynaform.php';
$od = new Dynaform();
$dynaformF = $od->Load($step['STEP_UID_OBJ']);
$n = $step['STEP_POSITION'];
$TITLE = " - ".$dynaformF['DYN_TITLE'];
$DYN_UID = $dynaformF['DYN_UID'];
$href = "cases_StepToRevise?type=DYNAFORM&ex=$i&PRO_UID=$PRO_UID&DYN_UID=$DYN_UID&APP_UID=$APP_UID&position=".$step['STEP_POSITION']."&DEL_INDEX=$DEL_INDEX";
$extTreeDynaforms->add(new TreeNode($DYN_UID,$TITLE,"datasource",true,false,$href,"openCaseFrame"));
$i++;
$steps->next();
}
echo $extTreeDynaforms->toJson();
// end the dynaforms tree menu
echo ",";
// assembling the input documents tree menu
$extTreeInputDocs = new ExtJsTreeNode("node-input-documents", G::loadtranslation('ID_REQUEST_DOCUMENTS'), "", false, false, "", "");
$i = 0;
$APP_UID = $_GET['APP_UID'];
$DEL_INDEX = $_GET['DEL_INDEX'];
$steps = $o->getAllInputsStepsToRevise($_GET['APP_UID']);
$steps->next();
while ($step = $steps->getRow()) {
require_once 'classes/model/InputDocument.php';
$od = new InputDocument();
$IDF = $od->Load($step['STEP_UID_OBJ']);
$n = $step['STEP_POSITION'];
$TITLE = " - ".$IDF['INP_DOC_TITLE'];
$INP_DOC_UID = $IDF['INP_DOC_UID'];
$href = "cases_StepToReviseInputs?type=INPUT_DOCUMENT&ex=$i&PRO_UID=$PRO_UID&INP_DOC_UID=$INP_DOC_UID&APP_UID=$APP_UID&position=".$step['STEP_POSITION']."&DEL_INDEX=$DEL_INDEX";
$extTreeInputDocs->add(new TreeNode($INP_DOC_UID,$TITLE,"datasource",true,false,$href,"openCaseFrame"));
$i++;
$steps->next();
}
echo $extTreeInputDocs->toJson();
echo "]"; | carbonadona/pm | workflow/engine/methods/cases/casesToReviseTreeContent.php | PHP | agpl-3.0 | 2,947 |
<?php
namespace CloudDataService\NHSNumberValidation\Test;
use CloudDataService\NHSNumberValidation\Test\TestCase;
use CloudDataService\NHSNumberValidation\Validator;
class ValidatorTest extends TestCase
{
public function testInit()
{
$validator = new Validator;
$this->assertTrue(is_object($validator));
}
public function testHasFunction()
{
$validator = new Validator;
$this->assertTrue(
method_exists($validator, 'validate'),
'Class does not have method validate'
);
}
public function testValidateNoNumber()
{
$validator = new Validator;
$this->assertTrue(is_object($validator));
try {
$valid = $validator->validate();
} catch (\CloudDataService\NHSNumberValidation\InvalidNumberException $e) {
return;
}
}
public function testValidateNumberTooShort()
{
$validator = new Validator;
$this->assertTrue(is_object($validator));
try {
$valid = $validator->validate(0123);
} catch (\CloudDataService\NHSNumberValidation\InvalidNumberException $e) {
return;
}
}
public function testValidateNumberTooLong()
{
$validator = new Validator;
$this->assertTrue(is_object($validator));
try {
$valid = $validator->validate('01234567890');
} catch (\CloudDataService\NHSNumberValidation\InvalidNumberException $e) {
return;
}
}
public function testValidNumber()
{
$validator = new Validator;
$this->assertTrue(is_object($validator));
try {
$valid = $validator->validate(4010232137);
} catch (\CloudDataService\NHSNumberValidation\InvalidNumberException $e) {
return false;
}
$this->assertEquals(4010232137, $valid);
}
public function testValidNumberWithBadChecksum()
{
$validator = new Validator;
$this->assertTrue(is_object($validator));
try {
$valid = $validator->validate(4010232138);
} catch (\CloudDataService\NHSNumberValidation\InvalidNumberException $e) {
return false;
}
}
public function testValidNumberWithBadChecksumEqualsTen()
{
$validator = new Validator;
$this->assertTrue(is_object($validator));
try {
$valid = $validator->validate(1000000010);
} catch (\CloudDataService\NHSNumberValidation\InvalidNumberException $e) {
return false;
}
}
public function testValidNumberWithBadChecksumEqualsEleven()
{
$validator = new Validator;
$this->assertTrue(is_object($validator));
try {
$valid = $validator->validate(1000000060);
} catch (\CloudDataService\NHSNumberValidation\InvalidNumberException $e) {
return false;
}
}
public function testValidNumberWithSpaces()
{
$validator = new Validator;
$this->assertTrue(is_object($validator));
try {
$valid = $validator->validate("401 023 2137");
} catch (\CloudDataService\NHSNumberValidation\InvalidNumberException $e) {
return false;
}
$this->assertEquals(4010232137, $valid);
}
public function testValidNumberWithNonAlphaNumeric()
{
$validator = new Validator;
$this->assertTrue(is_object($validator));
try {
$valid = $validator->validate("401-023-2137");
} catch (\CloudDataService\NHSNumberValidation\InvalidNumberException $e) {
return false;
}
$this->assertEquals(4010232137, $valid);
}
}
| CloudDataService/nhs-number-validation | tests/ValidatorTest.php | PHP | agpl-3.0 | 3,767 |
# Copyright (C) 2021 OpenMotics BV
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
apartment controller manages the apartment objects that are known in the system
"""
import logging
from gateway.events import EsafeEvent, EventError
from gateway.exceptions import ItemDoesNotExistException, StateException
from gateway.models import Apartment, Database
from gateway.mappers import ApartmentMapper
from gateway.dto import ApartmentDTO
from gateway.pubsub import PubSub
from ioc import INJECTED, Inject, Injectable, Singleton
if False: # MyPy
from typing import List, Optional, Dict, Any
from esafe.rebus import RebusController
logger = logging.getLogger(__name__)
@Injectable.named('apartment_controller')
@Singleton
class ApartmentController(object):
def __init__(self):
self.rebus_controller = None # type: Optional[RebusController]
def set_rebus_controller(self, rebus_controller):
self.rebus_controller = rebus_controller
@staticmethod
@Inject
def send_config_change_event(msg, error=EventError.ErrorTypes.NO_ERROR, pubsub=INJECTED):
# type: (str, Dict[str, Any], PubSub) -> None
event = EsafeEvent(EsafeEvent.Types.CONFIG_CHANGE, {'type': 'apartment', 'msg': msg}, error=error)
pubsub.publish_esafe_event(PubSub.EsafeTopics.CONFIG, event)
@staticmethod
def load_apartment(apartment_id):
# type: (int) -> Optional[ApartmentDTO]
apartment_orm = Apartment.select().where(Apartment.id == apartment_id).first()
if apartment_orm is None:
return None
apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
return apartment_dto
@staticmethod
def load_apartment_by_mailbox_id(mailbox_id):
# type: (int) -> Optional[ApartmentDTO]
apartment_orm = Apartment.select().where(Apartment.mailbox_rebus_id == mailbox_id).first()
if apartment_orm is None:
return None
apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
return apartment_dto
@staticmethod
def load_apartment_by_doorbell_id(doorbell_id):
# type: (int) -> Optional[ApartmentDTO]
apartment_orm = Apartment.select().where(Apartment.doorbell_rebus_id == doorbell_id).first()
if apartment_orm is None:
return None
apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
return apartment_dto
@staticmethod
def load_apartments():
# type: () -> List[ApartmentDTO]
apartments = []
for apartment_orm in Apartment.select():
apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
apartments.append(apartment_dto)
return apartments
@staticmethod
def get_apartment_count():
# type: () -> int
return Apartment.select().count()
@staticmethod
def apartment_id_exists(apartment_id):
# type: (int) -> bool
apartments = ApartmentController.load_apartments()
ids = (x.id for x in apartments)
return apartment_id in ids
def _check_rebus_ids(self, apartment_dto):
if self.rebus_controller is None:
raise StateException("Cannot save apartment: Rebus Controller is None")
if 'doorbell_rebus_id' in apartment_dto.loaded_fields and \
not self.rebus_controller.verify_device_exists(apartment_dto.doorbell_rebus_id):
raise ItemDoesNotExistException("Cannot save apartment: doorbell ({}) does not exists".format(apartment_dto.doorbell_rebus_id))
if 'mailbox_rebus_id' in apartment_dto.loaded_fields and \
not self.rebus_controller.verify_device_exists(apartment_dto.mailbox_rebus_id):
raise ItemDoesNotExistException("Cannot save apartment: mailbox ({}) does not exists".format(apartment_dto.mailbox_rebus_id))
def save_apartment(self, apartment_dto, send_event=True):
# type: (ApartmentDTO, bool) -> ApartmentDTO
self._check_rebus_ids(apartment_dto)
apartment_orm = ApartmentMapper.dto_to_orm(apartment_dto)
apartment_orm.save()
if send_event:
ApartmentController.send_config_change_event('save')
return ApartmentMapper.orm_to_dto(apartment_orm)
def save_apartments(self, apartments_dto):
apartments_dtos = []
for apartment in apartments_dto:
apartment_saved = self.save_apartment(apartment, send_event=False)
apartments_dtos.append(apartment_saved)
self.send_config_change_event('save')
return apartments_dtos
def update_apartment(self, apartment_dto, send_event=True):
# type: (ApartmentDTO, bool) -> ApartmentDTO
self._check_rebus_ids(apartment_dto)
if 'id' not in apartment_dto.loaded_fields or apartment_dto.id is None:
raise RuntimeError('cannot update an apartment without the id being set')
try:
apartment_orm = Apartment.get_by_id(apartment_dto.id)
loaded_apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
for field in apartment_dto.loaded_fields:
if field == 'id':
continue
if hasattr(apartment_dto, field):
setattr(loaded_apartment_dto, field, getattr(apartment_dto, field))
apartment_orm = ApartmentMapper.dto_to_orm(loaded_apartment_dto)
apartment_orm.save()
if send_event:
ApartmentController.send_config_change_event('update')
return ApartmentMapper.orm_to_dto(apartment_orm)
except Exception as e:
raise RuntimeError('Could not update the user: {}'.format(e))
def update_apartments(self, apartment_dtos):
# type: (List[ApartmentDTO]) -> Optional[List[ApartmentDTO]]
apartments = []
with Database.get_db().transaction() as transaction:
try:
# First clear all the rebus fields in order to be able to swap 2 fields
for apartment in apartment_dtos:
apartment_orm = Apartment.get_by_id(apartment.id) # type: Apartment
if 'mailbox_rebus_id' in apartment.loaded_fields:
apartment_orm.mailbox_rebus_id = None
if 'doorbell_rebus_id' in apartment.loaded_fields:
apartment_orm.doorbell_rebus_id = None
apartment_orm.save()
# Then check if there is already an apartment with an mailbox or doorbell rebus id that is passed
# This is needed for when an doorbell or mailbox gets assigned to another apartment. Then the first assignment needs to be deleted.
for apartment_orm in Apartment.select():
for apartment_dto in apartment_dtos:
if apartment_orm.mailbox_rebus_id == apartment_dto.mailbox_rebus_id and apartment_orm.mailbox_rebus_id is not None:
apartment_orm.mailbox_rebus_id = None
apartment_orm.save()
if apartment_orm.doorbell_rebus_id == apartment_dto.doorbell_rebus_id and apartment_orm.doorbell_rebus_id is not None:
apartment_orm.doorbell_rebus_id = None
apartment_orm.save()
for apartment in apartment_dtos:
updated = self.update_apartment(apartment, send_event=False)
if updated is not None:
apartments.append(updated)
self.send_config_change_event('update')
except Exception as ex:
logger.error('Could not update apartments: {}: {}'.format(type(ex).__name__, ex))
transaction.rollback()
return None
return apartments
@staticmethod
def delete_apartment(apartment_dto):
# type: (ApartmentDTO) -> None
if "id" in apartment_dto.loaded_fields and apartment_dto.id is not None:
Apartment.delete_by_id(apartment_dto.id)
elif "name" in apartment_dto.loaded_fields:
# First check if there is only one:
if Apartment.select().where(Apartment.name == apartment_dto.name).count() <= 1:
Apartment.delete().where(Apartment.name == apartment_dto.name).execute()
ApartmentController.send_config_change_event('delete')
else:
raise RuntimeError('More than one apartment with the given name: {}'.format(apartment_dto.name))
else:
raise RuntimeError('Could not find an apartment with the name {} to delete'.format(apartment_dto.name))
| openmotics/gateway | src/gateway/apartment_controller.py | Python | agpl-3.0 | 9,287 |
<?php
/**
* BaseContact class
*
* @author Carlos Palma <chonwil@gmail.com>
*/
abstract class BaseContact extends ContentDataObject {
// -------------------------------------------------------
// Access methods
// -------------------------------------------------------
/**
* Return value of 'id' field
*
* @access public
* @param void
* @return integer
*/
function getObjectId() {
return $this->getColumnValue('object_id');
} // getObjectId()
/**
* Set value of 'id' field
*
* @access public
* @param integer $value
* @return boolean
*/
function setObjectId($value) {
return $this->setColumnValue('object_id', $value);
} // setObjectId()
/**
* Return value of 'first_name' field
*
* @access public
* @param void
* @return string
*/
function getFirstName() {
return $this->getColumnValue('first_name');
} // getFirstName()
/**
* Set value of 'first_name' field
*
* @access public
* @param string $value
* @return boolean
*/
function setFirstName($value) {
return $this->setColumnValue('first_name', $value);
} // setFirstName()
/**
* Return value of 'surname' field
*
* @access public
* @param void
* @return string
*/
function getSurname() {
return $this->getColumnValue('surname');
} // getSurname()
/**
* Set value of 'surname' field
*
* @access public
* @param string $value
* @return boolean
*/
function setSurname($value) {
return $this->setColumnValue('surname', $value);
} // setSurname()
/**
* Return value of 'company_id' field
*
* @access public
* @param void
* @return integer
*/
function getCompanyId() {
return $this->getColumnValue('company_id');
} // getCompanyId()
/**
* Set value of 'company_id' field
*
* @access public
* @param integer $value
* @return boolean
*/
function setCompanyId($value) {
return $this->setColumnValue('company_id', $value);
} // setCompanyId()
/**
* Return value of 'is_company' field
*
* @access public
* @param void
* @return boolean
*/
function getIsCompany() {
return $this->getColumnValue('is_company');
} // getIsCompany()
/**
* Set value of 'is_company' field
*
* @access public
* @param boolean $value
* @return boolean
*/
function setIsCompany($value) {
return $this->setColumnValue('is_company', $value);
} // setIsCompany()
/**
* Return value of 'user_type' field
*
* @access public
* @param void
* @return boolean
*/
function getUserType() {
return $this->getColumnValue('user_type');
} // getUserType()
/**
* Set value of 'user_type' field
*
* @access public
* @param boolean $value
* @return boolean
*/
function setUserType($value) {
return $this->setColumnValue('user_type', $value);
} // setUserType()
/**
* Return value of 'birthday' field
*
* @access public
* @param void
* @return datetimevalue
*/
function getBirthday() {
return $this->getColumnValue('birthday');
} // getBirthday()
/**
* Set value of 'birthday' field
*
* @access public
* @param datetimevalue $value
* @return boolean
*/
function setBirthday($value) {
return $this->setColumnValue('birthday', $value);
} // setBirthday()
/**
* Return value of 'department' field
*
* @access public
* @param void
* @return string
*/
function getDepartment() {
return $this->getColumnValue('department');
} // getDepartment()
/**
* Set value of 'department' field
*
* @access public
* @param string $value
* @return boolean
*/
function setDepartment($value) {
return $this->setColumnValue('department', $value);
} // setDepartment()
/**
* Return value of 'job_title' field
*
* @access public
* @param void
* @return string
*/
function getJobTitle() {
return $this->getColumnValue('job_title');
} // getJobTitle()
/**
* Set value of 'job_title' field
*
* @access public
* @param string $value
* @return boolean
*/
function setJobTitle($value) {
return $this->setColumnValue('job_title', $value);
} // setJobTitle()
/**
* Return value of 'timezone' field
*
* @access public
* @param void
* @return float
*/
function getTimezone() {
return $this->getColumnValue('timezone');
} // getTimezone()
/**
* Set value of 'timezone' field
*
* @access public
* @param float $value
* @return boolean
*/
function setTimezone($value) {
return $this->setColumnValue('timezone', $value);
} // setTimezone()
/**
* Return value of 'is_active_user' field
*
* @access public
* @param void
* @return boolean
*/
function getIsActiveUser() {
return $this->getColumnValue('is_active_user');
} // getIsActiveUser()
/**
* Set value of 'is_active_user' field
*
* @access public
* @param boolean $value
* @return boolean
*/
function setIsActiveUser($value) {
return $this->setColumnValue('is_active_user', $value);
} // setIsActiveUser()
/**
* Return value of 'token' field
*
* @access public
* @param void
* @return string
*/
function getToken() {
return $this->getColumnValue('token');
} // getToken()
/**
* Set value of 'token' field
*
* @access public
* @param string $value
* @return boolean
*/
function setToken($value) {
return $this->setColumnValue('token', $value);
} // setToken()
/**
* Return value of 'salt' field
*
* @access public
* @param void
* @return string
*/
function getSalt() {
return $this->getColumnValue('salt');
} // getSalt()
/**
* Set value of 'salt' field
*
* @access public
* @param string $value
* @return boolean
*/
function setSalt($value) {
return $this->setColumnValue('salt', $value);
} // setSalt()
/**
* Return value of 'twister' field
*
* @access public
* @param void
* @return string
*/
function getTwister() {
return $this->getColumnValue('twister');
} // getTwister()
/**
* Set value of 'twister' field
*
* @access public
* @param string $value
* @return boolean
*/
function setTwister($value) {
return $this->setColumnValue('twister', $value);
} // setTwister()
/**
* Return value of 'display_name' field
*
* @access public
* @param void
* @return string
*/
function getDisplayName() {
return $this->getColumnValue('display_name');
} // getDisplayName()
/**
* Set value of 'display_name' field
*
* @access public
* @param string $value
* @return boolean
*/
function setDisplayName($value) {
return $this->setColumnValue('display_name', $value);
} // setDisplayName()
/**
* Return value of 'permission_group_id' field
*
* @access public
* @param void
* @return integer
*/
function getPermissionGroupId() {
return $this->getColumnValue('permission_group_id');
} // getPermissionGroupId()
/**
* Set value of 'permission_group_id' field
*
* @access public
* @param integer $value
* @return boolean
*/
function setPermissionGroupId($value) {
return $this->setColumnValue('permission_group_id', $value);
} // setPermissionGroupId()
/**
* Return value of 'username' field
*
* @access public
* @param void
* @return string
*/
function getUsername() {
return $this->getColumnValue('username');
} // getUsername()
/**
* Set value of 'username' field
*
* @access public
* @param string $value
* @return boolean
*/
function setUsername($value) {
return $this->setColumnValue('username', $value);
} // setUsername()
/**
* Return value of 'contact_passwords_id' field
*
* @access public
* @param void
* @return string
*/
function getContactPasswordsId() {
return $this->getColumnValue('contact_passwords_id');
} // getContactPasswordsId()
/**
* Set value of 'contact_passwords_id' field
*
* @access public
* @param string $value
* @return boolean
*/
function setContactPasswordsId($value) {
return $this->setColumnValue('contact_passwords_id', $value);
} // setContactPasswordsId()
/**
* Return value of 'comments' field
*
* @access public
* @param void
* @return string
*/
function getCommentsField() {
return $this->getColumnValue('comments');
} // getCommentsField()
/**
* Set value of 'comments' field
*
* @access public
* @param string $value
* @return boolean
*/
function setCommentsField($value) {
return $this->setColumnValue('comments', $value);
} // setCommentsField()
/**
* Return value of 'picture_file' field
*
* @access public
* @param void
* @return string
*/
function getPictureFile() {
return $this->getColumnValue('picture_file');
} // getPictureFile()
/**
* Set value of 'picture_file' field
*
* @access public
* @param string $value
* @return boolean
*/
function setPictureFile($value) {
return $this->setColumnValue('picture_file', $value);
} // setPictureFile()
function getPictureFileSmall() {
return $this->getColumnValue('picture_file_small');
}
function setPictureFileSmall($value) {
return $this->setColumnValue('picture_file_small', $value);
}
function getPictureFileMedium() {
return $this->getColumnValue('picture_file_medium');
}
function setPictureFileMedium($value) {
return $this->setColumnValue('picture_file_medium', $value);
}
/**
* Return value of 'avatar_file' field
*
* @access public
* @param void
* @return string
*/
function getAvatarFile() {
return $this->getColumnValue('avatar_file');
} // getAvatarFile()
/**
* Set value of 'avatar_file' field
*
* @access public
* @param string $value
* @return boolean
*/
function setAvatarFile($value) {
return $this->setColumnValue('avatar_file', $value);
} // setAvatarFile()
/**
* Return value of 'last_login' field
*
* @access public
* @param void
* @return DateTimeValue
*/
function getLastLogin() {
return $this->getColumnValue('last_login');
} // getLastLogin()
/**
* Set value of 'last_login' field
*
* @access public
* @param DateTimeValue $value
* @return boolean
*/
function setLastLogin(DateTimeValue $value) {
return $this->setColumnValue('last_login', $value);
} // setLastLogin()
/**
* Return value of 'last_visit' field
*
* @access public
* @param void
* @return DateTimeValue
*/
function getLastVisit() {
return $this->getColumnValue('last_visit');
} // getLastVisit()
/**
* Set value of 'last_visit' field
*
* @access public
* @param DateTimeValue $value
* @return boolean
*/
function setLastVisit($value) {
return $this->setColumnValue('last_visit', $value);
} // setLastVisit()
/**
* Return value of 'last_activity' field
*
* @access public
* @param void
* @return DateTimeValue
*/
function getLastActivity() {
return $this->getColumnValue('last_activity');
} // getLastActivity()
/**
* Set value of 'last_activity' field
*
* @access public
* @param DateTimeValue $value
* @return boolean
*/
function setLastActivity($value) {
return $this->setColumnValue('last_activity', $value);
} // setLastActivity()
/**
* Return value of 'personal_member_id' field
*
* @access public
* @param void
* @return integer
*/
function getPersonalMemberId() {
return $this->getColumnValue('personal_member_id');
} // getPersonalMemberId()
/**
* Set value of 'personal_member_id' field
*
* @access public
* @param integer $value
* @return boolean
*/
function setPersonalMemberId($value) {
return $this->setColumnValue('personal_member_id', $value);
} // setPersonalMemberId()
/**
* Return value of 'disabled' field
*
* @access public
* @param void
* @return integer
*/
function getDisabled() {
return $this->getColumnValue('disabled');
} // getDisabled()
/**
* Set value of 'disabled' field
*
* @access public
* @param integer $value
* @return boolean
*/
function setDisabled($value) {
return $this->setColumnValue('disabled', $value);
} // setDisabled()
/**
* Return value of 'token_disabled' field
*
* @access public
* @param void
* @return string
*/
function getTokenDisabled() {
return $this->getColumnValue('token_disabled');
} // getTokenDisabled()
/**
* Set value of 'token_disabled' field
*
* @access public
* @param string $value
* @return boolean
*/
function setTokenDisabled($value) {
return $this->setColumnValue('token_disabled', $value);
} // setTokenDisabled()
/**
* Return value of 'default_billing_id' field
*
* @access public
* @param void
* @return integer
*/
function getDefaultBillingId() {
return $this->getColumnValue('default_billing_id');
} // getDefaultBillingId()
/**
* Set value of 'default_billing_id' field
*
* @access public
* @param integer $value
* @return boolean
*/
function setDefaultBillingId($value) {
return $this->setColumnValue('default_billing_id', $value);
} // setDefaultBillingId()
function getUserTimezoneId() {
return $this->getColumnValue('user_timezone_id');
}
function setUserTimezoneId($value) {
return $this->setColumnValue('user_timezone_id', $value);
}
/**
* Return manager instance
*
* @access protected
* @param void
* @return Contacts
*/
function manager() {
if(!($this->manager instanceof Contacts)) $this->manager = Contacts::instance();
return $this->manager;
} // manager
} // BaseContact
?> | fengoffice/fengoffice | application/models/contacts/base/BaseContact.class.php | PHP | agpl-3.0 | 15,547 |
<?php
/*********************************************************************************
* SugarCRM Community Edition is a customer relationship management program developed by
* SugarCRM, Inc. Copyright (C) 2004-2011 SugarCRM Inc.
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License version 3 as published by the
* Free Software Foundation with the addition of the following permission added
* to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK
* IN WHICH THE COPYRIGHT IS OWNED BY SUGARCRM, SUGARCRM DISCLAIMS THE WARRANTY
* OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along with
* this program; if not, see http://www.gnu.org/licenses or write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301 USA.
*
* You can contact SugarCRM, Inc. headquarters at 10050 North Wolfe Road,
* SW2-130, Cupertino, CA 95014, USA. or at email address contact@sugarcrm.com.
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Public License version 3.
*
* In accordance with Section 7(b) of the GNU Affero General Public License version 3,
* these Appropriate Legal Notices must retain the display of the "Powered by
* SugarCRM" logo. If the display of the logo is not reasonably feasible for
* technical reasons, the Appropriate Legal Notices must display the words
* "Powered by SugarCRM".
********************************************************************************/
require_once('data/SugarBean.php');
require_once('modules/Contacts/Contact.php');
require_once('include/SubPanel/SubPanelDefinitions.php');
class Bug41738Test extends Sugar_PHPUnit_Framework_TestCase
{
protected $bean;
public function setUp()
{
global $moduleList, $beanList, $beanFiles;
require('include/modules.php');
$GLOBALS['current_user'] = SugarTestUserUtilities::createAnonymousUser();
$GLOBALS['modListHeader'] = query_module_access_list($GLOBALS['current_user']);
$GLOBALS['modules_exempt_from_availability_check']['Calls']='Calls';
$GLOBALS['modules_exempt_from_availability_check']['Meetings']='Meetings';
$this->bean = new Opportunity();
}
public function tearDown()
{
SugarTestUserUtilities::removeAllCreatedAnonymousUsers();
unset($GLOBALS['current_user']);
}
public function testSubpanelCollectionWithSpecificQuery()
{
$subpanel = array(
'order' => 20,
'sort_order' => 'desc',
'sort_by' => 'date_entered',
'type' => 'collection',
'subpanel_name' => 'history', //this values is not associated with a physical file.
'top_buttons' => array(),
'collection_list' => array(
'meetings' => array(
'module' => 'Meetings',
'subpanel_name' => 'ForHistory',
'get_subpanel_data' => 'function:subpanelCollectionWithSpecificQueryMeetings',
'generate_select'=>false,
'function_parameters' => array(
'bean_id'=>$this->bean->id,
'import_function_file' => __FILE__
),
),
'tasks' => array(
'module' => 'Tasks',
'subpanel_name' => 'ForHistory',
'get_subpanel_data' => 'function:subpanelCollectionWithSpecificQueryTasks',
'generate_select'=>false,
'function_parameters' => array(
'bean_id'=>$this->bean->id,
'import_function_file' => __FILE__
),
),
)
);
$subpanel_def = new aSubPanel("testpanel", $subpanel, $this->bean);
$query = $this->bean->get_union_related_list($this->bean, "", '', "", 0, 5, -1, 0, $subpanel_def);
$result = $this->bean->db->query($query["query"]);
$this->assertTrue($result != false, "Bad query: {$query['query']}");
}
}
function subpanelCollectionWithSpecificQueryMeetings($params)
{
$query = "SELECT meetings.id , meetings.name , meetings.status , 0 reply_to_status , ' ' contact_name , ' ' contact_id , ' ' contact_name_owner , ' ' contact_name_mod , meetings.parent_id , meetings.parent_type , meetings.date_modified , jt1.user_name assigned_user_name , jt1.created_by assigned_user_name_owner , 'Users' assigned_user_name_mod, ' ' filename , meetings.assigned_user_id , 'meetings' panel_name
FROM meetings
LEFT JOIN users jt1 ON jt1.id= meetings.assigned_user_id AND jt1.deleted=0 AND jt1.deleted=0
WHERE ( meetings.parent_type = 'Opportunities'
AND meetings.deleted=0
AND (meetings.status='Held' OR meetings.status='Not Held')
AND meetings.parent_id IN(
SELECT o.id
FROM opportunities o
INNER JOIN opportunities_contacts oc on o.id = oc.opportunity_id
AND oc.contact_id = '".$params['bean_id']."')
)";
return $query ;
}
function subpanelCollectionWithSpecificQueryTasks($params)
{
$query = "SELECT tasks.id , tasks.name , tasks.status , 0 reply_to_status , ' ' contact_name , ' ' contact_id , ' ' contact_name_owner , ' ' contact_name_mod , tasks.parent_id , tasks.parent_type , tasks.date_modified , jt1.user_name assigned_user_name , jt1.created_by assigned_user_name_owner , 'Users' assigned_user_name_mod, ' ' filename , tasks.assigned_user_id , 'tasks' panel_name
FROM tasks
LEFT JOIN users jt1 ON jt1.id= tasks.assigned_user_id AND jt1.deleted=0 AND jt1.deleted=0
WHERE ( tasks.parent_type = 'Opportunities'
AND tasks.deleted=0
AND (tasks.status='Completed' OR tasks.status='Deferred')
AND tasks.parent_id IN(
SELECT o.id
FROM opportunities o
INNER JOIN opportunities_contacts oc on o.id = oc.opportunity_id
AND oc.contact_id = '".$params['bean_id']."')
)";
return $query ;
}
| firstmoversadvantage/sugarcrm | tests/include/SubPanel/Bug41738Test.php | PHP | agpl-3.0 | 6,370 |
<?php
class MetadataPlugin extends KalturaPlugin
{
const PLUGIN_NAME = 'metadata';
const METADATA_FLOW_MANAGER_CLASS = 'kMetadataFlowManager';
const METADATA_COPY_HANDLER_CLASS = 'kMetadataObjectCopiedHandler';
const METADATA_DELETE_HANDLER_CLASS = 'kMetadataObjectDeletedHandler';
const BULK_UPLOAD_COLUMN_PROFILE_ID = 'metadataProfileId';
const BULK_UPLOAD_COLUMN_XML = 'metadataXml';
const BULK_UPLOAD_COLUMN_URL = 'metadataUrl';
const BULK_UPLOAD_COLUMN_FIELD_PREFIX = 'metadataField_';
const BULK_UPLOAD_MULTI_VALUES_DELIMITER = '|,|';
const BULK_UPLOAD_DATE_FORMAT = '%Y-%m-%dT%H:%i:%s';
/**
* @return array<string,string> in the form array[serviceName] = serviceClass
*/
public static function getServicesMap()
{
$map = array(
'metadata' => 'MetadataService',
'metadataProfile' => 'MetadataProfileService',
'metadataBatch' => 'MetadataBatchService',
);
return $map;
}
/**
* @return string - the path to services.ct
*/
public static function getServiceConfig()
{
return realpath(dirname(__FILE__).'/config/metadata.ct');
}
/**
* @return array
*/
public static function getEventConsumers()
{
return array(
self::METADATA_FLOW_MANAGER_CLASS,
self::METADATA_COPY_HANDLER_CLASS,
self::METADATA_DELETE_HANDLER_CLASS,
);
}
/**
* @param KalturaPluginManager::OBJECT_TYPE $objectType
* @param string $enumValue
* @param array $constructorArgs
* @return object
*/
public static function loadObject($objectType, $enumValue, array $constructorArgs = null)
{
if($objectType != KalturaPluginManager::OBJECT_TYPE_SYNCABLE)
return null;
if(!isset($constructorArgs['objectId']))
return null;
$objectId = $constructorArgs['objectId'];
switch($enumValue)
{
case FileSync::FILE_SYNC_OBJECT_TYPE_METADATA:
MetadataPeer::setUseCriteriaFilter ( false );
$object = MetadataPeer::retrieveByPK( $objectId );
MetadataPeer::setUseCriteriaFilter ( true );
return $object;
case FileSync::FILE_SYNC_OBJECT_TYPE_METADATA_PROFILE:
MetadataProfilePeer::setUseCriteriaFilter ( false );
$object = MetadataProfilePeer::retrieveByPK( $objectId );
MetadataProfilePeer::setUseCriteriaFilter ( true );
return $object;
}
return null;
}
/**
* @param array $fields
* @return string
*/
private static function getDateFormatRegex(&$fields = null)
{
$replace = array(
'%Y' => '([1-2][0-9]{3})',
'%m' => '([0-1][0-9])',
'%d' => '([0-3][0-9])',
'%H' => '([0-2][0-9])',
'%i' => '([0-5][0-9])',
'%s' => '([0-5][0-9])',
// '%T' => '([A-Z]{3})',
);
$fields = array();
$arr = null;
// if(!preg_match_all('/%([YmdTHis])/', self::BULK_UPLOAD_DATE_FORMAT, $arr))
if(!preg_match_all('/%([YmdHis])/', self::BULK_UPLOAD_DATE_FORMAT, $arr))
return false;
$fields = $arr[1];
return '/' . str_replace(array_keys($replace), $replace, self::BULK_UPLOAD_DATE_FORMAT) . '/';
}
/**
* @param string $str
* @return int
*/
private static function parseFormatedDate($str)
{
KalturaLog::debug("parseFormatedDate($str)");
if(function_exists('strptime'))
{
$ret = strptime($str, self::BULK_UPLOAD_DATE_FORMAT);
if($ret)
{
KalturaLog::debug("Formated Date [$ret] " . date('Y-m-d\TH:i:s', $ret));
return $ret;
}
}
$fields = null;
$regex = self::getDateFormatRegex($fields);
$values = null;
if(!preg_match($regex, $str, $values))
return null;
$hour = 0;
$minute = 0;
$second = 0;
$month = 0;
$day = 0;
$year = 0;
$is_dst = 0;
foreach($fields as $index => $field)
{
$value = $values[$index + 1];
switch($field)
{
case 'Y':
$year = intval($value);
break;
case 'm':
$month = intval($value);
break;
case 'd':
$day = intval($value);
break;
case 'H':
$hour = intval($value);
break;
case 'i':
$minute = intval($value);
break;
case 's':
$second = intval($value);
break;
// case 'T':
// $date = date_parse($value);
// $hour -= ($date['zone'] / 60);
// break;
}
}
KalturaLog::debug("gmmktime($hour, $minute, $second, $month, $day, $year)");
$ret = gmmktime($hour, $minute, $second, $month, $day, $year);
if($ret)
{
KalturaLog::debug("Formated Date [$ret] " . date('Y-m-d\TH:i:s', $ret));
return $ret;
}
KalturaLog::debug("Formated Date [null]");
return null;
}
/**
* @param string $entryId the new created entry
* @param array $data key => value pairs
*/
public static function handleBulkUploadData($entryId, array $data)
{
KalturaLog::debug("Handle metadata bulk upload data:\n" . print_r($data, true));
if(!isset($data[self::BULK_UPLOAD_COLUMN_PROFILE_ID]))
return;
$metadataProfileId = $data[self::BULK_UPLOAD_COLUMN_PROFILE_ID];
$xmlData = null;
$entry = entryPeer::retrieveByPK($entryId);
if(!$entry)
return;
$metadataProfile = MetadataProfilePeer::retrieveById($metadataProfileId);
if(!$metadataProfile)
{
KalturaLog::info("Metadata profile [$metadataProfileId] not found");
return;
}
if(isset($data[self::BULK_UPLOAD_COLUMN_URL]))
{
try{
$xmlData = file_get_contents($data[self::BULK_UPLOAD_COLUMN_URL]);
KalturaLog::debug("Metadata downloaded [" . $data[self::BULK_UPLOAD_COLUMN_URL] . "]");
}
catch(Exception $e)
{
KalturaLog::err("Download metadata[" . $data[self::BULK_UPLOAD_COLUMN_URL] . "] error: " . $e->getMessage());
$xmlData = null;
}
}
elseif(isset($data[self::BULK_UPLOAD_COLUMN_XML]))
{
$xmlData = $data[self::BULK_UPLOAD_COLUMN_XML];
}
else
{
$metadataProfileFields = array();
MetadataProfileFieldPeer::setUseCriteriaFilter(false);
$tmpMetadataProfileFields = MetadataProfileFieldPeer::retrieveByMetadataProfileId($metadataProfileId);
MetadataProfileFieldPeer::setUseCriteriaFilter(true);
foreach($tmpMetadataProfileFields as $metadataProfileField)
$metadataProfileFields[$metadataProfileField->getKey()] = $metadataProfileField;
KalturaLog::debug("Found fields [" . count($metadataProfileFields) . "] for metadata profile [$metadataProfileId]");
$xml = new DOMDocument();
$dataFound = false;
foreach($data as $key => $value)
{
if(!$value || !strlen($value))
continue;
if(!preg_match('/^' . self::BULK_UPLOAD_COLUMN_FIELD_PREFIX . '(.+)$/', $key, $matches))
continue;
$key = $matches[1];
if(!isset($metadataProfileFields[$key]))
{
KalturaLog::debug("No field found for key[$key]");
continue;
}
$metadataProfileField = $metadataProfileFields[$key];
KalturaLog::debug("Found field [" . $metadataProfileField->getXpath() . "] for value [$value]");
if($metadataProfileField->getType() == MetadataSearchFilter::KMC_FIELD_TYPE_DATE && !is_numeric($value))
{
$value = self::parseFormatedDate($value);
if(!$value || !strlen($value))
continue;
}
$fieldValues = explode(self::BULK_UPLOAD_MULTI_VALUES_DELIMITER, $value);
foreach($fieldValues as $fieldValue)
self::addXpath($xml, $metadataProfileField->getXpath(), $fieldValue);
$dataFound = true;
}
if($dataFound)
{
$xmlData = $xml->saveXML($xml->firstChild);
$xmlData = trim($xmlData, " \n\r\t");
}
}
if(!$xmlData)
return;
$dbMetadata = new Metadata();
$dbMetadata->setPartnerId($entry->getPartnerId());
$dbMetadata->setMetadataProfileId($metadataProfileId);
$dbMetadata->setMetadataProfileVersion($metadataProfile->getVersion());
$dbMetadata->setObjectType(Metadata::TYPE_ENTRY);
$dbMetadata->setObjectId($entryId);
$dbMetadata->setStatus(Metadata::STATUS_INVALID);
$dbMetadata->save();
KalturaLog::debug("Metadata [" . $dbMetadata->getId() . "] saved [$xmlData]");
$key = $dbMetadata->getSyncKey(Metadata::FILE_SYNC_METADATA_DATA);
kFileSyncUtils::file_put_contents($key, $xmlData);
$errorMessage = '';
$status = kMetadataManager::validateMetadata($dbMetadata, $errorMessage);
if($status == Metadata::STATUS_VALID)
{
kMetadataManager::updateSearchIndex($dbMetadata);
}
else
{
$bulkUploadResult = BulkUploadResultPeer::retrieveByEntryId($entryId, $entry->getBulkUploadId());
if($bulkUploadResult)
{
$msg = $bulkUploadResult->getDescription();
if($msg)
$msg .= "\n";
$msg .= $errorMessage;
$bulkUploadResult->setDescription($msg);
$bulkUploadResult->save();
}
}
}
protected static function addXpath(DOMDocument &$xml, $xPath, $value)
{
KalturaLog::debug("add value [$value] to xPath [$xPath]");
$xPaths = explode('/', $xPath);
$currentNode = $xml;
$currentXPath = '';
foreach($xPaths as $index => $xPath)
{
if(!strlen($xPath))
{
KalturaLog::debug("xPath [/] already exists");
continue;
}
$currentXPath .= "/$xPath";
$domXPath = new DOMXPath($xml);
$nodeList = $domXPath->query($currentXPath);
if($nodeList && $nodeList->length)
{
$currentNode = $nodeList->item(0);
KalturaLog::debug("xPath [$xPath] already exists");
continue;
}
if(!preg_match('/\*\[\s*local-name\(\)\s*=\s*\'([^\']+)\'\s*\]/', $xPath, $matches))
{
KalturaLog::err("Xpath [$xPath] doesn't match");
return false;
}
$nodeName = $matches[1];
if($index + 1 == count($xPaths))
{
KalturaLog::debug("Creating node [$nodeName] xPath [$xPath] with value [$value]");
$valueNode = $xml->createElement($nodeName, $value);
}
else
{
KalturaLog::debug("Creating node [$nodeName] xPath [$xPath]");
$valueNode = $xml->createElement($nodeName);
}
KalturaLog::debug("Appending node [$nodeName] to current node [$currentNode->localName]");
$currentNode->appendChild($valueNode);
$currentNode = $valueNode;
}
}
// /**
// * @return array<KalturaAdminConsolePlugin>
// */
// public static function getAdminConsolePages()
// {
// $metadata = new MetadataProfilesAction('Metadata', 'metadata');
// $metadataProfiles = new MetadataProfilesAction('Profiles Management', 'profiles', 'Metadata');
// $metadataObjects = new MetadataObjectsAction('Objects Management', 'objects', 'Metadata');
// return array($metadata, $metadataProfiles, $metadataObjects);
// }
}
| MimocomMedia/kaltura | package/app/app/plugins/metadata/MetadataPlugin.php | PHP | agpl-3.0 | 10,744 |
import React from 'react';
import PropTypes from 'prop-types';
import ManaUsageGraph from './ManaUsageGraph';
class HealingDoneGraph extends React.PureComponent {
static propTypes = {
start: PropTypes.number.isRequired,
end: PropTypes.number.isRequired,
offset: PropTypes.number.isRequired,
healingBySecond: PropTypes.object.isRequired,
manaUpdates: PropTypes.array.isRequired,
};
groupHealingBySeconds(healingBySecond, interval) {
return Object.keys(healingBySecond)
.reduce((obj, second) => {
const healing = healingBySecond[second];
const index = Math.floor(second / interval);
if (obj[index]) {
obj[index] = obj[index].add(healing.regular, healing.absorbed, healing.overheal);
} else {
obj[index] = healing;
}
return obj;
}, {});
}
render() {
const { start, end, offset, healingBySecond, manaUpdates } = this.props;
// TODO: move this to vega-lite window transform
// e.g. { window: [{op: 'mean', field: 'hps', as: 'hps'}], frame: [-2, 2] }
const interval = 5;
const healingPerFrame = this.groupHealingBySeconds(healingBySecond, interval);
let max = 0;
Object.keys(healingPerFrame)
.map(k => healingPerFrame[k])
.forEach((healingDone) => {
const current = healingDone.effective;
if (current > max) {
max = current;
}
});
max /= interval;
const manaUsagePerFrame = {
0: 0,
};
const manaLevelPerFrame = {
0: 1,
};
manaUpdates.forEach((item) => {
const frame = Math.floor((item.timestamp - start) / 1000 / interval);
manaUsagePerFrame[frame] = (manaUsagePerFrame[frame] || 0) + item.used / item.max;
manaLevelPerFrame[frame] = item.current / item.max; // use the lowest value of the frame; likely to be more accurate
});
const fightDurationSec = Math.ceil((end - start) / 1000);
const labels = [];
for (let i = 0; i <= fightDurationSec / interval; i += 1) {
labels.push(Math.ceil(offset/1000) + i * interval);
healingPerFrame[i] = healingPerFrame[i] !== undefined ? healingPerFrame[i].effective : 0;
manaUsagePerFrame[i] = manaUsagePerFrame[i] !== undefined ? manaUsagePerFrame[i] : 0;
manaLevelPerFrame[i] = manaLevelPerFrame[i] !== undefined ? manaLevelPerFrame[i] : null;
}
let lastKnown = null;
const mana = Object.values(manaLevelPerFrame).map((value, i) => {
if (value !== null) {
lastKnown = value;
}
return {
x: labels[i],
y: lastKnown * max,
};
});
const healing = Object.values(healingPerFrame).map((value, i) => ({ x: labels[i], y: value / interval }));
const manaUsed = Object.values(manaUsagePerFrame).map((value, i) => ({ x: labels[i], y: value * max }));
return (
<div className="graph-container" style={{ marginBottom: 20 }}>
<ManaUsageGraph
mana={mana}
healing={healing}
manaUsed={manaUsed}
/>
</div>
);
}
}
export default HealingDoneGraph;
| yajinni/WoWAnalyzer | src/parser/shared/modules/resources/mana/ManaUsageChartComponent.js | JavaScript | agpl-3.0 | 3,101 |
/*
* Copyright (c) 2015 - 2016 Memorial Sloan-Kettering Cancer Center.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY, WITHOUT EVEN THE IMPLIED WARRANTY OF MERCHANTABILITY OR FITNESS
* FOR A PARTICULAR PURPOSE. The software and documentation provided hereunder
* is on an "as is" basis, and Memorial Sloan-Kettering Cancer Center has no
* obligations to provide maintenance, support, updates, enhancements or
* modifications. In no event shall Memorial Sloan-Kettering Cancer Center be
* liable to any party for direct, indirect, special, incidental or
* consequential damages, including lost profits, arising out of the use of this
* software and its documentation, even if Memorial Sloan-Kettering Cancer
* Center has been advised of the possibility of such damage.
*/
/*
* This file is part of cBioPortal.
*
* cBioPortal is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.mskcc.cbio.portal.util;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import org.cbioportal.persistence.GenePanelRepository;
import org.cbioportal.model.GenePanel;
import org.mskcc.cbio.portal.model.GeneticAlterationType;
import org.mskcc.cbio.portal.model.GeneticProfile;
/**
* Genetic Profile Util Class.
*
*/
public class GeneticProfileUtil {
/**
* Gets the GeneticProfile with the Specified GeneticProfile ID.
* @param profileId GeneticProfile ID.
* @param profileList List of Genetic Profiles.
* @return GeneticProfile or null.
*/
public static GeneticProfile getProfile(String profileId,
ArrayList<GeneticProfile> profileList) {
for (GeneticProfile profile : profileList) {
if (profile.getStableId().equals(profileId)) {
return profile;
}
}
return null;
}
/**
* Returns true if Any of the Profiles Selected by the User Refer to mRNA Expression
* outlier profiles.
*
* @param geneticProfileIdSet Set of Chosen Profiles IDs.
* @param profileList List of Genetic Profiles.
* @return true or false.
*/
public static boolean outlierExpressionSelected(HashSet<String> geneticProfileIdSet,
ArrayList<GeneticProfile> profileList) {
Iterator<String> geneticProfileIdIterator = geneticProfileIdSet.iterator();
while (geneticProfileIdIterator.hasNext()) {
String geneticProfileId = geneticProfileIdIterator.next();
GeneticProfile geneticProfile = getProfile (geneticProfileId, profileList);
if (geneticProfile != null && geneticProfile.getGeneticAlterationType() == GeneticAlterationType.MRNA_EXPRESSION) {
String profileName = geneticProfile.getProfileName();
if (profileName != null) {
if (profileName.toLowerCase().contains("outlier")) {
return true;
}
}
}
}
return false;
}
public static int getGenePanelId(String panelId) {
GenePanelRepository genePanelRepository = SpringUtil.getGenePanelRepository();
GenePanel genePanel = genePanelRepository.getGenePanelByStableId(panelId).get(0);
return genePanel.getInternalId();
}
}
| bihealth/cbioportal | core/src/main/java/org/mskcc/cbio/portal/util/GeneticProfileUtil.java | Java | agpl-3.0 | 3,897 |
<?php
require_once __BASE__.'/model/Storable.php';
class AccountSMTP extends Storable
{
public $id = MYSQL_PRIMARY_KEY;
public $code = '';
public $created = MYSQL_DATETIME;
public $last_edit = MYSQL_DATETIME;
public $name = '';
public $host = '';
public $port = '';
public $connection = '';
public $username = '';
public $password = '';
public $sender_name = '';
public $sender_mail = '';
public $replyTo = '';
public $max_mail = 0;
public $ever = ['day', 'week', 'month', 'year', 'onetime'];
public $max_mail_day = 0;
public $send = 0;
public $last_send = MYSQL_DATETIME;
public $perc = 0.0;
public $total_send = 0;
public $active = 1;
##
public static function findServer()
{
$servers = self::query(
[
'active' => 1,
]);
$perc = 110;
foreach ($servers as $server) {
if ($server->perc < $perc) { //trova quello più saturo
$use = $server;
}
if ($server->perc == 0) {
return $use;
}
}
return $use;
}
##
public static function getMaxMail($account = 'all')
{
$append = $account != 'all' ? ' WHERE id = "'.$account.'"' : ' ';
$sql = 'SELECT SUM(max_mail_day) AS maxmail FROM '.self::table().$append;
$res = schemadb::execute('row', $sql);
return $res['maxmail'];
}
##
public static function getSenderMail($account = 'all')
{
$append = $account != 'all' ? ' WHERE id = "'.$account.'"' : ' ';
$sql = 'SELECT SUM(total_send) AS mailtotali FROM '.self::table().$append;
$res = schemadb::execute('row', $sql);
return $res['mailtotali'];
}
##
public static function getInviateMail($account = 'all')
{
$append = $account != 'all' ? ' WHERE id = "'.$account.'"' : ' ';
$sql = 'SELECT SUM(send) AS inviate FROM '.self::table().$append;
$res = schemadb::execute('row', $sql);
return $res['inviate'];
}
##
public static function getRemainMail($account = 'all')
{
$remain = self::getMaxMail() - self::getInviateMail();
return $remain;
}
}
AccountSMTP::schemadb_update();
| ctlr/MailCtlr | module/config/model/AccountSMTP.php | PHP | agpl-3.0 | 2,329 |
# frozen_string_literal: true
require "spec_helper"
describe "Edit initiative", type: :system do
let(:organization) { create(:organization) }
let(:user) { create(:user, :confirmed, organization: organization) }
let(:initiative_title) { translated(initiative.title) }
let(:new_title) { "This is my initiative new title" }
let!(:initiative_type) { create(:initiatives_type, :online_signature_enabled, organization: organization) }
let!(:scoped_type) { create(:initiatives_type_scope, type: initiative_type) }
let!(:other_initiative_type) { create(:initiatives_type, organization: organization) }
let!(:other_scoped_type) { create(:initiatives_type_scope, type: initiative_type) }
let(:initiative_path) { decidim_initiatives.initiative_path(initiative) }
let(:edit_initiative_path) { decidim_initiatives.edit_initiative_path(initiative) }
shared_examples "manage update" do
it "can be updated" do
visit initiative_path
click_link("Edit", href: edit_initiative_path)
expect(page).to have_content "EDIT INITIATIVE"
within "form.edit_initiative" do
fill_in :initiative_title, with: new_title
click_button "Update"
end
expect(page).to have_content(new_title)
end
end
before do
switch_to_host(organization.host)
login_as user, scope: :user
end
describe "when user is initiative author" do
let(:initiative) { create(:initiative, :created, author: user, scoped_type: scoped_type, organization: organization) }
it_behaves_like "manage update"
context "when initiative is published" do
let(:initiative) { create(:initiative, author: user, scoped_type: scoped_type, organization: organization) }
it "can't be updated" do
visit decidim_initiatives.initiative_path(initiative)
expect(page).not_to have_content "Edit initiative"
visit edit_initiative_path
expect(page).to have_content("not authorized")
end
end
end
describe "when author is a committee member" do
let(:initiative) { create(:initiative, :created, scoped_type: scoped_type, organization: organization) }
before do
create(:initiatives_committee_member, user: user, initiative: initiative)
end
it_behaves_like "manage update"
end
describe "when user is admin" do
let(:user) { create(:user, :confirmed, :admin, organization: organization) }
let(:initiative) { create(:initiative, :created, scoped_type: scoped_type, organization: organization) }
it_behaves_like "manage update"
end
describe "when author is not a committee member" do
let(:initiative) { create(:initiative, :created, scoped_type: scoped_type, organization: organization) }
it "renders an error" do
visit decidim_initiatives.initiative_path(initiative)
expect(page).to have_no_content("Edit initiative")
visit edit_initiative_path
expect(page).to have_content("not authorized")
end
end
end
| decidim/decidim | decidim-initiatives/spec/system/edit_initiative_spec.rb | Ruby | agpl-3.0 | 2,976 |
from odoo import fields, models
class Job(models.Model):
_inherit = "crm.team"
survey_id = fields.Many2one(
'survey.survey', "Interview Form",
help="Choose an interview form")
def action_print_survey(self):
return self.survey_id.action_print_survey()
| ingadhoc/sale | crm_survey/models/crm_job.py | Python | agpl-3.0 | 291 |
package com.alessiodp.parties.bukkit.addons.external.skript.expressions;
import ch.njol.skript.classes.Changer;
import ch.njol.skript.doc.Description;
import ch.njol.skript.doc.Examples;
import ch.njol.skript.doc.Name;
import ch.njol.skript.doc.Since;
import ch.njol.skript.expressions.base.SimplePropertyExpression;
import ch.njol.util.coll.CollectionUtils;
import com.alessiodp.parties.api.interfaces.Party;
import org.bukkit.event.Event;
@Name("Party Name")
@Description("Get the name of the given party.")
@Examples({"send \"%name of party with name \"test\"%\"",
"send \"%name of event-party%\""})
@Since("3.0.0")
public class ExprPartyName extends SimplePropertyExpression<Party, String> {
static {
register(ExprPartyName.class, String.class, "name", "party");
}
@Override
public Class<? extends String> getReturnType() {
return String.class;
}
@Override
protected String getPropertyName() {
return "name";
}
@Override
public String convert(Party party) {
return party.getName();
}
@Override
public void change(Event e, Object[] delta, Changer.ChangeMode mode){
if (delta != null) {
Party party = getExpr().getSingle(e);
String newName = (String) delta[0];
switch (mode) {
case SET:
party.rename(newName);
break;
case DELETE:
party.rename(null);
break;
default:
break;
}
}
}
@Override
public Class<?>[] acceptChange(final Changer.ChangeMode mode) {
return (mode == Changer.ChangeMode.SET || mode == Changer.ChangeMode.DELETE) ? CollectionUtils.array(String.class) : null;
}
}
| AlessioDP/Parties | bukkit/src/main/java/com/alessiodp/parties/bukkit/addons/external/skript/expressions/ExprPartyName.java | Java | agpl-3.0 | 1,576 |
class CreateColorMappings < ActiveRecord::Migration
def self.up
create_table :color_mappings do |t|
t.references :collage
t.references :tag
t.string :hex
t.timestamps
end
end
def self.down
drop_table :color_mappings
end
end
| emmalemma/h2o | db/migrate/20121005144035_create_color_mappings.rb | Ruby | agpl-3.0 | 269 |
##############################################################################
# For copyright and license notices, see __manifest__.py file in module root
# directory
##############################################################################
from . import account_move
from . import account_move_line
from . import account_master_port
| ingadhoc/multi-company | account_multic_fix/models/__init__.py | Python | agpl-3.0 | 340 |
/**
* Copyright (C) 2001-2020 by RapidMiner and the contributors
*
* Complete list of developers available at our web site:
*
* http://rapidminer.com
*
* This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version 3
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this program.
* If not, see http://www.gnu.org/licenses/.
*/
package com.rapidminer.gui.new_plotter.configuration;
import com.rapidminer.gui.new_plotter.listener.events.LineFormatChangeEvent;
import com.rapidminer.gui.new_plotter.utility.DataStructureUtils;
import com.rapidminer.tools.I18N;
import java.awt.BasicStroke;
import java.awt.Color;
import java.lang.ref.WeakReference;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
/**
* @author Marius Helf
* @deprecated since 9.2.0
*/
@Deprecated
public class LineFormat implements Cloneable {
private static class StrokeFactory {
static public BasicStroke getSolidStroke() {
return new BasicStroke(1, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND);
}
static public BasicStroke getDottedStroke() {
return new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_ROUND, 10.0f, new float[] { 1f, 1f }, 0.0f);
}
static public BasicStroke getShortDashedStroke() {
return new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_ROUND, 10.0f, new float[] { 4f, 2f }, 0.0f);
}
static public BasicStroke getLongDashedStroke() {
return new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_ROUND, 10.0f, new float[] { 7f, 3f }, 0.0f);
}
static public BasicStroke getDashDotStroke() {
return new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_ROUND, 10.0f, new float[] { 6f, 2f, 1f, 2f },
0.0f);
}
static public BasicStroke getStripedStroke() {
return new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_ROUND, 10.0f, new float[] { 0.2f, 0.2f }, 0.0f);
}
}
public enum LineStyle {
NONE(null, I18N.getGUILabel("plotter.linestyle.NONE.label")), SOLID(StrokeFactory.getSolidStroke(), I18N
.getGUILabel("plotter.linestyle.SOLID.label")), DOTS(StrokeFactory.getDottedStroke(), I18N
.getGUILabel("plotter.linestyle.DOTS.label")), SHORT_DASHES(StrokeFactory.getShortDashedStroke(), I18N
.getGUILabel("plotter.linestyle.SHORT_DASHES.label")), LONG_DASHES(StrokeFactory.getLongDashedStroke(), I18N
.getGUILabel("plotter.linestyle.LONG_DASHES.label")), DASH_DOT(StrokeFactory.getDashDotStroke(), I18N
.getGUILabel("plotter.linestyle.DASH_DOT.label")), STRIPES(StrokeFactory.getStripedStroke(), I18N
.getGUILabel("plotter.linestyle.STRIPES.label"));
private final BasicStroke stroke;
private final String name;
public BasicStroke getStroke() {
return stroke;
}
public String getName() {
return name;
}
private LineStyle(BasicStroke stroke, String name) {
this.stroke = stroke;
this.name = name;
}
}
private List<WeakReference<LineFormatListener>> listeners = new LinkedList<WeakReference<LineFormatListener>>();
private LineStyle style = LineStyle.NONE; // dashed, solid...
private Color color = Color.GRAY;
private float width = 1.0f;
public LineStyle getStyle() {
return style;
}
public void setStyle(LineStyle style) {
if (style != this.style) {
this.style = style;
fireStyleChanged();
}
}
public Color getColor() {
return color;
}
public void setColor(Color color) {
if (color == null ? this.color != null : !color.equals(this.color)) {
this.color = color;
fireColorChanged();
}
}
public float getWidth() {
return width;
}
public void setWidth(float width) {
if (width != this.width) {
this.width = width;
fireWidthChanged();
}
}
private void fireWidthChanged() {
fireLineFormatChanged(new LineFormatChangeEvent(this, width));
}
private void fireColorChanged() {
fireLineFormatChanged(new LineFormatChangeEvent(this, color));
}
private void fireStyleChanged() {
fireLineFormatChanged(new LineFormatChangeEvent(this, style));
}
private void fireLineFormatChanged(LineFormatChangeEvent e) {
Iterator<WeakReference<LineFormatListener>> it = listeners.iterator();
while (it.hasNext()) {
LineFormatListener l = it.next().get();
if (l != null) {
l.lineFormatChanged(e);
} else {
it.remove();
}
}
}
@Override
public LineFormat clone() {
LineFormat clone = new LineFormat();
clone.color = new Color(color.getRGB(), true);
clone.style = style;
clone.width = width;
return clone;
}
public BasicStroke getStroke() {
BasicStroke stroke = style.getStroke();
if (stroke != null) {
float[] scaledDashArray = getScaledDashArray();
BasicStroke scaledStroke = new BasicStroke(this.getWidth(), stroke.getEndCap(), stroke.getLineJoin(),
stroke.getMiterLimit(), scaledDashArray, stroke.getDashPhase());
return scaledStroke;
} else {
return null;
}
}
float[] getScaledDashArray() {
BasicStroke stroke = getStyle().getStroke();
if (stroke == null) {
return null;
}
float[] dashArray = stroke.getDashArray();
float[] scaledDashArray;
if (dashArray != null) {
float scalingFactor = getWidth();
if (scalingFactor <= 0) {
scalingFactor = 1;
}
if (scalingFactor != 1) {
scaledDashArray = DataStructureUtils.cloneAndMultiplyArray(dashArray, scalingFactor);
} else {
scaledDashArray = dashArray;
}
} else {
scaledDashArray = dashArray;
}
return scaledDashArray;
}
public void addLineFormatListener(LineFormatListener l) {
listeners.add(new WeakReference<LineFormatListener>(l));
}
public void removeLineFormatListener(LineFormatListener l) {
Iterator<WeakReference<LineFormatListener>> it = listeners.iterator();
while (it.hasNext()) {
LineFormatListener listener = it.next().get();
if (l != null) {
if (listener != null && listener.equals(l)) {
it.remove();
}
} else {
it.remove();
}
}
}
}
| rapidminer/rapidminer-studio | src/main/java/com/rapidminer/gui/new_plotter/configuration/LineFormat.java | Java | agpl-3.0 | 6,597 |